hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
489d4dc8e5f3cc67a474dcb7feb4968126355d94
| 17,176
|
py
|
Python
|
modules/tools/gen_vehicle_protocol/gen_protocols.py
|
yuyangapollo/apollo
|
8c3daa48edf457efe042207f543d3e165a642ad2
|
[
"Apache-2.0"
] | null | null | null |
modules/tools/gen_vehicle_protocol/gen_protocols.py
|
yuyangapollo/apollo
|
8c3daa48edf457efe042207f543d3e165a642ad2
|
[
"Apache-2.0"
] | null | null | null |
modules/tools/gen_vehicle_protocol/gen_protocols.py
|
yuyangapollo/apollo
|
8c3daa48edf457efe042207f543d3e165a642ad2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
#-*- coding:utf-8 -*-
import datetime
import os
import shutil
import sys
import yaml
def gen_report_header(car_type, protocol, output_dir):
"""
doc string:
"""
report_header_tpl_file = "template/report_protocol.h.tpl"
FMT = get_tpl_fmt(report_header_tpl_file)
report_header_file = output_dir + "%s.h" % protocol["name"]
with open(report_header_file, 'w') as h_fp:
fmt_val = {}
fmt_val["car_type_lower"] = car_type.lower()
fmt_val["car_type_upper"] = car_type.upper()
fmt_val["protocol_name_upper"] = protocol["name"].upper()
fmt_val["classname"] = protocol["name"].replace('_', '').capitalize()
func_declare_list = []
for var in protocol["vars"]:
fmt = """
// config detail: %s
%s %s(const std::uint8_t* bytes, const int32_t length) const;"""
returntype = var["type"]
if var["type"] == "enum":
returntype = protocol["name"].capitalize(
) + "::" + var["name"].capitalize() + "Type"
declare = fmt % (str(var), returntype, var["name"].lower())
func_declare_list.append(declare)
fmt_val["func_declare_list"] = "\n".join(func_declare_list)
h_fp.write(FMT % fmt_val)
def gen_report_cpp(car_type, protocol, output_dir):
"""
doc string:
"""
report_cpp_tpl_file = "template/report_protocol.cc.tpl"
FMT = get_tpl_fmt(report_cpp_tpl_file)
report_cpp_file = output_dir + "%s.cc" % protocol["name"]
with open(report_cpp_file, 'w') as fp:
fmt_val = {}
fmt_val["car_type_lower"] = car_type
fmt_val["protocol_name_lower"] = protocol["name"]
classname = protocol["name"].replace('_', '').capitalize()
fmt_val["classname"] = classname
protocol_id = int(protocol["id"].upper(), 16)
if protocol_id > 2048:
fmt_val["id_upper"] = gen_esd_can_extended(protocol["id"].upper())
else:
fmt_val["id_upper"] = protocol["id"].upper()
set_var_to_protocol_list = []
func_impl_list = []
for var in protocol["vars"]:
var["name"] = var["name"].lower()
returntype = var["type"]
if var["type"] == "enum":
returntype = protocol["name"].capitalize(
) + "::" + var["name"].capitalize() + "Type"
# gen func top
fmt = """
// config detail: %s
%s %s::%s(const std::uint8_t* bytes, int32_t length) const {"""
impl = fmt % (str(var), returntype, classname, var["name"])
byte_info = get_byte_info(var)
impl = impl + gen_parse_value_impl(var, byte_info)
impl = impl + gen_report_value_offset_precision(var, protocol)
impl = impl + "}"
func_impl_list.append(impl)
proto_set_fmt = " chassis->mutable_%s()->mutable_%s()->set_%s(%s(bytes, length));"
func_name = var["name"]
proto_set = proto_set_fmt % (car_type, protocol["name"], var["name"],
func_name)
set_var_to_protocol_list.append(proto_set)
fmt_val["set_var_to_protocol_list"] = "\n".join(
set_var_to_protocol_list)
fmt_val["func_impl_list"] = "\n".join(func_impl_list)
fp.write(FMT % fmt_val)
def gen_report_value_offset_precision(var, protocol):
"""
doc string:
"""
impl = ""
if var["is_signed_var"]:
fmt = "\n x <<= %d;\n x >>= %d;\n"
# x is an int32_t var
shift_bit = 32 - var["len"]
impl = impl + fmt % (shift_bit, shift_bit)
returntype = var["type"]
if var["type"] == "enum":
returntype = protocol["name"].capitalize() + "::" + var["name"].capitalize(
) + "Type"
impl = impl + "\n " + returntype + " ret = "
if var["type"] == "enum":
impl = impl + " static_cast<" + returntype + ">(x);\n"
else:
impl = impl + "x"
if var["precision"] != 1.0:
impl = impl + " * %f" % var["precision"]
if var["offset"] != 0.0:
impl = impl + " + %f" % (var["offset"])
impl = impl + ";\n"
return impl + " return ret;\n"
def gen_parse_value_impl(var, byte_info):
"""
doc string:
"""
impl = ""
fmt = "\n Byte t%d(bytes + %d);\n"
shift_bit = 0
for i in range(0, len(byte_info)):
info = byte_info[i]
impl = impl + fmt % (i, info["byte"])
if i == 0:
impl = impl + " int32_t x = t%d.get_byte(%d, %d);\n" %\
(i, info["start_bit"], info["len"])
elif i == 1:
impl = impl + " int32_t t = t%d.get_byte(%d, %d);\n x <<= %d;\n x |= t;\n" %\
(i, info["start_bit"], info["len"], info["len"])
else:
impl = impl + " t = t%d.get_byte(%d, %d);\n x <<= %d;\n x |= t;\n" %\
(i, info["start_bit"], info["len"], info["len"])
shift_bit = shift_bit + info["len"]
return impl
def gen_control_header(car_type, protocol, output_dir):
"""
doc string:
"""
control_header_tpl_file = "template/control_protocol.h.tpl"
FMT = get_tpl_fmt(control_header_tpl_file)
control_header_file = output_dir + "%s.h" % protocol["name"]
with open(control_header_file, 'w') as h_fp:
fmt_val = {}
fmt_val["car_type_lower"] = car_type
fmt_val["car_type_upper"] = car_type.upper()
fmt_val["protocol_name_upper"] = protocol["name"].upper()
classname = protocol["name"].replace('_', '').capitalize()
fmt_val["classname"] = classname
declare_public_func_list = []
declare_private_func_list = []
declare_private_var_list = []
fmtpub = "\n // config detail: %s\n %s* set_%s(%s %s);"
fmtpri = "\n // config detail: %s\n void set_p_%s(uint8_t* data, %s %s);"
for var in protocol["vars"]:
returntype = var["type"]
if var["type"] == "enum":
returntype = protocol["name"].capitalize(
) + "::" + var["name"].capitalize() + "Type"
private_var = ""
public_func_declare = fmtpub % (str(var), classname,
var["name"].lower(), returntype,
var["name"].lower())
private_func_declare = fmtpri % (str(var), var["name"].lower(),
returntype, var["name"].lower())
private_var = " %s %s_;" % (returntype, var["name"].lower())
declare_private_var_list.append(private_var)
declare_public_func_list.append(public_func_declare)
declare_private_func_list.append(private_func_declare)
fmt_val["declare_public_func_list"] = "\n".join(
declare_public_func_list)
fmt_val["declare_private_func_list"] = "\n".join(
declare_private_func_list)
fmt_val["declare_private_var_list"] = "\n".join(
declare_private_var_list)
h_fp.write(FMT % fmt_val)
def get_byte_info(var):
"""
doc string: https://wenku.baidu.com/view/3fe9a7a4dd3383c4bb4cd293.html
u can reference this link to known the difference between motorola and intel encoding
return : the byte info of a variable in the protocol how many bytes are, and every byte use
how many bits, and bit start position
for the purpose of easily parsing value from CAN frame, the byte_info is arranged
from msb byte to lsb byte order
"""
bit = var["bit"]
byte_info = []
left_len = var["len"]
byte_idx = bit // 8
bit_start = bit % 8
if var["order"] == "motorola":
while left_len > 0:
info = {}
info["byte"] = byte_idx
info["len"] = min(bit_start + 1, left_len)
# start_bit is always the lowest bit
info["start_bit"] = bit_start - info["len"] + 1
byte_info.append(info)
left_len = left_len - info["len"]
byte_idx = byte_idx + 1
bit_start = 7
else:
while left_len > 0:
info = {}
info["byte"] = byte_idx
info["len"] = min(8 - bit_start, left_len)
info["start_bit"] = bit_start
byte_info.append(info)
left_len = left_len - info["len"]
byte_idx = byte_idx + 1
bit_start = 0
# byte_info is always construct with msb(most significant bit) byte to lsb byte
byte_info.reverse()
return byte_info
def gen_control_decode_offset_precision(var):
"""
doc string:
"""
impl = "\n"
range_info = get_range_info(var)
if var["type"] == "double":
if range_info["low"].find(".") == -1:
range_info["low"] = "%s.0" % range_info["low"]
if range_info["high"].find(".") == -1:
range_info["high"] = "%s.0" % range_info["high"]
if var["type"] != "enum" and var["type"] != "bool":
impl = impl + " %s = ProtocolData::BoundedValue(%s, %s, %s);\n" %\
(var["name"].lower(), range_info["low"],
range_info["high"], var["name"].lower())
impl = impl + " int x ="
if var["offset"] != 0.0:
impl = impl + " (%s - %f)" % (var["name"].lower(), var["offset"])
else:
impl = impl + " %s" % var["name"].lower()
if var["precision"] != 1.0:
impl = impl + " / %f" % var["precision"]
return impl + ";\n"
def gen_control_encode_one_byte_value_impl(var, byte_info):
"""
only has int and double, int can hold all the value whatever it is signed or unsigned
"""
fmt = """
Byte to_set(data + %d);
to_set.set_value(x, %d, %d);
"""
return fmt % (byte_info["byte"], byte_info["start_bit"], byte_info["len"])
def get_range_info(var):
"""
doc string:
"""
info = {}
if "physical_range" not in var.keys():
return info
items = var["physical_range"].split('|')
info["low"] = items[0].split('[')[1]
info["high"] = items[1].split(']')[0]
return info
def gen_control_encode_value_impl(var, byte_info):
"""
doc string:
"""
impl = " uint8_t t = 0;\n"
fmt = """
t = x & %s;
Byte to_set%d(data + %d);
to_set%d.set_value(t, %d, %d);
"""
shift_bit = 0
for i in range(0, len(byte_info)):
info = byte_info[i]
if i != 0:
impl = impl + " x >>= %d;\n" % shift_bit
mask_bit = "0x%X" % ((1 << info["len"]) - 1)
impl = impl + fmt % (mask_bit, i, info["byte"], i, info["start_bit"],
info["len"])
shift_bit = info["len"]
return impl
def gen_control_value_func_impl(classname, var, protocol):
"""
doc string:
"""
impl = ""
if var["len"] > 32:
print("This generator not support big than four bytes var." + \
"protocol classname: %s, var_name:%s " % (
class_name, var["name"]))
return impl
fmt = """
%(classname)s* %(classname)s::set_%(var_name)s(
%(var_type)s %(var_name)s) {
%(var_name)s_ = %(var_name)s;
return this;
}
// config detail: %(config)s
void %(classname)s::set_p_%(var_name)s(uint8_t* data,
%(var_type)s %(var_name)s) {"""
fmt_val = {}
fmt_val["classname"] = classname
fmt_val["var_name"] = var["name"].lower()
returntype = var["type"]
if var["type"] == "enum":
returntype = protocol["name"].capitalize() + "::" + var["name"].capitalize(
) + "Type"
fmt_val["var_type"] = returntype
fmt_val["config"] = str(var)
impl = impl + fmt % fmt_val
impl = impl + gen_control_decode_offset_precision(var)
# get lsb to msb order
byte_info = get_byte_info(var)
byte_info.reverse()
if len(byte_info) == 1:
impl = impl + gen_control_encode_one_byte_value_impl(var, byte_info[0])
else:
impl = impl + gen_control_encode_value_impl(var, byte_info)
return impl + "}\n"
def gen_control_cpp(car_type, protocol, output_dir):
"""
doc string:
"""
control_cpp_tpl_file = "template/control_protocol.cc.tpl"
FMT = get_tpl_fmt(control_cpp_tpl_file)
control_cpp_file = output_dir + "%s.cc" % protocol["name"]
with open(control_cpp_file, 'w') as fp:
fmt_val = {}
fmt_val["car_type_lower"] = car_type
fmt_val["protocol_name_lower"] = protocol["name"]
protocol_id = int(protocol["id"].upper(), 16)
if protocol_id > 2048:
fmt_val["id_upper"] = gen_esd_can_extended(protocol["id"].upper())
else:
fmt_val["id_upper"] = protocol["id"].upper()
classname = protocol["name"].replace('_', '').capitalize()
fmt_val["classname"] = classname
set_private_var_list = []
set_private_var_init_list = []
set_func_impl_list = []
for var in protocol["vars"]:
func_impl = gen_control_value_func_impl(classname, var, protocol)
set_func_impl_list.append(func_impl)
set_private_var = " set_p_%s(data, %s_);" % (var["name"].lower(),
var["name"].lower())
set_private_var_list.append(set_private_var)
init_val = "0"
if var["type"] == "double":
init_val = "0.0"
elif var["type"] == "bool":
init_val = "false"
elif var["type"] == "enum":
if 0 in var["enum"]:
init_val = protocol["name"].capitalize(
) + "::" + var["enum"][0].upper()
else:
init_val = protocol["name"].capitalize(
) + "::" + list(var["enum"].values())[0].upper()
set_private_var_init_list.append(" %s_ = %s;" %
(var["name"].lower(), init_val))
fmt_val["set_private_var_list"] = "\n".join(set_private_var_list)
fmt_val["set_private_var_init_list"] = "\n".join(
set_private_var_init_list)
fmt_val["set_func_impl_list"] = "\n".join(set_func_impl_list)
fp.write(FMT % fmt_val)
def get_tpl_fmt(tpl_file):
"""
get fmt from tpl_file
"""
with open(tpl_file, 'r') as tpl:
fmt = tpl.readlines()
fmt = "".join(fmt)
return fmt
def gen_build_file(car_type, work_dir):
"""
doc string:
"""
build_tpl_file = "template/protocol_BUILD.tpl"
fmt = get_tpl_fmt(build_tpl_file)
with open(work_dir + "BUILD", "w") as build_fp:
fmt_var = {}
fmt_var["car_type"] = car_type.lower()
build_fp.write(fmt % fmt_var)
def gen_protocols(protocol_conf_file, protocol_dir):
"""
doc string:
"""
print("Generating protocols")
if not os.path.exists(protocol_dir):
os.makedirs(protocol_dir)
with open(protocol_conf_file, 'r') as fp:
content = yaml.load(fp)
protocols = content["protocols"]
car_type = content["car_type"]
for p_name in protocols:
protocol = protocols[p_name]
if protocol["protocol_type"] == "report":
gen_report_header(car_type, protocol, protocol_dir)
gen_report_cpp(car_type, protocol, protocol_dir)
elif protocol["protocol_type"] == "control":
gen_control_header(car_type, protocol, protocol_dir)
gen_control_cpp(car_type, protocol, protocol_dir)
else:
print("Unknown protocol_type:%s" % protocol["protocol_type"])
gen_build_file(car_type, protocol_dir)
def gen_esd_can_extended(str):
"""
id string:
"""
int_id = int(str,16)
int_id &= 0x1FFFFFFF
int_id |= 0x20000000
str = hex(int_id).replace('0x', '')
return str
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage:\npython %s some_config.yml" % sys.argv[0])
sys.exit(0)
with open(sys.argv[1], 'r') as fp:
conf = yaml.load(fp)
protocol_conf = conf["protocol_conf"]
protocol_dir = conf["output_dir"] + "vehicle/" + conf["car_type"].lower(
) + "/protocol/"
shutil.rmtree(output_dir, True)
os.makedirs(output_dir)
gen_protocols(protocol_conf, protocol_dir)
| 35.561077
| 99
| 0.552806
|
086cc045e27166e82fadb70c7bc3edaf4da553b4
| 1,716
|
py
|
Python
|
certbot_dns_corenetworks/dns_corenetworks_test.py
|
MasinAD/certbot-dns-corenetworks
|
06924213d21d056da50716009b5bbf4ebcb8b12a
|
[
"Apache-2.0"
] | null | null | null |
certbot_dns_corenetworks/dns_corenetworks_test.py
|
MasinAD/certbot-dns-corenetworks
|
06924213d21d056da50716009b5bbf4ebcb8b12a
|
[
"Apache-2.0"
] | null | null | null |
certbot_dns_corenetworks/dns_corenetworks_test.py
|
MasinAD/certbot-dns-corenetworks
|
06924213d21d056da50716009b5bbf4ebcb8b12a
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for certbot_dns_corenetworks.dns_corenetworks."""
import os
import unittest
import mock
from requests.exceptions import HTTPError
from certbot.plugins import dns_test_common
from certbot.plugins import dns_test_common_lexicon
from certbot.tests import util as test_util
LOGIN = 'foo'
PASSWORD = 'bar'
class AuthenticatorTest(test_util.TempDirTestCase,
dns_test_common_lexicon.BaseLexiconAuthenticatorTest):
def setUp(self):
super(AuthenticatorTest, self).setUp()
from certbot_dns_corenetworks.dns_corenetworks import Authenticator
path = os.path.join(self.tempdir, 'file.ini')
dns_test_common.write({"corenetworks_login": LOGIN, "corenetworks_password": PASSWORD }, path)
self.config = mock.MagicMock(corenetworks_credentials=path,
corenetworks_propagation_seconds=0) # don't wait during tests
self.auth = Authenticator(self.config, "corenetworks")
self.mock_client = mock.MagicMock()
# _get_corenetworks_client | pylint: disable=protected-access
self.auth._get_corenetworks_client = mock.MagicMock(return_value=self.mock_client)
class CoreNetworksLexiconClientTest(unittest.TestCase, dns_test_common_lexicon.BaseLexiconClientTest):
LOGIN_ERROR = HTTPError('401 Client Error: Unauthorized for url: ...')
def setUp(self):
from certbot_dns_corenetworks.dns_corenetworks import _CoreNetworksLexiconClient
self.client = _CoreNetworksLexiconClient(LOGIN, PASSWORD, 0)
self.provider_mock = mock.MagicMock()
self.client.provider = self.provider_mock
if __name__ == "__main__":
unittest.main() # pragma: no cover
| 33
| 102
| 0.734266
|
64d0010a2cd52fd038e5843953c93fa654478ae3
| 10,972
|
py
|
Python
|
lightkurve/seismology/stellar_estimators.py
|
burke86/lightkurve
|
fda3e92544ccc3c6b38d003b2980a232fbcbbd0b
|
[
"MIT"
] | 1
|
2021-05-07T10:42:01.000Z
|
2021-05-07T10:42:01.000Z
|
lightkurve/seismology/stellar_estimators.py
|
burke86/lightkurve
|
fda3e92544ccc3c6b38d003b2980a232fbcbbd0b
|
[
"MIT"
] | 7
|
2018-07-14T17:49:36.000Z
|
2020-09-24T19:58:13.000Z
|
lightkurve/seismology/stellar_estimators.py
|
barentsen/lightkurve
|
5b1693832bc509e42742d1b6f20224d131e62d8c
|
[
"MIT"
] | null | null | null |
"""Functions to estimate stellar parameters (radius, mass, logg) using
asteroseismic scaling relations.
"""
from uncertainties import ufloat, umath
from astropy import units as u
from astropy import constants as const
from .utils import SeismologyQuantity
__all__ = ['estimate_radius', 'estimate_mass', 'estimate_logg']
"""Global parameters for the sun"""
NUMAX_SOL = ufloat(3090, 30) # microhertz | Huber et al. 2011
DELTANU_SOL = ufloat(135.1, 0.1) # microhertz | Huber et al. 2011
TEFF_SOL = ufloat(5772., 0.8) # Kelvin | Prsa et al. 2016
G_SOL = ((const.G * const.M_sun)/(const.R_sun)**2).to(u.cm/u.second**2) #cms^2
def estimate_radius(numax, deltanu, teff, numax_err=None, deltanu_err=None, teff_err=None):
"""Returns a stellar radius estimate based on the scaling relations.
The two global observable seismic parameters, numax and deltanu, along with
temperature, scale with fundamental stellar properties (Brown et al. 1991;
Kjeldsen & Bedding 1995). These scaling relations can be rearranged to
calculate a stellar radius as
R = Rsol * (numax/numax_sol)(deltanu/deltanusol)^-2(Teff/Teffsol)^0.5
where R is the radius and Teff is the effective temperature, and the suffix
'sol' indicates a solar value. In this method we use the solar values for
numax and deltanu as given in Huber et al. (2011) and for Teff as given in
Prsa et al. (2016).
This code structure borrows from work done in Bellinger et al. (2019), which
also functions as an accessible explanation of seismic scaling relations.
If no value of effective temperature is given, this function will check the
meta data of the `Periodogram` object used to create the `Seismology` object.
These data will often contain an effective tempearture from the Kepler Input
Catalogue (KIC, https://ui.adsabs.harvard.edu/abs/2011AJ....142..112B/abstract),
or from the EPIC or TIC for K2 and TESS respectively. The temperature values in these
catalogues are estimated using photometry, and so have large associated uncertainties
(roughly 200 K, see KIC). For more better results, spectroscopic measurements of
temperature are often more precise.
NOTE: These scaling relations are scaled to the Sun, and therefore do not
always produce an entirely accurate result for more evolved stars.
Parameters
----------
numax : float
The frequency of maximum power of the seismic mode envelope. If not
given an astropy unit, assumed to be in units of microhertz.
deltanu : float
The frequency spacing between two consecutive overtones of equal radial
degree. If not given an astropy unit, assumed to be in units of
microhertz.
teff : float
The effective temperature of the star. In units of Kelvin.
numax_err : float
Error on numax. Assumed to be same units as numax
deltanu_err : float
Error on deltanu. Assumed to be same units as deltanu
teff_err : float
Error on Teff. Assumed to be same units as Teff.
Returns
-------
radius : SeismologyQuantity
An estimate of the stellar radius in solar radii.
"""
numax = u.Quantity(numax, u.microhertz).value
deltanu = u.Quantity(deltanu, u.microhertz).value
teff = u.Quantity(teff, u. Kelvin).value
if all(b is not None for b in [numax_err, deltanu_err, teff_err]):
numax_err = u.Quantity(numax_err, u.microhertz).value
deltanu_err = u.Quantity(deltanu_err, u.microhertz).value
teff_err = u.Quantity(teff_err, u.Kelvin).value
unumax = ufloat(numax, numax_err)
udeltanu = ufloat(deltanu, deltanu_err)
uteff = ufloat(teff, teff_err)
else:
unumax = ufloat(numax, 0)
udeltanu = ufloat(deltanu, 0)
uteff = ufloat(teff, 0)
uR = (unumax / NUMAX_SOL) * (udeltanu / DELTANU_SOL)**(-2.) * (uteff / TEFF_SOL)**(0.5)
result = SeismologyQuantity(uR.n * u.solRad,
error=uR.s * u.solRad,
name="radius",
method="Uncorrected Scaling Relations")
return result
def estimate_mass(numax, deltanu, teff, numax_err=None, deltanu_err=None, teff_err=None):
"""Calculates mass using the asteroseismic scaling relations.
The two global observable seismic parameters, numax and deltanu, along with
temperature, scale with fundamental stellar properties (Brown et al. 1991;
Kjeldsen & Bedding 1995). These scaling relations can be rearranged to
calculate a stellar mass as
M = Msol * (numax/numax_sol)^3(deltanu/deltanusol)^-4(Teff/Teffsol)^1.5
where M is the mass and Teff is the effective temperature, and the suffix
'sol' indicates a solar value. In this method we use the solar values for
numax and deltanu as given in Huber et al. (2011) and for Teff as given in
Prsa et al. (2016).
This code structure borrows from work done in Bellinger et al. (2019), which
also functions as an accessible explanation of seismic scaling relations.
If no value of effective temperature is given, this function will check the
meta data of the `Periodogram` object used to create the `Seismology` object.
These data will often contain an effective tempearture from the Kepler Input
Catalogue (KIC, https://ui.adsabs.harvard.edu/abs/2011AJ....142..112B/abstract),
or from the EPIC or TIC for K2 and TESS respectively. The temperature values in these
catalogues are estimated using photometry, and so have large associated uncertainties
(roughly 200 K, see KIC). For more better results, spectroscopic measurements of
temperature are often more precise.
NOTE: These scaling relations are scaled to the Sun, and therefore do not
always produce an entirely accurate result for more evolved stars.
Parameters
----------
numax : float
The frequency of maximum power of the seismic mode envelope. If not
given an astropy unit, assumed to be in units of microhertz.
deltanu : float
The frequency spacing between two consecutive overtones of equal radial
degree. If not given an astropy unit, assumed to be in units of
microhertz.
teff : float
The effective temperature of the star. In units of Kelvin.
numax_err : float
Error on numax. Assumed to be same units as numax
deltanu_err : float
Error on deltanu. Assumed to be same units as deltanu
teff_err : float
Error on Teff. Assumed to be same units as Teff.
Returns
-------
mass : SeismologyQuantity
An estimate of the stellar mass in solar masses.
"""
numax = u.Quantity(numax, u.microhertz).value
deltanu = u.Quantity(deltanu, u.microhertz).value
teff = u.Quantity(teff, u.Kelvin).value
if all(b is not None for b in [numax_err, deltanu_err, teff_err]):
numax_err = u.Quantity(numax_err, u.microhertz).value
deltanu_err = u.Quantity(deltanu_err, u.microhertz).value
teff_err = u.Quantity(teff_err, u.Kelvin).value
unumax = ufloat(numax, numax_err)
udeltanu = ufloat(deltanu, deltanu_err)
uteff = ufloat(teff, teff_err)
else:
unumax = ufloat(numax, 0)
udeltanu = ufloat(deltanu, 0)
uteff = ufloat(teff, 0)
uM = (unumax / NUMAX_SOL)**3. * (udeltanu / DELTANU_SOL)**(-4.) * (uteff / TEFF_SOL)**(1.5)
result = SeismologyQuantity(uM.n * u.solMass,
error=uM.s * u.solMass,
name="mass",
method="Uncorrected Scaling Relations")
return result
def estimate_logg(numax, teff, numax_err=None, teff_err=None):
"""Calculates the log of the surface gravity using the asteroseismic scaling
relations.
The two global observable seismic parameters, numax and deltanu, along with
temperature, scale with fundamental stellar properties (Brown et al. 1991;
Kjeldsen & Bedding 1995). These scaling relations can be rearranged to
calculate a stellar surface gravity as
g = gsol * (numax/numax_sol)(Teff/Teffsol)^0.5
where g is the surface gravity and Teff is the effective temperature,
and the suffix 'sol' indicates a solar value. In this method we use the
solar values for numax as given in Huber et al. (2011) and for Teff as given
in Prsa et al. (2016). The solar surface gravity is calcluated from the
astropy constants for solar mass and radius and does not have an error.
The solar surface gravity is returned as log10(g) with units in dex, as is
common in the astrophysics literature.
This code structure borrows from work done in Bellinger et al. (2019), which
also functions as an accessible explanation of seismic scaling relations.
If no value of effective temperature is given, this function will check the
meta data of the `Periodogram` object used to create the `Seismology` object.
These data will often contain an effective tempearture from the Kepler Input
Catalogue (KIC, https://ui.adsabs.harvard.edu/abs/2011AJ....142..112B/abstract),
or from the EPIC or TIC for K2 and TESS respectively. The temperature values in these
catalogues are estimated using photometry, and so have large associated uncertainties
(roughly 200 K, see KIC). For more better results, spectroscopic measurements of
temperature are often more precise.
NOTE: These scaling relations are scaled to the Sun, and therefore do not
always produce an entirely accurate result for more evolved stars.
Parameters
----------
numax : float
The frequency of maximum power of the seismic mode envelope. If not
given an astropy unit, assumed to be in units of microhertz.
teff : float
The effective temperature of the star. In units of Kelvin.
numax_err : float
Error on numax. Assumed to be same units as numax
teff_err : float
Error on teff. Assumed to be same units as teff.
Returns
-------
logg : `.SeismologyQuantity`
The log10 of the surface gravity of the star.
"""
numax = u.Quantity(numax, u.microhertz).value
teff = u.Quantity(teff, u.Kelvin).value
if all(b is not None for b in [numax_err, teff_err]):
numax_err = u.Quantity(numax_err, u.microhertz).value
teff_err = u.Quantity(teff_err, u.Kelvin).value
unumax = ufloat(numax, numax_err)
uteff = ufloat(teff, teff_err)
else:
unumax = ufloat(numax, 0)
uteff = ufloat(teff, 0)
ug = G_SOL.value * (unumax / NUMAX_SOL) * (uteff / TEFF_SOL)**0.5
ulogg = umath.log(ug, 10)
result = SeismologyQuantity(ulogg.n * u.dex,
error=ulogg.s * u.dex,
name="logg",
method="Uncorrected Scaling Relations")
return result
| 44.241935
| 95
| 0.685108
|
ddb17b6e53147a98f8f014a55724656f0a4f7694
| 144
|
py
|
Python
|
pipenv/vendor/requirementslib/__init__.py
|
craynic/pipenv
|
e137d4334a5d225a06bf41b21e2eef746c19c3cb
|
[
"MIT"
] | null | null | null |
pipenv/vendor/requirementslib/__init__.py
|
craynic/pipenv
|
e137d4334a5d225a06bf41b21e2eef746c19c3cb
|
[
"MIT"
] | null | null | null |
pipenv/vendor/requirementslib/__init__.py
|
craynic/pipenv
|
e137d4334a5d225a06bf41b21e2eef746c19c3cb
|
[
"MIT"
] | null | null | null |
# -*- coding=utf-8 -*-
__version__ = '1.1.9.dev0'
from .exceptions import RequirementError
from .models import Requirement, Lockfile, Pipfile
| 20.571429
| 50
| 0.736111
|
3d654d865f0c14088d9d37d31f5e46ff59154183
| 24,349
|
py
|
Python
|
src/db-up/azext_db_up/vendored_sdks/azure_mgmt_rdbms/mariadb/operations/servers_operations.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/db-up/azext_db_up/vendored_sdks/azure_mgmt_rdbms/mariadb/operations/servers_operations.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/db-up/azext_db_up/vendored_sdks/azure_mgmt_rdbms/mariadb/operations/servers_operations.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class ServersOperations(object):
"""ServersOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The API version to use for the request. Constant value: "2018-06-01-preview".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2018-06-01-preview"
self.config = config
def _create_initial(
self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ServerForCreate')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Server', response)
if response.status_code == 201:
deserialized = self._deserialize('Server', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create(
self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates a new server or updates an existing server. The update action
will overwrite the existing server.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param parameters: The required parameters for creating or updating a
server.
:type parameters: ~azure.mgmt.rdbms.mariadb.models.ServerForCreate
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns Server or
ClientRawResponse<Server> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.rdbms.mariadb.models.Server]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.rdbms.mariadb.models.Server]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_initial(
resource_group_name=resource_group_name,
server_name=server_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('Server', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMariaDB/servers/{serverName}'}
def _update_initial(
self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ServerUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Server', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, resource_group_name, server_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Updates an existing server. The request body can contain one to many of
the properties present in the normal server definition.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param parameters: The required parameters for updating a server.
:type parameters:
~azure.mgmt.rdbms.mariadb.models.ServerUpdateParameters
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns Server or
ClientRawResponse<Server> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.rdbms.mariadb.models.Server]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.rdbms.mariadb.models.Server]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_initial(
resource_group_name=resource_group_name,
server_name=server_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('Server', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMariaDB/servers/{serverName}'}
def _delete_initial(
self, resource_group_name, server_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, server_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes a server.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
server_name=server_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMariaDB/servers/{serverName}'}
def get(
self, resource_group_name, server_name, custom_headers=None, raw=False, **operation_config):
"""Gets information about a server.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Server or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.rdbms.mariadb.models.Server or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Server', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMariaDB/servers/{serverName}'}
def list_by_resource_group(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""List all the servers in a given resource group.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Server
:rtype:
~azure.mgmt.rdbms.mariadb.models.ServerPaged[~azure.mgmt.rdbms.mariadb.models.Server]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ServerPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ServerPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMariaDB/servers'}
def list(
self, custom_headers=None, raw=False, **operation_config):
"""List all the servers in a given subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Server
:rtype:
~azure.mgmt.rdbms.mariadb.models.ServerPaged[~azure.mgmt.rdbms.mariadb.models.Server]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ServerPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ServerPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DBforMariaDB/servers'}
| 46.028355
| 156
| 0.664586
|
69c512413ca49c660d17343e10e4cd3cf51599e2
| 281
|
py
|
Python
|
visualqc/__diffusion__.py
|
raamana/corticalqa
|
8bca13b2c624e7a230d0b0a5a6539357af641163
|
[
"Apache-2.0"
] | 54
|
2018-02-05T14:50:57.000Z
|
2022-02-04T10:24:35.000Z
|
visualqc/__diffusion__.py
|
shannyfoo/visualqc
|
606d2b8770e619f8d990faeae24032bf4b152f29
|
[
"Apache-2.0"
] | 45
|
2018-02-09T02:46:32.000Z
|
2022-03-30T22:36:59.000Z
|
visualqc/__diffusion__.py
|
shannyfoo/visualqc
|
606d2b8770e619f8d990faeae24032bf4b152f29
|
[
"Apache-2.0"
] | 10
|
2018-02-09T19:46:27.000Z
|
2022-02-17T09:44:36.000Z
|
from sys import version_info
if version_info.major > 2:
from visualqc import diffusion
else:
raise NotImplementedError('visualqc_diffusion requires Python 3 or higher!')
def main():
"""Entry point."""
diffusion.cli_run()
if __name__ == '__main__':
main()
| 16.529412
| 80
| 0.697509
|
8bb10fde0388474b2ccaedabfdcc914c9b4e5aef
| 1,700
|
py
|
Python
|
tf2onnx/tflite/RNNOptions.py
|
gcunhase/tensorflow-onnx
|
8a61c99fbc39c36d70781f95e2c7c582f46ba2db
|
[
"Apache-2.0"
] | 1
|
2021-04-30T15:26:06.000Z
|
2021-04-30T15:26:06.000Z
|
tf2onnx/tflite/RNNOptions.py
|
gcunhase/tensorflow-onnx
|
8a61c99fbc39c36d70781f95e2c7c582f46ba2db
|
[
"Apache-2.0"
] | null | null | null |
tf2onnx/tflite/RNNOptions.py
|
gcunhase/tensorflow-onnx
|
8a61c99fbc39c36d70781f95e2c7c582f46ba2db
|
[
"Apache-2.0"
] | 1
|
2021-05-11T21:51:52.000Z
|
2021-05-11T21:51:52.000Z
|
# SPDX-License-Identifier: Apache-2.0
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: tflite
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class RNNOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsRNNOptions(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = RNNOptions()
x.Init(buf, n + offset)
return x
@classmethod
def RNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
# RNNOptions
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# RNNOptions
def FusedActivationFunction(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos)
return 0
# RNNOptions
def AsymmetricQuantizeInputs(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
return False
def RNNOptionsStart(builder): builder.StartObject(2)
def RNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0)
def RNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(1, asymmetricQuantizeInputs, 0)
def RNNOptionsEnd(builder): return builder.EndObject()
| 36.170213
| 133
| 0.726471
|
ed30226ea55b17e5b00ca86344177453a51f6af9
| 273
|
py
|
Python
|
ABC/166/b.py
|
fumiyanll23/AtCoder
|
362ca9fcacb5415c1458bc8dee5326ba2cc70b65
|
[
"MIT"
] | null | null | null |
ABC/166/b.py
|
fumiyanll23/AtCoder
|
362ca9fcacb5415c1458bc8dee5326ba2cc70b65
|
[
"MIT"
] | null | null | null |
ABC/166/b.py
|
fumiyanll23/AtCoder
|
362ca9fcacb5415c1458bc8dee5326ba2cc70b65
|
[
"MIT"
] | null | null | null |
N, K = map(int, input().split())
d = [0 for i in range(K)]
A = []
for i in range(K):
d[i] = int(input())
A.append(list(map(int, input().split())))
candy = [0 for i in range(N)]
for i in range(K):
for j in range(d[i]):
candy[A[i][j]-1] += 1
print(candy.count(0))
| 21
| 43
| 0.553114
|
d659e5f3e23a8e205074194264ce3c11b3b1eca4
| 19,732
|
py
|
Python
|
gs_api_client/swagger/models/paas_service_update.py
|
gridscale/gridscale_api_client_python
|
755b8e8a017784a4f5c6b3a577338ff988c41a9a
|
[
"MIT"
] | 7
|
2019-07-12T13:59:45.000Z
|
2021-03-16T08:46:20.000Z
|
gs_api_client/swagger/models/paas_service_update.py
|
gridscale/gridscale_api_client_python
|
755b8e8a017784a4f5c6b3a577338ff988c41a9a
|
[
"MIT"
] | 13
|
2020-01-23T07:50:29.000Z
|
2022-03-21T14:32:40.000Z
|
gs_api_client/swagger/models/paas_service_update.py
|
gridscale/gridscale_api_client_python
|
755b8e8a017784a4f5c6b3a577338ff988c41a9a
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
API Specification
# Introduction Welcome to gridscales API documentation. A REST API is a programming interface that allows you to access and send data directly to our systems using HTTPS requests, without the need to use a web GUI. All the functionality you are already familiar with in your control panel is accessible through the API, including expert methods that are only available through the API. Allowing you to script any actions you require, regardless of their complexity. First we will start with a general overview about how the API works, followed by an extensive list of each endpoint, describing them in great detail. ## Requests For security, gridscale requires all API requests are made through the HTTPS protocol so that traffic is encrypted. The following table displays the different type of requests that the interface responds to, depending on the action you require. | Method | Description | | --- | --- | | GET | A simple search of information. The response is a JSON object. Requests using GET are always read-only. | | POST | Adds new objects and object relations. The POST request must contain all the required parameters in the form of a JSON object. | | PATCH | Changes an object or an object relation. The parameters in PATCH requests are usually optional, so only the changed parameters must be specified in a JSON object. | | DELETE | Deletes an object or object relation. The object is deleted if it exists. | | OPTIONS | Get an extensive list of the servers support methods and characteristics. We will not give example OPTION requests on each endpoint, as they are extensive and self-descriptive. | <aside class=\"notice\"> The methods PATCH and DELETE are idempotent - that is, a request with identical parameters can be sent several times, and it doesn't change the result. </aside> ## Status Codes | HTTP Status | `Message` | Description | | --- | --- | --- | | 200 | `OK` | The request has been successfully processed and the result of the request is transmitted in the response. | | 202 | `Accepted` | The request has been accepted, but will run at a later date. Meaning we can not guarantee the success of the request. You should poll the request to be notified once the resource has been provisioned - see the requests endpoint on how to poll. | | 204 | `No Content` | The request was successful, but the answer deliberately contains no data. | | 400 | `Bad Request` | The request message was built incorrectly. | | 401 | `Unauthorised` | The request can not be performed without a valid authentication. X-Auth UserId or X-Auth token HTTP header is not set or the userID / token is invalid. | | 402 | `Payment Required` | Action can not be executed - not provided any or invalid payment methods. | | 403 | `Forbidden` | The request was not carried out due to lack of authorization of the user or because an impossible action was requested. | | 404 | `Not Found` | The requested resource was not found. Will also be used if you do a resource exists, but the user does not have permission for it. | | 405 | `Method Not Allowed` | The request may be made only with other HTTP methods (eg GET rather than POST). | | 409 | `Conflict` | The request was made under false assumptions. For example, a user can not be created twice with the same email. | | 415 | `Unsupported Media Type` | The contents of the request have been submitted with an invalid media type. All POST or PATCH requests must have \"Content-Type : application / json\" as a header, and send a JSON object as a payload. | | 416 | `Requested Range Not Satisfiable` | The request could not be fulfilled. It is possible that a resource limit was reached or an IPv4 address pool is exhausted. | | 424 | `Failed Dependency` | The request could not be performed because the object is in the wrong status. | | 429 | `Too Many Requests` | The request has been rejected because rate limits have been exceeded. | <aside class=\"success\"> Status 200-204 indicates that the request has been accepted and is processed. </aside> <aside class=\"notice\"> Status 400-429 indicates that there was a problem with the request that originated on the client. You will find more information about the problem in the body of 4xx response. </aside> <aside class=\"warning\"> A status 500 means that there was a server-side problem and your request can not be processed now. </aside> ## Request Headers | Header | Description | | --- | --- | | Content-Type | Always \"application/json\". | | X-Auth-userId | The user UUID. This can be found in the panel under \"API\" and will never change ( even after the change of user e-mail). | | X-Auth-Token | Is generated from the API hash and must be sent with all API requests. Both the token and its permissions can be configured in the panel.| ## Response Headers | Header | Description | | --- | --- | | Content-Type | Always \"application/json\". | | X-Time-Provisioning | The time taken to process the request (in ms). | | X-Api-Identity | The currently active Provisioning API version. Useful when reporting bugs to us. | | X-Request-Id | The unique identifier of the request, be sure to include it when referring to a request. | | RateLimit-Limit | The number of requests that can be made per minute. | | RateLimit-Remaining | The number of requests that still remain before you hit your request limit. | | RateLimit-Reset | A [Unix timestamp](https://en.wikipedia.org/wiki/Unix_time) in milliseconds of when the rate limit will reset, or the time at which a request no longer will return 429 - Too Many Requests. | ## Timestamp Format All timestamps follow <a href=\"https://de.wikipedia.org/wiki/ISO_8601\" target=\"_blank_\">ISO 8601</a> and issued in <a href=\"https://www.timeanddate.de/zeitzonen/utc-gmt\" target=\"_blank_\">UTC</a> ## CORS ### Cross Origin Resource Sharing To allow API access from other domains that supports the API CORS (Cross Origin Resource Sharing). See: enable-cors.org/ . This allows direct use the API in the browser running a JavaScript web control panel. All this is done in the background by the browser. The following HTTP headers are set by the API: Header | Parameter | Description --- | --- | --- Access-Control-Allow-Methods | GET, POST, PUT, PATCH, DELETE, OPTIONS | Contains all available methods that may be used for queries. Access-Control-Allow-Credentials | true | Is set to \"true\". Allows the browser to send the authentication data via X-Auth HTTP header. Access-Control-Allow-Headers | Origin, X-Requested-With, Content-Type, Accept, X-Auth-UserId, X-Auth-Token, X-Exec-Time, X-API-Version, X-Api-Client | The HTTP headers available for requests. Access-Control-Allow-Origin | * | The domain sent by the browser as a source of demand. Access-Control-Expose-Headers | X-Exec-Time, X-Api-Version | The HTTP headers that can be used by a browser application. ## Rate Limits The number of requests that can be made through our API is currently limited to 210 requests per 60 seconds. The current state of rate limiting is returned within the response headers of each request. The relevant response headers are - RateLimit-Limit - RateLimit-Remaining - RateLimit-Reset See the Response Headers section for details. As long as the `RateLimit-Remaining` count is above zero, you will be able to make further requests. As soon as the `RateLimit-Remaining` header value is zero, subsequent requests will return the 429 status code. This will stay until the timestamp given in `RateLimit-Reset` has been reached. ### Example rate limiting response ```shell HTTP/1.0 429 TOO MANY REQUESTS Content-Length: 66 Content-Type: application/json; charset=utf-8 Date: Mon, 11 Nov 2019 11:11:33 GMT RateLimit-Limit: 210 RateLimit-Remaining: 0 RateLimit-Reset: 1573468299256 { \"id\": \"too_many_requests\", \"message\": \"API Rate limit exceeded.\" } ``` It is important to understand how rate limits are reset in order to use the API efficiently. Rate limits are reset for all counted requests at once. This means that that once the timestamp `RateLimit-Remaining` has arrived all counted request are reset and you can again start sending requests to the API. This allows for short burst of traffic. The downside is once you have hit the request limit no more requests are allowed until the rate limit duration is reset. ## Object Relations Relationships describe resource objects (storages, networks, IPs, etc.) that are connected to a server. These relationships are treated like objects themselves and can have properties specific to this relation. One example would be, that the MAC address of a private network connected to a server (Server-to-Network relation) can be found as property of the relation itself - the relation is the _network interface_ in the server. Another example is storage, where the SCSI LUN is also part of the Server-to-Storage relation object. This information is especially interesting if some kind of network boot is used on the servers, where the properties of the server need to be known beforehand. ## Deleted Objects Objects that are deleted are no longer visible on their *regular* endpoints. For historical reasons these objects are still available read-only on a special endpoint named /deleted. If objects have been deleted but have not yet been billed in the current period, the yet-to-be-billed price is still shown. <!-- #strip_js --> ## Node.js / Javascript Library We have a JavaScript library for you to use our API with ease. <a href=\"https://badge.fury.io/js/%40gridscale%2Fgsclient-js\"><img src=\"https://badge.fury.io/js/%40gridscale%2Fgsclient-js.svg\" alt=\"npm version\" height=\"18\"></a> <aside class=\"success\"> We want to make it even easier for you to manage your Infrastructure via our API - so feel free to contact us with any ideas, or languages you would like to see included. </aside> Requests with our Node.js lib return a little differently. Everything is the same except it allows you to add URL parameters to customize your requests. To get started <a href=\"https://www.npmjs.com/package/@gridscale/gsclient-js\" target=\"_blank\">click here</a> . <!-- #strip_js_end --> <!-- #strip_go --> ## Golang Library We also have a Golang library for Gophers. Requests with our Golang lib return a little differently. Everything is the same except it allows you to add URL parameters to customize your requests. To get started <a href=\"https://github.com/gridscale/gsclient-go\" target=\"_blank\">click here</a> . <!-- #strip_go_end --> <!-- #strip_python --> ## Python Library We have a Python library, that optionally also simplifies handling of asynchronous requests by mimicking synchronous blocking behaviour. To get started <a href=\"https://pypi.org/project/gs-api-client/\" target=\"_blank\">click here</a> . <!-- #strip_python_end --> # Authentication In order to use the API, the User-UUID and an API_Token are required. Both are available via the web GUI which can be found here on <a href=\"https://my.gridscale.io/APIs/\" target=\"_blank\">Your Account</a> <aside class=\"success\"> If you are logged in, your UUID and Token will be pulled dynamically from your account, so you can copy request examples straight into your code. </aside> The User-UUID remains the same, even if the users email address is changed. The API_Token is a randomly generated hash that allows read/write access. ## API_Token <table class=\"security-details\"><tbody><tr><th> Security scheme type: </th><td> API Key </td></tr><tr><th> header parameter name:</th><td> X-Auth-Token </td></tr></tbody></table> ## User_UUID <table class=\"security-details\"><tbody><tr><th> Security scheme type: </th><td> API Key </td></tr><tr><th> header parameter name:</th><td> X-Auth-UserId </td></tr></tbody></table> ## Examples <!-- #strip_js --> > Node.js ``` // to get started // read the docs @ https://www.npmjs.com/package/@gs_js_auth/api var gs_js_auth = require('@gs_js_auth/api').gs_js_auth; var client = new gs_js_auth.Client(\"##API_TOKEN##\",\"##USER_UUID##\"); ``` <!-- #strip_js_end --> <!-- #strip_go --> > Golang ``` // to get started // read the docs @ https://github.com/gridscale/gsclient-go config := gsclient.NewConfiguration( \"https://api.gridscale.io\", \"##USER_UUID##\", \"##API_TOKEN##\", false, //set debug mode ) client := gsclient.NewClient(config) ``` <!-- #strip_go_end --> > Shell Authentication Headers ``` -H \"X-Auth-UserId: ##USER_UUID##\" \\ -H \"X-Auth-Token: ##API_TOKEN##\" \\ ``` > Setting Authentication in your Environment variables ``` export API_TOKEN=\"##API_TOKEN##\" USER_UUID=\"##USER_UUID##\" ``` <aside class=\"notice\"> You must replace <code>USER_UUID</code> and <code>API_Token</code> with your personal UUID and API key respectively. </aside> # noqa: E501
OpenAPI spec version: 1.0.50
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from gs_api_client.swagger.models.paas_service_parameters import PaasServiceParameters # noqa: F401,E501
from gs_api_client.swagger.models.paas_service_resource_limits import PaasServiceResourceLimits # noqa: F401,E501
class PaasServiceUpdate(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'labels': 'list[str]',
'parameters': 'PaasServiceParameters',
'resource_limits': 'PaasServiceResourceLimits',
'service_template_uuid': 'str'
}
attribute_map = {
'name': 'name',
'labels': 'labels',
'parameters': 'parameters',
'resource_limits': 'resource_limits',
'service_template_uuid': 'service_template_uuid'
}
def __init__(self, name=None, labels=None, parameters=None, resource_limits=None, service_template_uuid=None): # noqa: E501
"""PaasServiceUpdate - a model defined in Swagger""" # noqa: E501
self._name = None
self._labels = None
self._parameters = None
self._resource_limits = None
self._service_template_uuid = None
self.discriminator = None
if name is not None:
self.name = name
if labels is not None:
self.labels = labels
if parameters is not None:
self.parameters = parameters
if resource_limits is not None:
self.resource_limits = resource_limits
if service_template_uuid is not None:
self.service_template_uuid = service_template_uuid
@property
def name(self):
"""Gets the name of this PaasServiceUpdate. # noqa: E501
The human-readable name of the object. It supports the full UTF-8 character set, with a maximum of 64 characters. # noqa: E501
:return: The name of this PaasServiceUpdate. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this PaasServiceUpdate.
The human-readable name of the object. It supports the full UTF-8 character set, with a maximum of 64 characters. # noqa: E501
:param name: The name of this PaasServiceUpdate. # noqa: E501
:type: str
"""
self._name = name
@property
def labels(self):
"""Gets the labels of this PaasServiceUpdate. # noqa: E501
List of labels. # noqa: E501
:return: The labels of this PaasServiceUpdate. # noqa: E501
:rtype: list[str]
"""
return self._labels
@labels.setter
def labels(self, labels):
"""Sets the labels of this PaasServiceUpdate.
List of labels. # noqa: E501
:param labels: The labels of this PaasServiceUpdate. # noqa: E501
:type: list[str]
"""
self._labels = labels
@property
def parameters(self):
"""Gets the parameters of this PaasServiceUpdate. # noqa: E501
:return: The parameters of this PaasServiceUpdate. # noqa: E501
:rtype: PaasServiceParameters
"""
return self._parameters
@parameters.setter
def parameters(self, parameters):
"""Sets the parameters of this PaasServiceUpdate.
:param parameters: The parameters of this PaasServiceUpdate. # noqa: E501
:type: PaasServiceParameters
"""
self._parameters = parameters
@property
def resource_limits(self):
"""Gets the resource_limits of this PaasServiceUpdate. # noqa: E501
:return: The resource_limits of this PaasServiceUpdate. # noqa: E501
:rtype: PaasServiceResourceLimits
"""
return self._resource_limits
@resource_limits.setter
def resource_limits(self, resource_limits):
"""Sets the resource_limits of this PaasServiceUpdate.
:param resource_limits: The resource_limits of this PaasServiceUpdate. # noqa: E501
:type: PaasServiceResourceLimits
"""
self._resource_limits = resource_limits
@property
def service_template_uuid(self):
"""Gets the service_template_uuid of this PaasServiceUpdate. # noqa: E501
The template to which you want to update/upgrade your paas service. # noqa: E501
:return: The service_template_uuid of this PaasServiceUpdate. # noqa: E501
:rtype: str
"""
return self._service_template_uuid
@service_template_uuid.setter
def service_template_uuid(self, service_template_uuid):
"""Sets the service_template_uuid of this PaasServiceUpdate.
The template to which you want to update/upgrade your paas service. # noqa: E501
:param service_template_uuid: The service_template_uuid of this PaasServiceUpdate. # noqa: E501
:type: str
"""
self._service_template_uuid = service_template_uuid
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PaasServiceUpdate, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PaasServiceUpdate):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 86.165939
| 12,787
| 0.69927
|
84ba3e53249def115ccc7410900325690502fb69
| 9,899
|
py
|
Python
|
jarvis_cd/launchers/lustre/package.py
|
lukemartinlogan/jarvis-cd
|
fe929a22aa9b93498bacd5494b15c7c6a7c49e37
|
[
"MIT"
] | null | null | null |
jarvis_cd/launchers/lustre/package.py
|
lukemartinlogan/jarvis-cd
|
fe929a22aa9b93498bacd5494b15c7c6a7c49e37
|
[
"MIT"
] | null | null | null |
jarvis_cd/launchers/lustre/package.py
|
lukemartinlogan/jarvis-cd
|
fe929a22aa9b93498bacd5494b15c7c6a7c49e37
|
[
"MIT"
] | null | null | null |
from jarvis_cd.echo_node import EchoNode
from jarvis_cd.exec_node import ExecNode
from jarvis_cd.hostfile import Hostfile
from jarvis_cd.launcher import Launcher, LauncherConfig
import os
import socket
from jarvis_cd.scp_node import SCPNode
from jarvis_cd.sleep_node import SleepNode
from jarvis_cd.ssh_node import SSHNode
class Lustre(Launcher):
def __init__(self, config_path=None, args=None):
super().__init__('lustre', config_path, args)
def _LoadConfig(self):
self.ssh_port = int(self.config['BASIC']['SSH_PORT'])
self.ssh_user = self.config['BASIC']['SSH_USER']
self.oss_hosts = Hostfile().LoadHostfile(self.config['OBJECT_STORAGE_SERVERS']['HOSTFILE'])
self.client_hosts = Hostfile().LoadHostfile(self.config['CLIENT']['HOSTFILE'])
self.num_ost_per_node = int(self.config['OBJECT_STORAGE_SERVERS']['NUM_OST_PER_NODE'])
def SetNumHosts(self, num_oss_hosts, num_client_hosts):
self.oss_hosts.SelectHosts(num_oss_hosts)
self.client_hosts.SelectHosts(num_client_hosts)
return
def _DefineClean(self):
nodes = []
#Remove Lustre Management Server
rm_mgt_cmd = f"rm -rf {self.config['MANAGEMENT_SERVER']['MOUNT_POINT']}"
nodes.append(SSHNode("rm_mgt",
self.config['MANAGEMENT_SERVER']['HOST'],
rm_mgt_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
# Remove Lustre Metatadata Server (MDT)
rm_mdt_cmd = f"rm -rf {self.config['METADATA_SERVER']['MOUNT_POINT']}"
nodes.append(SSHNode(
"make_mdt",
self.config['METADATA_SERVER']['HOST'],
rm_mdt_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
# Remove Lustre Object Storage Server (OSS) and Targets (OSTs)
for host in self.oss_hosts:
rm_ost_cmds = []
for i in range(self.num_ost_per_node):
ost_dir = f"{self.config['OBJECT_STORAGE_SERVERS']['MOUNT_POINT_BASE']}{i}"
rm_ost_cmds.append(f"rm -rf {ost_dir}")
rm_ost_cmd = ';'.join(rm_ost_cmds)
nodes.append(SSHNode("rm_ost",
host,
rm_ost_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
# Remove the Lustre PFS on the clients
rm_client_cmd = f"rm -rf {self.config['CLIENT']['MOUNT_POINT']}"
nodes.append(SSHNode("mount_client",
self.client_hosts,
rm_client_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
return nodes
def _DefineStatus(self):
nodes = []
return nodes
def _DefineStop(self):
nodes = []
# Unmount the Lustre PFS on the clients
unmount_client_cmd = f"umount {self.config['CLIENT']['MOUNT_POINT']}"
nodes.append(SSHNode("unmount_client",
self.client_hosts,
unmount_client_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
# Unmount Lustre Object Storage Server (OSS) and Targets (OSTs)
index = 1
for host in self.oss_hosts:
unmount_ost_cmd = []
for i in range(self.num_ost_per_node):
ost_dir = f"{self.config['OBJECT_STORAGE_SERVERS']['MOUNT_POINT_BASE']}{i}"
unmount_ost_cmd.append(f"umount {ost_dir}")
index += 1
unmount_ost_cmd = ';'.join(unmount_ost_cmd)
nodes.append(SSHNode("unmount_ost",
host,
unmount_ost_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
# Unmount Lustre Metatadata Server (MDT)
unmount_mdt_cmd = f"umount {self.config['METADATA_SERVER']['MOUNT_POINT']}"
nodes.append(SSHNode(
"unmount_mdt",
self.config['METADATA_SERVER']['HOST'],
unmount_mdt_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
# Unmount Lustre Management Server (MGS)
unmount_mgt_cmd = f"umount {self.config['MANAGEMENT_SERVER']['MOUNT_POINT']}"
nodes.append(SSHNode("unmount_mgt",
self.config['MANAGEMENT_SERVER']['HOST'],
unmount_mgt_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
return nodes
def _DefineInit(self):
nodes = []
# Make and mount Lustre Management Server (MGS)
make_mgt_cmd = f"mkfs.lustre --reformat --mgs {self.config['MANAGEMENT_SERVER']['STORAGE']}"
mkdir_mgt_cmd = f"mkdir -p {self.config['MANAGEMENT_SERVER']['MOUNT_POINT']}"
nodes.append(SSHNode("make_mgt",
self.config['MANAGEMENT_SERVER']['HOST'],
f'{make_mgt_cmd};{mkdir_mgt_cmd}',
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
# Make and mount Lustre Metatadata Server (MDT)
make_mdt_cmd = (
f"mkfs.lustre "
f"--fsname={self.config['BASIC']['FSNAME']} "
f"--reformat "
f"--mgsnode={self.config['MANAGEMENT_SERVER']['HOST']}@tcp "
f"--mdt "
f"--index=0 {self.config['METADATA_SERVER']['STORAGE']}"
)
mkdir_mdt_cmd = f"mkdir -p {self.config['METADATA_SERVER']['MOUNT_POINT']}"
nodes.append(SSHNode(
"make_mdt",
self.config['METADATA_SERVER']['HOST'],
f'{make_mdt_cmd};{mkdir_mdt_cmd}',
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
# Make and mount Lustre Object Storage Server (OSS) and Targets (OSTs)
index = 1
for host in self.oss_hosts:
make_ost_cmd = []
mkdir_ost_cmd = []
for i in range(self.num_ost_per_node):
ost_id = f"OST{i}"
ost_dev = f"{self.config['OBJECT_STORAGE_SERVERS'][ost_id]}"
ost_dir = f"{self.config['OBJECT_STORAGE_SERVERS']['MOUNT_POINT_BASE']}{i}"
make_ost_cmd.append((
f"mkfs.lustre --ost "
f"--reformat "
f"--fsname={self.config['BASIC']['FSNAME']} "
f"--mgsnode={self.config['MANAGEMENT_SERVER']['HOST']}@tcp "
f"--index={index} {ost_dev}"
))
mkdir_ost_cmd.append(f"mkdir -p {ost_dir}")
index += 1
make_ost_cmd = ';'.join(make_ost_cmd)
mkdir_ost_cmd = ';'.join(mkdir_ost_cmd)
nodes.append(SSHNode("mount_ost",
host,
f'{make_ost_cmd};{mkdir_ost_cmd}',
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
# Mount the Lustre PFS on the clients
mkdir_client_cmd = f"mkdir -p {self.config['CLIENT']['MOUNT_POINT']}"
nodes.append(SSHNode("mount_client",
self.client_hosts,
mkdir_client_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
return nodes
def _DefineStart(self):
nodes = []
#Make and mount Lustre Management Server (MGS)
mount_mgt_cmd = f"mount -t lustre {self.config['MANAGEMENT_SERVER']['STORAGE']} {self.config['MANAGEMENT_SERVER']['MOUNT_POINT']}"
nodes.append(SSHNode("make_mgt",
self.config['MANAGEMENT_SERVER']['HOST'],
mount_mgt_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
#Make and mount Lustre Metatadata Server (MDT)
mount_mdt_cmd = f"mount -t lustre {self.config['METADATA_SERVER']['STORAGE']} {self.config['METADATA_SERVER']['MOUNT_POINT']}"
nodes.append(SSHNode(
"make_mdt",
self.config['METADATA_SERVER']['HOST'],
mount_mdt_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
#Make and mount Lustre Object Storage Server (OSS) and Targets (OSTs)
index = 1
for host in self.oss_hosts:
mount_ost_cmd = []
for i in range(self.num_ost_per_node):
ost_id = f"OST{i}"
ost_dev = f"{self.config['OBJECT_STORAGE_SERVERS'][ost_id]}"
ost_dir = f"{self.config['OBJECT_STORAGE_SERVERS']['MOUNT_POINT_BASE']}{i}"
mount_ost_cmd.append(f"mount -t lustre {ost_dev} {ost_dir}")
index += 1
mount_ost_cmd = ';'.join(mount_ost_cmd)
nodes.append(SSHNode("mount_ost",
host,
mount_ost_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
#Mount the Lustre PFS on the clients
mount_client_cmd = f"mount -t lustre {self.config['MANAGEMENT_SERVER']['HOST']}@tcp:/{self.config['BASIC']['FSNAME']} {self.config['CLIENT']['MOUNT_POINT']}"
nodes.append(SSHNode("mount_client",
self.client_hosts,
mount_client_cmd,
username=self.ssh_user, port=self.ssh_port, print_output=True, sudo=True))
return nodes
| 45.828704
| 165
| 0.567532
|
6bd6ff4dc3fe404a35ec1fd6c0c699c74e8851e5
| 480
|
py
|
Python
|
app/modules/pinyin_compare/pinyin.py
|
KennyChenFight/AIBox-Server
|
e2bb4a9afff1db7596741c4084889769510eb27b
|
[
"MIT"
] | 1
|
2018-10-18T14:48:09.000Z
|
2018-10-18T14:48:09.000Z
|
app/modules/pinyin_compare/pinyin.py
|
KennyChenFight/AIBox-Server
|
e2bb4a9afff1db7596741c4084889769510eb27b
|
[
"MIT"
] | null | null | null |
app/modules/pinyin_compare/pinyin.py
|
KennyChenFight/AIBox-Server
|
e2bb4a9afff1db7596741c4084889769510eb27b
|
[
"MIT"
] | 1
|
2018-09-22T08:18:14.000Z
|
2018-09-22T08:18:14.000Z
|
from xpinyin import Pinyin
p = Pinyin()
def to_pinyin(word):
return p.get_pinyin(word)
# word_1 & word_2 都不是拼音
def compare(word_1, word_2):
pinyin_1 = p.get_pinyin(word_1)
pinyin_2 = p.get_pinyin(word_2)
if pinyin_1 == pinyin_2:
return True
else:
return False
# word_1不是拼音, word_2是拼音
def compare_with_pinyin(word_1, word_2):
pinyin_1 = p.get_pinyin(word_1)
if pinyin_1 == word_2:
return True
else:
return False
| 19.2
| 40
| 0.6625
|
8c3e094576ad5407cb186a4674c54c1bcdd8d3ec
| 14,985
|
py
|
Python
|
Data/Drawings.py
|
pracedru/PracedruDesign
|
e6e313ee3efb377a8e393e5276eb9daa172e1c58
|
[
"BSD-3-Clause"
] | 3
|
2019-01-09T09:32:56.000Z
|
2019-02-14T18:27:33.000Z
|
Data/Drawings.py
|
pracedru/PracedruDesign
|
e6e313ee3efb377a8e393e5276eb9daa172e1c58
|
[
"BSD-3-Clause"
] | null | null | null |
Data/Drawings.py
|
pracedru/PracedruDesign
|
e6e313ee3efb377a8e393e5276eb9daa172e1c58
|
[
"BSD-3-Clause"
] | null | null | null |
from enum import Enum
from Data.Paper import *
from Data.Events import ChangeEvent, ValueChangeEvent
from Data.Objects import ObservableObject, NamedObservableObject
from Data.Parameters import Parameters
from Data.Plane import Plane
from Data.Sketch import Sketch
from Data.Vertex import Vertex
class ViewType(Enum):
SketchView = 0
PartView = 1
class AnnotationType(Enum):
Dimension = 0
Surface = 1
Weld = 2
Baloon = 3
BirdCage = 4
class Drawings(ObservableObject):
def __init__(self, document):
ObservableObject.__init__(self)
self._headers = []
self._borders = []
self._drawings = []
self._doc = document
def create_header(self):
header = Sketch(self._doc)
header.name = "New Header"
self.changed(ChangeEvent(self, ChangeEvent.BeforeObjectAdded, header))
self._doc.get_geometries().add_geometry(header)
self._headers.append(header)
self.changed(ChangeEvent(self, ChangeEvent.ObjectAdded, header))
return header
def create_drawing(self, size, name, header, orientation):
drawing = Drawing(self._doc, size, name, header, orientation)
self.changed(ChangeEvent(self, ChangeEvent.BeforeObjectAdded, drawing))
self._drawings.append(drawing)
self.changed(ChangeEvent(self, ChangeEvent.ObjectAdded, drawing))
drawing.add_change_handler(self.drawing_changed)
return drawing
def get_headers(self):
return list(self._headers)
@property
def name(self):
return "Drawings"
@property
def items(self):
return list(self._drawings)
@property
def length(self):
return len(self._drawings)
@property
def item(self, index):
return self._drawings[index]
def get_header_uids(self):
uids = []
for header in self._headers:
uids.append(header.uid)
return uids
def serialize_json(self):
return {
'headers': self.get_header_uids(),
'drawings': self._drawings
}
def drawing_changed(self, event):
self.changed(ChangeEvent(self, ChangeEvent.ObjectChanged, event.sender))
if event.type == ChangeEvent.Deleted:
self.changed(ChangeEvent(self, ChangeEvent.BeforeObjectRemoved, event.sender))
if type(event.sender) is Drawing:
self._drawings.remove(event.sender)
self.changed(ChangeEvent(self, ChangeEvent.ObjectRemoved, event.sender))
@staticmethod
def deserialize(data, document):
drawings = Drawings(document)
if data is not None:
drawings.deserialize_data(data)
return drawings
def deserialize_data(self, data):
for uid in data['headers']:
header = self._doc.get_geometries().get_geometry(uid)
self._headers.append(header)
for dwg_data in data['drawings']:
drawing = Drawing.deserialize(dwg_data, self._doc)
self._drawings.append(drawing)
drawing.add_change_handler(self.drawing_changed)
class Drawing(Paper, Parameters):
def __init__(self, document, size=[1, 1], name="New Drawing", header=None, orientation=Paper.Landscape):
Paper.__init__(self, size, orientation)
Parameters.__init__(self, name, document.get_parameters())
self._doc = document
self._views = []
self._border_sketch = Sketch(self)
self._header_sketch = header
self._margins = [0.02, 0.02, 0.02, 0.02]
self._fields = {}
self._annotations = []
self.generate_border()
Paper.add_change_handler(self, self.on_paper_changed)
def on_paper_changed(self, event):
if event.object is not None:
if hasattr(event.object, '__iter__'):
if "name" in event.object:
if event.object['name'] == "margins" or event.object['name'] == "size":
print("paper changed")
self.generate_border()
@property
def document(self):
return self._doc
@property
def header_sketch(self):
return self._header_sketch
@property
def header(self):
return self._header_sketch.name
@property
def border_sketch(self):
return self._border_sketch
def generate_border(self):
alpha = "ABCDEFGHIJKLMNOP"
sketch = self._border_sketch
sketch.clear()
self._doc.styles.get_edge_style_by_name("border").thickness = 0.0005
m = self._margins
sz = self.size
border_width = sz[0] - m[0] - m[2]
max_len = 0.1
divisions = round(border_width / max_len)
length = border_width / divisions
pnt1 = sketch.create_keypoint(m[0], m[3], 0)
pnt2 = sketch.create_keypoint(sz[0] - m[2], m[3], 0)
pnt3 = sketch.create_keypoint(sz[0] - m[2], sz[1] - m[1], 0)
pnt4 = sketch.create_keypoint(m[0], sz[1] - m[1], 0)
sketch.create_line_edge(pnt1, pnt2).style_name = "border"
sketch.create_line_edge(pnt2, pnt3).style_name = "border"
sketch.create_line_edge(pnt3, pnt4).style_name = "border"
sketch.create_line_edge(pnt4, pnt1).style_name = "border"
for i in range(0, divisions):
tkp = sketch.create_keypoint(m[0] + (i + 0.5) * length , 2 * m[3] / 3, 0)
sketch.create_text(tkp, alpha[i], 0.005)
tkp = sketch.create_keypoint(m[0] + (i + 0.5) * length,sz[1] - 2 * m[1] / 3, 0)
sketch.create_text(tkp, alpha[i], 0.005)
if i > 0:
pnt1 = sketch.create_keypoint(m[0] + i * length , m[3], 0)
pnt2 = sketch.create_keypoint(m[0] + i * length , m[3] / 2, 0)
sketch.create_line_edge(pnt1, pnt2).style_name = "border"
pnt1 = sketch.create_keypoint(m[0] + i * length , sz[1] - m[1], 0)
pnt2 = sketch.create_keypoint(m[0] + i * length , sz[1] - m[1] / 2, 0)
sketch.create_line_edge(pnt1, pnt2).style_name = "border"
border_height = sz[1] - m[1] - m[3]
divisions = round(border_height / max_len)
length = border_height / divisions
for i in range(0, divisions):
tkp = sketch.create_keypoint(2 * m[0] / 3, m[3] + (i + 0.5) * length, 0)
sketch.create_text(tkp, str(i+1), 0.005)
tkp = sketch.create_keypoint(sz[0] - 2 * m[2] / 3, m[3] + (i + 0.5) * length, 0)
sketch.create_text(tkp, str(i+1), 0.005)
if i > 0:
pnt1 = sketch.create_keypoint(m[0], m[3] + i * length, 0)
pnt2 = sketch.create_keypoint(m[0] / 2, m[3] + i * length, 0)
sketch.create_line_edge(pnt1, pnt2).style_name = "border"
pnt1 = sketch.create_keypoint(sz[0] - m[2], m[3] + i * length, 0)
pnt2 = sketch.create_keypoint(sz[0] - m[2] / 2, m[3] + i * length, 0)
sketch.create_line_edge(pnt1, pnt2).style_name = "border"
def create_sketch_view(self, sketch, scale, offset):
view = SketchView(self, sketch, scale, offset)
self.changed(ChangeEvent(self, ChangeEvent.BeforeObjectAdded, view))
self._views.append(view)
self.changed(ChangeEvent(self, ChangeEvent.ObjectAdded, view))
view.add_change_handler(self.on_view_changed)
return view
def create_part_view(self, part, scale, offset):
view = PartView(self, part, scale, offset)
self.changed(ChangeEvent(self, ChangeEvent.BeforeObjectAdded, view))
self._views.append(view)
self.changed(ChangeEvent(self, ChangeEvent.ObjectAdded, view))
view.add_change_handler(self.on_view_changed)
return view
def delete(self):
self.changed(ChangeEvent(self, ChangeEvent.Deleted, self))
def add_field(self, name, value):
field = Field(name, value)
self.changed(ChangeEvent(self, ChangeEvent.BeforeObjectAdded, field))
self._fields[name] = field
self.changed(ChangeEvent(self, ChangeEvent.ObjectAdded, field))
field.add_change_handler(self.on_field_changed)
def get_field(self, name):
if name in self._fields:
return self._fields[name]
else:
return None
def get_fields(self):
return dict(self._fields)
def get_views(self):
return list(self._views)
def on_view_changed(self, event):
self.changed(ChangeEvent(self, ChangeEvent.ObjectChanged, event.sender))
if event.type == ChangeEvent.Deleted:
self.changed(ChangeEvent(self, ChangeEvent.BeforeObjectRemoved, event.sender))
self._views.remove(event.sender)
self.changed(ChangeEvent(self, ChangeEvent.ObjectRemoved, event.sender))
event.sender.remove_change_handler(self.on_view_changed)
def on_field_changed(self, event):
self.changed(ChangeEvent(self, ChangeEvent.ObjectChanged, event.sender))
if event.object == "name":
self._fields.pop(event.old_value)
self._fields[event.sender.name] = event.sender
def serialize_json(self):
print("test")
return {
'paper': Paper.serialize_json(self),
'name': self._name,
'views': self._views,
'border_sketch': self._border_sketch,
'header_sketch': self._header_sketch.uid,
'fields': self._fields
}
@staticmethod
def deserialize(data, document):
drawing = Drawing(document)
if data is not None:
drawing.deserialize_data(data)
return drawing
def deserialize_data(self, data):
Paper.deserialize_data(self, data['paper'])
self._name = data.get('name', "No name")
self._header_sketch = self._doc.get_geometries().get_geometry(data['header_sketch'])
for field_data_tuple in data.get('fields', {}).items():
field_data = field_data_tuple[1]
field = Field.deserialize(field_data)
self._fields[field.name] = field
field.add_change_handler(self.on_field_changed)
for view_data in data.get('views', []):
if view_data['view']['type'] == ViewType.SketchView.value:
view = SketchView.deserialize(view_data, self)
self._views.append(view)
view.add_change_handler(self.on_view_changed)
if view_data['view']['type'] == ViewType.PartView.value:
view = PartView.deserialize(view_data, self)
self._views.append(view)
view.add_change_handler(self.on_view_changed)
self.generate_border()
class Field(NamedObservableObject):
def __init__(self, name="New Field", value="Field Value"):
NamedObservableObject.__init__(self, name)
self._value = value
@property
def value(self):
return self._value
@value.setter
def value(self, value):
old_value = self._value
self._value = value
self.changed(ValueChangeEvent(self, 'value', old_value, value))
def serialize_json(self):
return {
'name': NamedObservableObject.serialize_json(self),
'value': self.value
}
@staticmethod
def deserialize(data):
field = Field(data)
if data is not None:
field.deserialize_data(data)
return field
def deserialize_data(self, data):
NamedObservableObject.deserialize_data(self, data['name'])
self._value = data['value']
class View(NamedObservableObject):
def __init__(self, drawing, name="New view", scale=1, offset=Vertex()):
NamedObservableObject.__init__(self, name)
self._drawing = drawing
self._offset = offset
self._scale = float(scale)
self._rotation = 0.0
self._view_type = ViewType.SketchView
@property
def view_type(self):
return self._view_type
@property
def limits(self):
return [0.0, 0.0, 0.0, 0.0]
@property
def scale(self):
return self._scale
@scale.setter
def scale(self, value):
old_value = self._scale
self._scale = value
self.changed(ValueChangeEvent(self, 'scale', old_value, value))
@property
def scale_name(self):
if self._scale < 1:
return "1 : " + str(1 / self._scale)
else:
return str(1 / self._scale) + " : 1"
@scale_name.setter
def scale_name(self, value):
values = value.split(":")
if len(values) == 2:
self._scale = float(values[0]) / float(values[1])
@property
def offset(self):
return self._offset
@property
def offset_values(self):
return self._offset.xyz
@property
def rotation(self):
return self._rotation
@rotation.setter
def rotation(self, value):
self._rotation = float(value)
def serialize_json(self):
return {
'no': NamedObservableObject.serialize_json(self),
'scale': self._scale,
'offset': self._offset,
'rot': self._rotation,
'type': self._view_type.value
}
def deserialize_data(self, data):
NamedObservableObject.deserialize_data(self, data['no'])
self._scale = data['scale']
self._offset = Vertex.deserialize(data['offset'])
self._rotation = data.get('rot', 0.0)
class SketchView(View):
def __init__(self, drawing, sketch=None, scale=1, offset=Vertex()):
View.__init__(self, drawing, "New View", scale, offset)
self._sketch = sketch
self._view_type = ViewType.SketchView
if sketch is not None:
self._name = sketch.name
self._sketch.add_change_handler(self.on_sketch_changed)
@property
def sketch(self):
return self._sketch
@property
def limits(self):
if self._sketch is not None:
return self._sketch.get_limits()
return super.limits
def delete(self):
self.changed(ChangeEvent(self, ChangeEvent.Deleted, self))
def on_sketch_changed(self, event):
self.changed(ChangeEvent(self, ChangeEvent.ObjectChanged, self._sketch))
def serialize_json(self):
return {
'view': View.serialize_json(self),
'sketch': self._sketch.uid,
}
@staticmethod
def deserialize(data, drawing):
sketch_view = SketchView(drawing)
if data is not None:
sketch_view.deserialize_data(data)
return sketch_view
def deserialize_data(self, data):
document = self._drawing.document
View.deserialize_data(self, data['view'])
self._sketch = document.get_geometries().get_geometry(data['sketch'])
self._sketch.add_change_handler(self.on_sketch_changed)
class PartView(View):
def __init__(self, drawing, part=None, scale=1, offset=Vertex()):
View.__init__(self, drawing, "New View", scale, offset)
self._part = part
self._view_type = ViewType.PartView
self._sketch = Sketch(drawing)
if part is not None:
self._name = part.name
self._part.add_change_handler(self.on_part_changed)
self.update_sketch()
@property
def sketch(self):
return self._sketch
@property
def part(self):
return self._part
def update_sketch(self):
if self._part.update_needed:
self._part.update_geometry()
section_datas = []
if self._part is not None:
self._sketch.clear()
for surface in self._part.get_surfaces():
plane = Plane()
section_data = surface.get_section_by_plane(plane)
if section_data is not None:
section_datas.append(section_data)
for section_data in section_datas:
coords = section_data['coords']
if len(coords) > 2:
kp1 = self._sketch.create_keypoint(coords[0][0], coords[0][1], coords[0][2])
for i in range(1, len(coords)):
kp2 = self._sketch.create_keypoint(coords[i][0], coords[i][1], coords[i][2])
self._sketch.create_line_edge(kp1, kp2)
kp1 = kp2
self.changed(ChangeEvent(self, ChangeEvent.ObjectChanged, self))
def delete(self):
self.changed(ChangeEvent(self, ChangeEvent.Deleted, self))
def on_part_changed(self, event):
self.changed(ChangeEvent(self, ChangeEvent.ObjectChanged, self._part))
self.update_sketch()
def serialize_json(self):
return {
'view': View.serialize_json(self),
'part': self._part.uid
}
@staticmethod
def deserialize(data, drawing):
part_view = PartView(drawing)
if data is not None:
part_view.deserialize_data(data)
return part_view
def deserialize_data(self, data):
doc = self._drawing.document
View.deserialize_data(self, data['view'])
self._part = doc.get_geometries().get_geometry(data['part'])
self._part.add_change_handler(self.on_part_changed)
self.update_sketch()
class Annotation(ObservableObject):
def __init__(self, type = AnnotationType.Dimension):
ObservableObject.__init__(self)
self.type = type
self.view = None
self.kps = []
self.edges = []
| 29.267578
| 105
| 0.719319
|
b68f3d3f2abf99cbfce04160d16c49cb3057c97a
| 2,087
|
py
|
Python
|
tests/flask/test_query_params.py
|
skilkis/strawberry
|
05c94aea86d5c44e226d0f5a0d453079308ce8b9
|
[
"MIT"
] | null | null | null |
tests/flask/test_query_params.py
|
skilkis/strawberry
|
05c94aea86d5c44e226d0f5a0d453079308ce8b9
|
[
"MIT"
] | 3
|
2021-11-08T01:12:10.000Z
|
2022-03-21T06:33:38.000Z
|
tests/flask/test_query_params.py
|
uvicorn/strawberry
|
100e47fd430f0552c9ef9549306faceac610431d
|
[
"MIT"
] | null | null | null |
import json
def test_no_graphiql_empty_get(flask_client_no_graphiql):
response = flask_client_no_graphiql.get("/graphql")
assert response.status_code == 415
def test_no_query(flask_client):
params = {"variables": '{"name": "James"}'}
response = flask_client.get("/graphql", query_string=params)
assert response.status_code == 400
def test_get_with_query_params(flask_client):
params = {
"query": """
query {
hello
}
"""
}
response = flask_client.get("/graphql", query_string=params)
data = json.loads(response.data.decode())
assert response.status_code == 200
assert data["data"]["hello"] == "Hello world"
def test_can_pass_variables_with_query_params(flask_client):
params = {
"query": "query Hello($name: String!) { hello(name: $name) }",
"variables": '{"name": "James"}',
}
response = flask_client.get("/graphql", query_string=params)
data = json.loads(response.data.decode())
assert response.status_code == 200
assert data["data"]["hello"] == "Hello James"
def test_post_fails_with_query_params(flask_client):
params = {
"query": """
query {
hello
}
"""
}
response = flask_client.post("/graphql", query_string=params)
assert response.status_code == 415
def test_does_not_allow_mutation(flask_client):
query = {
"query": """
mutation {
hello
}
"""
}
response = flask_client.get("/graphql", query_string=query)
assert response.status_code == 400
assert "mutations are not allowed when using GET" in response.text
def test_fails_if_allow_queries_via_get_false(flask_client_no_get):
query = {
"query": """
query {
hello
}
"""
}
response = flask_client_no_get.get("/graphql", query_string=query)
assert response.status_code == 400
assert "queries are not allowed when using GET" in response.text
| 23.188889
| 70
| 0.609008
|
13b6bbbd28c2d096149ec60f15b2cc83ff0f13b9
| 1,449
|
py
|
Python
|
grr/tools/export_plugins/hash_file_store_plugin.py
|
theGreenJedi/grr
|
d9e11e304dc299d49c76b7fdf6fdbfcd4b8eec39
|
[
"Apache-2.0"
] | null | null | null |
grr/tools/export_plugins/hash_file_store_plugin.py
|
theGreenJedi/grr
|
d9e11e304dc299d49c76b7fdf6fdbfcd4b8eec39
|
[
"Apache-2.0"
] | null | null | null |
grr/tools/export_plugins/hash_file_store_plugin.py
|
theGreenJedi/grr
|
d9e11e304dc299d49c76b7fdf6fdbfcd4b8eec39
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""'hash_file_store' plugin for GRR export tool."""
from grr.lib import data_store
from grr.lib import rdfvalue
from grr.lib.aff4_objects import filestore
from grr.tools.export_plugins import plugin
class HashFileStoreExportPlugin(plugin.OutputPluginBasedExportPlugin):
"""Exports hashes from HashFileStore via given hunt output plugin."""
name = "hash_file_store"
description = "Exports HashFileStore contents."
def ConfigureArgParser(self, parser):
"""Configures args parser for HshFileStoreExportPlugin."""
parser.add_argument("--threads",
type=int,
default=8,
help="Maximum number of threads to use.")
parser.add_argument("--batch",
type=int,
default=1000,
help="Size of batches processed by each thread.")
parser.add_argument("--checkpoint_every",
type=int,
default=1000 * 1000,
help="Flush the results every time after processing "
"this number of values.")
super(HashFileStoreExportPlugin, self,).ConfigureArgParser(parser)
def GetValuesSourceURN(self, unused_args):
return rdfvalue.RDFURN("aff4:/files/hash")
def GetValuesForExport(self, unused_args):
return filestore.HashFileStore.ListHashes(token=data_store.default_token)
| 32.931818
| 77
| 0.640442
|
b3fe47603d0debbca463ac8ea43ae98d10435b43
| 224
|
py
|
Python
|
board/filters/__init__.py
|
EliasEriksson/MinesweeperAI
|
3b881540baceea116eb3023c33bb355e6a67f8ce
|
[
"MIT"
] | 1
|
2020-04-11T21:29:24.000Z
|
2020-04-11T21:29:24.000Z
|
board/filters/__init__.py
|
EliasEriksson/MinesweeperAI
|
3b881540baceea116eb3023c33bb355e6a67f8ce
|
[
"MIT"
] | null | null | null |
board/filters/__init__.py
|
EliasEriksson/MinesweeperAI
|
3b881540baceea116eb3023c33bb355e6a67f8ce
|
[
"MIT"
] | null | null | null |
from .black_and_white_filter import black_and_white_filter as black_and_white
from .color_filter import color_filter as color
from .field_filter import field_filter as field
__all__ = ["black_and_white", "color", "field"]
| 32
| 77
| 0.825893
|
4250c19d146ad00ecc64c49ef45a35a451655022
| 11,052
|
py
|
Python
|
caluma/form/migrations/0001_initial.py
|
czosel/caluma
|
4a3e81b2000961ab934bfc1c6840ec00f0ba2c19
|
[
"MIT"
] | null | null | null |
caluma/form/migrations/0001_initial.py
|
czosel/caluma
|
4a3e81b2000961ab934bfc1c6840ec00f0ba2c19
|
[
"MIT"
] | 1
|
2020-07-11T01:07:44.000Z
|
2020-07-11T01:07:44.000Z
|
caluma/form/migrations/0001_initial.py
|
czosel/caluma
|
4a3e81b2000961ab934bfc1c6840ec00f0ba2c19
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2018-12-12 12:43
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import localized_fields.fields.field
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Answer",
fields=[
("created_at", models.DateTimeField(auto_now_add=True)),
("modified_at", models.DateTimeField(auto_now=True)),
(
"created_by_user",
models.CharField(blank=True, max_length=150, null=True),
),
(
"created_by_group",
models.CharField(blank=True, max_length=150, null=True),
),
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("value", django.contrib.postgres.fields.jsonb.JSONField()),
("meta", django.contrib.postgres.fields.jsonb.JSONField(default={})),
],
),
migrations.CreateModel(
name="Document",
fields=[
("created_at", models.DateTimeField(auto_now_add=True)),
("modified_at", models.DateTimeField(auto_now=True)),
(
"created_by_user",
models.CharField(blank=True, max_length=150, null=True),
),
(
"created_by_group",
models.CharField(blank=True, max_length=150, null=True),
),
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("meta", django.contrib.postgres.fields.jsonb.JSONField(default={})),
],
options={"abstract": False},
),
migrations.CreateModel(
name="Form",
fields=[
("created_at", models.DateTimeField(auto_now_add=True)),
("modified_at", models.DateTimeField(auto_now=True)),
(
"created_by_user",
models.CharField(blank=True, max_length=150, null=True),
),
(
"created_by_group",
models.CharField(blank=True, max_length=150, null=True),
),
("slug", models.SlugField(primary_key=True, serialize=False)),
("name", localized_fields.fields.field.LocalizedField(required=[])),
(
"description",
localized_fields.fields.field.LocalizedField(
blank=True, null=True, required=[]
),
),
("meta", django.contrib.postgres.fields.jsonb.JSONField(default={})),
("is_published", models.BooleanField(default=False)),
("is_archived", models.BooleanField(default=False)),
],
options={"abstract": False},
),
migrations.CreateModel(
name="FormQuestion",
fields=[
("created_at", models.DateTimeField(auto_now_add=True)),
("modified_at", models.DateTimeField(auto_now=True)),
(
"created_by_user",
models.CharField(blank=True, max_length=150, null=True),
),
(
"created_by_group",
models.CharField(blank=True, max_length=150, null=True),
),
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
(
"sort",
models.PositiveIntegerField(
db_index=True, default=0, editable=False
),
),
(
"form",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="form.Form"
),
),
],
options={"ordering": ("-sort", "id")},
),
migrations.CreateModel(
name="Option",
fields=[
("created_at", models.DateTimeField(auto_now_add=True)),
("modified_at", models.DateTimeField(auto_now=True)),
(
"created_by_user",
models.CharField(blank=True, max_length=150, null=True),
),
(
"created_by_group",
models.CharField(blank=True, max_length=150, null=True),
),
("slug", models.SlugField(primary_key=True, serialize=False)),
("label", localized_fields.fields.field.LocalizedField(required=[])),
("meta", django.contrib.postgres.fields.jsonb.JSONField(default={})),
],
options={"abstract": False},
),
migrations.CreateModel(
name="Question",
fields=[
("created_at", models.DateTimeField(auto_now_add=True)),
("modified_at", models.DateTimeField(auto_now=True)),
(
"created_by_user",
models.CharField(blank=True, max_length=150, null=True),
),
(
"created_by_group",
models.CharField(blank=True, max_length=150, null=True),
),
("slug", models.SlugField(primary_key=True, serialize=False)),
("label", localized_fields.fields.field.LocalizedField(required=[])),
(
"type",
models.CharField(
choices=[
("checkbox", "checkbox"),
("integer", "integer"),
("float", "float"),
("radio", "radio"),
("textarea", "textarea"),
("text", "text"),
],
max_length=10,
),
),
("is_required", models.TextField(default="false")),
("is_hidden", models.TextField(default="false")),
("is_archived", models.BooleanField(default=False)),
(
"configuration",
django.contrib.postgres.fields.jsonb.JSONField(default={}),
),
("meta", django.contrib.postgres.fields.jsonb.JSONField(default={})),
],
options={"abstract": False},
),
migrations.CreateModel(
name="QuestionOption",
fields=[
("created_at", models.DateTimeField(auto_now_add=True)),
("modified_at", models.DateTimeField(auto_now=True)),
(
"created_by_user",
models.CharField(blank=True, max_length=150, null=True),
),
(
"created_by_group",
models.CharField(blank=True, max_length=150, null=True),
),
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
(
"sort",
models.PositiveIntegerField(
db_index=True, default=0, editable=False
),
),
(
"option",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="form.Option"
),
),
(
"question",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="form.Question"
),
),
],
options={"ordering": ("-sort", "id")},
),
migrations.AddField(
model_name="question",
name="options",
field=models.ManyToManyField(
related_name="questions",
through="form.QuestionOption",
to="form.Option",
),
),
migrations.AddField(
model_name="formquestion",
name="question",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="form.Question"
),
),
migrations.AddField(
model_name="form",
name="questions",
field=models.ManyToManyField(
related_name="forms", through="form.FormQuestion", to="form.Question"
),
),
migrations.AddField(
model_name="document",
name="form",
field=models.ForeignKey(
on_delete=django.db.models.deletion.DO_NOTHING,
related_name="documents",
to="form.Form",
),
),
migrations.AddField(
model_name="answer",
name="document",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="answers",
to="form.Document",
),
),
migrations.AddField(
model_name="answer",
name="question",
field=models.ForeignKey(
on_delete=django.db.models.deletion.DO_NOTHING,
related_name="answers",
to="form.Question",
),
),
migrations.AlterUniqueTogether(
name="questionoption", unique_together=set([("option", "question")])
),
migrations.AlterUniqueTogether(
name="formquestion", unique_together=set([("form", "question")])
),
migrations.AlterUniqueTogether(
name="answer", unique_together=set([("document", "question")])
),
]
| 37.212121
| 87
| 0.440554
|
e3ff4211aed4e1270ff50cafc914f9c7068fd7fe
| 3,729
|
py
|
Python
|
tools/android/junction_test.py
|
sevki/bazel
|
b18915752a69fbbc6ed94e1710198167593565fc
|
[
"Apache-2.0"
] | 8
|
2015-12-25T16:16:53.000Z
|
2021-07-13T09:58:53.000Z
|
tools/android/junction_test.py
|
sevki/bazel
|
b18915752a69fbbc6ed94e1710198167593565fc
|
[
"Apache-2.0"
] | 67
|
2022-01-12T18:22:13.000Z
|
2022-01-12T18:24:28.000Z
|
tools/android/junction_test.py
|
sevki/bazel
|
b18915752a69fbbc6ed94e1710198167593565fc
|
[
"Apache-2.0"
] | 6
|
2016-02-10T20:07:36.000Z
|
2020-11-18T17:44:05.000Z
|
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for TempJunction."""
import os
import unittest
from src.test.py.bazel import test_base
from tools.android import junction
class JunctionTest(test_base.TestBase):
"""Unit tests for junction.py."""
def _AssertCreateJunctionWhenTargetsParentsDontExist(self, max_path=None):
def tempdir():
return self.ScratchDir("junc temp")
target = self.Path("this directory/should not\\yet exist")
self.assertFalse(os.path.exists(os.path.dirname(os.path.dirname(target))))
# Make the `target` path a non-normalized Windows path with a space in it
# which doesn't even exist.
# TempJunction should still work; it should:
# - normalize the path, and
# - create all directories on the path
# target = os.path.dirname(target) + "/junc target"
juncpath = None
with junction.TempJunction(
target, testonly_mkdtemp=tempdir, testonly_maxpath=max_path) as j:
juncpath = j
# Ensure that `j` created the junction.
self.assertTrue(os.path.exists(target))
self.assertTrue(os.path.exists(juncpath))
self.assertTrue(juncpath.endswith(os.path.join("junc temp", "j")))
self.assertTrue(os.path.isabs(juncpath))
# Create a file under the junction.
filepath = os.path.join(juncpath, "some file.txt")
with open(filepath, "w") as f:
f.write("hello")
# Ensure we can reach the file via the junction and the target directory.
self.assertTrue(os.path.exists(os.path.join(target, "some file.txt")))
self.assertTrue(os.path.exists(os.path.join(juncpath, "some file.txt")))
# Ensure that after the `with` block the junction and temp directories no
# longer exist, but we can still reach the file via the target directory.
self.assertTrue(os.path.exists(os.path.join(target, "some file.txt")))
self.assertFalse(os.path.exists(os.path.join(juncpath, "some file.txt")))
self.assertFalse(os.path.exists(juncpath))
self.assertFalse(os.path.exists(os.path.dirname(juncpath)))
def testCreateJunctionWhenTargetsParentsDontExistAndPathIsShort(self):
self._AssertCreateJunctionWhenTargetsParentsDontExist()
def testCreateJunctionWhenTargetsParentsDontExistAndPathIsLong(self):
self._AssertCreateJunctionWhenTargetsParentsDontExist(1)
def testCannotCreateJunction(self):
def tempdir():
return self.ScratchDir("junc temp")
target = self.ScratchDir("junc target")
# Make the `target` path a non-normalized Windows path with a space in it.
# TempJunction should still work.
target = os.path.dirname(target) + "/junc target"
with junction.TempJunction(target, testonly_mkdtemp=tempdir) as j:
self.assertTrue(os.path.exists(j))
try:
# Ensure that TempJunction raises a JunctionCreationError if it cannot
# create a junction. In this case the junction already exists in that
# directory.
with junction.TempJunction(target, testonly_mkdtemp=tempdir) as _:
self.fail("Expected exception")
except junction.JunctionCreationError:
pass # expected
if __name__ == "__main__":
unittest.main()
| 40.978022
| 79
| 0.721641
|
fd27dcc9675602820d1a055171e8df6d6c604d18
| 86,713
|
py
|
Python
|
gbpservice/neutron/services/grouppolicy/plugin.py
|
mr-smart/group-based-policy
|
e3fac66aa2d1eb3afd713ff763259e85f0e461af
|
[
"Apache-2.0"
] | null | null | null |
gbpservice/neutron/services/grouppolicy/plugin.py
|
mr-smart/group-based-policy
|
e3fac66aa2d1eb3afd713ff763259e85f0e461af
|
[
"Apache-2.0"
] | null | null | null |
gbpservice/neutron/services/grouppolicy/plugin.py
|
mr-smart/group-based-policy
|
e3fac66aa2d1eb3afd713ff763259e85f0e461af
|
[
"Apache-2.0"
] | 1
|
2019-12-03T15:28:24.000Z
|
2019-12-03T15:28:24.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
import six
from neutron.db import api as db_api
from neutron.extensions import portbindings
from neutron.plugins.common import constants as pconst
from neutron.quota import resource_registry
from neutron_lib import constants
from neutron_lib import context as n_ctx
from neutron_lib.plugins import directory
from oslo_log import helpers as log
from oslo_log import log as logging
from oslo_utils import excutils
from gbpservice.common import utils as gbp_utils
from gbpservice.neutron.db.grouppolicy import group_policy_db as gpdb
from gbpservice.neutron.db.grouppolicy import group_policy_mapping_db
from gbpservice.neutron import extensions as gbp_extensions
from gbpservice.neutron.extensions import group_policy as gpex
from gbpservice.neutron.services.grouppolicy import (
extension_manager as ext_manager)
from gbpservice.neutron.services.grouppolicy import (
group_policy_context as p_context)
from gbpservice.neutron.services.grouppolicy import (
policy_driver_manager as manager)
from gbpservice.neutron.services.grouppolicy.common import constants as gp_cts
from gbpservice.neutron.services.grouppolicy.common import exceptions as gp_exc
from gbpservice.neutron.services.grouppolicy.common import utils
from gbpservice.neutron.services.servicechain.plugins.ncp import (
model as ncp_model)
LOG = logging.getLogger(__name__)
STATUS = 'status'
STATUS_DETAILS = 'status_details'
STATUS_SET = set([STATUS, STATUS_DETAILS])
class GroupPolicyPlugin(group_policy_mapping_db.GroupPolicyMappingDbPlugin):
"""Implementation of the Group Policy Model Plugin.
This class manages the workflow of Group Policy request/response.
Most DB related works are implemented in class
db_group_policy_mapping.GroupPolicyMappingDbMixin.
"""
_supported_extension_aliases = ["group-policy", "group-policy-mapping"]
path_prefix = gp_cts.GBP_PREFIXES[pconst.GROUP_POLICY]
@property
def supported_extension_aliases(self):
if not hasattr(self, '_aliases'):
aliases = self._supported_extension_aliases[:]
aliases += self.extension_manager.extension_aliases()
self._aliases = aliases
return self._aliases
def start_rpc_listeners(self):
return self.policy_driver_manager.start_rpc_listeners()
@property
def servicechain_plugin(self):
# REVISIT(rkukura): Need initialization method after all
# plugins are loaded to grab and store plugin.
servicechain_plugin = directory.get_plugin(pconst.SERVICECHAIN)
if not servicechain_plugin:
LOG.error("No Servicechain service plugin found.")
raise gp_exc.GroupPolicyDeploymentError()
return servicechain_plugin
# Shared attribute validation rules:
# - A shared resource cannot use/link a non-shared resource
# - A shared resource cannot be reverted to non-shared if used/linked by
# other shared resources, or by any resource owned by any other tenant
# In the usage graph, specify which resource has to be checked to validate
# sharing policy conformity:
# usage_graph = {<to_check>: {<attribute>: <type>}, ...}
# <attribute> is the field on the <to_check> dictionary that can be used
# to retrieve the UUID/s of the specific object <type>
usage_graph = {'l3_policy': {'external_segments':
'external_segment'},
'l2_policy': {'l3_policy_id': 'l3_policy'},
'policy_target_group': {
'network_service_policy_id': 'network_service_policy',
'l2_policy_id': 'l2_policy',
'provided_policy_rule_sets': 'policy_rule_set',
'consumed_policy_rule_sets': 'policy_rule_set'},
'network_service_policy': {},
'policy_rule': {
'policy_classifier_id': 'policy_classifier',
'policy_actions': 'policy_action'},
'policy_action': {},
'policy_classifier': {},
'policy_rule_set': {
'parent_id': 'policy_rule_set',
'policy_rules': 'policy_rule'},
'external_segment': {},
'external_policy': {
'external_segments': 'external_segment',
'provided_policy_rule_sets': 'policy_rule_set',
'consumed_policy_rule_sets': 'policy_rule_set'},
'nat_pool': {'external_segment_id':
'external_segment'},
'policy_target': {'policy_target_group_id':
'policy_target_group'},
'application_policy_group': {}
}
@staticmethod
def _validate_shared_create(self, context, obj, identity):
# REVISIT(ivar): only validate new references
links = self.usage_graph.get(identity, {})
for attr in links:
ids = obj[attr]
if ids:
if isinstance(ids, six.string_types):
ids = [ids]
ref_type = links[attr]
linked_objects = getattr(
self, 'get_%s' % gbp_extensions.get_plural(ref_type))(
context, filters={'id': ids})
link_ids = set()
for linked in linked_objects:
link_ids.add(linked['id'])
GroupPolicyPlugin._verify_sharing_consistency(
obj, linked, identity, ref_type, context.is_admin)
# Check for missing references
missing = set(ids) - link_ids
if missing:
raise gpex.GbpResourceNotFound(identity=ref_type,
id=str(missing))
@staticmethod
def _validate_shared_update(self, context, original, updated, identity):
# Need admin context to check sharing constraints
# Even though the shared attribute may not be changed, the objects
# it is referring to might. For this reson we run the reference
# validation every time a shared resource is updated
# TODO(ivar): run only when relevant updates happen
self._validate_shared_create(self, context, updated, identity)
if updated.get('shared') != original.get('shared'):
context = context.elevated()
getattr(self, '_validate_%s_unshare' % identity)(context, updated)
@staticmethod
def _check_shared_or_different_tenant(context, obj, method, attr,
value=None):
tenant_id = obj['tenant_id']
refs = method(context, filters={attr: value or [obj['id']]})
for ref in refs:
if ref.get('shared') or tenant_id != ref['tenant_id']:
raise gp_exc.InvalidSharedAttributeUpdate(id=obj['id'],
rid=ref['id'])
def _validate_l3_policy_unshare(self, context, obj):
self._check_shared_or_different_tenant(
context, obj, self.get_l2_policies, 'l3_policy_id')
def _validate_l2_policy_unshare(self, context, obj):
self._check_shared_or_different_tenant(
context, obj, self.get_policy_target_groups, 'l2_policy_id')
def _validate_policy_target_group_unshare(self, context, obj):
self._check_shared_or_different_tenant(
context, obj, self.get_policy_targets, 'policy_target_group_id')
def _validate_network_service_policy_unshare(self, context, obj):
self._check_shared_or_different_tenant(
context, obj, self.get_policy_target_groups,
'network_service_policy_id')
def _validate_policy_rule_set_unshare(self, context, obj):
self._check_shared_or_different_tenant(
context, obj, self.get_policy_target_groups, 'id',
obj['providing_policy_target_groups'] +
obj['consuming_policy_target_groups'])
self._check_shared_or_different_tenant(
context, obj, self.get_external_policies, 'id',
obj['providing_external_policies'] +
obj['consuming_external_policies'])
def _validate_policy_classifier_unshare(self, context, obj):
self._check_shared_or_different_tenant(
context, obj, self.get_policy_rules, 'policy_classifier_id')
def _validate_policy_rule_unshare(self, context, obj):
c_ids = self._get_policy_rule_policy_rule_sets(context, obj['id'])
self._check_shared_or_different_tenant(
context, obj, self.get_policy_rule_sets, 'id', c_ids)
def _validate_policy_action_unshare(self, context, obj):
r_ids = self._get_policy_action_rules(context, obj['id'])
self._check_shared_or_different_tenant(
context, obj, self.get_policy_rules, 'id', r_ids)
def _validate_external_segment_unshare(self, context, obj):
self._check_shared_or_different_tenant(
context, obj, self.get_l3_policies, 'id', obj['l3_policies'])
self._check_shared_or_different_tenant(
context, obj, self.get_external_policies, 'id',
obj['external_policies'])
self._check_shared_or_different_tenant(
context, obj, self.get_nat_pools, 'external_segment_id')
def _validate_external_policy_unshare(self, context, obj):
pass
def _validate_nat_pool_unshare(self, context, obj):
pass
def _validate_routes(self, context, current, original=None):
if original:
added = (set((x['destination'], x['nexthop']) for x in
current['external_routes']) -
set((x['destination'], x['nexthop']) for x in
original['external_routes']))
else:
added = set((x['destination'], x['nexthop']) for x in
current['external_routes'])
if added:
# Verify new ones don't overlap with the existing L3P
added_dest = set(x[0] for x in added)
# Remove default routes
added_dest.discard('0.0.0.0/0')
added_dest.discard('::/0')
added_ipset = netaddr.IPSet(added_dest)
if current['l3_policies']:
l3ps = self.get_l3_policies(
context, filters={'id': current['l3_policies']})
for l3p in l3ps:
ip_pool_list = utils.convert_ip_pool_string_to_list(
l3p['ip_pool'])
if netaddr.IPSet(ip_pool_list) & added_ipset:
raise gp_exc.ExternalRouteOverlapsWithL3PIpPool(
destination=added_dest, l3p_id=l3p['id'],
es_id=current['id'])
es_list = [current]
es_list.extend(self.get_external_segments(
context.elevated(),
filters={'id': [e for e in l3p['external_segments']
if e != current['id']]}))
self._validate_identical_external_routes(es_list)
# Verify NH in ES pool
added_nexthop = netaddr.IPSet(x[1] for x in added if x[1])
es_subnet = netaddr.IPSet([current['cidr']])
if added_nexthop & es_subnet != added_nexthop:
raise gp_exc.ExternalRouteNextHopNotInExternalSegment(
cidr=current['cidr'])
def _validate_l3p_es(self, context, current, original=None):
if original:
added = (set(current['external_segments'].keys()) -
set(original['external_segments'].keys()))
else:
added = set(current['external_segments'].keys())
if added:
es_list = self.get_external_segments(context,
filters={'id': added})
ip_pool_list = utils.convert_ip_pool_string_to_list(
current['ip_pool'])
l3p_ipset = netaddr.IPSet(ip_pool_list)
for es in es_list:
# Verify no route overlap
dest_set = set(x['destination'] for x in
es['external_routes'])
dest_set.discard('0.0.0.0/0')
dest_set.discard('::/0')
if l3p_ipset & netaddr.IPSet(dest_set):
raise gp_exc.ExternalRouteOverlapsWithL3PIpPool(
destination=dest_set, l3p_id=current['id'],
es_id=es['id'])
# Verify segment CIDR doesn't overlap with L3P's
cidr = es['cidr']
if es['subnet_id']:
core_plugin = directory.get_plugin()
cidr = core_plugin.get_subnet(context,
es['subnet_id'])['cidr']
if l3p_ipset & netaddr.IPSet([cidr]):
raise gp_exc.ExternalSegmentSubnetOverlapsWithL3PIpPool(
subnet=cidr, l3p_id=current['id'],
es_id=current['id'])
# Verify allocated address correctly in subnet
for addr in current['external_segments'][es['id']]:
if addr != gpdb.ADDRESS_NOT_SPECIFIED:
if addr not in netaddr.IPNetwork(cidr):
raise gp_exc.InvalidL3PExternalIPAddress(
ip=addr, es_id=es['id'], l3p_id=current['id'],
es_cidr=cidr)
es_list_all = self.get_external_segments(
context.elevated(),
filters={'id': current['external_segments'].keys()})
self._validate_identical_external_routes(es_list_all)
def _validate_identical_external_routes(self, es_list):
if len(es_list) < 2:
return
route_dict = {netaddr.IPNetwork(route['destination']).cidr: es
for es in es_list[1:]
for route in es['external_routes']}
for route in es_list[0]['external_routes']:
cidr = netaddr.IPNetwork(route['destination']).cidr
if cidr in route_dict:
raise gp_exc.IdenticalExternalRoute(
es1=es_list[0]['id'], es2=route_dict[cidr]['id'],
cidr=cidr)
def _validate_action_value(self, context, action):
if action.get('action_type') == gp_cts.GP_ACTION_REDIRECT:
if action.get('action_value'):
# Verify sc spec existence and visibility
spec = self.servicechain_plugin.get_servicechain_spec(
context, action['action_value'])
GroupPolicyPlugin._verify_sharing_consistency(
action, spec, 'policy_action', 'servicechain_spec',
context.is_admin)
@staticmethod
def _verify_sharing_consistency(primary, reference, primary_type,
reference_type, is_admin):
if not reference.get('shared'):
if primary.get('shared'):
raise gp_exc.SharedResourceReferenceError(
res_type=primary_type, res_id=primary['id'],
ref_type=reference_type, ref_id=reference['id'])
if not is_admin:
if primary.get('tenant_id') != reference.get('tenant_id'):
raise gp_exc.InvalidCrossTenantReference(
res_type=primary_type, res_id=primary['id'],
ref_type=reference_type, ref_id=reference['id'])
def _get_status_from_drivers(self, context, context_name, resource_name,
resource_id, resource):
status = resource['status']
status_details = resource['status_details']
policy_context = getattr(p_context, context_name)(
self, context, resource, resource)
getattr(self.policy_driver_manager,
"get_" + resource_name + "_status")(policy_context)
_resource = getattr(policy_context, "_" + resource_name)
updated_status = _resource['status']
updated_status_details = _resource['status_details']
if status != updated_status or (
status_details != updated_status_details):
new_status = {resource_name: {'status': updated_status,
'status_details':
updated_status_details}}
session = context.session
with session.begin(subtransactions=True):
getattr(super(GroupPolicyPlugin, self),
"update_" + resource_name)(
context, _resource['id'], new_status)
resource['status'] = updated_status
resource['status_details'] = updated_status_details
return resource
def _get_resource(self, context, resource_name, resource_id,
gbp_context_name, fields=None):
session = context.session
with session.begin(subtransactions=True):
get_method = "".join(['get_', resource_name])
result = getattr(super(GroupPolicyPlugin, self), get_method)(
context, resource_id, None)
extend_resources_method = "".join(['extend_', resource_name,
'_dict'])
getattr(self.extension_manager, extend_resources_method)(
session, result)
# Invoke drivers only if status attributes are requested
if not fields or STATUS_SET.intersection(set(fields)):
result = self._get_status_from_drivers(
context, gbp_context_name, resource_name, resource_id, result)
return self._fields(result, fields)
def _get_resources(self, context, resource_name, gbp_context_name,
filters=None, fields=None, sorts=None, limit=None,
marker=None, page_reverse=False):
session = context.session
with session.begin(subtransactions=True):
resource_plural = gbp_utils.get_resource_plural(resource_name)
get_resources_method = "".join(['get_', resource_plural])
results = getattr(super(GroupPolicyPlugin, self),
get_resources_method)(
context, filters, None, sorts, limit, marker, page_reverse)
filtered_results = []
for result in results:
extend_resources_method = "".join(['extend_', resource_name,
'_dict'])
getattr(self.extension_manager, extend_resources_method)(
session, result)
filtered = self._filter_extended_result(result, filters)
if filtered:
filtered_results.append(filtered)
new_filtered_results = []
# Invoke drivers only if status attributes are requested
if not fields or STATUS_SET.intersection(set(fields)):
for result in filtered_results:
result = self._get_status_from_drivers(
context, gbp_context_name, resource_name, result['id'],
result)
new_filtered_results.append(result)
new_filtered_results = new_filtered_results or filtered_results
return [self._fields(nfresult, fields) for nfresult in
new_filtered_results]
@resource_registry.tracked_resources(
l3_policy=group_policy_mapping_db.L3PolicyMapping,
l2_policy=group_policy_mapping_db.L2PolicyMapping,
policy_target=group_policy_mapping_db.PolicyTargetMapping,
policy_target_group=group_policy_mapping_db.PolicyTargetGroupMapping,
application_policy_group=gpdb.ApplicationPolicyGroup,
policy_classifier=gpdb.PolicyClassifier,
policy_action=gpdb.PolicyAction,
policy_rule=gpdb.PolicyRule,
policy_rule_set=gpdb.PolicyRuleSet,
external_policy=gpdb.ExternalPolicy,
external_segment=group_policy_mapping_db.ExternalSegmentMapping,
nat_pool=group_policy_mapping_db.NATPoolMapping,
network_service_policy=gpdb.NetworkServicePolicy)
def __init__(self):
self.extension_manager = ext_manager.ExtensionManager()
self.policy_driver_manager = manager.PolicyDriverManager()
super(GroupPolicyPlugin, self).__init__()
self.extension_manager.initialize()
self.policy_driver_manager.initialize()
def _filter_extended_result(self, result, filters):
filters = filters or {}
for field in filters:
# Ignore unknown fields
if field in result:
if result[field] not in filters[field]:
break
else:
return result
def _add_fixed_ips_to_port_attributes(self, policy_target):
if 'fixed_ips' in policy_target['policy_target'] and (
policy_target['policy_target']['fixed_ips'] is not (
constants.ATTR_NOT_SPECIFIED)):
port_attributes = {'fixed_ips': policy_target[
'policy_target']['fixed_ips']}
policy_target['policy_target'].update(
{'port_attributes': port_attributes})
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_policy_target(self, context, policy_target):
self._ensure_tenant(context, policy_target['policy_target'])
self._add_fixed_ips_to_port_attributes(policy_target)
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_policy_target(context, policy_target)
self.extension_manager.process_create_policy_target(
session, policy_target, result)
self._validate_shared_create(
self, context, result, 'policy_target')
policy_context = p_context.PolicyTargetContext(self, context,
result)
self.policy_driver_manager.create_policy_target_precommit(
policy_context)
try:
self.policy_driver_manager.create_policy_target_postcommit(
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception("create_policy_target_postcommit "
"failed, deleting policy_target %s",
result['id'])
self.delete_policy_target(context, result['id'])
return self.get_policy_target(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_policy_target(self, context, policy_target_id, policy_target):
self._add_fixed_ips_to_port_attributes(policy_target)
session = context.session
with session.begin(subtransactions=True):
original_policy_target = self.get_policy_target(context,
policy_target_id)
updated_policy_target = super(
GroupPolicyPlugin, self).update_policy_target(
context, policy_target_id, policy_target)
self.extension_manager.process_update_policy_target(
session, policy_target, updated_policy_target)
self._validate_shared_update(self, context, original_policy_target,
updated_policy_target,
'policy_target')
policy_context = p_context.PolicyTargetContext(
self, context, updated_policy_target,
original_policy_target=original_policy_target)
self.policy_driver_manager.update_policy_target_precommit(
policy_context)
self.policy_driver_manager.update_policy_target_postcommit(
policy_context)
return self.get_policy_target(context, policy_target_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_policy_target(self, context, policy_target_id):
session = context.session
with session.begin(subtransactions=True):
policy_target = self.get_policy_target(context, policy_target_id)
policy_context = p_context.PolicyTargetContext(
self, context, policy_target)
self.policy_driver_manager.delete_policy_target_precommit(
policy_context)
super(GroupPolicyPlugin, self).delete_policy_target(
context, policy_target_id)
try:
self.policy_driver_manager.delete_policy_target_postcommit(
policy_context)
except Exception:
LOG.exception("delete_policy_target_postcommit failed "
"for policy_target %s",
policy_target_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def get_policy_target(self, context, policy_target_id, fields=None):
return self._get_resource(context, 'policy_target', policy_target_id,
'PolicyTargetContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def get_policy_targets(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'policy_target', 'PolicyTargetContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_policy_target_group(self, context, policy_target_group):
self._ensure_tenant(context,
policy_target_group['policy_target_group'])
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_policy_target_group(
context, policy_target_group)
self.extension_manager.process_create_policy_target_group(
session, policy_target_group, result)
self._validate_shared_create(self, context, result,
'policy_target_group')
policy_context = p_context.PolicyTargetGroupContext(
self, context, result)
self.policy_driver_manager.create_policy_target_group_precommit(
policy_context)
try:
self.policy_driver_manager.create_policy_target_group_postcommit(
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception("create_policy_target_group_postcommit "
"failed, deleting policy_target_group %s",
result['id'])
self.delete_policy_target_group(context, result['id'])
return self.get_policy_target_group(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_policy_target_group(self, context, policy_target_group_id,
policy_target_group):
session = context.session
with session.begin(subtransactions=True):
original_policy_target_group = self.get_policy_target_group(
context, policy_target_group_id)
updated_policy_target_group = super(
GroupPolicyPlugin, self).update_policy_target_group(
context, policy_target_group_id, policy_target_group)
# REVISIT(rkukura): We could potentially allow updates to
# l2_policy_id when no policy targets exist. This would
# involve removing each old subnet from the l3_policy's
# router, deleting each old subnet, creating a new subnet on
# the new l2_policy's network, and adding that subnet to the
# l3_policy's router in postcommit. Its also possible that new
# subnet[s] would be provided explicitly as part of the
# update.
old_l2p = original_policy_target_group['l2_policy_id']
new_l2p = updated_policy_target_group['l2_policy_id']
if old_l2p and old_l2p != new_l2p:
raise gp_exc.L2PolicyUpdateOfPolicyTargetGroupNotSupported()
self.extension_manager.process_update_policy_target_group(
session, policy_target_group, updated_policy_target_group)
self._validate_shared_update(
self, context, original_policy_target_group,
updated_policy_target_group, 'policy_target_group')
policy_context = p_context.PolicyTargetGroupContext(
self, context, updated_policy_target_group,
original_policy_target_group=original_policy_target_group)
self.policy_driver_manager.update_policy_target_group_precommit(
policy_context)
self.policy_driver_manager.update_policy_target_group_postcommit(
policy_context)
return self.get_policy_target_group(context, policy_target_group_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_policy_target_group(self, context, policy_target_group_id):
session = context.session
with session.begin(subtransactions=True):
policy_target_group = self.get_policy_target_group(
context, policy_target_group_id)
pt_ids = policy_target_group['policy_targets']
for pt in self.get_policy_targets(context.elevated(),
{'id': pt_ids}):
if (pt['port_id'] and self._is_port_bound(pt['port_id'])
and not (self._is_service_target(context, pt['id']))):
raise gp_exc.PolicyTargetGroupInUse(
policy_target_group=policy_target_group_id)
policy_context = p_context.PolicyTargetGroupContext(
self, context, policy_target_group)
self.policy_driver_manager.delete_policy_target_group_precommit(
policy_context)
# Disassociate all the PRSs first, this will trigger service chains
# deletion.
self.update_policy_target_group(
context, policy_target_group_id,
{'policy_target_group': {'provided_policy_rule_sets': {},
'consumed_policy_rule_sets': {}}})
policy_context.current['provided_policy_rule_sets'] = []
policy_context.current['consumed_policy_rule_sets'] = []
# Proxy PTGs must be deleted before the group itself
if policy_target_group.get('proxy_group_id'):
try:
self.delete_policy_target_group(
context, policy_target_group['proxy_group_id'])
except gpex.PolicyTargetGroupNotFound:
LOG.warning('PTG %s already deleted',
policy_target_group['proxy_group_id'])
with session.begin(subtransactions=True):
for pt in self.get_policy_targets(context, {'id': pt_ids}):
# We will allow PTG deletion if all PTs are unused.
# We could have cleaned these opportunistically in
# the previous loop, but we will keep it simple,
# such that either all unused PTs are deleted
# or nothing is.
self.delete_policy_target(context, pt['id'])
super(GroupPolicyPlugin, self).delete_policy_target_group(
context, policy_target_group_id)
try:
self.policy_driver_manager.delete_policy_target_group_postcommit(
policy_context)
except Exception:
LOG.exception("delete_policy_target_group_postcommit failed "
"for policy_target_group %s",
policy_target_group_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_target_group(self, context, policy_target_group_id,
fields=None):
return self._get_resource(context, 'policy_target_group',
policy_target_group_id,
'PolicyTargetGroupContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_target_groups(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'policy_target_group', 'PolicyTargetGroupContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_application_policy_group(self, context,
application_policy_group):
self._ensure_tenant(
context, application_policy_group['application_policy_group'])
session = context.session
pdm = self.policy_driver_manager
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_application_policy_group(
context, application_policy_group)
self.extension_manager.process_create_application_policy_group(
session, application_policy_group, result)
self._validate_shared_create(self, context, result,
'application_policy_group')
policy_context = p_context.ApplicationPolicyGroupContext(
self, context, result)
pdm.create_application_policy_group_precommit(policy_context)
try:
pdm.create_application_policy_group_postcommit(policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception("create_application_policy_group_postcommit "
"failed, deleting APG %s",
result['id'])
self.delete_application_policy_group(context, result['id'])
return self.get_application_policy_group(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_application_policy_group(self, context,
application_policy_group_id,
application_policy_group):
session = context.session
pdm = self.policy_driver_manager
with session.begin(subtransactions=True):
original_application_policy_group = (
self.get_application_policy_group(
context, application_policy_group_id))
updated_application_policy_group = super(
GroupPolicyPlugin, self).update_application_policy_group(
context, application_policy_group_id,
application_policy_group)
self.extension_manager.process_update_application_policy_group(
session, application_policy_group,
updated_application_policy_group)
self._validate_shared_update(
self, context, original_application_policy_group,
updated_application_policy_group, 'application_policy_group')
policy_context = p_context.ApplicationPolicyGroupContext(
self, context, updated_application_policy_group,
original_application_policy_group=
original_application_policy_group)
pdm.update_application_policy_group_precommit(policy_context)
pdm.update_application_policy_group_postcommit(policy_context)
return self.get_application_policy_group(context,
application_policy_group_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_application_policy_group(self, context,
application_policy_group_id):
session = context.session
pdm = self.policy_driver_manager
with session.begin(subtransactions=True):
application_policy_group = self.get_application_policy_group(
context, application_policy_group_id)
policy_context = p_context.ApplicationPolicyGroupContext(
self, context, application_policy_group)
pdm.delete_application_policy_group_precommit(policy_context)
super(GroupPolicyPlugin, self).delete_application_policy_group(
context, application_policy_group_id)
try:
pdm.delete_application_policy_group_postcommit(policy_context)
except Exception:
LOG.exception("delete_application_policy_group_postcommit "
"failed for application_policy_group %s",
application_policy_group_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_application_policy_group(self, context,
application_policy_group_id, fields=None):
return self._get_resource(context, 'application_policy_group',
application_policy_group_id,
'ApplicationPolicyGroupContext',
fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_application_policy_groups(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'application_policy_group',
'ApplicationPolicyGroupContext', filters=filters, fields=fields,
sorts=sorts, limit=limit, marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_l2_policy(self, context, l2_policy):
self._ensure_tenant(context, l2_policy['l2_policy'])
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_l2_policy(context, l2_policy)
self.extension_manager.process_create_l2_policy(
session, l2_policy, result)
self._validate_shared_create(self, context, result, 'l2_policy')
policy_context = p_context.L2PolicyContext(self, context, result)
self.policy_driver_manager.create_l2_policy_precommit(
policy_context)
try:
self.policy_driver_manager.create_l2_policy_postcommit(
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception("create_l2_policy_postcommit "
"failed, deleting l2_policy %s",
result['id'])
self.delete_l2_policy(context, result['id'])
return self.get_l2_policy(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_l2_policy(self, context, l2_policy_id, l2_policy):
session = context.session
with session.begin(subtransactions=True):
original_l2_policy = self.get_l2_policy(context, l2_policy_id)
updated_l2_policy = super(GroupPolicyPlugin,
self).update_l2_policy(
context, l2_policy_id, l2_policy)
self.extension_manager.process_update_l2_policy(
session, l2_policy, updated_l2_policy)
self._validate_shared_update(self, context, original_l2_policy,
updated_l2_policy, 'l2_policy')
policy_context = p_context.L2PolicyContext(
self, context, updated_l2_policy,
original_l2_policy=original_l2_policy)
self.policy_driver_manager.update_l2_policy_precommit(
policy_context)
self.policy_driver_manager.update_l2_policy_postcommit(
policy_context)
return self.get_l2_policy(context, l2_policy_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_l2_policy(self, context, l2_policy_id):
session = context.session
with session.begin(subtransactions=True):
l2_policy = self.get_l2_policy(context, l2_policy_id)
policy_context = p_context.L2PolicyContext(self, context,
l2_policy)
self.policy_driver_manager.delete_l2_policy_precommit(
policy_context)
super(GroupPolicyPlugin, self).delete_l2_policy(context,
l2_policy_id)
try:
self.policy_driver_manager.delete_l2_policy_postcommit(
policy_context)
except Exception:
LOG.exception("delete_l2_policy_postcommit failed "
"for l2_policy %s", l2_policy_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_l2_policy(self, context, l2_policy_id, fields=None):
return self._get_resource(context, 'l2_policy',
l2_policy_id,
'L2PolicyContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_l2_policies(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'l2_policy', 'L2PolicyContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_network_service_policy(self, context, network_service_policy):
self._ensure_tenant(
context, network_service_policy['network_service_policy'])
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_network_service_policy(
context, network_service_policy)
self.extension_manager.process_create_network_service_policy(
session, network_service_policy, result)
self._validate_shared_create(self, context, result,
'network_service_policy')
policy_context = p_context.NetworkServicePolicyContext(
self, context, result)
pdm = self.policy_driver_manager
pdm.create_network_service_policy_precommit(
policy_context)
try:
pdm.create_network_service_policy_postcommit(
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(
"create_network_service_policy_postcommit "
"failed, deleting network_service_policy %s",
result['id'])
self.delete_network_service_policy(context, result['id'])
return self.get_network_service_policy(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_network_service_policy(self, context, network_service_policy_id,
network_service_policy):
session = context.session
with session.begin(subtransactions=True):
original_network_service_policy = super(
GroupPolicyPlugin, self).get_network_service_policy(
context, network_service_policy_id)
updated_network_service_policy = super(
GroupPolicyPlugin, self).update_network_service_policy(
context, network_service_policy_id, network_service_policy)
self.extension_manager.process_update_network_service_policy(
session, network_service_policy,
updated_network_service_policy)
self._validate_shared_update(
self, context, original_network_service_policy,
updated_network_service_policy, 'network_service_policy')
policy_context = p_context.NetworkServicePolicyContext(
self, context, updated_network_service_policy,
original_network_service_policy=
original_network_service_policy)
self.policy_driver_manager.update_network_service_policy_precommit(
policy_context)
self.policy_driver_manager.update_network_service_policy_postcommit(
policy_context)
return self.get_network_service_policy(context,
network_service_policy_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_network_service_policy(
self, context, network_service_policy_id):
session = context.session
with session.begin(subtransactions=True):
network_service_policy = self.get_network_service_policy(
context, network_service_policy_id)
policy_context = p_context.NetworkServicePolicyContext(
self, context, network_service_policy)
self.policy_driver_manager.delete_network_service_policy_precommit(
policy_context)
super(GroupPolicyPlugin, self).delete_network_service_policy(
context, network_service_policy_id)
try:
pdm = self.policy_driver_manager
pdm.delete_network_service_policy_postcommit(policy_context)
except Exception:
LOG.exception(
"delete_network_service_policy_postcommit failed "
"for network_service_policy %s", network_service_policy_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_network_service_policy(self, context, network_service_policy_id,
fields=None):
return self._get_resource(context, 'network_service_policy',
network_service_policy_id,
'NetworkServicePolicyContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_network_service_policies(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'network_service_policy', 'NetworkServicePolicyContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_l3_policy(self, context, l3_policy):
self._ensure_tenant(context, l3_policy['l3_policy'])
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_l3_policy(context, l3_policy)
self.extension_manager.process_create_l3_policy(
session, l3_policy, result)
self._validate_shared_create(self, context, result, 'l3_policy')
self._validate_l3p_es(context, result)
policy_context = p_context.L3PolicyContext(self, context,
result)
self.policy_driver_manager.create_l3_policy_precommit(
policy_context)
try:
self.policy_driver_manager.create_l3_policy_postcommit(
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception("create_l3_policy_postcommit "
"failed, deleting l3_policy %s",
result['id'])
self.delete_l3_policy(context, result['id'])
return self.get_l3_policy(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_l3_policy(self, context, l3_policy_id, l3_policy):
session = context.session
with session.begin(subtransactions=True):
original_l3_policy = self.get_l3_policy(context, l3_policy_id)
updated_l3_policy = super(
GroupPolicyPlugin, self).update_l3_policy(
context, l3_policy_id, l3_policy)
self.extension_manager.process_update_l3_policy(
session, l3_policy, updated_l3_policy)
self._validate_shared_update(self, context, original_l3_policy,
updated_l3_policy, 'l3_policy')
self._validate_l3p_es(context, updated_l3_policy,
original_l3_policy)
policy_context = p_context.L3PolicyContext(
self, context, updated_l3_policy,
original_l3_policy=original_l3_policy)
self.policy_driver_manager.update_l3_policy_precommit(
policy_context)
self.policy_driver_manager.update_l3_policy_postcommit(
policy_context)
return self.get_l3_policy(context, l3_policy_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_l3_policy(self, context, l3_policy_id, check_unused=False):
session = context.session
with session.begin(subtransactions=True):
if (check_unused and
(session.query(group_policy_mapping_db.L2PolicyMapping).
filter_by(l3_policy_id=l3_policy_id).count())):
return False
l3_policy = self.get_l3_policy(context, l3_policy_id)
policy_context = p_context.L3PolicyContext(self, context,
l3_policy)
self.policy_driver_manager.delete_l3_policy_precommit(
policy_context)
super(GroupPolicyPlugin, self).delete_l3_policy(context,
l3_policy_id)
try:
self.policy_driver_manager.delete_l3_policy_postcommit(
policy_context)
except Exception:
LOG.exception("delete_l3_policy_postcommit failed "
"for l3_policy %s", l3_policy_id)
return True
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_l3_policy(self, context, l3_policy_id, fields=None):
return self._get_resource(context, 'l3_policy',
l3_policy_id,
'L3PolicyContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_l3_policies(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'l3_policy', 'L3PolicyContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_policy_classifier(self, context, policy_classifier):
self._ensure_tenant(context,
policy_classifier['policy_classifier'])
session = context.session
with session.begin(subtransactions=True):
result = super(
GroupPolicyPlugin, self).create_policy_classifier(
context, policy_classifier)
self.extension_manager.process_create_policy_classifier(
session, policy_classifier, result)
self._validate_shared_create(
self, context, result, 'policy_classifier')
policy_context = p_context.PolicyClassifierContext(self, context,
result)
self.policy_driver_manager.create_policy_classifier_precommit(
policy_context)
try:
self.policy_driver_manager.create_policy_classifier_postcommit(
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(
"policy_driver_manager.create_policy_classifier_postcommit"
" failed, deleting policy_classifier %s", result['id'])
self.delete_policy_classifier(context, result['id'])
return self.get_policy_classifier(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_policy_classifier(self, context, id, policy_classifier):
session = context.session
with session.begin(subtransactions=True):
original_policy_classifier = super(
GroupPolicyPlugin, self).get_policy_classifier(context, id)
updated_policy_classifier = super(
GroupPolicyPlugin, self).update_policy_classifier(
context, id, policy_classifier)
self.extension_manager.process_update_policy_classifier(
session, policy_classifier, updated_policy_classifier)
self._validate_shared_update(
self, context, original_policy_classifier,
updated_policy_classifier, 'policy_classifier')
policy_context = p_context.PolicyClassifierContext(
self, context, updated_policy_classifier,
original_policy_classifier=original_policy_classifier)
self.policy_driver_manager.update_policy_classifier_precommit(
policy_context)
self.policy_driver_manager.update_policy_classifier_postcommit(
policy_context)
return self.get_policy_classifier(context, id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_policy_classifier(self, context, id):
session = context.session
with session.begin(subtransactions=True):
policy_classifier = self.get_policy_classifier(context, id)
policy_context = p_context.PolicyClassifierContext(
self, context, policy_classifier)
self.policy_driver_manager.delete_policy_classifier_precommit(
policy_context)
super(GroupPolicyPlugin, self).delete_policy_classifier(
context, id)
try:
self.policy_driver_manager.delete_policy_classifier_postcommit(
policy_context)
except Exception:
LOG.exception("delete_policy_classifier_postcommit failed "
"for policy_classifier %s", id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_classifier(self, context, policy_classifier_id,
fields=None):
return self._get_resource(context, 'policy_classifier',
policy_classifier_id,
'PolicyClassifierContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_classifiers(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'policy_classifier', 'PolicyClassifierContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_policy_action(self, context, policy_action):
self._ensure_tenant(context, policy_action['policy_action'])
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_policy_action(context, policy_action)
self.extension_manager.process_create_policy_action(
session, policy_action, result)
self._validate_shared_create(self, context, result,
'policy_action')
self._validate_action_value(context, result)
policy_context = p_context.PolicyActionContext(self, context,
result)
self.policy_driver_manager.create_policy_action_precommit(
policy_context)
try:
self.policy_driver_manager.create_policy_action_postcommit(
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(
"policy_driver_manager.create_policy_action_postcommit "
"failed, deleting policy_action %s", result['id'])
self.delete_policy_action(context, result['id'])
return self.get_policy_action(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_policy_action(self, context, id, policy_action):
session = context.session
with session.begin(subtransactions=True):
original_policy_action = super(
GroupPolicyPlugin, self).get_policy_action(context, id)
updated_policy_action = super(
GroupPolicyPlugin, self).update_policy_action(context, id,
policy_action)
self.extension_manager.process_update_policy_action(
session, policy_action, updated_policy_action)
self._validate_shared_update(self, context, original_policy_action,
updated_policy_action,
'policy_action')
self._validate_action_value(context, updated_policy_action)
policy_context = p_context.PolicyActionContext(
self, context, updated_policy_action,
original_policy_action=original_policy_action)
self.policy_driver_manager.update_policy_action_precommit(
policy_context)
self.policy_driver_manager.update_policy_action_postcommit(
policy_context)
return self.get_policy_action(context, id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_policy_action(self, context, id):
session = context.session
with session.begin(subtransactions=True):
policy_action = self.get_policy_action(context, id)
policy_context = p_context.PolicyActionContext(self, context,
policy_action)
self.policy_driver_manager.delete_policy_action_precommit(
policy_context)
super(GroupPolicyPlugin, self).delete_policy_action(context, id)
try:
self.policy_driver_manager.delete_policy_action_postcommit(
policy_context)
except Exception:
LOG.exception("delete_policy_action_postcommit failed "
"for policy_action %s", id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_action(self, context, policy_action_id, fields=None):
return self._get_resource(context, 'policy_action',
policy_action_id,
'PolicyActionContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_actions(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'policy_action', 'PolicyActionContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_policy_rule(self, context, policy_rule):
self._ensure_tenant(context, policy_rule['policy_rule'])
session = context.session
with session.begin(subtransactions=True):
result = super(
GroupPolicyPlugin, self).create_policy_rule(
context, policy_rule)
self.extension_manager.process_create_policy_rule(
session, policy_rule, result)
self._validate_shared_create(self, context, result, 'policy_rule')
policy_context = p_context.PolicyRuleContext(self, context,
result)
self.policy_driver_manager.create_policy_rule_precommit(
policy_context)
try:
self.policy_driver_manager.create_policy_rule_postcommit(
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(
"policy_driver_manager.create_policy_rule_postcommit"
" failed, deleting policy_rule %s", result['id'])
self.delete_policy_rule(context, result['id'])
return self.get_policy_rule(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_policy_rule(self, context, id, policy_rule):
session = context.session
with session.begin(subtransactions=True):
original_policy_rule = super(
GroupPolicyPlugin, self).get_policy_rule(context, id)
updated_policy_rule = super(
GroupPolicyPlugin, self).update_policy_rule(
context, id, policy_rule)
self.extension_manager.process_update_policy_rule(
session, policy_rule, updated_policy_rule)
self._validate_shared_update(self, context, original_policy_rule,
updated_policy_rule, 'policy_rule')
policy_context = p_context.PolicyRuleContext(
self, context, updated_policy_rule,
original_policy_rule=original_policy_rule)
self.policy_driver_manager.update_policy_rule_precommit(
policy_context)
self.policy_driver_manager.update_policy_rule_postcommit(
policy_context)
return self.get_policy_rule(context, id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_policy_rule(self, context, id):
session = context.session
with session.begin(subtransactions=True):
policy_rule = self.get_policy_rule(context, id)
policy_context = p_context.PolicyRuleContext(self, context,
policy_rule)
self.policy_driver_manager.delete_policy_rule_precommit(
policy_context)
super(GroupPolicyPlugin, self).delete_policy_rule(
context, id)
try:
self.policy_driver_manager.delete_policy_rule_postcommit(
policy_context)
except Exception:
LOG.exception("delete_policy_rule_postcommit failed "
"for policy_rule %s", id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_rule(self, context, policy_rule_id, fields=None):
return self._get_resource(context, 'policy_rule',
policy_rule_id,
'PolicyRuleContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_rules(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'policy_rule', 'PolicyRuleContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_policy_rule_set(self, context, policy_rule_set):
self._ensure_tenant(context, policy_rule_set['policy_rule_set'])
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_policy_rule_set(
context, policy_rule_set)
self.extension_manager.process_create_policy_rule_set(
session, policy_rule_set, result)
self._validate_shared_create(
self, context, result, 'policy_rule_set')
policy_context = p_context.PolicyRuleSetContext(
self, context, result)
self.policy_driver_manager.create_policy_rule_set_precommit(
policy_context)
try:
self.policy_driver_manager.create_policy_rule_set_postcommit(
policy_context)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(
"policy_driver_manager.create_policy_rule_set_postcommit "
"failed, deleting policy_rule_set %s", result['id'])
self.delete_policy_rule_set(context, result['id'])
return self.get_policy_rule_set(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_policy_rule_set(self, context, id, policy_rule_set):
session = context.session
with session.begin(subtransactions=True):
original_policy_rule_set = super(
GroupPolicyPlugin, self).get_policy_rule_set(context, id)
updated_policy_rule_set = super(
GroupPolicyPlugin, self).update_policy_rule_set(
context, id, policy_rule_set)
self.extension_manager.process_update_policy_rule_set(
session, policy_rule_set, updated_policy_rule_set)
self._validate_shared_update(
self, context, original_policy_rule_set,
updated_policy_rule_set, 'policy_rule_set')
policy_context = p_context.PolicyRuleSetContext(
self, context, updated_policy_rule_set,
original_policy_rule_set=original_policy_rule_set)
self.policy_driver_manager.update_policy_rule_set_precommit(
policy_context)
self.policy_driver_manager.update_policy_rule_set_postcommit(
policy_context)
return self.get_policy_rule_set(context, id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_policy_rule_set(self, context, id):
session = context.session
with session.begin(subtransactions=True):
policy_rule_set = self.get_policy_rule_set(context, id)
policy_context = p_context.PolicyRuleSetContext(
self, context, policy_rule_set)
self.policy_driver_manager.delete_policy_rule_set_precommit(
policy_context)
super(GroupPolicyPlugin, self).delete_policy_rule_set(context, id)
try:
self.policy_driver_manager.delete_policy_rule_set_postcommit(
policy_context)
except Exception:
LOG.exception("delete_policy_rule_set_postcommit failed "
"for policy_rule_set %s", id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_rule_set(self, context, policy_rule_set_id, fields=None):
return self._get_resource(context, 'policy_rule_set',
policy_rule_set_id,
'PolicyRuleSetContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_policy_rule_sets(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'policy_rule_set', 'PolicyRuleSetContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_external_segment(self, context, external_segment):
self._ensure_tenant(context, external_segment['external_segment'])
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_external_segment(context,
external_segment)
self.extension_manager.process_create_external_segment(
session, external_segment, result)
self._validate_shared_create(self, context, result,
'external_segment')
policy_context = p_context.ExternalSegmentContext(
self, context, result)
(self.policy_driver_manager.
create_external_segment_precommit(policy_context))
# Validate the routes after the drivers had the chance to fill
# the cidr field.
self._validate_routes(context, result)
try:
(self.policy_driver_manager.
create_external_segment_postcommit(policy_context))
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception("create_external_segment_postcommit "
"failed, deleting external_segment "
"%s", result['id'])
self.delete_external_segment(context, result['id'])
return self.get_external_segment(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_external_segment(self, context, external_segment_id,
external_segment):
session = context.session
with session.begin(subtransactions=True):
original_external_segment = super(
GroupPolicyPlugin, self).get_external_segment(
context, external_segment_id)
updated_external_segment = super(
GroupPolicyPlugin, self).update_external_segment(
context, external_segment_id,
external_segment)
self.extension_manager.process_update_external_segment(
session, external_segment, updated_external_segment)
self._validate_shared_update(
self, context, original_external_segment,
updated_external_segment, 'external_segment')
self._validate_routes(context, updated_external_segment,
original_external_segment)
# TODO(ivar): Validate Routes' GW in es subnet
policy_context = p_context.ExternalSegmentContext(
self, context, updated_external_segment,
original_external_segment)
(self.policy_driver_manager.
update_external_segment_precommit(policy_context))
self.policy_driver_manager.update_external_segment_postcommit(
policy_context)
return self.get_external_segment(context, external_segment_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_external_segment(self, context, external_segment_id):
session = context.session
with session.begin(subtransactions=True):
es = self.get_external_segment(context, external_segment_id)
if es['l3_policies'] or es['nat_pools'] or es['external_policies']:
raise gpex.ExternalSegmentInUse(es_id=es['id'])
policy_context = p_context.ExternalSegmentContext(
self, context, es)
(self.policy_driver_manager.
delete_external_segment_precommit(policy_context))
super(GroupPolicyPlugin, self).delete_external_segment(
context, external_segment_id)
try:
(self.policy_driver_manager.
delete_external_segment_postcommit(policy_context))
except Exception:
LOG.exception("delete_external_segment_postcommit failed "
"for external_segment %s",
external_segment_id)
return True
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_external_segment(self, context, external_segment_id, fields=None):
return self._get_resource(context, 'external_segment',
external_segment_id,
'ExternalSegmentContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_external_segments(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'external_segment', 'ExternalSegmentContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_external_policy(self, context, external_policy):
self._ensure_tenant(context, external_policy['external_policy'])
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin,
self).create_external_policy(
context, external_policy)
self.extension_manager.process_create_external_policy(
session, external_policy, result)
self._validate_shared_create(self, context, result,
'external_policy')
policy_context = p_context.ExternalPolicyContext(
self, context, result)
(self.policy_driver_manager.
create_external_policy_precommit(policy_context))
try:
(self.policy_driver_manager.
create_external_policy_postcommit(policy_context))
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception("create_external_policy_postcommit "
"failed, deleting external_policy "
"%s", result['id'])
self.delete_external_policy(context, result['id'])
return self.get_external_policy(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_external_policy(self, context, external_policy_id,
external_policy):
session = context.session
with session.begin(subtransactions=True):
original_external_policy = super(
GroupPolicyPlugin, self).get_external_policy(
context, external_policy_id)
updated_external_policy = super(
GroupPolicyPlugin, self).update_external_policy(
context, external_policy_id,
external_policy)
self.extension_manager.process_update_external_policy(
session, external_policy, updated_external_policy)
self._validate_shared_update(
self, context, original_external_policy,
updated_external_policy, 'external_policy')
policy_context = p_context.ExternalPolicyContext(
self, context, updated_external_policy,
original_external_policy)
(self.policy_driver_manager.
update_external_policy_precommit(policy_context))
self.policy_driver_manager.update_external_policy_postcommit(
policy_context)
return self.get_external_policy(context, external_policy_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_external_policy(self, context, external_policy_id,
check_unused=False):
session = context.session
with session.begin(subtransactions=True):
es = self.get_external_policy(context, external_policy_id)
policy_context = p_context.ExternalPolicyContext(
self, context, es)
(self.policy_driver_manager.
delete_external_policy_precommit(policy_context))
super(GroupPolicyPlugin, self).delete_external_policy(
context, external_policy_id)
try:
self.policy_driver_manager.delete_external_policy_postcommit(
policy_context)
except Exception:
LOG.exception("delete_external_policy_postcommit failed "
"for external_policy %s", external_policy_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_external_policy(self, context, external_policy_id, fields=None):
return self._get_resource(context, 'external_policy',
external_policy_id,
'ExternalPolicyContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_external_policies(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'external_policy', 'ExternalPolicyContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def create_nat_pool(self, context, nat_pool):
self._ensure_tenant(context, nat_pool['nat_pool'])
session = context.session
with session.begin(subtransactions=True):
result = super(GroupPolicyPlugin, self).create_nat_pool(
context, nat_pool)
self.extension_manager.process_create_nat_pool(session, nat_pool,
result)
self._validate_shared_create(self, context, result, 'nat_pool')
policy_context = p_context.NatPoolContext(self, context, result)
(self.policy_driver_manager.
create_nat_pool_precommit(policy_context))
try:
(self.policy_driver_manager.
create_nat_pool_postcommit(policy_context))
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(
"create_nat_pool_postcommit failed, deleting "
"nat_pool %s", result['id'])
self.delete_nat_pool(context, result['id'])
return self.get_nat_pool(context, result['id'])
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def update_nat_pool(self, context, nat_pool_id, nat_pool):
session = context.session
with session.begin(subtransactions=True):
original_nat_pool = super(
GroupPolicyPlugin, self).get_nat_pool(context, nat_pool_id)
updated_nat_pool = super(
GroupPolicyPlugin, self).update_nat_pool(context, nat_pool_id,
nat_pool)
self.extension_manager.process_update_nat_pool(
session, nat_pool, updated_nat_pool)
self._validate_shared_update(self, context, original_nat_pool,
updated_nat_pool, 'nat_pool')
policy_context = p_context.NatPoolContext(
self, context, updated_nat_pool, original_nat_pool)
(self.policy_driver_manager.
update_nat_pool_precommit(policy_context))
self.policy_driver_manager.update_nat_pool_postcommit(policy_context)
return self.get_nat_pool(context, nat_pool_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
@gbp_extensions.disable_transaction_guard
def delete_nat_pool(self, context, nat_pool_id, check_unused=False):
session = context.session
with session.begin(subtransactions=True):
es = self.get_nat_pool(context, nat_pool_id)
policy_context = p_context.NatPoolContext(self, context, es)
(self.policy_driver_manager.delete_nat_pool_precommit(
policy_context))
super(GroupPolicyPlugin, self).delete_nat_pool(context,
nat_pool_id)
try:
self.policy_driver_manager.delete_nat_pool_postcommit(
policy_context)
except Exception:
LOG.exception("delete_nat_pool_postcommit failed "
"for nat_pool %s",
nat_pool_id)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_nat_pool(self, context, nat_pool_id, fields=None):
return self._get_resource(context, 'nat_pool',
nat_pool_id,
'NatPoolContext', fields=fields)
@log.log_method_call
@db_api.retry_if_session_inactive()
def get_nat_pools(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
return self._get_resources(
context, 'nat_pool', 'NatPoolContext',
filters=filters, fields=fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
def _is_port_bound(self, port_id):
# REVISIT(ivar): This operation shouldn't be done within a DB lock
# once we refactor the server.
not_bound = [portbindings.VIF_TYPE_UNBOUND,
portbindings.VIF_TYPE_BINDING_FAILED]
context = n_ctx.get_admin_context()
port = directory.get_plugin().get_port(context, port_id)
return (port.get('binding:vif_type') not in not_bound) and port.get(
'binding:host_id') and (port['device_owner'] or port['device_id'])
def _is_service_target(self, context, pt_id):
return bool(ncp_model.get_service_targets_count(
context.session, pt_id))
def _ensure_tenant(self, context, resource):
# TODO(Sumit): This check is ideally not required, but a bunch of UTs
# are not setup correctly to populate the tenant_id, hence we
# temporarily need to perform this check. This will go with the fix
# for the deprecated get_tenant_id_for_create method.
if 'tenant_id' in resource:
tenant_id = resource['tenant_id']
self.policy_driver_manager.ensure_tenant(context, tenant_id)
| 47.907735
| 79
| 0.628971
|
8afbb38d5dc19e75687457493a772cd2f94e3b67
| 24,466
|
py
|
Python
|
cw.py
|
kkew3/pytorch-cw2
|
44993391ac9444b9941596d7fec6627fe6673910
|
[
"MIT"
] | 55
|
2018-08-30T16:14:41.000Z
|
2022-03-28T13:26:49.000Z
|
cw.py
|
Mrzhouqifei/pytorch-cw2
|
44993391ac9444b9941596d7fec6627fe6673910
|
[
"MIT"
] | 1
|
2018-12-11T03:37:04.000Z
|
2018-12-19T17:13:07.000Z
|
cw.py
|
kkew3/pytorch-cw2
|
44993391ac9444b9941596d7fec6627fe6673910
|
[
"MIT"
] | 15
|
2019-04-02T07:29:51.000Z
|
2022-03-09T04:23:37.000Z
|
"""
Carlini-Wagner attack (http://arxiv.org/abs/1608.04644).
Referential implementation:
- https://github.com/carlini/nn_robust_attacks.git (the original implementation)
- https://github.com/rwightman/pytorch-nips2017-attack-example.git
"""
import operator as op
from typing import Union, Tuple
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
import runutils
def _var2numpy(var):
"""
Make Variable to numpy array. No transposition will be made.
:param var: Variable instance on whatever device
:type var: Variable
:return: the corresponding numpy array
:rtype: np.ndarray
"""
return var.data.cpu().numpy()
def atanh(x, eps=1e-6):
"""
The inverse hyperbolic tangent function, missing in pytorch.
:param x: a tensor or a Variable
:param eps: used to enhance numeric stability
:return: :math:`\\tanh^{-1}{x}`, of the same type as ``x``
"""
x = x * (1 - eps)
return 0.5 * torch.log((1.0 + x) / (1.0 - x))
def to_tanh_space(x, box):
# type: (Union[Variable, torch.FloatTensor], Tuple[float, float]) -> Union[Variable, torch.FloatTensor]
"""
Convert a batch of tensors to tanh-space. This method complements the
implementation of the change-of-variable trick in terms of tanh.
:param x: the batch of tensors, of dimension [B x C x H x W]
:param box: a tuple of lower bound and upper bound of the box constraint
:return: the batch of tensors in tanh-space, of the same dimension;
the returned tensor is on the same device as ``x``
"""
_box_mul = (box[1] - box[0]) * 0.5
_box_plus = (box[1] + box[0]) * 0.5
return atanh((x - _box_plus) / _box_mul)
def from_tanh_space(x, box):
# type: (Union[Variable, torch.FloatTensor], Tuple[float, float]) -> Union[Variable, torch.FloatTensor]
"""
Convert a batch of tensors from tanh-space to oridinary image space.
This method complements the implementation of the change-of-variable trick
in terms of tanh.
:param x: the batch of tensors, of dimension [B x C x H x W]
:param box: a tuple of lower bound and upper bound of the box constraint
:return: the batch of tensors in ordinary image space, of the same
dimension; the returned tensor is on the same device as ``x``
"""
_box_mul = (box[1] - box[0]) * 0.5
_box_plus = (box[1] + box[0]) * 0.5
return torch.tanh(x) * _box_mul + _box_plus
class L2Adversary(object):
"""
The L2 attack adversary. To enforce the box constraint, the
change-of-variable trick using tanh-space is adopted.
The loss function to optimize:
.. math::
\\|\\delta\\|_2^2 + c \\cdot f(x + \\delta)
where :math:`f` is defined as
.. math::
f(x') = \\max\\{0, (\\max_{i \\ne t}{Z(x')_i} - Z(x')_t) \\cdot \\tau + \\kappa\\}
where :math:`\\tau` is :math:`+1` if the adversary performs targeted attack;
otherwise it's :math:`-1`.
Usage::
attacker = L2Adversary()
# inputs: a batch of input tensors
# targets: a batch of attack targets
# model: the model to attack
advx = attacker(model, inputs, targets)
The change-of-variable trick
++++++++++++++++++++++++++++
Let :math:`a` be a proper affine transformation.
1. Given input :math:`x` in image space, map :math:`x` to "tanh-space" by
.. math:: \\hat{x} = \\tanh^{-1}(a^{-1}(x))
2. Optimize an adversarial perturbation :math:`m` without constraint in the
"tanh-space", yielding an adversarial example :math:`w = \\hat{x} + m`; and
3. Map :math:`w` back to the same image space as the one where :math:`x`
resides:
.. math::
x' = a(\\tanh(w))
where :math:`x'` is the adversarial example, and :math:`\\delta = x' - x`
is the adversarial perturbation.
Since the composition of affine transformation and hyperbolic tangent is
strictly monotonic, $\\delta = 0$ if and only if $m = 0$.
Symbols used in docstring
+++++++++++++++++++++++++
- ``B``: the batch size
- ``C``: the number of channels
- ``H``: the height
- ``W``: the width
- ``M``: the number of classes
"""
def __init__(self, targeted=True, confidence=0.0, c_range=(1e-3, 1e10),
search_steps=5, max_steps=1000, abort_early=True,
box=(-1., 1.), optimizer_lr=1e-2, init_rand=False):
"""
:param targeted: ``True`` to perform targeted attack in ``self.run``
method
:type targeted: bool
:param confidence: the confidence constant, i.e. the $\\kappa$ in paper
:type confidence: float
:param c_range: the search range of the constant :math:`c`; should be a
tuple of form (lower_bound, upper_bound)
:type c_range: Tuple[float, float]
:param search_steps: the number of steps to perform binary search of
the constant :math:`c` over ``c_range``
:type search_steps: int
:param max_steps: the maximum number of optimization steps for each
constant :math:`c`
:type max_steps: int
:param abort_early: ``True`` to abort early in process of searching for
:math:`c` when the loss virtually stops increasing
:type abort_early: bool
:param box: a tuple of lower bound and upper bound of the box
:type box: Tuple[float, float]
:param optimizer_lr: the base learning rate of the Adam optimizer used
over the adversarial perturbation in clipped space
:type optimizer_lr: float
:param init_rand: ``True`` to initialize perturbation to small Gaussian;
False is consistent with the original paper, where the
perturbation is initialized to zero
:type init_rand: bool
:rtype: None
Why to make ``box`` default to (-1., 1.) rather than (0., 1.)? TL;DR the
domain of the problem in pytorch is [-1, 1] instead of [0, 1].
According to Xiang Xu (samxucmu@gmail.com)::
> The reason is that in pytorch a transformation is applied first
> before getting the input from the data loader. So image in range [0,1]
> will subtract some mean and divide by std. The normalized input image
> will now be in range [-1,1]. For this implementation, clipping is
> actually performed on the image after normalization, not on the
> original image.
Why to ``optimizer_lr`` default to 1e-2? The optimizer used in Carlini's
code adopts 1e-2. In another pytorch implementation
(https://github.com/rwightman/pytorch-nips2017-attack-example.git),
though, the learning rate is set to 5e-4.
"""
if len(c_range) != 2:
raise TypeError('c_range ({}) should be of form '
'tuple([lower_bound, upper_bound])'
.format(c_range))
if c_range[0] >= c_range[1]:
raise ValueError('c_range lower bound ({}) is expected to be less '
'than c_range upper bound ({})'.format(*c_range))
if len(box) != 2:
raise TypeError('box ({}) should be of form '
'tuple([lower_bound, upper_bound])'
.format(box))
if box[0] >= box[1]:
raise ValueError('box lower bound ({}) is expected to be less than '
'box upper bound ({})'.format(*box))
self.targeted = targeted
self.confidence = float(confidence)
self.c_range = (float(c_range[0]), float(c_range[1]))
self.binary_search_steps = search_steps
self.max_steps = max_steps
self.abort_early = abort_early
self.ae_tol = 1e-4 # tolerance of early abort
self.box = tuple(map(float, box)) # type: Tuple[float, float]
self.optimizer_lr = optimizer_lr
# `self.init_rand` is not in Carlini's code, it's an attempt in the
# referencing pytorch implementation to improve the quality of attacks.
self.init_rand = init_rand
# Since the larger the `scale_const` is, the more likely a successful
# attack can be found, `self.repeat` guarantees at least attempt the
# largest scale_const once. Moreover, since the optimal criterion is the
# L2 norm of the attack, and the larger `scale_const` is, the larger
# the L2 norm is, thus less optimal, the last attempt at the largest
# `scale_const` won't ruin the optimum ever found.
self.repeat = (self.binary_search_steps >= 10)
def __call__(self, model, inputs, targets, to_numpy=True):
"""
Produce adversarial examples for ``inputs``.
:param model: the model to attack
:type model: nn.Module
:param inputs: the original images tensor, of dimension [B x C x H x W].
``inputs`` can be on either CPU or GPU, but it will eventually be
moved to the same device as the one the parameters of ``model``
reside
:type inputs: torch.FloatTensor
:param targets: the original image labels, or the attack targets, of
dimension [B]. If ``self.targeted`` is ``True``, then ``targets``
is treated as the attack targets, otherwise the labels.
``targets`` can be on either CPU or GPU, but it will eventually
be moved to the same device as the one the parameters of
``model`` reside
:type targets: torch.LongTensor
:param to_numpy: True to return an `np.ndarray`, otherwise,
`torch.FloatTensor`
:type to_numpy: bool
:return: the adversarial examples on CPU, of dimension [B x C x H x W]
"""
# sanity check
assert isinstance(model, nn.Module)
assert len(inputs.size()) == 4
assert len(targets.size()) == 1
# get a copy of targets in numpy before moving to GPU, used when doing
# the binary search on `scale_const`
targets_np = targets.clone().cpu().numpy() # type: np.ndarray
# the type annotations here are used only for type hinting and do
# not indicate the actual type (cuda or cpu); same applies to all codes
# below
inputs = runutils.make_cuda_consistent(model, inputs)[0] # type: torch.FloatTensor
targets = runutils.make_cuda_consistent(model, targets)[0] # type: torch.FloatTensor
# run the model a little bit to get the `num_classes`
num_classes = model(Variable(inputs[0][None, :], requires_grad=False)).size(1) # type: int
batch_size = inputs.size(0) # type: int
# `lower_bounds_np`, `upper_bounds_np` and `scale_consts_np` are used
# for binary search of each `scale_const` in the batch. The element-wise
# inquality holds: lower_bounds_np < scale_consts_np <= upper_bounds_np
lower_bounds_np = np.zeros(batch_size)
upper_bounds_np = np.ones(batch_size) * self.c_range[1]
scale_consts_np = np.ones(batch_size) * self.c_range[0]
# Optimal attack to be found.
# The three "placeholders" are defined as:
# - `o_best_l2`: the least L2 norms
# - `o_best_l2_ppred`: the perturbed predictions made by the adversarial
# perturbations with the least L2 norms
# - `o_best_advx`: the underlying adversarial example of
# `o_best_l2_ppred`
o_best_l2 = np.ones(batch_size) * np.inf
o_best_l2_ppred = -np.ones(batch_size)
o_best_advx = inputs.clone().cpu().numpy() # type: np.ndarray
# convert `inputs` to tanh-space
inputs_tanh = self._to_tanh_space(inputs) # type: torch.FloatTensor
inputs_tanh_var = Variable(inputs_tanh, requires_grad=False)
# the one-hot encoding of `targets`
targets_oh = torch.zeros(targets.size() + (num_classes,)) # type: torch.FloatTensor
targets_oh = runutils.make_cuda_consistent(model, targets_oh)[0]
targets_oh.scatter_(1, targets.unsqueeze(1), 1.0)
targets_oh_var = Variable(targets_oh, requires_grad=False)
# the perturbation variable to optimize.
# `pert_tanh` is essentially the adversarial perturbation in tanh-space.
# In Carlini's code it's denoted as `modifier`
pert_tanh = torch.zeros(inputs.size()) # type: torch.FloatTensor
if self.init_rand:
nn.init.normal(pert_tanh, mean=0, std=1e-3)
pert_tanh = runutils.make_cuda_consistent(model, pert_tanh)[0]
pert_tanh_var = Variable(pert_tanh, requires_grad=True)
optimizer = optim.Adam([pert_tanh_var], lr=self.optimizer_lr)
for sstep in range(self.binary_search_steps):
if self.repeat and sstep == self.binary_search_steps - 1:
scale_consts_np = upper_bounds_np
scale_consts = torch.from_numpy(np.copy(scale_consts_np)).float() # type: torch.FloatTensor
scale_consts = runutils.make_cuda_consistent(model, scale_consts)[0]
scale_consts_var = Variable(scale_consts, requires_grad=False)
print 'Using scale consts:', list(scale_consts_np) # FIXME
# the minimum L2 norms of perturbations found during optimization
best_l2 = np.ones(batch_size) * np.inf
# the perturbed predictions corresponding to `best_l2`, to be used
# in binary search of `scale_const`
best_l2_ppred = -np.ones(batch_size)
# previous (summed) batch loss, to be used in early stopping policy
prev_batch_loss = np.inf # type: float
for optim_step in range(self.max_steps):
batch_loss, pert_norms_np, pert_outputs_np, advxs_np = \
self._optimize(model, optimizer, inputs_tanh_var,
pert_tanh_var, targets_oh_var,
scale_consts_var)
if optim_step % 10 == 0: print 'batch [{}] loss: {}'.format(optim_step, batch_loss) # FIXME
if self.abort_early and not optim_step % (self.max_steps // 10):
if batch_loss > prev_batch_loss * (1 - self.ae_tol):
break
prev_batch_loss = batch_loss
# update best attack found during optimization
pert_predictions_np = np.argmax(pert_outputs_np, axis=1)
comp_pert_predictions_np = np.argmax(
self._compensate_confidence(pert_outputs_np,
targets_np),
axis=1)
for i in range(batch_size):
l2 = pert_norms_np[i]
cppred = comp_pert_predictions_np[i]
ppred = pert_predictions_np[i]
tlabel = targets_np[i]
ax = advxs_np[i]
if self._attack_successful(cppred, tlabel):
assert cppred == ppred
if l2 < best_l2[i]:
best_l2[i] = l2
best_l2_ppred[i] = ppred
if l2 < o_best_l2[i]:
o_best_l2[i] = l2
o_best_l2_ppred[i] = ppred
o_best_advx[i] = ax
# binary search of `scale_const`
for i in range(batch_size):
tlabel = targets_np[i]
assert best_l2_ppred[i] == -1 or \
self._attack_successful(best_l2_ppred[i], tlabel)
assert o_best_l2_ppred[i] == -1 or \
self._attack_successful(o_best_l2_ppred[i], tlabel)
if best_l2_ppred[i] != -1:
# successful; attempt to lower `scale_const` by halving it
if scale_consts_np[i] < upper_bounds_np[i]:
upper_bounds_np[i] = scale_consts_np[i]
# `upper_bounds_np[i] == c_range[1]` implies no solution
# found, i.e. upper_bounds_np[i] has never been updated by
# scale_consts_np[i] until
# `scale_consts_np[i] > 0.1 * c_range[1]`
if upper_bounds_np[i] < self.c_range[1] * 0.1:
scale_consts_np[i] = (lower_bounds_np[i] + upper_bounds_np[i]) / 2
else:
# failure; multiply `scale_const` by ten if no solution
# found; otherwise do binary search
if scale_consts_np[i] > lower_bounds_np[i]:
lower_bounds_np[i] = scale_consts_np[i]
if upper_bounds_np[i] < self.c_range[1] * 0.1:
scale_consts_np[i] = (lower_bounds_np[i] + upper_bounds_np[i]) / 2
else:
scale_consts_np[i] *= 10
if not to_numpy:
o_best_advx = torch.from_numpy(o_best_advx).float()
return o_best_advx
def _optimize(self, model, optimizer, inputs_tanh_var, pert_tanh_var,
targets_oh_var, c_var):
"""
Optimize for one step.
:param model: the model to attack
:type model: nn.Module
:param optimizer: the Adam optimizer to optimize ``modifier_var``
:type optimizer: optim.Adam
:param inputs_tanh_var: the input images in tanh-space
:type inputs_tanh_var: Variable
:param pert_tanh_var: the perturbation to optimize in tanh-space,
``pert_tanh_var.requires_grad`` flag must be set to True
:type pert_tanh_var: Variable
:param targets_oh_var: the one-hot encoded target tensor (the attack
targets if self.targeted else image labels)
:type targets_oh_var: Variable
:param c_var: the constant :math:`c` for each perturbation of a batch,
a Variable of FloatTensor of dimension [B]
:type c_var: Variable
:return: the batch loss, squared L2-norm of adversarial perturbations
(of dimension [B]), the perturbed activations (of dimension
[B]), the adversarial examples (of dimension [B x C x H x W])
"""
# the adversarial examples in the image space
# of dimension [B x C x H x W]
advxs_var = self._from_tanh_space(inputs_tanh_var + pert_tanh_var) # type: Variable
# the perturbed activation before softmax
pert_outputs_var = model(advxs_var) # type: Variable
# the original inputs
inputs_var = self._from_tanh_space(inputs_tanh_var) # type: Variable
perts_norm_var = torch.pow(advxs_var - inputs_var, 2)
perts_norm_var = torch.sum(perts_norm_var.view(
perts_norm_var.size(0), -1), 1)
# In Carlini's code, `target_activ_var` is called `real`.
# It should be a Variable of tensor of dimension [B], such that the
# `target_activ_var[i]` is the final activation (right before softmax)
# of the $t$th class, where $t$ is the attack target or the image label
#
# noinspection PyArgumentList
target_activ_var = torch.sum(targets_oh_var * pert_outputs_var, 1)
inf = 1e4 # sadly pytorch does not work with np.inf;
# 1e4 is also used in Carlini's code
# In Carlini's code, `maxother_activ_var` is called `other`.
# It should be a Variable of tensor of dimension [B], such that the
# `maxother_activ_var[i]` is the maximum final activation of all classes
# other than class $t$, where $t$ is the attack target or the image
# label.
#
# The assertion here ensures (sufficiently yet not necessarily) the
# assumption behind the trick to get `maxother_activ_var` holds, that
# $\max_{i \ne t}{o_i} \ge -\text{_inf}$, where $t$ is the target and
# $o_i$ the $i$th element along axis=1 of `pert_outputs_var`.
#
# noinspection PyArgumentList
assert (pert_outputs_var.max(1)[0] >= -inf).all(), 'assumption failed'
# noinspection PyArgumentList
maxother_activ_var = torch.max(((1 - targets_oh_var) * pert_outputs_var
- targets_oh_var * inf), 1)[0]
# Compute $f(x')$, where $x'$ is the adversarial example in image space.
# The result `f_var` should be of dimension [B]
if self.targeted:
# if targeted, optimize to make `target_activ_var` larger than
# `maxother_activ_var` by `self.confidence`
#
# noinspection PyArgumentList
f_var = torch.clamp(maxother_activ_var - target_activ_var
+ self.confidence, min=0.0)
else:
# if not targeted, optimize to make `maxother_activ_var` larger than
# `target_activ_var` (the ground truth image labels) by
# `self.confidence`
#
# noinspection PyArgumentList
f_var = torch.clamp(target_activ_var - maxother_activ_var
+ self.confidence, min=0.0)
# the total loss of current batch, should be of dimension [1]
batch_loss_var = torch.sum(perts_norm_var + c_var * f_var) # type: Variable
# Do optimization for one step
optimizer.zero_grad()
batch_loss_var.backward()
optimizer.step()
# Make some records in python/numpy on CPU
batch_loss = batch_loss_var.data[0] # type: float
pert_norms_np = _var2numpy(perts_norm_var)
pert_outputs_np = _var2numpy(pert_outputs_var)
advxs_np = _var2numpy(advxs_var)
return batch_loss, pert_norms_np, pert_outputs_np, advxs_np
def _attack_successful(self, prediction, target):
"""
See whether the underlying attack is successful.
:param prediction: the prediction of the model on an input
:type prediction: int
:param target: either the attack target or the ground-truth image label
:type target: int
:return: ``True`` if the attack is successful
:rtype: bool
"""
if self.targeted:
return prediction == target
else:
return prediction != target
# noinspection PyUnresolvedReferences
def _compensate_confidence(self, outputs, targets):
"""
Compensate for ``self.confidence`` and returns a new weighted sum
vector.
:param outputs: the weighted sum right before the last layer softmax
normalization, of dimension [B x M]
:type outputs: np.ndarray
:param targets: either the attack targets or the real image labels,
depending on whether or not ``self.targeted``, of dimension [B]
:type targets: np.ndarray
:return: the compensated weighted sum of dimension [B x M]
:rtype: np.ndarray
"""
outputs_comp = np.copy(outputs)
rng = np.arange(targets.shape[0])
if self.targeted:
# for each image $i$:
# if targeted, `outputs[i, target_onehot]` should be larger than
# `max(outputs[i, ~target_onehot])` by `self.confidence`
outputs_comp[rng, targets] -= self.confidence
else:
# for each image $i$:
# if not targeted, `max(outputs[i, ~target_onehot]` should be larger
# than `outputs[i, target_onehot]` (the ground truth image labels)
# by `self.confidence`
outputs_comp[rng, targets] += self.confidence
return outputs_comp
def _to_tanh_space(self, x):
"""
Convert a batch of tensors to tanh-space.
:param x: the batch of tensors, of dimension [B x C x H x W]
:return: the batch of tensors in tanh-space, of the same dimension
"""
return to_tanh_space(x, self.box)
def _from_tanh_space(self, x):
"""
Convert a batch of tensors from tanh-space to input space.
:param x: the batch of tensors, of dimension [B x C x H x W]
:return: the batch of tensors in tanh-space, of the same dimension;
the returned tensor is on the same device as ``x``
"""
return from_tanh_space(x, self.box)
| 45.307407
| 108
| 0.60394
|
5538b29c4d0145016101d5a35e3faa5680fec014
| 530
|
py
|
Python
|
Chapter02/urllib/urllib_form_data.py
|
yangwawa0323/Learning-Python-Networking-Second-Edition
|
5460fe4fb6acc5d0df19bf36e52ac09e9a11eb8b
|
[
"MIT"
] | 52
|
2018-12-17T19:33:06.000Z
|
2022-03-25T18:14:02.000Z
|
Chapter02/urllib/urllib_form_data.py
|
barretthugh/Learning-Python-Networking-Second-Edition
|
0f00b8b20c1c85e76754e47113dff8ca9e99d5ca
|
[
"MIT"
] | null | null | null |
Chapter02/urllib/urllib_form_data.py
|
barretthugh/Learning-Python-Networking-Second-Edition
|
0f00b8b20c1c85e76754e47113dff8ca9e99d5ca
|
[
"MIT"
] | 38
|
2018-12-18T09:08:43.000Z
|
2022-02-06T02:53:05.000Z
|
#!/usr/bin/env python3
from urllib.request import urlopen
from urllib.parse import urlencode
from urllib.request import Request
import json
data_dict = {'custname': 'customer','custtel': '323232',
'size': 'large','custemail': 'email@domain.com'}
data = urlencode(data_dict).encode('utf-8')
print(data)
req = Request('http://httpbin.org/post',data=data)
req.add_header('Content-Type', 'application/x-www-form-urlencode;charset=UTF-8')
response = urlopen(req)
response_dictionary = json.load(response)
print(response_dictionary)
| 29.444444
| 80
| 0.760377
|
642efa8c9a14a30508aed9efe268ce0ab1874041
| 1,267
|
py
|
Python
|
tests/test_extraction.py
|
Kabongosalomon/axcell
|
f9c74910561f6064a04a10118824c99e871f8a38
|
[
"Apache-2.0"
] | 335
|
2020-05-07T19:57:36.000Z
|
2022-03-16T07:05:51.000Z
|
tests/test_extraction.py
|
doc22940/axcell
|
b41c1623377d89c3c45a61907f0a47ea029269de
|
[
"Apache-2.0"
] | 16
|
2020-06-12T16:43:29.000Z
|
2021-11-24T11:19:09.000Z
|
tests/test_extraction.py
|
doc22940/axcell
|
b41c1623377d89c3c45a61907f0a47ea029269de
|
[
"Apache-2.0"
] | 50
|
2020-05-07T20:35:18.000Z
|
2022-02-16T06:37:31.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import pytest
from pathlib import Path
from axcell.helpers.paper_extractor import PaperExtractor
from axcell.data.paper_collection import PaperCollection
from shutil import copyfileobj
import gzip
def test_extraction(tmpdir):
# pack main.tex to an archive
tmpdir = Path(tmpdir)
source = Path(__file__).resolve().parent / "data" / "main.tex"
paper_id = "1234.56789"
archive = tmpdir / "sources" / paper_id
archive.parent.mkdir()
with source.open("rb") as src, gzip.open(archive, "wb") as dst:
copyfileobj(src, dst)
extract = PaperExtractor(tmpdir)
status = extract(archive)
assert status == "success"
pc = PaperCollection.from_files(tmpdir / "papers")
extracted = len(pc)
assert extracted == 1, f"Expected to extract exactly one paper, found {extracted}"
paper = pc[0]
assert paper.paper_id == paper_id
assert paper.text.title == "DILBERT: Distilling Inner Latent BERT variables"
assert len(paper.tables) == 2
assert paper.tables[0].caption == "Table 1: A table."
assert paper.tables[1].caption == "Table 2: A table."
assert paper.tables[0].shape == (5, 3)
assert paper.tables[1].shape == (4, 3)
| 32.487179
| 86
| 0.692186
|
9788f76ff8a41bfa0885c48722f13afb39d6305b
| 7,422
|
py
|
Python
|
game/state.py
|
Areking-RS/Code-jam-2021
|
67b2a4526ededa7bbef7a6963443dbb1c5d63d37
|
[
"MIT"
] | null | null | null |
game/state.py
|
Areking-RS/Code-jam-2021
|
67b2a4526ededa7bbef7a6963443dbb1c5d63d37
|
[
"MIT"
] | null | null | null |
game/state.py
|
Areking-RS/Code-jam-2021
|
67b2a4526ededa7bbef7a6963443dbb1c5d63d37
|
[
"MIT"
] | null | null | null |
from typing import Generator, Optional, Union
from blessed import Terminal
from game.components import (
Ascii, FollowAI, Movement, PlayerInput, Renderable, Text, TimeToLive,
Transform
)
from game.cutscenes import CutsceneFrame, CutsceneSequence, ordered_cutscenes
from game.ecs.world import World
from game.mapgeneration import MapType, mapgenerator
from game.processors import (
ascii_renderer, enemy_movement, input_processor, movement_processor,
render_system, text_renderer, ttl_processor
)
from game.utils import Vector2, echo
def _level_progression() -> Generator[Union['Cutscene', 'GameLevel'], None, None]:
for cutscene in ordered_cutscenes:
yield Cutscene(cutscene)
next_map, spawn = mapgenerator(
map_width=50,
map_height=50,
room_frequency=10,
room_size=30,
path_width=5
)
yield GameLevel(next_map, spawn)
# TODO: Once we're out of levels, spawn a credits or some story ending
# This generator outputs our level progression
story_progression = _level_progression()
class Screen(object):
"""Base class for all game screens"""
def __init__(self, world: Optional[World] = None):
if world is None:
world = World()
self.world = world
def setup(self, term: Terminal) -> None:
"""
Run setup for the screen before beginning ticks.
:param term: Terminal reference for running setup operations
:return: None
"""
pass
def tick(self, term: Terminal, dt: float, inp: str) -> Optional['Screen']:
"""
Tick (update) the screen
:param term: Terminal reference
:param dt: Delta between game loop iterations
:param inp: Keyboard input
:return: Optional next screen
"""
# TODO: Globally before any processors run, blanking the screen makes some sense
# but it may not always be appropriate
color_bg = term.on_blue
echo(term.move_yx(0, 0))
echo(color_bg(term.clear))
self.world.tick(term, dt, inp)
return None
class Intro(Screen):
"""Intro screen for the game"""
def __init__(self):
super(Intro, self).__init__()
self.text_entity: Optional[int] = None
self.ttl_component: Optional[TimeToLive] = None
def setup(self, term: Terminal) -> None:
"""
Run setup for the screen before beginning ticks.
:param term: Terminal reference for running setup operations
:return: None
"""
text = Text(text_string='Dedicated Dugongs', v_align=Text.VerticalAlign.CENTER)
self.ttl_component = TimeToLive(expires_after=1)
self.text_entity = self.world.create_entity(text, self.ttl_component)
self.world.register_processor(text_renderer)
self.world.register_processor(ttl_processor)
def tick(self, term: Terminal, dt: float, inp: str) -> Optional['Screen']:
"""
Tick (update) the screen
:param term: Terminal reference
:param dt: Delta between game loop iterations
:param inp: Keyboard input
:return: Optional next screen
"""
# TODO: Blank the screen
super(Intro, self).tick(term, dt, inp)
if self.ttl_component.expired:
return next(story_progression)
class GameLevel(Screen):
"""Screen that plays out a game level"""
def __init__(self, level: MapType, spawn_location: int):
super(GameLevel, self).__init__()
self.level = level
self.spawn_location = spawn_location
def setup(self, term: Terminal) -> None:
"""
Run setup for the screen before beginning ticks.
:param term: Terminal reference for running setup operations
:return: None
"""
self.world.register_processor(input_processor)
player_transform = Transform(position=Vector2(x=self.spawn_location))
self.world.create_entity(
player_transform,
Movement(direction=Vector2.RIGHT),
PlayerInput(),
Renderable(w=1, h=1, character=u'^')
)
self.world.create_entity(
Transform(position=Vector2(x=self.spawn_location + 2)),
Movement(direction=Vector2.RIGHT),
FollowAI(follow_transform=player_transform),
Renderable(w=1, h=1, character=u'O')
)
self.world.register_processor(enemy_movement(self.level))
self.world.register_processor(movement_processor(self.level))
self.world.register_processor(render_system(self.level))
def tick(self, term: Terminal, dt: float, inp: str) -> Optional['Screen']:
"""
Tick (update) the screen
:param term: Terminal reference
:param dt: Delta between game loop iterations
:param inp: Keyboard input
:return: Optional next screen
"""
try:
super(GameLevel, self).tick(term, dt, inp)
except IndexError:
next_screen = next(story_progression)
return next_screen
class Cutscene(Screen):
"""A screen that handles displaying cutscenes"""
def __init__(self, sequence: CutsceneSequence):
super(Cutscene, self).__init__()
self.sequence: CutsceneSequence = sequence
self.ascii: Optional[Ascii] = None
self.art_ttl: Optional[TimeToLive] = None
self.text: Optional[Text] = None
def _next_scene(self) -> Optional[CutsceneFrame]:
"""
Get the next frame of the cutscene
:return: The CutsceneFrame or None if there are no more frames
"""
try:
return self.sequence.pop(0)
except IndexError:
return None
def setup(self, term: Terminal) -> None:
"""
Run setup for the screen before beginning ticks.
:param term: Terminal reference for running setup operations
:return: None
"""
scene = self._next_scene()
if scene is None:
raise ValueError('Cutscene empty on setup')
art, timing, text = scene
self.ascii = Ascii(art=art)
self.art_ttl = TimeToLive(expires_after=timing)
self.text = Text(text_string=text)
self.world.create_entity(
self.ascii,
self.art_ttl,
self.text
)
self.world.register_processor(text_renderer)
self.world.register_processor(ttl_processor)
self.world.register_processor(ascii_renderer)
def tick(self, term: Terminal, dt: float, inp: str) -> Optional['Screen']:
"""
Tick (update) the screen
:param term: Terminal reference
:param dt: Delta between game loop iterations
:param inp: Keyboard input
:return: Optional next screen
"""
super(Cutscene, self).tick(term, dt, inp)
if self.art_ttl.expired:
scene = self._next_scene()
if scene is None:
next_screen = next(story_progression)
return next_screen
# Switch out the frame data on the components
art, timing, text = scene
self.ascii.art = art
self.art_ttl.expires_after = timing
self.art_ttl.start_time = None
self.art_ttl.current_time = None
self.text.text_string = text
| 31.991379
| 88
| 0.626381
|
4550934031aec77ba2517b33be10f8f89bdf83e8
| 51,816
|
py
|
Python
|
tests/engine/test_flow_runner.py
|
davidfhaines/prefect
|
47e49bd685920b09164e8397a2b29f91197cebbc
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tests/engine/test_flow_runner.py
|
davidfhaines/prefect
|
47e49bd685920b09164e8397a2b29f91197cebbc
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tests/engine/test_flow_runner.py
|
davidfhaines/prefect
|
47e49bd685920b09164e8397a2b29f91197cebbc
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import collections
import datetime
import queue
import random
import time
from unittest.mock import MagicMock
import pendulum
import pytest
import prefect
from prefect.core import Flow, Parameter, Task
from prefect.engine import signals
from prefect.engine.cache_validators import duration_only
from prefect.engine.executors import Executor, LocalExecutor
from prefect.engine.flow_runner import ENDRUN, FlowRunner, FlowRunnerInitializeResult
from prefect.engine.result import NoResult, Result
from prefect.engine.state import (
Cached,
Failed,
Finished,
Mapped,
Paused,
Pending,
Resume,
Retrying,
Running,
Scheduled,
Skipped,
State,
Success,
TimedOut,
TriggerFailed,
)
from prefect.tasks.secrets import PrefectSecret
from prefect.triggers import manual_only
from prefect.utilities.debug import raise_on_exception
class SuccessTask(Task):
def run(self):
return 1
class AddTask(Task):
def run(self, x, y): # pylint: disable=W0221
return x + y
class CountTask(Task):
call_count = 0
def run(self):
self.call_count += 1
return self.call_count
class ErrorTask(Task):
def run(self):
raise ValueError("custom-error-message")
class RaiseFailTask(Task):
def run(self):
raise prefect.engine.signals.FAIL("custom-fail-message")
raise ValueError("custom-error-message") # pylint: disable=W0101
class RaiseSkipTask(Task):
def run(self):
raise prefect.engine.signals.SKIP()
raise ValueError() # pylint: disable=W0101
class RaiseSuccessTask(Task):
def run(self):
raise prefect.engine.signals.SUCCESS()
raise ValueError() # pylint: disable=W0101
class RaiseRetryTask(Task):
def run(self):
raise prefect.engine.signals.RETRY()
raise ValueError() # pylint: disable=W0101
class ReturnTask(Task):
def run(self, x):
return 1 / (x - 1)
class SlowTask(Task):
def run(self, secs):
time.sleep(secs)
def test_flow_runner_has_logger():
r = FlowRunner(Flow(name="test"))
assert r.logger.name == "prefect.FlowRunner"
def test_flow_runner_runs_basic_flow_with_1_task():
flow = Flow(name="test")
task = SuccessTask()
flow.add_task(task)
flow_runner = FlowRunner(flow=flow)
state = flow_runner.run(return_tasks=[task])
assert state == Success(result={task: Success(result=1)})
def test_flow_runner_with_no_return_tasks():
"""
Make sure FlowRunner accepts return_tasks=None and doesn't raise early error
"""
flow = Flow(name="test")
task = SuccessTask()
flow.add_task(task)
flow_runner = FlowRunner(flow=flow)
assert flow_runner.run(return_tasks=None)
def test_flow_runner_with_invalid_return_tasks():
flow = Flow(name="test")
task = SuccessTask()
flow.add_task(task)
flow_runner = FlowRunner(flow=flow)
state = flow_runner.run(return_tasks=[1])
assert state.is_failed()
def test_flow_runner_runs_basic_flow_with_2_independent_tasks():
flow = Flow(name="test")
task1 = SuccessTask()
task2 = SuccessTask()
flow.add_task(task1)
flow.add_task(task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert isinstance(flow_state, Success)
assert flow_state.result[task1] == Success(result=1)
assert flow_state.result[task2] == Success(result=1)
def test_flow_runner_runs_basic_flow_with_2_dependent_tasks():
flow = Flow(name="test")
task1 = SuccessTask()
task2 = SuccessTask()
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert isinstance(flow_state, Success)
assert flow_state.result[task1] == Success(result=1)
assert flow_state.result[task2] == Success(result=1)
def test_flow_runner_runs_base_task_class():
flow = Flow(name="test")
task1 = Task()
task2 = Task()
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert isinstance(flow_state, Success)
assert isinstance(flow_state.result[task1], Success)
assert isinstance(flow_state.result[task2], Success)
def test_flow_runner_runs_basic_flow_with_2_dependent_tasks_and_first_task_fails():
flow = Flow(name="test")
task1 = ErrorTask()
task2 = SuccessTask()
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert isinstance(flow_state, Failed)
assert isinstance(flow_state.result[task1], Failed)
assert isinstance(flow_state.result[task2], TriggerFailed)
def test_flow_runner_runs_flow_with_2_dependent_tasks_and_first_task_fails_and_second_has_trigger():
flow = Flow(name="test")
task1 = ErrorTask()
task2 = SuccessTask(trigger=prefect.triggers.all_failed)
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert isinstance(
flow_state, Success
) # flow state is determined by terminal states
assert isinstance(flow_state.result[task1], Failed)
assert isinstance(flow_state.result[task2], Success)
def test_flow_runner_runs_basic_flow_with_2_dependent_tasks_and_first_task_fails_with_FAIL():
flow = Flow(name="test")
task1 = RaiseFailTask()
task2 = SuccessTask()
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert isinstance(flow_state, Failed)
assert isinstance(flow_state.result[task1], Failed)
assert not isinstance(flow_state.result[task1], TriggerFailed)
assert isinstance(flow_state.result[task2], TriggerFailed)
def test_flow_runner_runs_basic_flow_with_2_dependent_tasks_and_second_task_fails():
flow = Flow(name="test")
task1 = SuccessTask()
task2 = ErrorTask()
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert isinstance(flow_state, Failed)
assert isinstance(flow_state.result[task1], Success)
assert isinstance(flow_state.result[task2], Failed)
def test_flow_runner_does_not_return_task_states_when_it_doesnt_run():
flow = Flow(name="test")
task1 = SuccessTask()
task2 = ErrorTask()
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(
state=Success(result=5), return_tasks=[task1, task2]
)
assert isinstance(flow_state, Success)
assert flow_state.result == 5
def test_flow_run_method_returns_task_states_even_if_it_doesnt_run():
# https://github.com/PrefectHQ/prefect/issues/19
flow = Flow(name="test")
task1 = SuccessTask()
task2 = ErrorTask()
flow.add_edge(task1, task2)
flow_state = flow.run(state=Success())
assert flow_state.is_successful()
assert flow_state.result[task1].is_pending()
assert flow_state.result[task2].is_pending()
def test_flow_runner_remains_running_if_tasks_are_retrying():
# https://github.com/PrefectHQ/prefect/issues/19
flow = Flow(name="test")
task1 = SuccessTask()
task2 = ErrorTask(max_retries=1, retry_delay=datetime.timedelta(0))
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert flow_state.is_running()
assert flow_state.result[task1].is_successful()
assert flow_state.result[task2].is_retrying()
def test_secrets_dynamically_pull_from_context():
flow = Flow(name="test")
task1 = PrefectSecret("foo", max_retries=1, retry_delay=datetime.timedelta(0))
flow.add_task(task1)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1])
assert flow_state.is_running()
assert flow_state.result[task1].is_retrying()
with prefect.context(secrets=dict(foo=42)):
time.sleep(1)
flow_state = FlowRunner(flow=flow).run(task_states=flow_state.result)
assert flow_state.is_successful()
def test_flow_runner_doesnt_return_by_default():
flow = Flow(name="test")
task1 = SuccessTask()
task2 = SuccessTask()
flow.add_edge(task1, task2)
res = FlowRunner(flow=flow).run()
assert res.result == {}
def test_flow_runner_does_return_tasks_when_requested():
flow = Flow(name="test")
task1 = SuccessTask()
task2 = SuccessTask()
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1])
assert isinstance(flow_state, Success)
assert isinstance(flow_state.result[task1], Success)
def test_required_parameters_must_be_provided():
flow = Flow(name="test")
y = prefect.Parameter("y")
flow.add_task(y)
flow_state = FlowRunner(flow=flow).run(return_tasks=[y])
assert isinstance(flow_state, Failed)
assert isinstance(flow_state.result[y], Failed)
assert "required but not provided" in str(flow_state.result[y]).lower()
def test_parameters_are_placed_into_context():
flow = Flow(name="test")
y = prefect.Parameter("y", default=99)
flow.add_task(y)
flow_state = FlowRunner(flow=flow).run(return_tasks=[y], parameters=dict(y=42))
assert isinstance(flow_state, Success)
assert flow_state.result[y].result == 42
def test_parameters_are_placed_into_context_including_defaults():
@prefect.task
def whats_in_ctx():
return prefect.context.parameters
y = prefect.Parameter("y", default=99)
z = prefect.Parameter("z", default=19)
flow = Flow(name="test", tasks=[y, z, whats_in_ctx])
flow_state = FlowRunner(flow=flow).run(
return_tasks=[whats_in_ctx], parameters=dict(y=42)
)
assert isinstance(flow_state, Success)
assert flow_state.result[whats_in_ctx].result == dict(y=42, z=19)
def test_parameters_are_placed_into_context_and_override_current_context():
flow = Flow(name="test")
y = prefect.Parameter("y", default=99)
flow.add_task(y)
with prefect.context(parameters=dict(y=88, z=55)):
flow_state = FlowRunner(flow=flow).run(return_tasks=[y], parameters=dict(y=42))
assert isinstance(flow_state, Success)
assert flow_state.result[y].result == 42
def test_flow_run_state_determined_by_reference_tasks():
flow = Flow(name="test")
t1 = ErrorTask()
t2 = SuccessTask(trigger=prefect.triggers.all_finished)
flow.add_edge(t1, t2)
flow.set_reference_tasks([t1])
flow_state = flow.run()
assert isinstance(flow_state, Failed)
assert isinstance(flow_state.result[t1], Failed)
assert isinstance(flow_state.result[t2], Success)
def test_flow_run_state_not_determined_by_reference_tasks_if_terminal_tasks_are_not_finished():
flow = Flow(name="test")
t1 = ErrorTask()
t2 = RaiseRetryTask(trigger=prefect.triggers.all_finished)
flow.add_edge(t1, t2)
flow.set_reference_tasks([t1])
flow_state = FlowRunner(flow=flow).run(return_tasks=flow.tasks)
assert flow_state.is_running()
assert flow_state.result[t1].is_failed()
assert flow_state.result[t2].is_retrying()
def test_flow_with_multiple_retry_tasks_doesnt_run_them_early():
"""
t1 -> t2
t1 -> t3
Both t2 and t3 fail on initial run and request a retry. Starting the flow at t1 should
only run t3, which requests an immediate retry, and not t2, which requests a retry in
10 minutes.
This tests a check on the TaskRunner, but which matters in Flows like this.
"""
flow = Flow(name="test")
t1 = Task()
t2 = ErrorTask(retry_delay=datetime.timedelta(minutes=10), max_retries=1)
t3 = ErrorTask(retry_delay=datetime.timedelta(minutes=0), max_retries=1)
flow.add_edge(t1, t2)
flow.add_edge(t1, t3)
state1 = FlowRunner(flow=flow).run(return_tasks=flow.tasks)
assert state1.result[t2].is_retrying()
assert state1.result[t3].is_retrying()
state2 = FlowRunner(flow=flow).run(
return_tasks=flow.tasks, task_states=state1.result
)
assert state2.result[t2].is_retrying()
assert state2.result[t2] == state1.result[t2] # state is not modified at all
assert isinstance(state2.result[t3], Failed) # this task ran
def test_flow_runner_makes_copy_of_task_results_dict():
"""
Ensure the flow runner copies the task_results dict rather than modifying it inplace
"""
flow = Flow(name="test")
t1, t2 = Task(), Task()
flow.add_edge(t1, t2)
task_states = {t1: Pending()}
state = flow.run(task_states=task_states)
assert state.result[t1] == Success(result=None)
assert task_states == {t1: Pending()}
class TestCheckFlowPendingOrRunning:
@pytest.mark.parametrize("state", [Pending(), Running(), Retrying(), Scheduled()])
def test_pending_or_running_are_ok(self, state):
flow = Flow(name="test", tasks=[Task()])
new_state = FlowRunner(flow=flow).check_flow_is_pending_or_running(state=state)
assert new_state is state
@pytest.mark.parametrize(
"state", [Finished(), Success(), Failed(), Skipped(), State()]
)
def test_not_pending_or_running_raise_endrun(self, state):
flow = Flow(name="test", tasks=[Task()])
with pytest.raises(ENDRUN):
FlowRunner(flow=flow).check_flow_is_pending_or_running(state=state)
class TestCheckScheduledStep:
@pytest.mark.parametrize("state", [Failed(), Pending(), Running(), Success()])
def test_non_scheduled_states(self, state):
assert (
FlowRunner(flow=Flow(name="test")).check_flow_reached_start_time(
state=state
)
is state
)
def test_scheduled_states_without_start_time(self):
state = Scheduled(start_time=None)
assert (
FlowRunner(flow=Flow(name="test")).check_flow_reached_start_time(
state=state
)
is state
)
def test_scheduled_states_with_future_start_time(self):
state = Scheduled(
start_time=pendulum.now("utc") + datetime.timedelta(minutes=10)
)
with pytest.raises(ENDRUN) as exc:
FlowRunner(flow=Flow(name="test")).check_flow_reached_start_time(
state=state
)
assert exc.value.state is state
def test_scheduled_states_with_past_start_time(self):
state = Scheduled(
start_time=pendulum.now("utc") - datetime.timedelta(minutes=1)
)
assert (
FlowRunner(flow=Flow(name="test")).check_flow_reached_start_time(
state=state
)
is state
)
class TestSetFlowToRunning:
@pytest.mark.parametrize("state", [Pending(), Retrying()])
def test_pending_becomes_running(self, state):
flow = Flow(name="test", tasks=[Task()])
new_state = FlowRunner(flow=flow).set_flow_to_running(state=state)
assert new_state.is_running()
def test_running_stays_running(self):
state = Running()
flow = Flow(name="test", tasks=[Task()])
new_state = FlowRunner(flow=flow).set_flow_to_running(state=state)
assert new_state.is_running()
@pytest.mark.parametrize("state", [Finished(), Success(), Failed(), Skipped()])
def test_other_states_raise_endrun(self, state):
flow = Flow(name="test", tasks=[Task()])
with pytest.raises(ENDRUN):
FlowRunner(flow=flow).set_flow_to_running(state=state)
class TestRunFlowStep:
def test_running_state_finishes(self):
flow = Flow(name="test", tasks=[Task()])
new_state = FlowRunner(flow=flow).get_flow_run_state(
state=Running(),
task_states={},
task_contexts={},
return_tasks=set(),
task_runner_state_handlers=[],
executor=LocalExecutor(),
)
assert new_state.is_successful()
@pytest.mark.parametrize(
"state", [Pending(), Retrying(), Finished(), Success(), Failed(), Skipped()]
)
def test_other_states_raise_endrun(self, state):
flow = Flow(name="test", tasks=[Task()])
with pytest.raises(ENDRUN):
FlowRunner(flow=flow).get_flow_run_state(
state=state,
task_states={},
task_contexts={},
return_tasks=set(),
task_runner_state_handlers=[],
executor=Executor(),
)
def test_determine_final_state_has_final_say(self):
class MyFlowRunner(FlowRunner):
def determine_final_state(self, *args, **kwargs):
return Failed("Very specific error message")
flow = Flow(name="test", tasks=[Task()])
new_state = MyFlowRunner(flow=flow).get_flow_run_state(
state=Running(),
task_states={},
task_contexts={},
return_tasks=set(),
task_runner_state_handlers=[],
executor=LocalExecutor(),
)
assert new_state.is_failed()
assert new_state.message == "Very specific error message"
def test_determine_final_state_preserves_running_states_when_tasks_still_running(
self,
):
task = Task()
flow = Flow(name="test", tasks=[task])
old_state = Running()
new_state = FlowRunner(flow=flow).get_flow_run_state(
state=old_state,
task_states={task: Retrying(start_time=pendulum.now("utc").add(days=1))},
task_contexts={},
return_tasks=set(),
task_runner_state_handlers=[],
executor=LocalExecutor(),
)
assert new_state is old_state
class TestInputCaching:
@pytest.mark.parametrize(
"executor", ["local", "sync", "mproc", "mthread"], indirect=True
)
def test_retries_use_cached_inputs(self, executor):
with Flow(name="test") as f:
a = CountTask()
b = ReturnTask(max_retries=1, retry_delay=datetime.timedelta(0))
a_res = a()
b_res = b(a_res)
first_state = FlowRunner(flow=f).run(executor=executor, return_tasks=f.tasks)
assert first_state.is_running()
a_state = first_state.result[a_res]
a_state.result = (
NoResult # remove the result to see if the cached results are picked up
)
b_state = first_state.result[b_res]
b_state.cached_inputs = dict(x=Result(2)) # artificially alter state
with raise_on_exception(): # without caching we'd expect a KeyError
second_state = FlowRunner(flow=f).run(
executor=executor, return_tasks=[b_res], task_states=first_state.result
)
assert isinstance(second_state, Success)
assert second_state.result[b_res].result == 1
@pytest.mark.parametrize(
"executor", ["local", "sync", "mproc", "mthread"], indirect=True
)
def test_retries_cache_parameters_as_well(self, executor):
with Flow(name="test") as f:
a = Parameter("a")
b = ReturnTask(max_retries=1, retry_delay=datetime.timedelta(0))
a_res = a()
b_res = b(a_res)
first_state = FlowRunner(flow=f).run(
executor=executor, parameters=dict(a=1), return_tasks=f.tasks
)
assert first_state.is_running()
a_state = first_state.result[a_res]
a_state.result = (
NoResult # remove the result to see if the cached results are picked up
)
b_state = first_state.result[b_res]
b_state.cached_inputs = dict(x=Result(2)) # artificially alter state
with raise_on_exception(): # without caching we'd expect a KeyError
second_state = FlowRunner(flow=f).run(
executor=executor, return_tasks=[b_res], task_states=first_state.result
)
assert isinstance(second_state, Success)
assert second_state.result[b_res].result == 1
@pytest.mark.parametrize(
"executor", ["local", "sync", "mproc", "mthread"], indirect=True
)
def test_retries_ignore_cached_inputs_if_upstream_results_are_available(
self, executor
):
with Flow(name="test") as f:
a = CountTask()
b = ReturnTask(max_retries=1, retry_delay=datetime.timedelta(0))
a_res = a()
b_res = b(a_res)
first_state = FlowRunner(flow=f).run(executor=executor, return_tasks=f.tasks)
assert first_state.is_running()
a_state = first_state.result[a_res]
a_state.result = 100 # modify the result
b_state = first_state.result[b_res]
b_state.cached_inputs = dict(x=Result(2)) # artificially alter state
with raise_on_exception(): # without caching we'd expect a KeyError
second_state = FlowRunner(flow=f).run(
executor=executor, return_tasks=[b_res], task_states=first_state.result
)
assert isinstance(second_state, Success)
assert second_state.result[b_res].result == 1 / 99
@pytest.mark.parametrize(
"executor", ["local", "sync", "mproc", "mthread"], indirect=True
)
def test_manual_only_trigger_caches_inputs(self, executor):
with Flow(name="test") as f:
x = Parameter("x")
inp = SuccessTask()
t = AddTask(trigger=manual_only)
res = t(x, inp)
first_state = FlowRunner(flow=f).run(
executor=executor, parameters=dict(x=11), return_tasks=f.tasks
)
assert first_state.is_running()
first_state.result.update(
{res: Resume(cached_inputs=first_state.result[res].cached_inputs)}
)
second_state = FlowRunner(flow=f).run(
executor=executor,
parameters=dict(x=1),
return_tasks=[res],
task_states=first_state.result,
)
assert isinstance(second_state, Success)
assert second_state.result[res].result == 12
class TestOutputCaching:
@pytest.mark.parametrize(
"executor", ["local", "sync", "mproc", "mthread"], indirect=True
)
def test_providing_cachedstate_with_simple_example(self, executor):
class TestTask(Task):
call_count = 0
def run(self, x, s):
self.call_count += 1
return self.call_count
with Flow(name="test") as f:
y = TestTask(
cache_validator=duration_only, cache_for=datetime.timedelta(days=1)
)
x = Parameter("x")
s = SuccessTask()
f.add_edge(x, y, key="x")
f.add_edge(s, y, key="s")
state = Cached(
cached_result_expiration=pendulum.now("utc") + datetime.timedelta(days=1),
result=100,
)
flow_state = FlowRunner(flow=f).run(
executor=executor,
parameters=dict(x=1),
return_tasks=[y],
task_states={y: state},
)
assert isinstance(flow_state, Success)
assert flow_state.result[y].result == 100
class TestCachingFromContext:
def test_caches_do_not_persist_across_flow_runner_runs(self):
@prefect.task(cache_for=datetime.timedelta(seconds=10))
def test_task():
return random.random()
with Flow("test_cache") as flow:
t = test_task()
flow_state = FlowRunner(flow=flow).run(return_tasks=[t])
first_result = flow_state.result[t].result
flow_state = FlowRunner(flow=flow).run(return_tasks=[t])
second_result = flow_state.result[t].result
assert first_result != second_result
class TestInitializeRun:
def test_initialize_sets_none_to_pending(self):
result = FlowRunner(Flow(name="test")).initialize_run(
state=None, task_states={}, context={}, task_contexts={}, parameters={}
)
assert result.state.is_pending()
@pytest.mark.parametrize("state", [Pending(), Running()])
def test_initialize_returns_state_if_provided(self, state):
result = FlowRunner(Flow(name="test")).initialize_run(
state=state, task_states={}, context={}, task_contexts={}, parameters={}
)
assert result.state is state
def test_initialize_sets_task_contexts(self):
t1 = Task(name="t1")
t2 = Parameter(name="x")
flow = Flow(name="test", tasks=[t1, t2])
result = FlowRunner(flow).initialize_run(
state=Pending(), task_states={}, context={}, task_contexts={}, parameters={}
)
assert result.task_contexts == {
t: dict(task_name=t.name, task_slug=t.slug) for t in flow.tasks
}
def test_initialize_puts_parameters_in_context(self):
x = Parameter(name="x")
flow = Flow(name="test", tasks=[x])
result = FlowRunner(flow).initialize_run(
state=Pending(),
task_states={},
context={},
task_contexts={},
parameters={"x": 1},
)
assert result.context["parameters"] == {"x": 1}
def test_parameter_precedance(self):
x = Parameter(name="x")
flow = Flow(name="test", tasks=[x])
result = FlowRunner(flow).initialize_run(
state=Pending(),
task_states={},
context={"parameters": {"x": 2, "y": 1}},
task_contexts={},
parameters={"x": 1},
)
assert result.context["parameters"] == {"x": 1, "y": 1}
class TestRunCount:
def test_run_count_updates_after_each_retry(self):
flow = Flow(name="test")
t1 = ErrorTask(max_retries=2, retry_delay=datetime.timedelta(0))
flow.add_task(t1)
state1 = FlowRunner(flow=flow).run(return_tasks=[t1])
assert state1.result[t1].is_retrying()
assert state1.result[t1].run_count == 1
state2 = FlowRunner(flow=flow).run(return_tasks=[t1], task_states=state1.result)
assert state2.result[t1].is_retrying()
assert state2.result[t1].run_count == 2
def test_run_count_tracked_via_retry_states(self):
flow = Flow(name="test")
t1 = ErrorTask(max_retries=1, retry_delay=datetime.timedelta(0))
t2 = ErrorTask(max_retries=2, retry_delay=datetime.timedelta(0))
flow.add_task(t1)
flow.add_task(t2)
# first run
state1 = FlowRunner(flow=flow).run(return_tasks=[t1, t2])
assert state1.is_running()
assert state1.result[t1].is_retrying()
assert state1.result[t1].run_count == 1
assert state1.result[t2].is_retrying()
assert state1.result[t2].run_count == 1
# second run
state2 = FlowRunner(flow=flow).run(
task_states=state1.result, return_tasks=[t1, t2]
)
assert state2.is_running()
assert isinstance(state2.result[t1], Failed)
assert state2.result[t2].is_retrying()
assert state2.result[t2].run_count == 2
# third run
state3 = FlowRunner(flow=flow).run(
task_states=state2.result, return_tasks=[t1, t2]
)
assert state3.is_failed()
assert isinstance(state3.result[t1], Failed)
assert isinstance(state3.result[t2], Failed)
def test_flow_runner_uses_user_provided_executor():
t = SuccessTask()
with Flow(name="test") as f:
result = t()
with raise_on_exception():
with pytest.raises(NotImplementedError):
FlowRunner(flow=f).run(executor=Executor())
@pytest.mark.parametrize("executor", ["mproc", "mthread"], indirect=True)
def test_flow_runner_captures_and_exposes_dask_errors(executor):
q = queue.Queue()
@prefect.task
def put():
q.put(55)
f = Flow(name="test", tasks=[put])
state = f.run(executor=executor)
assert state.is_failed()
assert isinstance(state.result, TypeError)
# assert two possible result outputs for different Python versions
assert str(state.result) in [
"can't pickle _thread.lock objects",
"cannot pickle '_thread.lock' object",
]
@pytest.mark.xfail(
reason="This test fails on CircleCI for Python 3.5+ if not enough cores/workers are available."
)
@pytest.mark.parametrize("executor", ["mproc", "mthread"], indirect=True)
def test_flow_runner_allows_for_parallelism_with_times(executor):
# related:
# "https://stackoverflow.com/questions/52121686/why-is-dask-distributed-not-parallelizing-the-first-run-of-my-workflow"
@prefect.task
def record_times():
res = []
pause = random.randint(0, 75)
for i in range(75):
if i == pause:
time.sleep(0.1) # add a little noise
res.append(time.time())
return res
with Flow(name="test") as flow:
a, b = record_times(), record_times()
state = flow.run(executor=executor)
assert state.is_successful()
times = [("alice", t) for t in state.result[a].result] + [
("bob", t) for t in state.result[b].result
]
names = [name for name, time in sorted(times, key=lambda x: x[1])]
alice_first = ["alice"] * 75 + ["bob"] * 75
bob_first = ["bob"] * 75 + ["alice"] * 75
assert names != alice_first
assert names != bob_first
@pytest.mark.parametrize(
"executor", ["local", "mproc", "mthread", "sync"], indirect=True
)
def test_flow_runner_properly_provides_context_to_task_runners(executor):
@prefect.task
def my_name():
return prefect.context.get("my_name")
@prefect.task
def flow_name():
return prefect.context.get("flow_name")
flow = Flow(name="test-dummy", tasks=[flow_name, my_name])
with prefect.context(my_name="marvin"):
res = flow.run(executor=executor)
assert res.result[flow_name].result == "test-dummy"
assert res.result[my_name].result == "marvin"
with Flow("test-map") as f:
tt = flow_name.map(upstream_tasks=[my_name])
with prefect.context(my_name="mapped-marvin"):
res = f.run(executor=executor)
assert res.result[my_name].result == "mapped-marvin"
assert res.result[tt].result[0] == "test-map"
@pytest.mark.parametrize("executor", ["local", "mthread", "sync"], indirect=True)
def test_flow_runner_handles_timeouts(executor):
sleeper = SlowTask(timeout=1)
with Flow(name="test") as flow:
res = sleeper(3)
state = FlowRunner(flow=flow).run(return_tasks=[res], executor=executor)
assert state.is_failed()
assert isinstance(state.result[res], TimedOut)
assert "timed out" in state.result[res].message
assert isinstance(state.result[res].result, TimeoutError)
def test_flow_runner_handles_timeout_error_with_mproc(mproc):
sleeper = SlowTask(timeout=1)
with Flow(name="test") as flow:
res = sleeper(2)
state = FlowRunner(flow=flow).run(return_tasks=[res], executor=mproc)
assert state.is_failed()
assert isinstance(state.result[res], TimedOut)
assert isinstance(state.result[res].result, TimeoutError)
handler_results = collections.defaultdict(lambda: 0)
@pytest.fixture(autouse=True)
def clear_handler_results():
handler_results.clear()
def flow_handler(flow, old_state, new_state):
"""state change handler for flows that increments a value by 1"""
assert isinstance(flow, Flow)
assert isinstance(old_state, State)
assert isinstance(new_state, State)
handler_results["Flow"] += 1
return new_state
def flow_runner_handler(flow_runner, old_state, new_state):
"""state change handler for flow runners that increments a value by 1"""
assert isinstance(flow_runner, FlowRunner)
assert isinstance(old_state, State)
assert isinstance(new_state, State)
handler_results["FlowRunner"] += 1
return new_state
class TestFlowStateHandlers:
def test_flow_handlers_are_called(self):
flow = Flow(name="test", state_handlers=[flow_handler])
FlowRunner(flow=flow).run()
# the flow changed state twice: Pending -> Running -> Success
assert handler_results["Flow"] == 2
def test_flow_handlers_are_called_even_when_initialize_run_fails(self):
class BadRunner(FlowRunner):
def initialize_run(self, *args, **kwargs):
raise SyntaxError("bad")
def handler(runner, old, new):
handler_results["Flow"] += 1
return new
flow = Flow(name="test", state_handlers=[handler])
BadRunner(flow=flow).run()
# the flow changed state twice: Pending -> Failed
assert handler_results["Flow"] == 1
def test_flow_handlers_can_return_none(self):
flow_handler = MagicMock(side_effect=lambda t, o, n: None)
flow = Flow(name="test", state_handlers=[flow_handler])
flow_state = FlowRunner(flow=flow).run()
assert flow_state.is_successful()
# the flow changed state twice: Pending -> Running -> Success
assert flow_handler.call_count == 2
def test_flow_on_failure_is_not_called(self):
on_failure = MagicMock()
flow = Flow(name="test", on_failure=on_failure, tasks=[Task()])
FlowRunner(flow=flow).run()
assert not on_failure.called
def test_task_on_failure_is_called(self):
on_failure = MagicMock()
flow = Flow(name="test", tasks=[ErrorTask()], on_failure=on_failure)
FlowRunner(flow=flow).run()
assert on_failure.call_count == 1
assert on_failure.call_args[0][0] is flow
assert on_failure.call_args[0][1].is_failed()
def test_multiple_flow_handlers_are_called(self):
flow = Flow(name="test", state_handlers=[flow_handler, flow_handler])
FlowRunner(flow=flow).run()
# each flow changed state twice: Pending -> Running -> Success
assert handler_results["Flow"] == 4
def test_multiple_flow_handlers_are_called_in_sequence(self):
# the second flow handler will assert the result of the first flow handler is a state
# and raise an error, as long as the flow_handlers are called in sequence on the
# previous result
flow = Flow(name="test", state_handlers=[lambda *a: True, flow_handler])
with pytest.raises(AssertionError):
with prefect.utilities.debug.raise_on_exception():
FlowRunner(flow=flow).run()
def test_task_handler_that_doesnt_return_state_or_none(self):
flow = Flow(name="test", state_handlers=[lambda *a: True])
# raises an attribute error because it tries to access a property of the state that
# doesn't exist on None
with pytest.raises(AttributeError):
with prefect.utilities.debug.raise_on_exception():
FlowRunner(flow=flow).run()
class TestFlowRunnerStateHandlers:
def test_task_runner_handlers_are_called(self):
FlowRunner(flow=Flow(name="test"), state_handlers=[flow_runner_handler]).run()
# the flow changed state twice: Pending -> Running -> Success
assert handler_results["FlowRunner"] == 2
def test_multiple_task_runner_handlers_are_called(self):
FlowRunner(
flow=Flow(name="test"),
state_handlers=[flow_runner_handler, flow_runner_handler],
).run()
# each flow changed state twice: Pending -> Running -> Success
assert handler_results["FlowRunner"] == 4
def test_multiple_task_runner_handlers_are_called_in_sequence(self):
# the second flow handler will assert the result of the first flow handler is a state
# and raise an error, as long as the flow_handlers are called in sequence on the
# previous result
with pytest.raises(AssertionError):
with prefect.utilities.debug.raise_on_exception():
FlowRunner(
flow=Flow(name="test"),
state_handlers=[lambda *a: True, flow_runner_handler],
).run()
def test_task_runner_handler_that_doesnt_return_state_or_none(self):
# raises an attribute error because it tries to access a property of the state that
# doesn't exist on None
with pytest.raises(AttributeError):
with prefect.utilities.debug.raise_on_exception():
FlowRunner(
flow=Flow(name="test"), state_handlers=[lambda *a: True]
).run()
def test_task_handler_that_raises_signal_is_trapped(self):
def handler(flow, old, new):
raise signals.FAIL()
flow = Flow(name="test", state_handlers=[handler])
state = FlowRunner(flow=flow).run()
assert state.is_failed()
def test_task_handler_that_has_error_is_trapped(self):
def handler(flow, old, new):
1 / 0
flow = Flow(name="test", state_handlers=[handler])
state = FlowRunner(flow=flow).run()
assert state.is_failed()
def test_improper_use_of_unmapped_fails_gracefully():
add = AddTask()
x = Parameter("x", default=[1, 2, 3])
with Flow(name="test") as f:
res = add.map(
x, y=prefect.tasks.core.constants.Constant(8)
) # incorrect, should use `unmapped`
state = FlowRunner(flow=f).run(return_tasks=f.tasks)
assert state.is_failed()
# make sure tasks were still returned with the correct states
x_state = state.result.pop(x)
res_state = state.result.pop(res)
y_state = state.result.popitem()[1]
assert x_state.is_successful()
assert x_state.result == [1, 2, 3]
assert y_state.is_successful()
assert y_state.result == 8
assert res_state.is_failed()
def test_all_pipeline_method_steps_are_called():
pipeline = [
"initialize_run",
"check_flow_is_pending_or_running",
"set_flow_to_running",
"get_flow_run_state",
]
runner = FlowRunner(Flow(name="test"))
for method in pipeline:
setattr(runner, method, MagicMock())
# initialize run is unpacked, which MagicMocks dont support
runner.initialize_run = MagicMock(
return_value=FlowRunnerInitializeResult(
MagicMock(), MagicMock(), MagicMock(), MagicMock()
)
)
runner.run()
for method in pipeline:
assert getattr(runner, method).call_count == 1
def test_endrun_raised_in_initialize_is_caught_correctly():
class BadInitializeRunner(FlowRunner):
def initialize_run(self, *args, **kwargs):
raise ENDRUN(state=Pending())
res = BadInitializeRunner(Flow(name="test")).run()
assert res.is_pending()
def test_task_runner_cls_uses_default_function_if_none():
fr = FlowRunner(flow=None, task_runner_cls=None)
assert fr.task_runner_cls is prefect.engine.get_default_task_runner_class()
with prefect.utilities.configuration.set_temporary_config(
{"engine.task_runner.default_class": "prefect.engine.cloud.CloudTaskRunner"}
):
fr = FlowRunner(flow=None, task_runner_cls=None)
assert fr.task_runner_cls is prefect.engine.get_default_task_runner_class()
def test_flow_run_uses_default_flow_runner(monkeypatch):
x = MagicMock()
monkeypatch.setattr("prefect.engine.flow_runner.FlowRunner", x)
with prefect.utilities.configuration.set_temporary_config(
{"engine.flow_runner.default_class": "prefect.engine.x"}
):
with pytest.warns(UserWarning):
Flow(name="test").run()
assert x.call_count == 1
def test_parameters_can_be_set_in_context_if_none_passed():
x = prefect.Parameter("x")
f = FlowRunner(Flow(name="test", tasks=[x]))
state = f.run(parameters={}, context={"parameters": {"x": 5}}, return_tasks=[x])
assert state.result[x].result == 5
def test_parameters_overwrite_context():
x = prefect.Parameter("x")
f = FlowRunner(Flow(name="test", tasks=[x]))
state = f.run(
parameters={"x": 2}, context={"parameters": {"x": 5}}, return_tasks=[x]
)
assert state.result[x].result == 2
def test_parameters_overwrite_context_only_if_key_matches():
x = prefect.Parameter("x")
y = prefect.Parameter("y")
f = FlowRunner(Flow(name="test", tasks=[x, y]))
state = f.run(
parameters={"x": 2},
context={"parameters": {"x": 5, "y": 6}},
return_tasks=[x, y],
)
assert state.result[x].result == 2
assert state.result[y].result == 6
class TestMapping:
@pytest.mark.parametrize(
"executor", ["local", "mthread", "mproc", "sync"], indirect=True
)
def test_terminal_mapped_states_are_used_for_flow_state(self, executor):
with Flow(name="test") as flow:
res = ReturnTask().map([0, 1])
state = FlowRunner(flow=flow).run(return_tasks=[res], executor=executor)
assert state.is_failed()
assert state.result[res].map_states[0].is_successful()
assert state.result[res].map_states[1].is_failed()
@pytest.mark.parametrize(
"executor", ["local", "mthread", "mproc", "sync"], indirect=True
)
def test_mapped_will_use_existing_map_states_if_available(self, executor):
with Flow(name="test") as flow:
res = ReturnTask().map([0, 1])
state = FlowRunner(flow=flow).run(
return_tasks=[res],
executor=executor,
task_states={res: Mapped(map_states=[Success(), Success(result=100)])},
)
assert state.is_successful()
assert state.result[res].map_states[1].is_successful()
assert state.result[res].map_states[1].result == 100
@pytest.mark.parametrize(
"executor", ["local", "mthread", "mproc", "sync"], indirect=True
)
def test_mapped_will_use_partial_existing_map_states_if_available(self, executor):
with Flow(name="test") as flow:
res = ReturnTask().map([1, 1])
state = FlowRunner(flow=flow).run(
return_tasks=[res],
executor=executor,
task_states={res: Mapped(map_states=[None, Success(result=100)])},
)
assert state.is_failed()
assert state.result[res].map_states[0].is_failed()
assert state.result[res].map_states[1].is_successful()
assert state.result[res].map_states[1].result == 100
@pytest.mark.parametrize(
"executor", ["local", "mthread", "mproc", "sync"], indirect=True
)
def test_mapped_tasks_dont_run_if_upstream_pending(self, executor):
with Flow(name="test") as flow:
ups = SuccessTask()
res = ReturnTask().map([ups])
state = FlowRunner(flow=flow).run(
return_tasks=flow.tasks,
executor=executor,
task_states={ups: Retrying(start_time=pendulum.now().add(hours=1))},
)
assert state.is_running()
assert state.result[ups].is_pending()
assert state.result[res].is_pending()
@pytest.mark.parametrize(
"executor", ["local", "mthread", "mproc", "sync"], indirect=True
)
def test_mapped_task_can_be_scheduled(self, executor):
with Flow(name="test") as flow:
res = ReturnTask().map([0, 0])
state = FlowRunner(flow=flow).run(
return_tasks=[res],
executor=executor,
task_states={res: Scheduled(start_time=pendulum.now().subtract(minutes=1))},
)
assert state.is_successful()
@pytest.mark.parametrize(
"executor", ["local", "mthread", "mproc", "sync"], indirect=True
)
def test_mapped_task_can_be_scheduled_for_future(self, executor):
with Flow(name="test") as flow:
res = ReturnTask().map([0, 0])
state = FlowRunner(flow=flow).run(
return_tasks=[res],
executor=executor,
task_states={res: Scheduled(start_time=pendulum.now().add(hours=1))},
)
assert state.is_running()
assert isinstance(state.result[res], Scheduled)
def test_task_contexts_are_provided_to_tasks():
@prefect.task(name="rc", slug="rc")
def return_context():
return prefect.context.to_dict()
with Flow(name="test") as flow:
rc = return_context()
state = FlowRunner(flow=flow).run(return_tasks=[rc])
ctx = state.result[rc].result
assert ctx["task_name"] == rc.name
assert ctx["task_slug"] == rc.slug
def test_paused_tasks_stay_paused_when_run():
t = Task()
f = Flow(name="test", tasks=[t])
state = FlowRunner(flow=f).run(task_states={t: Paused()}, return_tasks=[t])
assert state.is_running()
assert isinstance(state.result[t], Paused)
class TestContext:
def test_flow_runner_passes_along_its_run_context_to_tasks(self):
@prefect.task
def grab_key():
return prefect.context["THE_ANSWER"]
with prefect.context(THE_ANSWER=42):
runner = FlowRunner(Flow(name="test", tasks=[grab_key]))
flow_state = runner.run(return_tasks=[grab_key])
assert flow_state.is_successful()
assert flow_state.result[grab_key].result == 42
def test_flow_runner_provides_scheduled_start_time(self):
@prefect.task
def return_scheduled_start_time():
return prefect.context.get("scheduled_start_time")
f = Flow(name="test", tasks=[return_scheduled_start_time])
res = f.run()
assert res.is_successful()
assert res.result[return_scheduled_start_time].is_successful()
assert isinstance(
res.result[return_scheduled_start_time].result, datetime.datetime
)
@pytest.mark.parametrize("run_on_schedule", [True, False])
def test_flow_runner_doesnt_override_scheduled_start_time_when_running_on_schedule(
self, run_on_schedule
):
@prefect.task
def return_scheduled_start_time():
return prefect.context.get("scheduled_start_time")
f = Flow(name="test", tasks=[return_scheduled_start_time])
res = f.run(
context=dict(scheduled_start_time=42), run_on_schedule=run_on_schedule
)
assert res.is_successful()
assert res.result[return_scheduled_start_time].result != 42
@pytest.mark.parametrize(
"date", ["today_nodash", "tomorrow_nodash", "yesterday_nodash"]
)
def test_context_contains_nodash_date_formats(self, date):
@prefect.task
def return_ctx_key():
return prefect.context.get(date)
f = Flow(name="test", tasks=[return_ctx_key])
res = f.run()
assert res.is_successful()
output = res.result[return_ctx_key].result
assert isinstance(output, str)
assert len(output) == 8
@pytest.mark.parametrize("date", ["today", "tomorrow", "yesterday"])
def test_context_contains_date_formats(self, date):
@prefect.task
def return_ctx_key():
return prefect.context.get(date)
f = Flow(name="test", tasks=[return_ctx_key])
res = f.run()
assert res.is_successful()
output = res.result[return_ctx_key].result
assert isinstance(output, str)
assert len(output) == 10
def test_context_includes_date(self):
@prefect.task
def return_ctx_key():
return prefect.context.get("date")
f = Flow(name="test", tasks=[return_ctx_key])
res = f.run()
assert res.is_successful()
output = res.result[return_ctx_key].result
assert isinstance(output, datetime.datetime)
def test_user_provided_context_is_prioritized(self):
@prefect.task
def return_ctx_key():
return prefect.context.get("date")
f = Flow(name="test", tasks=[return_ctx_key])
res = f.run(context={"date": "42"})
assert res.is_successful()
output = res.result[return_ctx_key].result
assert output == "42"
@pytest.mark.parametrize(
"executor", ["local", "sync", "mproc", "mthread"], indirect=True
)
def test_task_logs_survive_if_timeout_is_used(caplog, executor):
@prefect.task(timeout=2)
def log_stuff():
logger = prefect.context.get("logger")
logger.critical("important log right here")
f = Flow(name="logs", tasks=[log_stuff])
res = f.run()
assert res.is_successful()
assert len([r for r in caplog.records if r.levelname == "CRITICAL"]) == 1
def test_task_runners_submitted_to_remote_machines_respect_original_config(monkeypatch):
"""
This test is meant to simulate the behavior of running a Cloud Flow against an external
cluster which has _not_ been configured for Prefect. The idea is that the configuration
settings which were present on the original machine are respected in the remote job, reflected
here by having the CloudHandler called during logging and the special values present in context.
"""
from prefect.engine.flow_runner import run_task
def my_run_task(*args, **kwargs):
with prefect.utilities.configuration.set_temporary_config(
{"logging.log_to_cloud": False, "cloud.auth_token": ""}
):
return run_task(*args, **kwargs)
calls = []
class Client:
def write_run_logs(self, *args, **kwargs):
calls.append(args)
monkeypatch.setattr("prefect.engine.flow_runner.run_task", my_run_task)
monkeypatch.setattr("prefect.client.Client", Client)
@prefect.task
def log_stuff():
logger = prefect.context.get("logger")
logger.critical("important log right here")
return (
prefect.context.config.special_key,
prefect.context.config.cloud.auth_token,
)
with prefect.utilities.configuration.set_temporary_config(
{
"logging.log_to_cloud": True,
"special_key": 42,
"cloud.auth_token": "original",
}
):
# captures config at init
flow = Flow("test", tasks=[log_stuff])
flow_state = flow.run(task_contexts={log_stuff: dict(special_key=99)})
assert flow_state.is_successful()
assert flow_state.result[log_stuff].result == (42, "original")
time.sleep(0.75)
assert len(calls) >= 1
assert len([log for call in calls for log in call[0]]) == 6 # actual number of logs
loggers = [log["name"] for call in calls for log in call[0]]
assert set(loggers) == {
"prefect.TaskRunner",
"prefect.FlowRunner",
"prefect.log_stuff",
}
def test_constant_tasks_arent_submitted(caplog):
calls = []
class TrackSubmissions(LocalExecutor):
def submit(self, *args, **kwargs):
calls.append(kwargs)
return super().submit(*args, **kwargs)
@prefect.task
def add(x):
return x + 1
with Flow("constants") as flow:
output = add(5)
runner = FlowRunner(flow=flow)
flow_state = runner.run(return_tasks=[output], executor=TrackSubmissions())
assert flow_state.is_successful()
assert flow_state.result[output].result == 6
## only add was submitted
assert len(calls) == 1
## to be safe, ensure '5' isn't in the logs
assert len([log.message for log in caplog.records if "5" in log.message]) == 0
def test_constant_tasks_arent_submitted_when_mapped(caplog):
calls = []
class TrackSubmissions(LocalExecutor):
def submit(self, *args, **kwargs):
calls.append(kwargs)
return super().submit(*args, **kwargs)
@prefect.task
def add(x):
return x + 1
with Flow("constants") as flow:
output = add.map([99] * 10)
runner = FlowRunner(flow=flow)
flow_state = runner.run(return_tasks=[output], executor=TrackSubmissions())
assert flow_state.is_successful()
assert flow_state.result[output].result == [100] * 10
## the add task was submitted 11 times: one for the parent and 10 times for each child
assert len(calls) == 11
## to be safe, ensure '5' isn't in the logs
assert len([log.message for log in caplog.records if "99" in log.message]) == 0
| 33.408124
| 123
| 0.656187
|
294045377a0564537638366a8a114a1a4b987573
| 701
|
py
|
Python
|
setup.py
|
tushar2488/sos
|
d97e57453f98394a0957a5a00675b44b3e3c37df
|
[
"MIT"
] | null | null | null |
setup.py
|
tushar2488/sos
|
d97e57453f98394a0957a5a00675b44b3e3c37df
|
[
"MIT"
] | null | null | null |
setup.py
|
tushar2488/sos
|
d97e57453f98394a0957a5a00675b44b3e3c37df
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import re, ast
with open('requirements.txt') as f:
install_requires = f.read().strip().split('\n')
# get version from __version__ variable in sos/__init__.py
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('sos/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='sos',
version=version,
description='This app created for SOS Custom docs',
author='Youtility Technologies Pvt. Ltd',
author_email='tushar.tajne@youtility.in',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=install_requires
)
| 26.961538
| 58
| 0.730385
|
290aabaee00322eff09521619fcfdf5d516c1d3c
| 4,706
|
py
|
Python
|
src/util/graph_definition.py
|
imatge-upc/skiprnn-2017-tfm
|
63f93a539a3f2c7a713089fdd2c38bb7b0c581ca
|
[
"MIT"
] | 129
|
2017-08-24T00:27:47.000Z
|
2022-03-24T21:42:37.000Z
|
src/util/graph_definition.py
|
imatge-upc/skiprnn-2018-iclr
|
63f93a539a3f2c7a713089fdd2c38bb7b0c581ca
|
[
"MIT"
] | 8
|
2018-02-28T15:05:48.000Z
|
2022-02-09T23:30:21.000Z
|
src/util/graph_definition.py
|
imatge-upc/skiprnn-2018-iclr
|
63f93a539a3f2c7a713089fdd2c38bb7b0c581ca
|
[
"MIT"
] | 45
|
2017-08-24T13:16:11.000Z
|
2021-05-13T02:36:59.000Z
|
"""
Graph creation functions.
"""
from __future__ import print_function
from __future__ import absolute_import
import tensorflow as tf
from rnn_cells.basic_rnn_cells import BasicLSTMCell, BasicGRUCell
from rnn_cells.skip_rnn_cells import SkipLSTMCell, MultiSkipLSTMCell
from rnn_cells.skip_rnn_cells import SkipGRUCell, MultiSkipGRUCell
def create_generic_flags():
"""
Create flags which are shared by all experiments
"""
# Generic flags
tf.app.flags.DEFINE_string('model', 'lstm', "Select RNN cell: {lstm, gru, skip_lstm, skip_gru}")
tf.app.flags.DEFINE_integer("rnn_cells", 110, "Number of RNN cells.")
tf.app.flags.DEFINE_integer("rnn_layers", 1, "Number of RNN layers.")
tf.app.flags.DEFINE_integer('batch_size', 256, "Batch size.")
tf.app.flags.DEFINE_float('learning_rate', 0.0001, "Learning rate.")
tf.app.flags.DEFINE_float('grad_clip', 1., "Clip gradients at this value. Set to <=0 to disable clipping.")
tf.app.flags.DEFINE_string('logdir', '../logs', "Directory where TensorBoard logs will be stored.")
# Flags for the Skip RNN cells
tf.app.flags.DEFINE_float('cost_per_sample', 0., "Cost per used sample. Set to 0 to disable this option.")
def compute_gradients(loss, learning_rate, gradient_clipping=-1):
"""
Create optimizer, compute gradients and (optionally) apply gradient clipping
"""
opt = tf.train.AdamOptimizer(learning_rate)
if gradient_clipping > 0:
vars_to_optimize = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(loss, vars_to_optimize), clip_norm=gradient_clipping)
grads_and_vars = list(zip(grads, vars_to_optimize))
else:
grads_and_vars = opt.compute_gradients(loss)
return opt, grads_and_vars
def create_model(model, num_cells, batch_size, learn_initial_state=True):
"""
Returns a tuple of (cell, initial_state) to use with dynamic_rnn.
If num_cells is an integer, a single RNN cell will be created. If it is a list, a stack of len(num_cells)
cells will be created.
"""
if not model in ['lstm', 'gru', 'skip_lstm', 'skip_gru']:
raise ValueError('The specified model is not supported. Please use {lstm, gru, skip_lstm, skip_gru}.')
if isinstance(num_cells, list) and len(num_cells) > 1:
if model == 'skip_lstm':
cells = MultiSkipLSTMCell(num_cells)
elif model == 'skip_gru':
cells = MultiSkipGRUCell(num_cells)
elif model == 'lstm':
cell_list = [BasicLSTMCell(n) for n in num_cells]
cells = tf.contrib.rnn.MultiRNNCell(cell_list)
elif model == 'gru':
cell_list = [BasicGRUCell(n) for n in num_cells]
cells = tf.contrib.rnn.MultiRNNCell(cell_list)
if learn_initial_state:
if model == 'skip_lstm' or model == 'skip_gru':
initial_state = cells.trainable_initial_state(batch_size)
else:
initial_state = []
for idx, cell in enumerate(cell_list):
with tf.variable_scope('layer_%d' % (idx + 1)):
initial_state.append(cell.trainable_initial_state(batch_size))
initial_state = tuple(initial_state)
else:
initial_state = None
return cells, initial_state
else:
if isinstance(num_cells, list):
num_cells = num_cells[0]
if model == 'skip_lstm':
cell = SkipLSTMCell(num_cells)
elif model == 'skip_gru':
cell = SkipGRUCell(num_cells)
elif model == 'lstm':
cell = BasicLSTMCell(num_cells)
elif model == 'gru':
cell = BasicGRUCell(num_cells)
if learn_initial_state:
initial_state = cell.trainable_initial_state(batch_size)
else:
initial_state = None
return cell, initial_state
def using_skip_rnn(model):
"""
Helper function determining whether a Skip RNN models is being used
"""
return model.lower() == 'skip_lstm' or model.lower() == 'skip_gru'
def split_rnn_outputs(model, rnn_outputs):
"""
Split the output of dynamic_rnn into the actual RNN outputs and the state update gate
"""
if using_skip_rnn(model):
return rnn_outputs.h, rnn_outputs.state_gate
else:
return rnn_outputs, tf.no_op()
def compute_budget_loss(model, loss, updated_states, cost_per_sample):
"""
Compute penalization term on the number of updated states (i.e. used samples)
"""
if using_skip_rnn(model):
return tf.reduce_mean(tf.reduce_sum(cost_per_sample * updated_states, 1), 0)
else:
return tf.zeros(loss.get_shape())
| 38.892562
| 111
| 0.664046
|
744c8f6c5d7b7314da0d32771da32d0c43e4f5e3
| 2,247
|
py
|
Python
|
services/web/src/auth/forms.py
|
dmenezesgabriel/flask-notes
|
6a18f23e841b7d8ff641c0840f4cb9814dbcccb7
|
[
"MIT"
] | null | null | null |
services/web/src/auth/forms.py
|
dmenezesgabriel/flask-notes
|
6a18f23e841b7d8ff641c0840f4cb9814dbcccb7
|
[
"MIT"
] | null | null | null |
services/web/src/auth/forms.py
|
dmenezesgabriel/flask-notes
|
6a18f23e841b7d8ff641c0840f4cb9814dbcccb7
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from flask_babel import lazy_gettext as _l
from wtforms import StringField, PasswordField, SubmitField, BooleanField
from wtforms.validators import (
DataRequired, length, EqualTo, Email, ValidationError)
from src.models import User
class Register(FlaskForm):
username = StringField(
_l('Username'), validators=[DataRequired(), length(min=3, max=20)])
email = StringField(
_l('Email'), validators=[DataRequired(), Email()]
)
password = PasswordField(
_l('Password'), validators=[DataRequired(), length(min=3, max=20)])
confirm_password = PasswordField(
_l('Confirm Password'),
validators=[DataRequired(), EqualTo('password')]
)
submit = SubmitField(_l('Submit'))
# When you add any methods that match the pattern validate_<field_name>,
# WTForms takes those as custom validators and invokes them in addition
# to the stock validators.
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError(_l('Please use a different username.'))
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is not None:
raise ValidationError(_l('Please use a different email address.'))
class Login(FlaskForm):
username = StringField(
_l('Username'), validators=[DataRequired(), length(min=3, max=20)])
password = PasswordField(
_l('Password'), validators=[DataRequired(), length(min=3, max=20)])
remember_me = BooleanField(_l('Remember Me'))
submit = SubmitField(_l('Submit'))
class ResetPasswordRequestForm(FlaskForm):
email = StringField(_l('Email'), validators=[DataRequired(), Email()])
submit = SubmitField(_l('Request Password Reset'))
class ResetPasswordForm(FlaskForm):
password = PasswordField(
_l('Password'), validators=[DataRequired(), length(min=3, max=20)])
confirm_password = PasswordField(
_l('Confirm Password'),
validators=[DataRequired(), EqualTo('password')]
)
submit = SubmitField(_l('Submit'))
submit = SubmitField(_l('Request Password Reset'))
| 37.45
| 78
| 0.684913
|
ca767602b03ef6be32e9c7e13c362924c5d02091
| 1,116
|
py
|
Python
|
ex24.py
|
thinmarwin/python-exercises
|
2d8ccdf9b0fcf73802b161ca31dd0428e92bbc66
|
[
"MIT"
] | null | null | null |
ex24.py
|
thinmarwin/python-exercises
|
2d8ccdf9b0fcf73802b161ca31dd0428e92bbc66
|
[
"MIT"
] | null | null | null |
ex24.py
|
thinmarwin/python-exercises
|
2d8ccdf9b0fcf73802b161ca31dd0428e92bbc66
|
[
"MIT"
] | null | null | null |
print("Let's priactive everything.")
print('You\'d need to know \'bout escapes with \\ that do:')
print('\n newlines and \t tabs.')
poem="""
\tThe lovely world
with logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print("----------")
print(poem)
print("----------")
five = 10 - 2 + 3 - 6
print(f"This should be five: {five}")
def secret_formula(started):
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
# remember that this is another way to format a string
print("With a starting point of: {}".format(start_point))
# it's just like with an f"" string
print(f"We'd have {beans} beans, {jars} jars, and {crates} crates.")
start_point = start_point /10
print("We can also do that this way:")
formula = secret_formula(start_point)
# this is an easy way to apply a list to a format string
print("We'd have {} beans, {} jars, and {} crates.".format(*formula))
| 27.219512
| 69
| 0.693548
|
62a993455b9be52df51eee21ca3a37bb45cad3d3
| 30,700
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20191101/public_ip_address.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20191101/public_ip_address.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20191101/public_ip_address.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['PublicIPAddressInitArgs', 'PublicIPAddress']
@pulumi.input_type
class PublicIPAddressInitArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
ddos_settings: Optional[pulumi.Input['DdosSettingsArgs']] = None,
dns_settings: Optional[pulumi.Input['PublicIPAddressDnsSettingsArgs']] = None,
id: Optional[pulumi.Input[str]] = None,
idle_timeout_in_minutes: Optional[pulumi.Input[int]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_tags: Optional[pulumi.Input[Sequence[pulumi.Input['IpTagArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
public_ip_address_version: Optional[pulumi.Input[Union[str, 'IPVersion']]] = None,
public_ip_allocation_method: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]] = None,
public_ip_prefix: Optional[pulumi.Input['SubResourceArgs']] = None,
public_ip_address_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input['PublicIPAddressSkuArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a PublicIPAddress resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input['DdosSettingsArgs'] ddos_settings: The DDoS protection custom policy associated with the public IP address.
:param pulumi.Input['PublicIPAddressDnsSettingsArgs'] dns_settings: The FQDN of the DNS record associated with the public IP address.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[int] idle_timeout_in_minutes: The idle timeout of the public IP address.
:param pulumi.Input[str] ip_address: The IP address associated with the public IP address resource.
:param pulumi.Input[Sequence[pulumi.Input['IpTagArgs']]] ip_tags: The list of tags associated with the public IP address.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[Union[str, 'IPVersion']] public_ip_address_version: The public IP address version.
:param pulumi.Input[Union[str, 'IPAllocationMethod']] public_ip_allocation_method: The public IP address allocation method.
:param pulumi.Input['SubResourceArgs'] public_ip_prefix: The Public IP Prefix this Public IP Address should be allocated from.
:param pulumi.Input[str] public_ip_address_name: The name of the public IP address.
:param pulumi.Input['PublicIPAddressSkuArgs'] sku: The public IP address SKU.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: A list of availability zones denoting the IP allocated for the resource needs to come from.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if ddos_settings is not None:
pulumi.set(__self__, "ddos_settings", ddos_settings)
if dns_settings is not None:
pulumi.set(__self__, "dns_settings", dns_settings)
if id is not None:
pulumi.set(__self__, "id", id)
if idle_timeout_in_minutes is not None:
pulumi.set(__self__, "idle_timeout_in_minutes", idle_timeout_in_minutes)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if ip_tags is not None:
pulumi.set(__self__, "ip_tags", ip_tags)
if location is not None:
pulumi.set(__self__, "location", location)
if public_ip_address_version is not None:
pulumi.set(__self__, "public_ip_address_version", public_ip_address_version)
if public_ip_allocation_method is not None:
pulumi.set(__self__, "public_ip_allocation_method", public_ip_allocation_method)
if public_ip_prefix is not None:
pulumi.set(__self__, "public_ip_prefix", public_ip_prefix)
if public_ip_address_name is not None:
pulumi.set(__self__, "public_ip_address_name", public_ip_address_name)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if zones is not None:
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="ddosSettings")
def ddos_settings(self) -> Optional[pulumi.Input['DdosSettingsArgs']]:
"""
The DDoS protection custom policy associated with the public IP address.
"""
return pulumi.get(self, "ddos_settings")
@ddos_settings.setter
def ddos_settings(self, value: Optional[pulumi.Input['DdosSettingsArgs']]):
pulumi.set(self, "ddos_settings", value)
@property
@pulumi.getter(name="dnsSettings")
def dns_settings(self) -> Optional[pulumi.Input['PublicIPAddressDnsSettingsArgs']]:
"""
The FQDN of the DNS record associated with the public IP address.
"""
return pulumi.get(self, "dns_settings")
@dns_settings.setter
def dns_settings(self, value: Optional[pulumi.Input['PublicIPAddressDnsSettingsArgs']]):
pulumi.set(self, "dns_settings", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> Optional[pulumi.Input[int]]:
"""
The idle timeout of the public IP address.
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@idle_timeout_in_minutes.setter
def idle_timeout_in_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "idle_timeout_in_minutes", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The IP address associated with the public IP address resource.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter(name="ipTags")
def ip_tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IpTagArgs']]]]:
"""
The list of tags associated with the public IP address.
"""
return pulumi.get(self, "ip_tags")
@ip_tags.setter
def ip_tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IpTagArgs']]]]):
pulumi.set(self, "ip_tags", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="publicIPAddressVersion")
def public_ip_address_version(self) -> Optional[pulumi.Input[Union[str, 'IPVersion']]]:
"""
The public IP address version.
"""
return pulumi.get(self, "public_ip_address_version")
@public_ip_address_version.setter
def public_ip_address_version(self, value: Optional[pulumi.Input[Union[str, 'IPVersion']]]):
pulumi.set(self, "public_ip_address_version", value)
@property
@pulumi.getter(name="publicIPAllocationMethod")
def public_ip_allocation_method(self) -> Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]:
"""
The public IP address allocation method.
"""
return pulumi.get(self, "public_ip_allocation_method")
@public_ip_allocation_method.setter
def public_ip_allocation_method(self, value: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]]):
pulumi.set(self, "public_ip_allocation_method", value)
@property
@pulumi.getter(name="publicIPPrefix")
def public_ip_prefix(self) -> Optional[pulumi.Input['SubResourceArgs']]:
"""
The Public IP Prefix this Public IP Address should be allocated from.
"""
return pulumi.get(self, "public_ip_prefix")
@public_ip_prefix.setter
def public_ip_prefix(self, value: Optional[pulumi.Input['SubResourceArgs']]):
pulumi.set(self, "public_ip_prefix", value)
@property
@pulumi.getter(name="publicIpAddressName")
def public_ip_address_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the public IP address.
"""
return pulumi.get(self, "public_ip_address_name")
@public_ip_address_name.setter
def public_ip_address_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "public_ip_address_name", value)
@property
@pulumi.getter
def sku(self) -> Optional[pulumi.Input['PublicIPAddressSkuArgs']]:
"""
The public IP address SKU.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: Optional[pulumi.Input['PublicIPAddressSkuArgs']]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of availability zones denoting the IP allocated for the resource needs to come from.
"""
return pulumi.get(self, "zones")
@zones.setter
def zones(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "zones", value)
class PublicIPAddress(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
ddos_settings: Optional[pulumi.Input[pulumi.InputType['DdosSettingsArgs']]] = None,
dns_settings: Optional[pulumi.Input[pulumi.InputType['PublicIPAddressDnsSettingsArgs']]] = None,
id: Optional[pulumi.Input[str]] = None,
idle_timeout_in_minutes: Optional[pulumi.Input[int]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IpTagArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
public_ip_address_version: Optional[pulumi.Input[Union[str, 'IPVersion']]] = None,
public_ip_allocation_method: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]] = None,
public_ip_prefix: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
public_ip_address_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['PublicIPAddressSkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Public IP address resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['DdosSettingsArgs']] ddos_settings: The DDoS protection custom policy associated with the public IP address.
:param pulumi.Input[pulumi.InputType['PublicIPAddressDnsSettingsArgs']] dns_settings: The FQDN of the DNS record associated with the public IP address.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[int] idle_timeout_in_minutes: The idle timeout of the public IP address.
:param pulumi.Input[str] ip_address: The IP address associated with the public IP address resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IpTagArgs']]]] ip_tags: The list of tags associated with the public IP address.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[Union[str, 'IPVersion']] public_ip_address_version: The public IP address version.
:param pulumi.Input[Union[str, 'IPAllocationMethod']] public_ip_allocation_method: The public IP address allocation method.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] public_ip_prefix: The Public IP Prefix this Public IP Address should be allocated from.
:param pulumi.Input[str] public_ip_address_name: The name of the public IP address.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[pulumi.InputType['PublicIPAddressSkuArgs']] sku: The public IP address SKU.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: A list of availability zones denoting the IP allocated for the resource needs to come from.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PublicIPAddressInitArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Public IP address resource.
:param str resource_name: The name of the resource.
:param PublicIPAddressInitArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PublicIPAddressInitArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
ddos_settings: Optional[pulumi.Input[pulumi.InputType['DdosSettingsArgs']]] = None,
dns_settings: Optional[pulumi.Input[pulumi.InputType['PublicIPAddressDnsSettingsArgs']]] = None,
id: Optional[pulumi.Input[str]] = None,
idle_timeout_in_minutes: Optional[pulumi.Input[int]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
ip_tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IpTagArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
public_ip_address_version: Optional[pulumi.Input[Union[str, 'IPVersion']]] = None,
public_ip_allocation_method: Optional[pulumi.Input[Union[str, 'IPAllocationMethod']]] = None,
public_ip_prefix: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
public_ip_address_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['PublicIPAddressSkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PublicIPAddressInitArgs.__new__(PublicIPAddressInitArgs)
__props__.__dict__["ddos_settings"] = ddos_settings
__props__.__dict__["dns_settings"] = dns_settings
__props__.__dict__["id"] = id
__props__.__dict__["idle_timeout_in_minutes"] = idle_timeout_in_minutes
__props__.__dict__["ip_address"] = ip_address
__props__.__dict__["ip_tags"] = ip_tags
__props__.__dict__["location"] = location
__props__.__dict__["public_ip_address_version"] = public_ip_address_version
__props__.__dict__["public_ip_allocation_method"] = public_ip_allocation_method
__props__.__dict__["public_ip_prefix"] = public_ip_prefix
__props__.__dict__["public_ip_address_name"] = public_ip_address_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
__props__.__dict__["zones"] = zones
__props__.__dict__["etag"] = None
__props__.__dict__["ip_configuration"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["resource_guid"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/v20191101:PublicIPAddress"), pulumi.Alias(type_="azure-native:network:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20150501preview:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20150615:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20150615:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20160330:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20160330:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20160601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20160601:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20160901:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20160901:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20161201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20161201:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20170301:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20170301:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20170601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20170601:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20170801:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20170801:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20170901:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20170901:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20171001:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20171001:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20171101:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20171101:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20180101:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180101:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20180201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180201:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20180401:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180401:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20180601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180601:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20180701:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180701:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20180801:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20180801:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20181001:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20181001:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20181101:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20181101:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20181201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20181201:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20190201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190201:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20190401:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190401:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20190601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190601:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20190701:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190701:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20190801:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190801:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20190901:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20190901:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20191201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20191201:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20200301:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200301:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20200401:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200401:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20200501:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200501:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20200601:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200601:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20200701:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200701:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20200801:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20200801:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20201101:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20201101:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20210201:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20210201:PublicIPAddress"), pulumi.Alias(type_="azure-native:network/v20210301:PublicIPAddress"), pulumi.Alias(type_="azure-nextgen:network/v20210301:PublicIPAddress")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(PublicIPAddress, __self__).__init__(
'azure-native:network/v20191101:PublicIPAddress',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PublicIPAddress':
"""
Get an existing PublicIPAddress resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = PublicIPAddressInitArgs.__new__(PublicIPAddressInitArgs)
__props__.__dict__["ddos_settings"] = None
__props__.__dict__["dns_settings"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["idle_timeout_in_minutes"] = None
__props__.__dict__["ip_address"] = None
__props__.__dict__["ip_configuration"] = None
__props__.__dict__["ip_tags"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["public_ip_address_version"] = None
__props__.__dict__["public_ip_allocation_method"] = None
__props__.__dict__["public_ip_prefix"] = None
__props__.__dict__["resource_guid"] = None
__props__.__dict__["sku"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["zones"] = None
return PublicIPAddress(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="ddosSettings")
def ddos_settings(self) -> pulumi.Output[Optional['outputs.DdosSettingsResponse']]:
"""
The DDoS protection custom policy associated with the public IP address.
"""
return pulumi.get(self, "ddos_settings")
@property
@pulumi.getter(name="dnsSettings")
def dns_settings(self) -> pulumi.Output[Optional['outputs.PublicIPAddressDnsSettingsResponse']]:
"""
The FQDN of the DNS record associated with the public IP address.
"""
return pulumi.get(self, "dns_settings")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> pulumi.Output[Optional[int]]:
"""
The idle timeout of the public IP address.
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Output[Optional[str]]:
"""
The IP address associated with the public IP address resource.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="ipConfiguration")
def ip_configuration(self) -> pulumi.Output['outputs.IPConfigurationResponse']:
"""
The IP configuration associated with the public IP address.
"""
return pulumi.get(self, "ip_configuration")
@property
@pulumi.getter(name="ipTags")
def ip_tags(self) -> pulumi.Output[Optional[Sequence['outputs.IpTagResponse']]]:
"""
The list of tags associated with the public IP address.
"""
return pulumi.get(self, "ip_tags")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the public IP address resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicIPAddressVersion")
def public_ip_address_version(self) -> pulumi.Output[Optional[str]]:
"""
The public IP address version.
"""
return pulumi.get(self, "public_ip_address_version")
@property
@pulumi.getter(name="publicIPAllocationMethod")
def public_ip_allocation_method(self) -> pulumi.Output[Optional[str]]:
"""
The public IP address allocation method.
"""
return pulumi.get(self, "public_ip_allocation_method")
@property
@pulumi.getter(name="publicIPPrefix")
def public_ip_prefix(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The Public IP Prefix this Public IP Address should be allocated from.
"""
return pulumi.get(self, "public_ip_prefix")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> pulumi.Output[str]:
"""
The resource GUID property of the public IP address resource.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[Optional['outputs.PublicIPAddressSkuResponse']]:
"""
The public IP address SKU.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def zones(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of availability zones denoting the IP allocated for the resource needs to come from.
"""
return pulumi.get(self, "zones")
| 54.144621
| 5,476
| 0.685993
|
5b57343df9fc48ccb5fb4c3e1682795dde4ef268
| 3,052
|
py
|
Python
|
packages/python/plotly/plotly/validators/bar/insidetextfont/__init__.py
|
sgn/plotly.py
|
587075c9f5a57a3dd60b03b2d47d925fbbb9b9b6
|
[
"MIT"
] | 3
|
2020-02-04T21:39:20.000Z
|
2020-11-17T19:07:07.000Z
|
packages/python/plotly/plotly/validators/bar/insidetextfont/__init__.py
|
sgn/plotly.py
|
587075c9f5a57a3dd60b03b2d47d925fbbb9b9b6
|
[
"MIT"
] | 12
|
2020-06-06T01:22:26.000Z
|
2022-03-12T00:13:42.000Z
|
packages/python/plotly/plotly/validators/bar/insidetextfont/__init__.py
|
sgn/plotly.py
|
587075c9f5a57a3dd60b03b2d47d925fbbb9b9b6
|
[
"MIT"
] | 17
|
2019-11-21T14:11:29.000Z
|
2019-11-21T15:26:23.000Z
|
import _plotly_utils.basevalidators
class SizesrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="sizesrc", parent_name="bar.insidetextfont", **kwargs
):
super(SizesrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="size", parent_name="bar.insidetextfont", **kwargs):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 1),
role=kwargs.pop("role", "style"),
**kwargs
)
import _plotly_utils.basevalidators
class FamilysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="familysrc", parent_name="bar.insidetextfont", **kwargs
):
super(FamilysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="family", parent_name="bar.insidetextfont", **kwargs
):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
no_blank=kwargs.pop("no_blank", True),
role=kwargs.pop("role", "style"),
strict=kwargs.pop("strict", True),
**kwargs
)
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="colorsrc", parent_name="bar.insidetextfont", **kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(self, plotly_name="color", parent_name="bar.insidetextfont", **kwargs):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "style"),
role=kwargs.pop("role", "style"),
**kwargs
)
| 31.463918
| 88
| 0.632372
|
a7c3800fa014f8f56a5e27d139eaa2bbd3dd7ded
| 2,841
|
py
|
Python
|
mod/scene_graph.py
|
MarioA-PM/Snake2D
|
b5a196360331b88955e478f8f98e35b85c2c6ce5
|
[
"CC0-1.0"
] | null | null | null |
mod/scene_graph.py
|
MarioA-PM/Snake2D
|
b5a196360331b88955e478f8f98e35b85c2c6ce5
|
[
"CC0-1.0"
] | null | null | null |
mod/scene_graph.py
|
MarioA-PM/Snake2D
|
b5a196360331b88955e478f8f98e35b85c2c6ce5
|
[
"CC0-1.0"
] | null | null | null |
# coding=utf-8
"""
Daniel Calderon, CC3501, 2019-2
A simple scene graph class and functionality
"""
from OpenGL.GL import *
import numpy as np
from mod import easy_shaders as es, transformations as tr
# A simple class to handle a scene graph
# Each node represents a group of objects
# Each leaf represents a basic figure (GPUShape)
# To identify each node properly, it MUST have a unique name
class SceneGraphNode:
def __init__(self, name):
self.name = name
self.transform = tr.identity()
self.childs = []
def findNode(node, name):
# The name was not found in this path
if isinstance(node, es.GPUShape):
return None
# This is the requested node
if node.name == name:
return node
# All childs are checked for the requested name
for child in node.childs:
foundNode = findNode(child, name)
if foundNode != None:
return foundNode
# No child of this node had the requested name
return None
def findTransform(node, name, parentTransform=tr.identity()):
# The name was not found in this path
if isinstance(node, es.GPUShape):
return None
newTransform = np.matmul(parentTransform, node.transform)
# This is the requested node
if node.name == name:
return newTransform
# All childs are checked for the requested name
for child in node.childs:
foundTransform = findTransform(child, name, newTransform)
if isinstance(foundTransform, (np.ndarray, np.generic) ):
return foundTransform
# No child of this node had the requested name
return None
def findPosition(node, name, parentTransform=tr.identity()):
foundTransform = findTransform(node, name, parentTransform)
if isinstance(foundTransform, (np.ndarray, np.generic) ):
zero = np.array([[0,0,0,1]], dtype=np.float32).T
foundPosition = np.matmul(foundTransform, zero)
return foundPosition
return None
def drawSceneGraphNode(node, pipeline, transformName, parentTransform=tr.identity()):
assert(isinstance(node, SceneGraphNode))
# Composing the transformations through this path
newTransform = np.matmul(parentTransform, node.transform)
# If the child node is a leaf, it should be a GPUShape.
# Hence, it can be drawn with drawShape
if len(node.childs) == 1 and isinstance(node.childs[0], es.GPUShape):
leaf = node.childs[0]
glUniformMatrix4fv(glGetUniformLocation(pipeline.shaderProgram, transformName), 1, GL_TRUE, newTransform)
pipeline.drawShape(leaf)
# If the child node is not a leaf, it MUST be a SceneGraphNode,
# so this draw function is called recursively
else:
for child in node.childs:
drawSceneGraphNode(child, pipeline, transformName, newTransform)
| 29.59375
| 113
| 0.687434
|
d056d96cb70fc1521719c4b671440d61404b7cbf
| 1,929
|
py
|
Python
|
setup.py
|
astrotransients/sne
|
11abc3131c6366ecd23964369e55ff264add7805
|
[
"MIT"
] | 30
|
2016-07-21T20:41:52.000Z
|
2022-03-17T20:52:49.000Z
|
setup.py
|
astrotransients/sne
|
11abc3131c6366ecd23964369e55ff264add7805
|
[
"MIT"
] | 48
|
2016-07-07T12:02:50.000Z
|
2020-10-14T21:20:02.000Z
|
setup.py
|
astrocatalogs/astrocats
|
11abc3131c6366ecd23964369e55ff264add7805
|
[
"MIT"
] | 9
|
2016-07-22T08:19:44.000Z
|
2022-03-17T23:26:08.000Z
|
"""WARNING : THIS SCRIPT IS NOT CURRENTLY OPERATIONAL.
"""
import logging
import os
from setuptools import find_packages, setup
from setuptools.command.develop import develop
from setuptools.command.install import install
with open('requirements.txt') as f:
required = f.read().splitlines()
dir_path = os.path.dirname(os.path.realpath(__file__))
exec(open(os.path.join(dir_path, 'astrocats', '__init__.py')).read())
def read(fname):
return open(
os.path.join(os.path.dirname(os.path.abspath(__file__)), fname)).read()
def setup_uc():
from astrocats.main import setup_user_config
setup_user_config(logging.getLogger())
class PostDevelopCommand(develop):
"""Post-develop command."""
def run(self):
setup_uc()
develop.run(self)
class PostInstallCommand(install):
"""Post-installation command."""
def run(self):
setup_uc()
install.run(self)
setup(
name="astrocats",
packages=find_packages(exclude=('*tidaldisruptions*',
'*novae*', '*faststars*')),
include_package_data=True,
version=__version__, # noqa
description=("Package for downloading, analyzing, and constructing open "
"astronomy catalogs."),
license=__license__, # noqa
author=__author__, # noqa
author_email="guillochon@gmail.com",
install_requires=required,
setup_requires=required,
url="https://github.com/astrocatalogs/astrocats",
download_url=(
'https://github.com/astrocatalogs/astrocats/tarball/' +
__version__ # noqa
),
cmdclass={
'develop': PostDevelopCommand,
'install': PostInstallCommand,
},
keywords="astronomy",
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.5"
])
| 26.067568
| 79
| 0.656299
|
76742909624fba012094f65a37a5c32b13662103
| 18,971
|
py
|
Python
|
basenji/vcf.py
|
shtoneyan/basenji
|
b220dc72069c3d8c250f36cb09799b337daac2fe
|
[
"Apache-2.0"
] | null | null | null |
basenji/vcf.py
|
shtoneyan/basenji
|
b220dc72069c3d8c250f36cb09799b337daac2fe
|
[
"Apache-2.0"
] | null | null | null |
basenji/vcf.py
|
shtoneyan/basenji
|
b220dc72069c3d8c250f36cb09799b337daac2fe
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Calico LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from __future__ import print_function
import gzip
import os
import pdb
import subprocess
import sys
import tempfile
import numpy as np
import pandas as pd
import pysam
import basenji.dna_io
"""vcf.py
Methods and classes to support .vcf SNP analysis.
"""
def cap_allele(allele, cap=5):
""" Cap the length of an allele in the figures """
if len(allele) > cap:
allele = allele[:cap] + '*'
return allele
def intersect_seqs_snps(vcf_file, gene_seqs, vision_p=1):
""" Intersect a VCF file with a list of sequence coordinates.
In
vcf_file:
gene_seqs: list of GeneSeq's
vision_p: proportion of sequences visible to center genes.
Out
seqs_snps: list of list mapping segment indexes to overlapping SNP indexes
"""
# print segments to BED
# hash segments to indexes
seq_temp = tempfile.NamedTemporaryFile()
seq_bed_file = seq_temp.name
seq_bed_out = open(seq_bed_file, 'w')
seq_indexes = {}
for si in range(len(gene_seqs)):
gs = gene_seqs[si]
gene_seq_key = (gs.chrom, gs.start)
seq_indexes[gene_seq_key] = si
print('%s\t%d\t%d' % (gs.chrom, gs.start, gs.end), file=seq_bed_out)
seq_bed_out.close()
# hash SNPs to indexes
snp_indexes = {}
si = 0
vcf_in = open(vcf_file)
line = vcf_in.readline()
while line[0] == '#':
line = vcf_in.readline()
while line:
a = line.split()
snp_id = a[2]
if snp_id in snp_indexes:
raise Exception('Duplicate SNP id %s will break the script' % snp_id)
snp_indexes[snp_id] = si
si += 1
line = vcf_in.readline()
vcf_in.close()
# initialize list of lists
seqs_snps = []
for _ in range(len(gene_seqs)):
seqs_snps.append([])
# intersect
p = subprocess.Popen(
'bedtools intersect -wo -a %s -b %s' % (vcf_file, seq_bed_file),
shell=True,
stdout=subprocess.PIPE)
for line in p.stdout:
line = line.decode('UTF-8')
a = line.split()
pos = int(a[1])
snp_id = a[2]
seq_chrom = a[-4]
seq_start = int(a[-3])
seq_end = int(a[-2])
seq_key = (seq_chrom, seq_start)
vision_buffer = (seq_end - seq_start) * (1 - vision_p) // 2
if seq_start + vision_buffer < pos < seq_end - vision_buffer:
seqs_snps[seq_indexes[seq_key]].append(snp_indexes[snp_id])
p.communicate()
return seqs_snps
def intersect_snps_seqs(vcf_file, seq_coords, vision_p=1):
""" Intersect a VCF file with a list of sequence coordinates.
In
vcf_file:
seq_coords: list of sequence coordinates
vision_p: proportion of sequences visible to center genes.
Out
snp_segs: list of list mapping SNP indexes to overlapping sequence indexes
"""
# print segments to BED
# hash segments to indexes
seg_temp = tempfile.NamedTemporaryFile()
seg_bed_file = seg_temp.name
seg_bed_out = open(seg_bed_file, 'w')
segment_indexes = {}
for si in range(len(seq_coords)):
segment_indexes[seq_coords[si]] = si
print('%s\t%d\t%d' % seq_coords[si], file=seg_bed_out)
seg_bed_out.close()
# hash SNPs to indexes
snp_indexes = {}
si = 0
vcf_in = open(vcf_file)
line = vcf_in.readline()
while line[0] == '#':
line = vcf_in.readline()
while line:
a = line.split()
snp_id = a[2]
if snp_id in snp_indexes:
raise Exception('Duplicate SNP id %s will break the script' % snp_id)
snp_indexes[snp_id] = si
si += 1
line = vcf_in.readline()
vcf_in.close()
# initialize list of lists
snp_segs = []
for i in range(len(snp_indexes)):
snp_segs.append([])
# intersect
p = subprocess.Popen(
'bedtools intersect -wo -a %s -b %s' % (vcf_file, seg_bed_file),
shell=True,
stdout=subprocess.PIPE)
for line in p.stdout:
line = line.decode('UTF-8')
a = line.split()
pos = int(a[1])
snp_id = a[2]
seg_chrom = a[-4]
seg_start = int(a[-3])
seg_end = int(a[-2])
seg_key = (seg_chrom, seg_start, seg_end)
vision_buffer = (seg_end - seg_start) * (1 - vision_p) // 2
if seg_start + vision_buffer < pos < seg_end - vision_buffer:
snp_segs[snp_indexes[snp_id]].append(segment_indexes[seg_key])
p.communicate()
return snp_segs
def snp_seq1(snp, seq_len, genome_open):
""" Produce a one hot coded sequences for a SNP.
Attrs:
snp [SNP] :
seq_len (int) : sequence length to code
genome_open (File) : open genome FASTA file
Return:
seq_vecs_list [array] : list of one hot coded sequences surrounding the
SNP
"""
left_len = seq_len // 2 - 1
right_len = seq_len // 2
# initialize one hot coded vector list
seq_vecs_list = []
# specify positions in GFF-style 1-based
seq_start = snp.pos - left_len
seq_end = snp.pos + right_len + max(0,
len(snp.ref_allele) - snp.longest_alt())
# extract sequence as BED style
if seq_start < 0:
seq = 'N'*(1-seq_start) + genome_open.fetch(snp.chr, 0, seq_end).upper()
else:
seq = genome_open.fetch(snp.chr, seq_start - 1, seq_end).upper()
# extend to full length
if len(seq) < seq_end - seq_start:
seq += 'N' * (seq_end - seq_start - len(seq))
# verify that ref allele matches ref sequence
seq_ref = seq[left_len:left_len + len(snp.ref_allele)]
ref_found = True
if seq_ref != snp.ref_allele:
# search for reference allele in alternatives
ref_found = False
# for each alternative allele
for alt_al in snp.alt_alleles:
# grab reference sequence matching alt length
seq_ref_alt = seq[left_len:left_len + len(alt_al)]
if seq_ref_alt == alt_al:
# found it!
ref_found = True
# warn user
print(
'WARNING: %s - alt (as opposed to ref) allele matches reference genome; changing reference genome to match.'
% (snp.rsid),
file=sys.stderr)
# remove alt allele and include ref allele
seq = seq[:left_len] + snp.ref_allele + seq[left_len + len(alt_al):]
break
if not ref_found:
print('WARNING: %s - reference genome does not match any allele' % snp.rsid, file=sys.stderr)
else:
# one hot code ref allele
seq_vecs_ref, seq_ref = dna_length_1hot(seq, seq_len)
seq_vecs_list.append(seq_vecs_ref)
for alt_al in snp.alt_alleles:
# remove ref allele and include alt allele
seq_alt = seq[:left_len] + alt_al + seq[left_len + len(snp.ref_allele):]
# one hot code
seq_vecs_alt, seq_alt = dna_length_1hot(seq_alt, seq_len)
seq_vecs_list.append(seq_vecs_alt)
return seq_vecs_list
def snps_seq1(snps, seq_len, genome_fasta, return_seqs=False):
""" Produce an array of one hot coded sequences for a list of SNPs.
Attrs:
snps [SNP] : list of SNPs
seq_len (int) : sequence length to code
genome_fasta (str) : genome FASTA file
Return:
seq_vecs (array) : one hot coded sequences surrounding the SNPs
seq_headers [str] : headers for sequences
seq_snps [SNP] : list of used SNPs
"""
left_len = seq_len // 2 - 1
right_len = seq_len // 2
# initialize one hot coded vector list
seq_vecs_list = []
# save successful SNPs
seq_snps = []
# save sequence strings, too
seqs = []
# name sequences
seq_headers = []
# open genome FASTA
genome_open = pysam.Fastafile(genome_fasta)
for snp in snps:
# specify positions in GFF-style 1-based
seq_start = snp.pos - left_len
seq_end = snp.pos + right_len + max(0,
len(snp.ref_allele) - snp.longest_alt())
# extract sequence as BED style
if seq_start < 0:
seq = 'N' * (-seq_start) + genome_open.fetch(snp.chr, 0,
seq_end).upper()
else:
seq = genome_open.fetch(snp.chr, seq_start - 1, seq_end).upper()
# extend to full length
if len(seq) < seq_end - seq_start:
seq += 'N' * (seq_end - seq_start - len(seq))
# verify that ref allele matches ref sequence
seq_ref = seq[left_len:left_len + len(snp.ref_allele)]
if seq_ref != snp.ref_allele:
# search for reference allele in alternatives
ref_found = False
# for each alternative allele
for alt_al in snp.alt_alleles:
# grab reference sequence matching alt length
seq_ref_alt = seq[left_len:left_len + len(alt_al)]
if seq_ref_alt == alt_al:
# found it!
ref_found = True
# warn user
print(
'WARNING: %s - alt (as opposed to ref) allele matches reference genome; changing reference genome to match.'
% (snp.rsid),
file=sys.stderr)
# remove alt allele and include ref allele
seq = seq[:left_len] + snp.ref_allele + seq[left_len + len(alt_al):]
break
if not ref_found:
print(
'WARNING: %s - reference genome %s does not match any allele; skipping'
% (seq_ref, snp.rsid),
file=sys.stderr)
continue
seq_snps.append(snp)
# one hot code ref allele
seq_vecs_ref, seq_ref = dna_length_1hot(seq, seq_len)
seq_vecs_list.append(seq_vecs_ref)
if return_seqs:
seqs.append(seq_ref)
# name ref allele
seq_headers.append('%s_%s' % (snp.rsid, cap_allele(snp.ref_allele)))
for alt_al in snp.alt_alleles:
# remove ref allele and include alt allele
seq_alt = seq[:left_len] + alt_al + seq[left_len + len(snp.ref_allele):]
# one hot code
seq_vecs_alt, seq_alt = dna_length_1hot(seq_alt, seq_len)
seq_vecs_list.append(seq_vecs_alt)
if return_seqs:
seqs.append(seq_alt) # not using right now
# name
seq_headers.append('%s_%s' % (snp.rsid, cap_allele(alt_al)))
# convert to array
seq_vecs = np.array(seq_vecs_list)
if return_seqs:
return seq_vecs, seq_headers, seq_snps, seqs
else:
return seq_vecs, seq_headers, seq_snps
def snps2_seq1(snps, seq_len, genome1_fasta, genome2_fasta, return_seqs=False):
""" Produce an array of one hot coded sequences for a list of SNPs.
Attrs:
snps [SNP] : list of SNPs
seq_len (int) : sequence length to code
genome_fasta (str) : major allele genome FASTA file
genome2_fasta (str) : minor allele genome FASTA file
Return:
seq_vecs (array) : one hot coded sequences surrounding the SNPs
seq_headers [str] : headers for sequences
seq_snps [SNP] : list of used SNPs
"""
left_len = seq_len // 2 - 1
right_len = seq_len // 2
# open genome FASTA
genome1 = pysam.Fastafile(genome1_fasta)
genome2 = pysam.Fastafile(genome2_fasta)
# initialize one hot coded vector list
seq_vecs_list = []
# save successful SNPs
seq_snps = []
# save sequence strings, too
seqs = []
# name sequences
seq_headers = []
for snp in snps:
if len(snp.alt_alleles) > 1:
raise Exception(
'Major/minor genome mode requires only two alleles: %s' % snp.rsid)
alt_al = snp.alt_alleles[0]
# specify positions in GFF-style 1-based
seq_start = snp.pos - left_len
seq_end = snp.pos + right_len + len(snp.ref_allele)
# extract sequence as BED style
if seq_start < 0:
seq_ref = 'N' * (-seq_start) + genome1.fetch(snp.chr, 0,
seq_end).upper()
else:
seq_ref = genome1.fetch(snp.chr, seq_start - 1, seq_end).upper()
# extend to full length
if len(seq_ref) < seq_end - seq_start:
seq_ref += 'N' * (seq_end - seq_start - len(seq_ref))
# verify that ref allele matches ref sequence
seq_ref_snp = seq_ref[left_len:left_len + len(snp.ref_allele)]
if seq_ref_snp != snp.ref_allele:
raise Exception(
'WARNING: Major allele SNP %s doesnt match reference genome: %s vs %s'
% (snp.rsid, snp.ref_allele, seq_ref_snp))
# specify positions in GFF-style 1-based
seq_start = snp.pos2 - left_len
seq_end = snp.pos2 + right_len + len(alt_al)
# extract sequence as BED style
if seq_start < 0:
seq_alt = 'N' * (-seq_start) + genome2.fetch(snp.chr, 0,
seq_end).upper()
else:
seq_alt = genome2.fetch(snp.chr, seq_start - 1, seq_end).upper()
# extend to full length
if len(seq_alt) < seq_end - seq_start:
seq_alt += 'N' * (seq_end - seq_start - len(seq_alt))
# verify that ref allele matches ref sequence
seq_alt_snp = seq_alt[left_len:left_len + len(alt_al)]
if seq_alt_snp != alt_al:
raise Exception(
'WARNING: Minor allele SNP %s doesnt match reference genome: %s vs %s'
% (snp.rsid, snp.alt_alleles[0], seq_alt_snp))
seq_snps.append(snp)
# one hot code ref allele
seq_vecs_ref, seq_ref = dna_length_1hot(seq_ref, seq_len)
seq_vecs_list.append(seq_vecs_ref)
if return_seqs:
seqs.append(seq_ref)
# name ref allele
seq_headers.append('%s_%s' % (snp.rsid, cap_allele(snp.ref_allele)))
# one hot code alt allele
seq_vecs_alt, seq_alt = dna_length_1hot(seq_alt, seq_len)
seq_vecs_list.append(seq_vecs_alt)
if return_seqs:
seqs.append(seq_alt)
# name
seq_headers.append('%s_%s' % (snp.rsid, cap_allele(alt_al)))
# convert to array
seq_vecs = np.array(seq_vecs_list)
if return_seqs:
return seq_vecs, seq_headers, seq_snps, seqs
else:
return seq_vecs, seq_headers, seq_snps
def dna_length_1hot(seq, length):
""" Adjust the sequence length and compute
a 1hot coding. """
if length < len(seq):
# trim the sequence
seq_trim = (len(seq) - length) // 2
seq = seq[seq_trim:seq_trim + length]
elif length > len(seq):
# extend with N's
nfront = (length - len(seq)) // 2
nback = length - len(seq) - nfront
seq = 'N' * nfront + seq + 'N' * nback
# n_uniform required to avoid different
# random nucleotides for each allele
seq_1hot = basenji.dna_io.dna_1hot(seq, n_uniform=True)
return seq_1hot, seq
def vcf_count(vcf_file):
""" Count SNPs in a VCF file """
if vcf_file[-3:] == '.gz':
vcf_in = gzip.open(vcf_file, 'rt')
else:
vcf_in = open(vcf_file)
# read through header
line = vcf_in.readline()
while line[0] == '#':
line = vcf_in.readline()
# count SNPs
num_snps = 0
while line:
num_snps += 1
line = vcf_in.readline()
vcf_in.close()
return num_snps
def vcf_snps(vcf_file, require_sorted=False, validate_ref_fasta=None,
flip_ref=False, pos2=False, start_i=None, end_i=None):
""" Load SNPs from a VCF file """
if vcf_file[-3:] == '.gz':
vcf_in = gzip.open(vcf_file, 'rt')
else:
vcf_in = open(vcf_file)
# read through header
line = vcf_in.readline()
while line[0] == '#':
line = vcf_in.readline()
# to check sorted
if require_sorted:
seen_chrs = set()
prev_chr = None
prev_pos = -1
# to check reference
if validate_ref_fasta is not None:
genome_open = pysam.Fastafile(validate_ref_fasta)
# read in SNPs
snps = []
si = 0
while line:
if start_i is None or start_i <= si < end_i:
snps.append(SNP(line, pos2))
if require_sorted:
if prev_chr is not None:
# same chromosome
if prev_chr == snps[-1].chr:
if snps[-1].pos < prev_pos:
print('Sorted VCF required. Mis-ordered position: %s' % line.rstrip(),
file=sys.stderr)
exit(1)
elif snps[-1].chr in seen_chrs:
print('Sorted VCF required. Mis-ordered chromosome: %s' % line.rstrip(),
file=sys.stderr)
exit(1)
seen_chrs.add(snps[-1].chr)
prev_chr = snps[-1].chr
prev_pos = snps[-1].pos
if validate_ref_fasta is not None:
ref_n = len(snps[-1].ref_allele)
snp_pos = snps[-1].pos-1
ref_snp = genome_open.fetch(snps[-1].chr, snp_pos, snp_pos+ref_n)
if snps[-1].ref_allele != ref_snp:
if not flip_ref:
# bail
print('ERROR: %s does not match reference %s' % (snps[-1], ref_snp), file=sys.stderr)
exit(1)
else:
alt_n = len(snps[-1].alt_alleles[0])
ref_snp = genome_open.fetch(snps[-1].chr, snp_pos, snp_pos+alt_n)
# if alt matches fasta reference
if snps[-1].alt_alleles[0] == ref_snp:
# flip alleles
snps[-1].flip_alleles()
else:
# bail
print('ERROR: %s does not match reference %s' % (snps[-1], ref_snp), file=sys.stderr)
exit(1)
si += 1
line = vcf_in.readline()
vcf_in.close()
return snps
def vcf_sort(vcf_file):
# move
os.rename(vcf_file, '%s.tmp' % vcf_file)
# print header
vcf_out = open(vcf_file, 'w')
print('##fileformat=VCFv4.0', file=vcf_out)
vcf_out.close()
# sort
subprocess.call(
'bedtools sort -i %s.tmp >> %s' % (vcf_file, vcf_file), shell=True)
# clean
os.remove('%s.tmp' % vcf_file)
class SNP:
""" SNP
Represent SNPs read in from a VCF file
Attributes:
vcf_line (str)
"""
def __init__(self, vcf_line, pos2=False):
a = vcf_line.split()
if a[0].startswith('chr'):
self.chr = a[0]
else:
self.chr = 'chr%s' % a[0]
self.pos = int(a[1])
self.rsid = a[2]
self.ref_allele = a[3]
self.alt_alleles = a[4].split(',')
# self.alt_allele = self.alt_alleles[0]
self.flipped = False
if self.rsid == '.':
self.rsid = '%s:%d' % (self.chr, self.pos)
self.pos2 = None
if pos2:
self.pos2 = int(a[5])
def flip_alleles(self):
""" Flip reference and first alt allele."""
assert(len(self.alt_alleles) == 1)
self.ref_allele, self.alt_alleles[0] = self.alt_alleles[0], self.ref_allele
self.flipped = True
def get_alleles(self):
""" Return a list of all alleles """
alleles = [self.ref_allele] + self.alt_alleles
return alleles
def longest_alt(self):
""" Return the longest alt allele. """
return max([len(al) for al in self.alt_alleles])
def __str__(self):
return 'SNP(%s, %s:%d, %s/%s)' % (self.rsid, self.chr, self.pos,
self.ref_allele,
','.join(self.alt_alleles))
| 27.980826
| 122
| 0.624743
|
31aa5ed79b43acf4d436a1dde60f9ec5f4e2b569
| 1,479
|
py
|
Python
|
L1Trigger/L1THGCal/python/hgcalTowerMapProducer_cfi.py
|
bkilian15/cmssw
|
ad9b85e79d89f4f6c393c8cc648261366d82adca
|
[
"Apache-2.0"
] | null | null | null |
L1Trigger/L1THGCal/python/hgcalTowerMapProducer_cfi.py
|
bkilian15/cmssw
|
ad9b85e79d89f4f6c393c8cc648261366d82adca
|
[
"Apache-2.0"
] | null | null | null |
L1Trigger/L1THGCal/python/hgcalTowerMapProducer_cfi.py
|
bkilian15/cmssw
|
ad9b85e79d89f4f6c393c8cc648261366d82adca
|
[
"Apache-2.0"
] | null | null | null |
import FWCore.ParameterSet.Config as cms
import math
L1TTriggerTowerConfig_etaphi = cms.PSet(readMappingFile=cms.bool(False),
minEta=cms.double(1.479),
maxEta=cms.double(3.0),
minPhi=cms.double(-1*math.pi),
maxPhi=cms.double(math.pi),
nBinsEta=cms.int32(18),
nBinsPhi=cms.int32(72),
binsEta=cms.vdouble(),
binsPhi=cms.vdouble())
towerMap2D_parValues = cms.PSet( #nEtaBins = cms.int32(18),
#nPhiBins = cms.int32(72),
#etaBins = cms.vdouble(),
#phiBins = cms.vdouble(),
useLayerWeights = cms.bool(False),
layerWeights = cms.vdouble(),
L1TTriggerTowerConfig = L1TTriggerTowerConfig_etaphi
)
tower_map = cms.PSet( ProcessorName = cms.string('HGCalTowerMapProcessor'),
towermap_parameters = towerMap2D_parValues.clone()
)
hgcalTowerMapProducer = cms.EDProducer(
"HGCalTowerMapProducer",
InputTriggerCells = cms.InputTag('hgcalVFEProducer:HGCalVFEProcessorSums'),
ProcessorParameters = tower_map.clone()
)
| 46.21875
| 85
| 0.478702
|
20dcdfbc43738e8db381d416a52c1e23d9a8fed5
| 1,377
|
py
|
Python
|
tests/tests_dezero/test_layers.py
|
daisuke19891023/dl-from-scratch-3
|
4fb9441cfcceca96ac07c602053e608d92c80838
|
[
"MIT"
] | null | null | null |
tests/tests_dezero/test_layers.py
|
daisuke19891023/dl-from-scratch-3
|
4fb9441cfcceca96ac07c602053e608d92c80838
|
[
"MIT"
] | 2
|
2020-05-30T15:04:54.000Z
|
2020-05-30T15:05:20.000Z
|
tests/tests_dezero/test_layers.py
|
daisuke19891023/dl-from-scratch-3
|
4fb9441cfcceca96ac07c602053e608d92c80838
|
[
"MIT"
] | null | null | null |
if '__file__' in globals():
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import numpy as np
import matplotlib.pyplot as plt
from dezero import Variable
from dezero import setup_variable
from dezero.utils import plot_dot_graph
import dezero.functions as F
import dezero.layers as L
import pytest
setup_variable()
np.random.seed(0)
lr = 0.2
iters = 10001
@pytest.fixture(scope="function", autouse=True)
def linear_object():
x = np.random.rand(100, 1)
y = np.sin(2 * np.pi * x) + np.random.rand(100, 1)
yield x, y
def predict(x, l1, l2):
y = l1(x)
y = F.sigmoid(y)
y = l2(y)
return y
class TestLinear:
def test_linear_forward(self, linear_object):
l1 = L.Linear(10)
l2 = L.Linear(1)
y_pred = predict(linear_object[0], l1, l2)
loss = F.mean_squared_error(linear_object[1], y_pred)
assert np.allclose(loss.data, 0.81651785)
def test_linear_backward(self, linear_object):
l1 = L.Linear(10)
l2 = L.Linear(1)
y_pred = predict(linear_object[0], l1, l2)
loss = F.mean_squared_error(linear_object[1], y_pred)
l1.cleargrads()
l2.cleargrads()
loss.backward()
for l in [l1, l2]:
for p in l.params():
assert p.grad.data is not None
| 25.5
| 66
| 0.620189
|
49eb7eda46176ff9623ba0ca9acedcce8076e756
| 6,274
|
py
|
Python
|
features/steps/inventory_steps.py
|
devops-inventory/inventory
|
9febddf89f16837f83602da290349bdfbcd72cff
|
[
"Apache-2.0"
] | 1
|
2021-08-12T16:06:07.000Z
|
2021-08-12T16:06:07.000Z
|
features/steps/inventory_steps.py
|
devops-inventory/inventory
|
9febddf89f16837f83602da290349bdfbcd72cff
|
[
"Apache-2.0"
] | 108
|
2019-02-26T19:52:27.000Z
|
2019-05-13T23:37:36.000Z
|
features/steps/inventory_steps.py
|
devops-inventory/inventory
|
9febddf89f16837f83602da290349bdfbcd72cff
|
[
"Apache-2.0"
] | 2
|
2019-02-26T19:46:07.000Z
|
2021-02-07T21:45:54.000Z
|
"""
Inventory Steps
Steps file for Inventory.feature
"""
from os import getenv
import json
import logging
import requests
from behave import *
from compare import expect, ensure
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
WAIT_SECONDS = int(getenv('WAIT_SECONDS', '30'))
@given('the following inventory')
def step_impl(context):
""" Delete all Inventory and load new ones """
headers = {'Content-Type': 'application/json'}
#context.resp = requests.delete(context.base_url + '/inventory/reset', headers=headers)
#expect(context.resp.status_code).to_equal(204)
create_url = context.base_url + '/inventory'
for row in context.table:
data = {
"name": row['name'],
"category": row['category'],
"available": row['available'],
"condition": row['condition'],
"count": row['count']
}
payload = json.dumps(data)
context.resp = requests.post(create_url, data=payload, headers=headers)
expect(context.resp.status_code).to_equal(201)
@when('I visit the "home page"')
def step_impl(context):
""" Make a call to the base URL """
context.driver.get(context.base_url)
#context.driver.save_screenshot('home_page.png')
@then('I should see "{message}" in the title')
def step_impl(context, message):
""" Check the document title for a message """
expect(context.driver.title).to_contain(message)
@then('I should not see "{message}"')
def step_impl(context, message):
error_msg = "I should not see '%s' in '%s'" % (message, context.resp.text)
ensure(message in context.resp.text, False, error_msg)
@when('I set the "{element_name}" to "{text_string}"')
def step_impl(context, element_name, text_string):
element_id = 'inventory_' + element_name.lower()
element = context.driver.find_element_by_id(element_id)
element.clear()
element.send_keys(text_string)
@when('I choose "{element_name}" as "{true_false}"')
def step_impl(context, element_name, true_false):
element_id = 'inventory_' + element_name.lower()
element = context.driver.find_element_by_id(element_id)
element.send_keys(true_false)
##################################################################
# This code works because of the following naming convention:
# The buttons have an id in the html hat is the button text
# in lowercase followed by '-btn' so the Clean button has an id of
# id='clear-btn'. That allows us to lowercase the name and add '-btn'
# to get the element id of any button
##################################################################
@when('I press the "{button}" button')
def step_impl(context, button):
button_id = button.lower() + '-btn'
context.driver.find_element_by_id(button_id).click()
@then('I should see "{name}" in the results')
def step_impl(context, name):
found = WebDriverWait(context.driver, WAIT_SECONDS).until(
expected_conditions.text_to_be_present_in_element(
(By.ID, 'search_results'),
name))
expect(found).to_be(True)
@then('I should not see "{name}" in the results')
def step_impl(context, name):
element = context.driver.find_element_by_id('search_results')
error_msg = "I should not see '%s' in '%s'" % (name, element.text)
ensure(name in element.text, False, error_msg)
@then('The "{element_name}" field should be empty')
def step_impl(context, element_name):
element = context.driver.find_element_by_id('search_results')
error_msg = "I should not see '%s' in '%s'" % (element_name, element.text)
ensure(element_name in element.text, False, error_msg)
@then('I should see the message "{message}"')
def step_impl(context, message):
found = WebDriverWait(context.driver, WAIT_SECONDS).until(
expected_conditions.text_to_be_present_in_element(
(By.ID, 'flash_message'),
message
)
)
expect(found).to_be(True)
##################################################################
# This code works because of the following naming convention:
# The id field for text input in the html is the element name
# prefixed by 'inventory_' so the Name field has an id='inventory_name'
# We can then lowercase the name and prefix with inventory_ to get the id
##################################################################
@when('I change "{element_name}" to "{text_string}"')
def step_impl(context, element_name, text_string):
element_id = 'inventory_' + element_name.lower()
element = context.driver.find_element_by_id(element_id)
# element = WebDriverWait(context.driver, WAIT_SECONDS).until(
# expected_conditions.presence_of_element_located((By.ID, element_id))
# )
element.clear()
element.send_keys(text_string)
@when('I switch "{key}" to "{value}"')
def step_impl(context, key, value):
context.data[key] = value
@then('I should see "{text_string}" in the "{element_name}" field')
def step_impl(context, text_string, element_name):
found = WebDriverWait(context.driver, WAIT_SECONDS).until(
expected_conditions.text_to_be_present_in_element_value(
(By.ID, element_id),
text_string
)
)
expect(found).to_be(True)
@then('I should not see "{text_string}" in the "{element_name}" field')
def step_impl(context, text_string, element_name):
element_id = 'inventory_' + element_name.lower()
element = context.driver.find_element_by_id(element_id)
error_msg = "I should not see '%s' in '%s'" % (text_string, element_name)
ensure(text_string in element_name, False, error_msg)
@when('I copy the "{element_name}" field')
def step_impl(context, element_name):
element_id = 'inventory_' + element_name.lower()
element = context.driver.find_element_by_id(element_id)
context.clipboard = element.get_attribute('value')
logging.info('Clipboard contains: %s', context.clipboard)
@when('I paste the "{element_name}" field')
def step_impl(context, element_name):
element_id = 'inventory_' + element_name.lower()
element = context.driver.find_element_by_id(element_id)
element.clear()
element.send_keys(context.clipboard)
| 38.490798
| 91
| 0.674211
|
a4343b770837d566842a92b6c352bbc51321b2c2
| 1,884
|
py
|
Python
|
edna2/utils/test/UtilsTest_exec_test.py
|
gsantoni/edna2
|
0aad63a3ea8091ce62118f0b2c8ac78a2286da9e
|
[
"CC0-1.0",
"MIT"
] | null | null | null |
edna2/utils/test/UtilsTest_exec_test.py
|
gsantoni/edna2
|
0aad63a3ea8091ce62118f0b2c8ac78a2286da9e
|
[
"CC0-1.0",
"MIT"
] | 2
|
2020-04-06T10:39:50.000Z
|
2021-04-14T19:24:37.000Z
|
edna2/utils/test/UtilsTest_exec_test.py
|
gsantoni/edna2
|
0aad63a3ea8091ce62118f0b2c8ac78a2286da9e
|
[
"CC0-1.0",
"MIT"
] | 5
|
2019-06-14T07:28:38.000Z
|
2021-04-28T13:10:39.000Z
|
#
# Copyright (c) European Synchrotron Radiation Facility (ESRF)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__authors__ = ["O. Svensson"]
__license__ = "MIT"
__date__ = "21/04/2019"
import os
import unittest
from edna2.utils import UtilsTest
class UtilsTestExecTest(unittest.TestCase):
def test_loadTestImages(self):
imageFileName = "FAE_1_1_00001.cbf"
pathImage = UtilsTest.getTestImageDirPath() / imageFileName
if pathImage.exists():
os.remove(str(pathImage))
UtilsTest.loadTestImage(imageFileName)
self.assertTrue(pathImage.exists())
def test_substitueTestData(self):
inData = {
"image": "$EDNA2_TESTDATA_IMAGES/ref-2m_RNASE_1_0001.cbf"
}
newInData = UtilsTest.substitueTestData(inData)
self.assertTrue(os.path.exists(newInData["image"]))
| 38.44898
| 82
| 0.734607
|
a019d5b25ab8537adb7e05b2c00a3ddf93465d04
| 3,404
|
py
|
Python
|
airflow/providers/amazon/aws/hooks/sns.py
|
shashijangra/airflow-1
|
c3e340584bf1892c4f73aa9e7495b5823dab0c40
|
[
"Apache-2.0"
] | 2
|
2021-07-30T17:25:56.000Z
|
2021-08-03T13:51:09.000Z
|
airflow/providers/amazon/aws/hooks/sns.py
|
shashijangra/airflow-1
|
c3e340584bf1892c4f73aa9e7495b5823dab0c40
|
[
"Apache-2.0"
] | null | null | null |
airflow/providers/amazon/aws/hooks/sns.py
|
shashijangra/airflow-1
|
c3e340584bf1892c4f73aa9e7495b5823dab0c40
|
[
"Apache-2.0"
] | 1
|
2020-11-06T01:26:29.000Z
|
2020-11-06T01:26:29.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains AWS SNS hook
"""
import json
from typing import Optional, Union, Dict
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
def _get_message_attribute(o):
if isinstance(o, bytes):
return {'DataType': 'Binary', 'BinaryValue': o}
if isinstance(o, str):
return {'DataType': 'String', 'StringValue': o}
if isinstance(o, (int, float)):
return {'DataType': 'Number', 'StringValue': str(o)}
if hasattr(o, '__iter__'):
return {'DataType': 'String.Array', 'StringValue': json.dumps(o)}
raise TypeError(
'Values in MessageAttributes must be one of bytes, str, int, float, or iterable; ' f'got {type(o)}'
)
class AwsSnsHook(AwsBaseHook):
"""
Interact with Amazon Simple Notification Service.
Additional arguments (such as ``aws_conn_id``) may be specified and
are passed down to the underlying AwsBaseHook.
.. seealso::
:class:`~airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
"""
def __init__(self, *args, **kwargs):
super().__init__(client_type='sns', *args, **kwargs)
def publish_to_target(
self,
target_arn: str,
message: str,
subject: Optional[str] = None,
message_attributes: Optional[dict] = None,
):
"""
Publish a message to a topic or an endpoint.
:param target_arn: either a TopicArn or an EndpointArn
:type target_arn: str
:param message: the default message you want to send
:param message: str
:param subject: subject of message
:type subject: str
:param message_attributes: additional attributes to publish for message filtering. This should be
a flat dict; the DataType to be sent depends on the type of the value:
- bytes = Binary
- str = String
- int, float = Number
- iterable = String.Array
:type message_attributes: dict
"""
publish_kwargs: Dict[str, Union[str, dict]] = {
'TargetArn': target_arn,
'MessageStructure': 'json',
'Message': json.dumps({'default': message}),
}
# Construct args this way because boto3 distinguishes from missing args and those set to None
if subject:
publish_kwargs['Subject'] = subject
if message_attributes:
publish_kwargs['MessageAttributes'] = {
key: _get_message_attribute(val) for key, val in message_attributes.items()
}
return self.get_conn().publish(**publish_kwargs)
| 35.092784
| 107
| 0.655993
|
55b5c9e8f3141c3abf87e080cf9e394503061d13
| 457
|
py
|
Python
|
main/migrations/0017_auto_20201127_1650.py
|
zahir1509/project-ap-etravel
|
2113a84ae4340be0e8cfa2676f448878c625e3e3
|
[
"MIT"
] | 1
|
2020-12-06T17:49:11.000Z
|
2020-12-06T17:49:11.000Z
|
main/migrations/0017_auto_20201127_1650.py
|
zahir1509/project-ap-etravel
|
2113a84ae4340be0e8cfa2676f448878c625e3e3
|
[
"MIT"
] | null | null | null |
main/migrations/0017_auto_20201127_1650.py
|
zahir1509/project-ap-etravel
|
2113a84ae4340be0e8cfa2676f448878c625e3e3
|
[
"MIT"
] | 1
|
2020-12-07T14:20:41.000Z
|
2020-12-07T14:20:41.000Z
|
# Generated by Django 3.1.3 on 2020-11-27 11:20
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0016_auto_20201127_1650'),
]
operations = [
migrations.AlterField(
model_name='reservation',
name='timestamp',
field=models.DateTimeField(default=datetime.datetime(2020, 11, 27, 16, 50, 53, 149123)),
),
]
| 22.85
| 100
| 0.623632
|
7b117f708d10b444361261314f52b9dc8c44bad0
| 2,096
|
py
|
Python
|
app.py
|
Annamalaisaravanan/Volume-Controller-using-OpenCV
|
eaed20e16ed4e72ce9a04542795581b7f9045cd0
|
[
"MIT"
] | null | null | null |
app.py
|
Annamalaisaravanan/Volume-Controller-using-OpenCV
|
eaed20e16ed4e72ce9a04542795581b7f9045cd0
|
[
"MIT"
] | null | null | null |
app.py
|
Annamalaisaravanan/Volume-Controller-using-OpenCV
|
eaed20e16ed4e72ce9a04542795581b7f9045cd0
|
[
"MIT"
] | null | null | null |
import keras
from keras.models import load_model
import cv2
import pyautogui as p # to control the buttons in the keyboard
import numpy as np
import operator
#load the trained model
model=load_model("path/to/the/directory/which/contains/volume_up_new_model.h5")
#initialise the camera
cap=cv2.VideoCapture(0)
#declaring the categorical output
categories={0:"volume up",1:"volumn down",2:'nothing'}
#initialise infinite loop
while True:
_,frame=cap.read() #reading video from a camera
frame=cv2.flip(frame,1) #fliping the camera output
x1=int(0.5*frame.shape[1]) #to create a ROI on the screen
y1=10
x2=frame.shape[1]-10
y2=int(0.5*frame.shape[1])
cv2.rectangle(frame,(x1-1,y1-1),(x2-1,y2-1),(255,0,0),1) # drawing the rectangle in the camera window
roi=frame[y1:y2,x1:x2]
roi=cv2.resize(roi,(50,50)) #resize the image
#converting BGR to gray scale image
result=model.predict(roi.reshape(1,50,50,3)) #making prediction from the loaded model
prediction={
"volume up":result[0][0],
"volumn down":result[0][1],
'nothing':result[0][2]
}
prediction=sorted(prediction.items(),key=operator.itemgetter(1),reverse=True) #sorting of prediction on the basis of max accuracy
cv2.putText(frame,prediction[0][0],(x1+100,y2+30),cv2.FONT_HERSHEY_PLAIN,2,(0,255,0),3) #showning text on the screen
cv2.imshow("Frame",frame)
print(prediction[0][0])
if prediction[0][0]=="volume up":
p.press('up') # it'll press the up arrow key to increase volume
if prediction[0][0]=='volumn down':
p.press('down') # it'll press the down arrow key to decrease volume
if prediction[0][0]=='nothing':
pass
key=cv2.waitKey(10)
if key & 0xFF == 27: #press esc for break
break
cap.release() #switch off the camera
cv2.destroyAllWindows() #destroy camera windows
| 37.428571
| 137
| 0.616889
|
f838bb938d69cebc8395a221a394fb2886ffa623
| 15,258
|
py
|
Python
|
beacon_export_plugin/operators/beacon_metric_exporter_operator.py
|
jgruberf5/beacon-airflow
|
8dddb57a966901d47e2c1d443fb6f63dcb0f2f9d
|
[
"Apache-2.0"
] | null | null | null |
beacon_export_plugin/operators/beacon_metric_exporter_operator.py
|
jgruberf5/beacon-airflow
|
8dddb57a966901d47e2c1d443fb6f63dcb0f2f9d
|
[
"Apache-2.0"
] | 8
|
2020-12-17T21:21:03.000Z
|
2022-03-29T22:28:19.000Z
|
beacon_export_plugin/operators/beacon_metric_exporter_operator.py
|
jgruberf5/beacon-airflow
|
8dddb57a966901d47e2c1d443fb6f63dcb0f2f9d
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# pylint: disable=broad-except
# Copyright (c) 2016-2018, F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module contains F5 Beacon metric API exporter
"""
import time
import datetime
import json
import os
import re
import subprocess
from airflow.models.baseoperator import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.exceptions import AirflowException
from beacon_export_plugin.hooks.beacon_hook import BeaconHook
from requests.exceptions import HTTPError
from line_protocol_parser import parse_line
# schema test7
SCHEMA_SKEL = [
{'name': 'ts', 'type': 'TIMESTAMP', 'mode': 'REQUIRED'},
{'name': 'account_id', 'type': 'STRING', 'mode': 'REQUIRED'},
{'name': 'source_id', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'evt', 'type': 'RECORD', 'mode': 'NULLABLE',
'fields': [
{'name': 'version', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'sourceName', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'sourceDescription', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'fields', 'type': 'RECORD', 'mode': 'NULLABLE', 'fields': []},
{'name': 'tags', 'type': 'RECORD', 'mode': 'NULLABLE', 'fields': []},
{'name': 'timestamp', 'type': 'TIMESTAMP', 'mode': 'NULLABLE'}
]
}
]
WRITE_FILE_DELAY_SECS = 1
class F5BeaconMetricQueryExporterOperator(BaseOperator):
@apply_defaults
def __init__(self, # pylint: disable=too-many-arguments
beacon_conn_id: str = 'f5_beacon_default',
destination_dir: str = '/home/airflow/gcs/data',
start_timestamp: int = 0,
stop_timestamp: int = 0,
metric_file: str = None,
schema_file: str = None,
*args,
**kwargs) -> None:
super().__init__(*args, **kwargs)
self.beacon_conn_id = beacon_conn_id
self.destination_dir = destination_dir
self.start_timestamp = start_timestamp
self.stop_timestamp = stop_timestamp
self.beacon_hook = BeaconHook(beacon_conn_id)
self.schema = SCHEMA_SKEL
self.mfn = metric_file
self.sfn = schema_file
self.tags = {}
self.fields = {}
def execute(self, context):
if not self.mfn:
self.mfn = self.get_metrics_fn(context['dag_run'].run_id)
if not self.sfn:
self.sfn = self.get_schema_fn(context['dag_run'].run_id)
conn = self.beacon_hook.get_conn()
account_id = 'primary'
if 'account_id' in conn.extra_dejson:
account_id = conn.extra_dejson['account_id']
self.log.info('Executing extract metrics from f5 Beacon account %s between %s:%s into: %s',
account_id, self.start_timestamp, self.stop_timestamp, self.destination_dir)
known_measurements = self.beacon_hook.get_measurements()
self.log.info('found %s measurement for f5 Beacon account %s',
known_measurements, account_id)
if os.path.exists(self.mfn):
os.unlink(self.mfn)
for measurement in known_measurements:
self.get_measurement_records(
measurement, context['dag_run'].run_id)
self.write_schema(context['dag_run'].run_id)
def get_metrics_fn(self, run_id):
dest_dir = os.path.join(self.destination_dir, run_id)
os.makedirs(dest_dir, exist_ok=True)
return os.path.join(dest_dir, 'line_metrics.json')
def get_schema_fn(self, run_id):
dest_dir = os.path.join(self.destination_dir, run_id)
os.makedirs(dest_dir, exist_ok=True)
return os.path.join(dest_dir, 'line_schema.json')
def get_field(self, field_name):
field_name = self.format_col_name(field_name)
key = field_name.lower()
if key in self.fields:
return self.fields[key]
return None
def get_tag(self, tag_name):
tag_name = self.format_col_name(tag_name)
key = tag_name.lower()
if key in self.tags:
return self.tags[key]
return None
def get_measurement_records(self, account_id, measurement, run_id):
batch_size = 9000
have_records = True
start_timestamp = int(self.start_timestamp)
stop_timestamp = int(self.stop_timestamp) + 1
offset_seconds = 0
self.load_schema()
while have_records:
if offset_seconds > 0:
start_timestamp = offset_seconds
query = "SELECT * FROM \"%s\" WHERE time > %s000000000 and time < %s000000000 ORDER BY time LIMIT %s" % (
measurement, start_timestamp, stop_timestamp, batch_size)
self.log.info(
'submitting query: %s to f5 Beacon metric API.', query)
try:
line_data = self.beacon_hook.query_metric(
query, output_line=True)
records = line_data.split("\n")
number_of_records = len(records)
if number_of_records:
self.log.info('writing %d records from f5 Beacon metrics API to %s',
number_of_records, self.destination_dir)
offset_seconds = self.output_to_file(
records, run_id, account_id)
if number_of_records < batch_size:
have_records = False
except HTTPError as he:
self.log.error(he)
if batch_size > 1:
batch_size = int(batch_size*0.9)
else:
raise AirflowException(
'could not export f5 Beacon metric for measurement %s after reducing record limit to 1', measurement)
self.save_schema()
def load_schema(self):
if os.path.exists(self.sfn):
with open(self.sfn, 'r') as sf:
try:
self.schema = json.load(sf)
if not self.schema:
self.schema = SCHEMA_SKEL
except json.JSONDecodeError:
self.schema = SCHEMA_SKEL
self.populate_cols_from_schema()
def populate_cols_from_schema(self):
# reduce tags and fields through dict
for col in self.schema:
if col['name'] == 'evt':
for event_cols in col['fields']:
if event_cols['name'] == 'fields':
for field_cols in event_cols['fields']:
key = field_cols['name'].lower()
self.fields[key] = field_cols['name']
if event_cols['name'] == 'tags':
for tag_cols in event_cols['fields']:
key = tag_cols['name'].lower()
self.tags[key] = tag_cols['name']
def save_schema(self):
with open(self.sfn, 'w+') as sf:
json.dump(self.schema, sf, indent=4,
separators=(',', ': '))
time.sleep(WRITE_FILE_DELAY_SECS)
def format_col_name(self, existing_tag):
# converting to Camel case or all lower case
components = existing_tag.split('_')
converted = components[0] + ''.join(x.title() for x in components[1:])
components = converted.split('-')
converted = components[0] + ''.join(x.title() for x in components[1:])
if converted.isupper():
converted = converted.lower()
if converted[0].isalpha() and converted[0].isupper():
converted = converted[0].lower() + converted[1:]
if not (converted[0].isalpha() or converted[0] == '_'):
converted = "beacon%s" % converted
return converted
def add_field_to_schema(self, field_name, field_type):
field_name = self.format_col_name(field_name)
for col in self.schema:
if col['name'] == 'evt':
for c in col['fields']:
if c['name'] == 'fields':
c['fields'].append(
{
'name': field_name,
'type': field_type,
'mode': 'NULLABLE'
}
)
key = field_name.lower()
self.fields[key] = field_name
return field_name
def add_tag_to_schema(self, tag_name):
tag_name = self.format_col_name(tag_name)
for col in self.schema:
if col['name'] == 'evt':
for c in col['fields']:
if c['name'] == 'tags':
c['fields'].append(
{
'name': tag_name,
'type': 'STRING',
'mode': 'NULLABLE'
}
)
key = tag_name.lower()
self.tags[key] = tag_name
return tag_name
def output_to_file(self, lines, run_id, account_id):
df = open(self.mfn, 'a+')
largest_timestamp = 0
for line in lines:
if line:
data = parse_line(line)
# Transform
ms_timestamp = float(int(data['time']) / 1000000000)
fields_dict = {}
for fn in data['fields']:
val = data['fields'][fn]
tfn = self.get_field(fn)
if not tfn:
if type(val) == bool:
tfn = self.add_field_to_schema(fn, 'BOOL')
elif type(val) == int:
tfn = self.add_field_to_schema(fn, 'INT64')
elif type(val) == float:
tfn = self.add_field_to_schema(fn, 'FLOAT64')
else:
tfn = self.add_field_to_schema(fn, 'STRING')
fields_dict[tfn] = val
tags_dict = {}
for tn in data['tags']:
ttn = self.get_tag(tn)
if not ttn:
ttn = self.add_tag_to_schema(tn)
tags_dict[ttn] = str(data['tags'][tn])
transformed_data = {
'ts': ms_timestamp,
'account_id': "urn:f5_cs::acccount:%s" % account_id,
'source_id': None,
'evt': {
'version': '1.0',
'sourceName': data['measurement'],
'sourceDescription': "data imported from beacon for account %s" % account_id,
'fields': fields_dict,
'tags': tags_dict,
'timestamp': ms_timestamp
}
}
df.write("%s\n" % json.dumps(transformed_data))
if ms_timestamp > largest_timestamp:
largest_timestamp = int(ms_timestamp)
time.sleep(WRITE_FILE_DELAY_SECS)
df.close()
return largest_timestamp
class F5BeaconMetricQueryDailyExporterOperator(F5BeaconMetricQueryExporterOperator):
@apply_defaults
def __init__(self, # pylint: disable=too-many-arguments
beacon_conn_id: str = 'f5_beacon_default',
destination_dir="/home/airflow/gcs/data",
*args,
**kwargs) -> None:
super().__init__(*args, **kwargs)
self.beacon_conn_id = beacon_conn_id
self.destination_dir = destination_dir
self.beacon_hook = BeaconHook(self.beacon_conn_id)
self.date = None
def execute(self, context):
if not self.mfn:
self.mfn = self.get_metrics_fn(context['dag_run'].run_id)
if not self.sfn:
self.sfn = self.get_schema_fn(context['dag_run'].run_id)
self.date = str(context.get("execution_date").date())
conn = self.beacon_hook.get_conn()
account_id = 'primary'
if 'account_id' in conn.extra_dejson:
account_id = conn.extra_dejson['account_id']
self.log.info('Executing extract metrics from f5 Beacon account %s on %s into: %s',
account_id, self.date, self.destination_dir)
known_measurements = self.beacon_hook.get_measurements()
self.log.info('found %s measurement for f5 Beacon account %s',
known_measurements, account_id)
self.start_timestamp = start_timestamp = int(time.mktime(
datetime.datetime.strptime(self.date, '%Y-%m-%d').timetuple()))
self.stop_timestamp = start_timestamp + 86400
if os.path.exists(self.mfn):
os.unlink(self.mfn)
for measurement in known_measurements:
self.get_measurement_records(
account_id, measurement, context['dag_run'].run_id)
class F5BeaconMetricQueryHourlyExporterOperator(F5BeaconMetricQueryExporterOperator):
@apply_defaults
def __init__(self, # pylint: disable=too-many-arguments
beacon_conn_id: str = 'f5_beacon_default',
destination_dir="/home/airflow/gcs/data",
*args,
**kwargs) -> None:
super().__init__(*args, **kwargs)
self.beacon_conn_id = beacon_conn_id
self.destination_dir = destination_dir
self.beacon_hook = BeaconHook(self.beacon_conn_id)
def execute(self, context):
if not self.mfn:
self.mfn = self.get_metrics_fn(context['dag_run'].run_id)
if not self.sfn:
self.sfn = self.get_schema_fn(context['dag_run'].run_id)
conn = self.beacon_hook.get_conn()
account_id = 'primary'
if 'account_id' in conn.extra_dejson:
account_id = conn.extra_dejson['account_id']
self.stop_timestamp = int(time.mktime(
context.get("execution_date").timetuple()))
self.start_timestamp = self.stop_timestamp - 3600
self.log.info('Executing extract metrics from f5 Beacon account %s for %s - %s into: %s',
account_id, self.start_timestamp, self.stop_timestamp, self.destination_dir)
known_measurements = self.beacon_hook.get_measurements()
self.log.info('found %s measurement for f5 Beacon account %s',
known_measurements, account_id)
if os.path.exists(self.mfn):
os.unlink(self.mfn)
for measurement in known_measurements:
self.get_measurement_records(
account_id, measurement, context['dag_run'].run_id)
| 41.349593
| 125
| 0.559772
|
7d5a033bc6cb4cec59e71d660b96d1ead0dee0e0
| 2,086
|
py
|
Python
|
lhotse/bin/modes/recipes/icsi.py
|
glynpu/lhotse
|
1d7807025575fdaa96cb907c451db0fb0fd23cde
|
[
"Apache-2.0"
] | 64
|
2020-04-27T14:55:15.000Z
|
2020-10-25T06:57:56.000Z
|
lhotse/bin/modes/recipes/icsi.py
|
glynpu/lhotse
|
1d7807025575fdaa96cb907c451db0fb0fd23cde
|
[
"Apache-2.0"
] | 85
|
2020-04-26T06:29:47.000Z
|
2020-10-19T20:28:52.000Z
|
lhotse/bin/modes/recipes/icsi.py
|
glynpu/lhotse
|
1d7807025575fdaa96cb907c451db0fb0fd23cde
|
[
"Apache-2.0"
] | 17
|
2020-06-19T06:26:33.000Z
|
2020-10-12T15:19:15.000Z
|
import click
from lhotse.bin.modes import download, prepare
from lhotse.recipes.icsi import download_icsi, prepare_icsi
from lhotse.utils import Pathlike
__all__ = ["icsi"]
@download.command(context_settings=dict(show_default=True))
@click.argument("audio_dir", type=click.Path())
@click.option(
"--transcripts-dir",
type=click.Path(),
default=None,
help="To download annotations in a different directory than audio.",
)
@click.option(
"--mic",
type=click.Choice(["ihm", "ihm-mix", "sdm", "mdm"], case_sensitive=False),
default="ihm",
help="ICSI microphone setting.",
)
@click.option(
"--url",
type=str,
default="http://groups.inf.ed.ac.uk/ami",
help="ICSI data downloading URL.",
)
@click.option(
"--force-download",
type=bool,
default=False,
help="If True, download even if file is present.",
)
def icsi(
audio_dir: Pathlike,
transcripts_dir: Pathlike,
mic: str,
url: str,
force_download: bool,
):
"""ICSI data download."""
download_icsi(
audio_dir,
transcripts_dir=transcripts_dir,
mic=mic,
url=url,
force_download=force_download,
)
@prepare.command(context_settings=dict(show_default=True))
@click.argument("audio_dir", type=click.Path(exists=True, dir_okay=True))
@click.argument("output_dir", type=click.Path())
@click.option(
"--transcripts-dir", type=click.Path(exists=True, dir_okay=True), default=None
)
@click.option(
"--mic",
type=click.Choice(["ihm", "ihm-mix", "sdm", "mdm"], case_sensitive=False),
default="ihm",
help="ICSI microphone setting.",
)
@click.option(
"--normalize-text",
is_flag=True,
help="If set, convert all text annotations to upper case (similar to Kaldi)",
)
def icsi(
audio_dir: Pathlike,
transcript_dir: Pathlike,
output_dir: Pathlike,
mic: str,
normalize_text: bool,
):
"""AMI data preparation."""
prepare_icsi(
audio_dir,
transcript_dir,
output_dir=output_dir,
mic=mic,
normalize_text=normalize_text,
)
| 24.541176
| 82
| 0.658198
|
1e72c48b2a7e2cb8bbf46e10ec7bdaab0dddffdd
| 2,604
|
py
|
Python
|
lib/btstack/tool/bluetooth_data_types.py
|
thiamchi/micropython_custom
|
b2d3ac286268502029a2215adb710c15f21ccae0
|
[
"MIT"
] | 3
|
2021-06-05T01:04:56.000Z
|
2021-12-19T18:08:08.000Z
|
lib/btstack/tool/bluetooth_data_types.py
|
thiamchi/micropython_custom
|
b2d3ac286268502029a2215adb710c15f21ccae0
|
[
"MIT"
] | 1
|
2021-06-19T19:03:10.000Z
|
2021-06-19T19:29:10.000Z
|
lib/btstack/tool/bluetooth_data_types.py
|
thiamchi/micropython_custom
|
b2d3ac286268502029a2215adb710c15f21ccae0
|
[
"MIT"
] | 3
|
2021-06-12T12:26:34.000Z
|
2022-02-22T11:05:01.000Z
|
#!/usr/bin/env python
#
# Scrape GAP Data Types from Bluetooth SIG page
# Copyright 2016 BlueKitchen GmbH
#
from lxml import html
import datetime
import re
import requests
import sys
import os
headers = {'user-agent': 'curl/7.63.0'}
program_info = '''
BTstack Data Types Scraper for BTstack
Copyright 2016, BlueKitchen GmbH
'''
header = '''/**
* bluetooth_data_types.h generated from Bluetooth SIG website for BTstack by tool/bluetooth_data_types.py
* {url}
* {datetime}
*/
#ifndef BLUETOOTH_DATA_TYPES_H
#define BLUETOOTH_DATA_TYPES_H
'''
trailer = '''
#endif
'''
def clean(tag):
# << 0xab
# >> 0xbb
# \n
# non-visible whitespace 0x200b
# non-vicible whitespace 0xa0
return tag.replace(u'\xab','').replace(u'\xbb','').replace(u'\u200b','').replace('\n','').replace(u'\xa0',' ').strip()
def scrape_page(fout, url):
print("Parsing %s" % url)
page = requests.get(url, headers=headers)
tree = html.fromstring(page.content)
print('')
print('%-48s | %s' % ("Data Type Name", "Data Type Value"))
print('-' * 70)
# get all <tr> elements in <table id="table3">
rows = tree.xpath('//table/tbody/tr')
for row in rows:
children = row.getchildren()
data_type_value = children[0].text_content()
data_type_name = children[1].text_content()
# table with references to where it was used
if (data_type_value == 'Data Type Value'):
continue
# clean up
data_type_name = clean(data_type_name)
data_type_value = clean(data_type_value)
tag = data_type_name
# uppper
tag = tag.upper()
# collapse ' - ' into ' '
tag = tag.replace(' - ', ' ')
# drop dashes otherwise
tag = tag.replace('-',' ')
# collect multiple spaces
tag = re.sub('\s+', ' ', tag).strip()
# replace space with under score
tag =tag.replace(' ', '_')
fout.write("#define BLUETOOTH_DATA_TYPE_%-50s %s // %s\n" % (tag, data_type_value, data_type_name))
print("%-48s | %s" % (data_type_name, data_type_value))
btstack_root = os.path.abspath(os.path.dirname(sys.argv[0]) + '/..')
gen_path = btstack_root + '/src/bluetooth_data_types.h'
print(program_info)
with open(gen_path, 'wt') as fout:
url = 'https://www.bluetooth.com/specifications/assigned-numbers/generic-access-profile'
fout.write(header.format(datetime=str(datetime.datetime.now()), url=url.replace('https://','')))
scrape_page(fout, url)
fout.write(trailer)
print('')
print('Scraping successful into %s!\n' % gen_path)
| 27.410526
| 122
| 0.631336
|
7089b6a943c5ffe06e969b8e0cd598262f46aa89
| 1,410
|
py
|
Python
|
tests/tests_mlflow_turing_scoring_server/base/base_test.py
|
jose-turintech/mlflow-turing-scoring-server
|
2a4ccdc94fa6313f401dad37fd40b232a36dc530
|
[
"BSD-3-Clause"
] | null | null | null |
tests/tests_mlflow_turing_scoring_server/base/base_test.py
|
jose-turintech/mlflow-turing-scoring-server
|
2a4ccdc94fa6313f401dad37fd40b232a36dc530
|
[
"BSD-3-Clause"
] | null | null | null |
tests/tests_mlflow_turing_scoring_server/base/base_test.py
|
jose-turintech/mlflow-turing-scoring-server
|
2a4ccdc94fa6313f401dad37fd40b232a36dc530
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This module aims to implement the base class used by all test classes with common methods.
"""
# ────────────────────────────────────────── imports ────────────────────────────────────────── #
import unittest
from pathlib import Path
from tests_mlflow_turing_scoring_server.conftest import data_mgr
# ───────────────────────────────────────────────────────────────────────────────────────────── #
# Base Test Class #
# ───────────────────────────────────────────────────────────────────────────────────────────── #
class BaseTest:
"""
Base Tests Class
"""
case = unittest.TestCase()
method: str = None
_data_path: Path = data_mgr.data_path
# -------------------------------------------------------------------------------------------
@classmethod
def setup_class(cls):
""" Configuration called when initializing the class """
@classmethod
def teardown_class(cls):
""" Configuration called when destroying the class """
def setup_method(self, method):
""" Configuration called for every method """
self.method = method.__name__
def teardown_method(self, method):
""" Configuration called at the end of the method execution """
# -------------------------------------------------------------------------------------------
| 32.790698
| 97
| 0.411348
|
fb627dbaf26ba4cb7a17ce25f654366db8f3fa37
| 7,420
|
py
|
Python
|
app/core/models.py
|
rach4you/secteur-app-api
|
c32093bc75db0ce84681df6cec5862542132e519
|
[
"MIT"
] | null | null | null |
app/core/models.py
|
rach4you/secteur-app-api
|
c32093bc75db0ce84681df6cec5862542132e519
|
[
"MIT"
] | 3
|
2021-03-10T22:56:54.000Z
|
2022-02-27T07:00:47.000Z
|
app/core/models.py
|
rach4you/secteur-app-api
|
c32093bc75db0ce84681df6cec5862542132e519
|
[
"MIT"
] | null | null | null |
import uuid
import os
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager, \
PermissionsMixin
from django.conf import settings
def user_image_file_path(instance, filename):
"""Generate file path for new user image"""
ext = filename.split('.')[-1]
filename = f'{uuid.uuid4()}.{ext}'
return os.path.join('uploads/user/', filename)
class UserManager(BaseUserManager):
def create_user(self, email, password=None, **extra_fields):
"""Creates and saves a new User"""
if not email:
raise ValueError('Users must have an email address')
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
"""Creates and saves a new super user"""
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
"""Custom user model that supports using email instead of username"""
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = 'email'
class Employe(models.Model):
"""Entreprise to be used for a recipe"""
class Meta:
db_table = 'employes'
nom_employe = models.CharField(max_length=300)
prenom_employe = models.CharField(max_length=300)
tel = models.CharField(max_length=300, blank=True)
image = models.ImageField(null=True, upload_to=user_image_file_path)
user = models.OneToOneField(User, on_delete=models.DO_NOTHING)
agence = models.CharField(max_length=300, blank=True)
def __str__(self):
return self.nom_employe
class Entreprise(models.Model):
"""Entreprise to be used for a recipe"""
class Meta:
db_table = 'entreprises'
raison_sociale = models.CharField(max_length=300)
def __str__(self):
return self.raison_sociale
class Operateur(models.Model):
class Meta:
db_table = 'operateurs'
operateur = models.CharField(max_length=300)
def __str__(self):
return f"{self.operateur}"
class Devise(models.Model):
class Meta:
db_table = 'devises'
devise = models.CharField(max_length=300)
def __str__(self):
return f"{self.devise}"
class Secteur(models.Model):
class Meta:
db_table = 'secteurs'
secteur = models.CharField(max_length=300)
def __str__(self):
return f"{self.secteur}"
class Filiere(models.Model):
class Meta:
db_table = 'filieres'
filiere = models.CharField(max_length=300)
secteur = models.ForeignKey(Secteur,
on_delete=models.CASCADE,
related_name="filieres")
def __str__(self):
return f"{self.filiere}"
class CreditAlloue(models.Model):
class Meta:
db_table = 'credit_alloues'
fe = models.IntegerField()
fc = models.IntegerField()
filiere = models.ForeignKey(Filiere,
on_delete=models.DO_NOTHING,
related_name="credit_alloue")
def __str__(self):
return f"{self.filiere}"
class Formulaire(models.Model):
class Meta:
db_table = 'formulaires'
code = models.CharField(max_length=300)
theme = models.CharField(max_length=300)
lieu = models.CharField(max_length=300)
secteur = models.ForeignKey(Secteur, on_delete=models.DO_NOTHING)
filiere = models.ForeignKey(Filiere, on_delete=models.DO_NOTHING)
operateur = models.ForeignKey(Operateur, on_delete=models.DO_NOTHING)
entreprise = models.ForeignKey(Entreprise, on_delete=models.DO_NOTHING)
devise = models.ForeignKey(Devise, on_delete=models.DO_NOTHING)
date_creation = models.DateTimeField(auto_now_add=True)
date_depot = models.DateField()
date_demarrage = models.DateField()
date_achevement = models.DateField()
montant = models.FloatField()
competence = models.CharField(max_length=300, null=True)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
blank=True,
null=True, on_delete=models.DO_NOTHING)
def __str__(self):
return f"{self.code}"
class Module(models.Model):
class Meta:
db_table = 'modules'
module = models.CharField(max_length=300)
horaire = models.IntegerField(default=0)
formulaire = models.ForeignKey(Formulaire, on_delete=models.CASCADE, related_name="modules")
def __str__(self):
return f"{self.formulaire}"
class Beneficiaire(models.Model):
class Meta:
db_table = 'beneficiaires'
cin = models.CharField(max_length=300)
nom = models.CharField(max_length=300)
prenom = models.CharField(max_length=300)
tel = models.CharField(max_length=300, blank=True)
email = models.CharField(max_length=300, blank=True)
cnss = models.CharField(max_length=300, blank=True)
ancien = models.BooleanField(default=True)
date_creation = models.DateTimeField(auto_now_add=True, blank=True, null=True)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
blank=True,
null=True, on_delete=models.DO_NOTHING)
def __str__(self):
return f"{self.cin}"
class BeneficiaireFormulaire(models.Model):
class Meta:
db_table = 'beneficiaire_formulaires'
diplome = models.CharField(max_length=300)
profil_bareme = models.CharField(max_length=300)
type = models.CharField(max_length=10)
contrat = models.CharField(max_length=10)
beneficier = models.BooleanField(default=False)
non_conforme = models.BooleanField(default=False)
engagement = models.FloatField()
consommation = models.FloatField(default=0)
date_dembauche = models.DateField()
beneficiaire = models.ForeignKey(Beneficiaire, on_delete=models.DO_NOTHING, related_name="formulaires")
formulaire = models.ForeignKey(Formulaire, on_delete=models.CASCADE, related_name="beneficiaires")
def __str__(self):
return f"{self.formulaire}"
class Facture(models.Model):
class Meta:
db_table = 'facturations'
num_facture = models.CharField(max_length=100)
commentaire = models.CharField(max_length=300, blank=True)
mttc = models.FloatField()
montant_dh = models.FloatField(default=0)
taux = models.FloatField(default=0)
date_creation = models.DateTimeField(auto_now_add=True)
date_facture = models.DateField(default=None)
formulaire = models.ForeignKey(Formulaire, on_delete=models.CASCADE, related_name="facture")
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
blank=True,
null=True, on_delete=models.DO_NOTHING)
def __str__(self):
return f"{self.num_facture}"
class DiplomeFiliere(models.Model):
class Meta:
db_table = 'diplome_filieres'
diplome = models.CharField(max_length=100)
filiere = models.ForeignKey(Filiere,
on_delete=models.DO_NOTHING,
related_name="diplome_filiere")
def __str__(self):
return f"{self.filiere}"
| 30.916667
| 107
| 0.683827
|
43a7a610de8444988ef65873e7e32db5d5ab6a32
| 4,309
|
py
|
Python
|
tests/test_tutorial/test_path_operation_configurations/test_tutorial005_py310.py
|
ssensalo/fastapi
|
146f57b8f70c5757dc20edc716dba1b96936a8d6
|
[
"MIT"
] | 1
|
2021-11-26T15:27:59.000Z
|
2021-11-26T15:27:59.000Z
|
tests/test_tutorial/test_path_operation_configurations/test_tutorial005_py310.py
|
ssensalo/fastapi
|
146f57b8f70c5757dc20edc716dba1b96936a8d6
|
[
"MIT"
] | 1
|
2022-01-07T21:04:04.000Z
|
2022-01-07T21:04:04.000Z
|
tests/test_tutorial/test_path_operation_configurations/test_tutorial005_py310.py
|
ssensalo/fastapi
|
146f57b8f70c5757dc20edc716dba1b96936a8d6
|
[
"MIT"
] | null | null | null |
import pytest
from fastapi.testclient import TestClient
from ...utils import needs_py310
openapi_schema = {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/items/": {
"post": {
"responses": {
"200": {
"description": "The created item",
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/Item"}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"summary": "Create an item",
"description": "Create an item with all the information:\n\n- **name**: each item must have a name\n- **description**: a long description\n- **price**: required\n- **tax**: if the item doesn't have tax, you can omit this\n- **tags**: a set of unique tag strings for this item",
"operationId": "create_item_items__post",
"requestBody": {
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/Item"}
}
},
"required": True,
},
}
}
},
"components": {
"schemas": {
"Item": {
"title": "Item",
"required": ["name", "price"],
"type": "object",
"properties": {
"name": {"title": "Name", "type": "string"},
"price": {"title": "Price", "type": "number"},
"description": {"title": "Description", "type": "string"},
"tax": {"title": "Tax", "type": "number"},
"tags": {
"title": "Tags",
"uniqueItems": True,
"type": "array",
"items": {"type": "string"},
"default": [],
},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {"anyOf": [{"type": "string"}, {"type": "integer"}]},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
@pytest.fixture(name="client")
def get_client():
from docs_src.path_operation_configuration.tutorial005_py310 import app
client = TestClient(app)
return client
@needs_py310
def test_openapi_schema(client: TestClient):
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == openapi_schema
@needs_py310
def test_query_params_str_validations(client: TestClient):
response = client.post("/items/", json={"name": "Foo", "price": 42})
assert response.status_code == 200, response.text
assert response.json() == {
"name": "Foo",
"price": 42,
"description": None,
"tax": None,
"tags": [],
}
| 35.319672
| 293
| 0.387561
|
1155e10055a31211a92d353787e5a681d5aaa47f
| 4,138
|
py
|
Python
|
dubbo_client/rpclib.py
|
wuwen5/dubbo-python
|
d86ad6acd30592c2389c6aba4360e0b61b8396dc
|
[
"Apache-2.0"
] | 1
|
2020-12-21T06:58:10.000Z
|
2020-12-21T06:58:10.000Z
|
dubbo_client/rpclib.py
|
wuwen5/dubbo-python
|
d86ad6acd30592c2389c6aba4360e0b61b8396dc
|
[
"Apache-2.0"
] | null | null | null |
dubbo_client/rpclib.py
|
wuwen5/dubbo-python
|
d86ad6acd30592c2389c6aba4360e0b61b8396dc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
"""
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from urllib2 import HTTPError
from pyjsonrpc import HttpClient, JsonRpcError
from dubbo.connection.connections import connection_pool
from dubbo_client.registry import Registry
from dubbo_client.rpcerror import ConnectionFail, dubbo_client_errors, InternalError, DubboClientError
class DubboClient(object):
interface = ''
group = ''
version = ''
protocol = ''
protocol_methods = {}
class _Method(object):
def __init__(self, client_instance, method):
self.client_instance = client_instance
self.method = method
def __call__(self, *args, **kwargs):
return self.client_instance.call(self.method, *args, **kwargs)
def __init__(self, interface, registry, **kwargs):
assert isinstance(registry, Registry)
self.interface = interface
self.registry = registry
self.group = kwargs.get('group', '')
self.version = kwargs.get('version', '')
self.protocol = kwargs.get('protocol', '')
self.registry.subscribe(interface)
self.registry.register(interface)
self.protocol_methods = {'jsonrpc': self.__jsonrpc_call__, 'dubbo': self.__dubbo_call__}
def call(self, method, *args, **kwargs):
provider = self.registry.get_random_provider(self.interface, version=self.version, group=self.group,
protocol=self.protocol)
timeout = int(provider.timeout) / 1000
return self.protocol_methods[provider.protocol](method, provider, timeout, *args, **kwargs)
def __dubbo_call__(self, method, provider, timeout, *args, **kwargs):
if not isinstance(args, (list, tuple)):
args = [args]
request_param = {
'dubbo_version': provider.dubbo,
'version': provider.version,
'path': provider.interface if not provider.group else provider.group + '/' + provider.interface,
'method': method,
'arguments': args
}
result = connection_pool.get(provider.location, request_param, timeout, **kwargs)
return result
def __jsonrpc_call__(self, method, provider, timeout, *args, **kwargs):
client = HttpClient(url="http://{0}{1}".format(provider.location, provider.path), timeout=timeout)
try:
return client.call(method, *args, **kwargs)
except HTTPError, e:
raise ConnectionFail(None, e.filename)
except JsonRpcError, error:
if error.code in dubbo_client_errors:
raise dubbo_client_errors[error.code](message=error.message, data=error.data)
else:
raise DubboClientError(code=error.code, message=error.message, data=error.data)
except Exception, ue:
if hasattr(ue, 'reason'):
raise InternalError(ue.message, ue.reason)
else:
raise InternalError(ue.message, None)
def __call__(self, method, *args, **kwargs):
"""
Redirects the direct call to *self.call*
"""
return self.call(method, *args, **kwargs)
def __getattr__(self, method):
"""
Allows the usage of attributes as *method* names.
"""
return self._Method(client_instance=self, method=method)
if __name__ == '__main__':
pass
| 36.946429
| 108
| 0.656839
|
8dbbe1eba0b6d757452a8466f8e9d111c35011d2
| 384
|
py
|
Python
|
code/List/symmetric_difference.py
|
jumploop/30-seconds-of-python
|
bfcc5a35d9bd0bba67e81de5715dba21e1ba43be
|
[
"CC0-1.0"
] | null | null | null |
code/List/symmetric_difference.py
|
jumploop/30-seconds-of-python
|
bfcc5a35d9bd0bba67e81de5715dba21e1ba43be
|
[
"CC0-1.0"
] | null | null | null |
code/List/symmetric_difference.py
|
jumploop/30-seconds-of-python
|
bfcc5a35d9bd0bba67e81de5715dba21e1ba43be
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
功能实现:返回两个可迭代对象之间的对称差,而不会过滤出重复的值。
解读:
从每个列表创建一个集合。
对它们中的每一个都使用列表推导式,以只保留先前创建的另一个集合中不包含的值。
"""
def symmetric_difference(a, b):
(_a, _b) = (set(a), set(b))
return [item for item in a if item not in _b] + [item for item in b if item not in _a]
# Examples
print(symmetric_difference([1, 2, 3], [1, 2, 4]))
# output:
# [3, 4]
| 17.454545
| 90
| 0.643229
|
d03c10542c895a34900c7151f2abb769ff1e90fd
| 340
|
py
|
Python
|
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/azure_projects/migrations/0011_remove_project_screenshot.py
|
michellechena/azure-intelligent-edge-patterns
|
b1260b962b208880532391e7ef2148d240f489f8
|
[
"MIT"
] | null | null | null |
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/azure_projects/migrations/0011_remove_project_screenshot.py
|
michellechena/azure-intelligent-edge-patterns
|
b1260b962b208880532391e7ef2148d240f489f8
|
[
"MIT"
] | null | null | null |
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/azure_projects/migrations/0011_remove_project_screenshot.py
|
michellechena/azure-intelligent-edge-patterns
|
b1260b962b208880532391e7ef2148d240f489f8
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.8 on 2021-08-26 05:38
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('azure_projects', '0010_auto_20210825_1021'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='screenshot',
),
]
| 18.888889
| 54
| 0.605882
|
cb72007062a87f18172db9b811bdf63a789162a4
| 7,249
|
py
|
Python
|
EvoOpt/solvers/SimulatedAnnealing.py
|
tsyet12/EvoOpt
|
2e70f29ee58217a667430571ac81bae7112e9336
|
[
"BSD-2-Clause"
] | 9
|
2019-09-27T19:14:05.000Z
|
2022-03-13T00:42:34.000Z
|
EvoOpt/solvers/SimulatedAnnealing.py
|
tsyet12/Moptimum
|
2e70f29ee58217a667430571ac81bae7112e9336
|
[
"BSD-2-Clause"
] | 4
|
2019-05-31T14:32:02.000Z
|
2021-02-09T20:30:27.000Z
|
EvoOpt/solvers/SimulatedAnnealing.py
|
tsyet12/Moptimum
|
2e70f29ee58217a667430571ac81bae7112e9336
|
[
"BSD-2-Clause"
] | 1
|
2019-11-15T18:37:55.000Z
|
2019-11-15T18:37:55.000Z
|
'''Simulated Annealing
###
###Code and Implementation by Sin Yong, Teng
###
###
###Implemented on 08/06/2019
'''
import numpy as np
import matplotlib.pyplot as mp
class SimulatedAnnealing():
def __init__(self,f,x,lb,ub,pop=200,max_gen=50,nsize=1,normal_neighbour=True,verbose=True):
self.f=np.vectorize(f)
self.x=x
self.lb=lb
self.ub=ub
self.pop=pop
self.verbose=verbose
self.normal_neighbour=normal_neighbour
self.nsize=nsize
self.max_gen=max_gen
self.pop_mat=np.tile(self.lb,(pop,1))+np.random.rand(pop,len(x)).astype(np.longdouble)*(np.tile(self.ub,(pop,1))-np.tile(self.lb,(pop,1)))
self.plotgen=[]
self.average_fit=[]
self.history1=self.pop_mat[:,0]
self.history2=self.pop_mat[:,1]
self.best_result=[]
self.best_domain=[]
self.overall_best=[]
self.overall_bestdomain=[]
def solve(self):
self.evaluate(initial=True)
for i in range(self.max_gen+1):
self.update(generation=i)
self.evaluate()
if self.verbose:
self.log_result(generation=i)
def evaluate(self,initial=False):
#get fitness of all population
if initial:
self.pop_mat_fit=self.f(*self.pop_mat.T)
#concatenate and sort population by fitness
temp_mat=np.concatenate((np.asarray(self.pop_mat_fit).reshape(self.pop_mat_fit.shape[0],1),self.pop_mat),axis=1)
#sort new points by fitness
temp_mat=temp_mat[temp_mat[:,0].argsort()]
#return the sorted values to pop matrix
self.pop_mat_fit, self.pop_mat= np.copy(temp_mat[:,0]), np.copy(temp_mat[:,1:])
def update(self,generation):
#neighbours=np.tile(self.lb,(self.pop,1))+np.random.rand(self.pop,len(self.x)).astype(np.longdouble)*(np.tile(self.ub,(self.pop,1))-np.tile(self.lb,(self.pop,1)))
if self.normal_neighbour:
neighbours=np.clip(np.tile(self.lb,(self.pop,1))+np.random.uniform(0,1,(self.pop,len(self.x))).astype(np.longdouble)*(np.tile(self.ub,(self.pop,1))-np.tile(self.lb,(self.pop,1)))/self.nsize,a_min=self.lb,a_max=self.ub)
else:
neighbours=np.clip(np.tile(self.lb,(self.pop,1))+np.random.rand(self.pop,len(self.x)).astype(np.longdouble)*(np.tile(self.ub,(self.pop,1))-np.tile(self.lb,(self.pop,1)))/self.nsize,a_min=self.lb,a_max=self.ub)
neighbour_fit=self.f(*neighbours.T)
#print('nf=',neighbour_fit)
#print('pop_mat_fit',self.pop_mat_fit)
p=np.random.rand(*self.pop_mat_fit.shape).astype(np.longdouble)
condition=(p<=np.clip(np.exp((self.pop_mat_fit-neighbour_fit)/(self.max_gen/(generation+1))).astype(np.longdouble),a_min=0,a_max=1)).reshape(self.pop_mat_fit.shape)
self.pop_mat=np.repeat((~condition).astype(int),len(self.x)).reshape(self.pop_mat.shape)*self.pop_mat+np.repeat((condition).astype(int),len(self.x)).reshape(self.pop_mat.shape)*neighbours
self.pop_mat_fit=(~condition).astype(int)*self.pop_mat_fit+(condition).astype(int)*neighbour_fit
def log_result(self,generation):
print("Generation #",generation,"Best Fitness=", self.pop_mat_fit[0], "Answer=", self.pop_mat[0])
self.plotgen.append(generation)
self.best_result.append(self.pop_mat_fit[0])
self.best_domain.append(self.pop_mat[0])
self.overall_best.append(min(self.best_result))
if self.overall_best[-1]==self.best_result[-1]:
self.overall_bestdomain.append(self.best_domain[-1])
else:
self.overall_bestdomain.append(self.overall_bestdomain[-1])
self.average_fit.append(np.average(self.pop_mat_fit))
self.history1=np.concatenate((self.history1,self.pop_mat[:,0]),axis=0)
self.history2=np.concatenate((self.history2,self.pop_mat[:,1]),axis=0)
if generation==self.max_gen:
print("Final Best Fitness=",self.overall_best[-1],"Answer=",self.best_domain[-1])
def plot_result(self,contour_density=50):
subtitle_font=16
axis_font=14
title_weight="bold"
axis_weight="bold"
tick_font=14
fig=mp.figure()
fig.suptitle("Simulated Annealing Optimization", fontsize=20, fontweight=title_weight)
fig.tight_layout()
mp.subplots_adjust(hspace=0.3,wspace=0.3)
mp.rc('xtick',labelsize=tick_font)
mp.rc('ytick',labelsize=tick_font)
mp.subplot(2,2,1)
mp.plot(self.plotgen,self.overall_best)
mp.title("Convergence Curve", fontsize=subtitle_font,fontweight=title_weight)
mp.xlabel("Number of Generation",fontsize=axis_font, fontweight=axis_weight)
mp.ylabel("Fitness of Best Solution",fontsize=axis_font, fontweight=axis_weight)
mp.autoscale()
mp.subplot(2,2,2)
mp.plot(self.plotgen,[x[0] for x in self.overall_bestdomain])
mp.title("Trajectory in the First Dimension",fontsize=subtitle_font,fontweight=title_weight)
mp.xlabel("Number of Generation",fontsize=axis_font, fontweight=axis_weight)
mp.ylabel("Variable in the First Dimension",fontsize=axis_font, fontweight=axis_weight)
mp.autoscale()
mp.subplot(2,2,3)
mp.plot(self.plotgen, self.average_fit)
mp.title("Average Fitness during Convergence",fontsize=subtitle_font,fontweight=title_weight)
mp.xlabel("Number of Generation",fontsize=axis_font, fontweight=axis_weight)
mp.ylabel("Average Fitness of Population",fontsize=axis_font, fontweight=axis_weight)
mp.subplot(2,2,4)
cont_x=[]
cont_y=[]
cont_z=[]
tempx=self.lb[0]
tempy=self.lb[1]
average_other=list(map(lambda x,y:(x+y)/2,self.lb[2:],self.ub[2:]))
for x in range(contour_density):
tempx=tempx+(self.ub[0]-self.lb[0])/contour_density
tempy=tempy+(self.ub[1]-self.lb[1])/contour_density
cont_x.append(tempx)
cont_y.append(tempy)
for y in cont_y:
cont_z.append([self.f(x,y,*average_other) for x in cont_x])
mp.plot(self.history1,self.history2,'bo', markersize=3, alpha=0.4)
CS=mp.contour(cont_x,cont_y,cont_z)
mp.clabel(CS,inline=True,inline_spacing=-10,fontsize=10)
mp.title("Points Evaluated",fontsize=subtitle_font,fontweight=title_weight)
mp.ylabel("Second Dimension",fontsize=axis_font, fontweight=axis_weight)
mp.xlabel("First Dimension",fontsize=axis_font, fontweight=axis_weight)
mp.autoscale()
try:
mng = mp.get_current_fig_manager()
mng.window.state('zoomed')
except:
print("Format your plot using: matplotlib.rcParams['figure.figsize'] = [width, height]")
mp.show()
if __name__=="__main__":
def f(x1,x2,x3):
y=(x1-50)**2+(x2-20)**2+(x3-50)**2
return y
sa=SimulatedAnnealing(f,["x1","x2","x3"],[0,0,5],[100,50,100],pop=200,max_gen=50,nsize=1,normal_neighbour=True)
sa.solve()
sa.plot_result()
| 42.391813
| 230
| 0.637053
|
0d011f719b84fa9bc4601f444ee192014249614a
| 1,674
|
py
|
Python
|
problems/lt126.py
|
jaxonwang/my_leetcode
|
f55c664050a27f583abeb534564b5c3af48f6192
|
[
"MIT"
] | null | null | null |
problems/lt126.py
|
jaxonwang/my_leetcode
|
f55c664050a27f583abeb534564b5c3af48f6192
|
[
"MIT"
] | null | null | null |
problems/lt126.py
|
jaxonwang/my_leetcode
|
f55c664050a27f583abeb534564b5c3af48f6192
|
[
"MIT"
] | null | null | null |
import queue
import itertools
class Solution(object):
def findLadders(self, beginWord, endWord, wordList):
"""
:type beginWord: str
:type endWord: str
:type wordList: List[str]
:rtype: List[List[str]]
"""
parent = {}
def append_to_parent(p, child):
if child not in parent:
parent[child] = [p]
else:
parent[child].append(p)
q = queue.Queue()
q.put([beginWord])
no_visit = set(wordList)
if beginWord in no_visit:
no_visit.remove(beginWord)
saw_end = False
while not q.empty():
layer = q.get()
next_layer = set()
for i in layer:
for k in range(len(i)):
for c in 'abcdefghijklmnopqrstuvwxyz':
neww = i[:k]+c+i[k+1:]
if neww in no_visit:
next_layer.add(neww)
append_to_parent(i, neww)
if neww == endWord:
saw_end = True
if saw_end:
break
if next_layer:
q.put(next_layer)
for i in next_layer:
no_visit.remove(i)
if not saw_end:
return []
def make_path(root):
if root == beginWord:
return [[beginWord]]
pathes = list(itertools.chain(*[make_path(p) for p in parent[root]]))
for p in pathes:
p.append(root)
return pathes
return make_path(endWord)
print(ret)
| 29.368421
| 81
| 0.457587
|
4583eb29e8a8f729683cd5c35166547bea5e6b84
| 17,833
|
py
|
Python
|
src/datasets/inspect.py
|
KhalidAlt/datasets
|
f013aff823b831fb48b458a7bbb3bb11c2981326
|
[
"Apache-2.0"
] | null | null | null |
src/datasets/inspect.py
|
KhalidAlt/datasets
|
f013aff823b831fb48b458a7bbb3bb11c2981326
|
[
"Apache-2.0"
] | null | null | null |
src/datasets/inspect.py
|
KhalidAlt/datasets
|
f013aff823b831fb48b458a7bbb3bb11c2981326
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
""" List and inspect datasets and metrics."""
from typing import Dict, List, Mapping, Optional, Sequence, Union
import huggingface_hub
from .info import DatasetInfo
from .load import (
dataset_module_factory,
extend_dataset_builder_for_streaming,
import_main_class,
load_dataset_builder,
metric_module_factory,
)
from .utils import DownloadConfig
from .utils.download_manager import DownloadMode
from .utils.logging import get_logger
from .utils.streaming_download_manager import StreamingDownloadManager
from .utils.version import Version
logger = get_logger(__name__)
class SplitsNotFoundError(ValueError):
pass
def list_datasets(with_community_datasets=True, with_details=False):
"""List all the datasets scripts available on the Hugging Face Hub.
Args:
with_community_datasets (``bool``, optional, default ``True``): Include the community provided datasets.
with_details (``bool``, optional, default ``False``): Return the full details on the datasets instead of only the short name.
"""
datasets = huggingface_hub.list_datasets(full=with_details)
if not with_community_datasets:
datasets = [dataset for dataset in datasets if "/" not in dataset.id]
if not with_details:
datasets = [dataset.id for dataset in datasets]
return datasets
def list_metrics(with_community_metrics=True, with_details=False):
"""List all the metrics script available on the Hugging Face Hub.
Args:
with_community_metrics (Optional ``bool``, default ``True``): Include the community provided metrics.
with_details (Optional ``bool``, default ``False``): Return the full details on the metrics instead of only the short name.
"""
metrics = huggingface_hub.list_metrics()
if not with_community_metrics:
metrics = [metric for metric in metrics if "/" not in metric.id]
if not with_details:
metrics = [metric.id for metric in metrics]
return metrics
def inspect_dataset(path: str, local_path: str, download_config: Optional[DownloadConfig] = None, **download_kwargs):
r"""
Allow inspection/modification of a dataset script by copying on local drive at local_path.
Args:
path (``str``): path to the dataset processing script with the dataset builder. Can be either:
- a local path to processing script or the directory containing the script (if the script has the same name as the directory),
e.g. ``'./dataset/squad'`` or ``'./dataset/squad/squad.py'``
- a dataset identifier on the Hugging Face Hub (list all available datasets and ids with ``datasets.list_datasets()``)
e.g. ``'squad'``, ``'glue'`` or ``'openai/webtext'``
local_path (``str``): path to the local folder to copy the datset script to.
download_config (Optional ``datasets.DownloadConfig``: specific download configuration parameters.
**download_kwargs: optional attributes for DownloadConfig() which will override the attributes in download_config if supplied.
"""
dataset_module = dataset_module_factory(
path, download_config=download_config, force_local_path=local_path, **download_kwargs
)
print(
f"The processing script for dataset {path} can be inspected at {local_path}. "
f"The main class is in {dataset_module.module_path}. "
f"You can modify this processing script and use it with `datasets.load_dataset({local_path})`."
)
def inspect_metric(path: str, local_path: str, download_config: Optional[DownloadConfig] = None, **download_kwargs):
r"""
Allow inspection/modification of a metric script by copying it on local drive at local_path.
Args:
path (``str``): path to the dataset processing script with the dataset builder. Can be either:
- a local path to processing script or the directory containing the script (if the script has the same name as the directory),
e.g. ``'./dataset/squad'`` or ``'./dataset/squad/squad.py'``
- a dataset identifier on the Hugging Face Hub (list all available datasets and ids with ``datasets.list_datasets()``)
e.g. ``'squad'``, ``'glue'`` or ``'openai/webtext'``
local_path (``str``): path to the local folder to copy the datset script to.
download_config (Optional ``datasets.DownloadConfig``: specific download configuration parameters.
**download_kwargs: optional attributes for DownloadConfig() which will override the attributes in download_config if supplied.
"""
metric_module = metric_module_factory(
path, download_config=download_config, force_local_path=local_path, **download_kwargs
)
print(
f"The processing scripts for metric {path} can be inspected at {local_path}. "
f"The main class is in {metric_module.module_path}. "
f"You can modify this processing scripts and use it with `datasets.load_metric({local_path})`."
)
def get_dataset_infos(
path: str,
data_files: Optional[Union[Dict, List, str]] = None,
download_config: Optional[DownloadConfig] = None,
download_mode: Optional[DownloadMode] = None,
revision: Optional[Union[str, Version]] = None,
use_auth_token: Optional[Union[bool, str]] = None,
**config_kwargs,
):
"""Get the meta information about a dataset, returned as a dict mapping config name to DatasetInfoDict.
Args:
path (``str``): path to the dataset processing script with the dataset builder. Can be either:
- a local path to processing script or the directory containing the script (if the script has the same name as the directory),
e.g. ``'./dataset/squad'`` or ``'./dataset/squad/squad.py'``
- a dataset identifier on the Hugging Face Hub (list all available datasets and ids with ``datasets.list_datasets()``)
e.g. ``'squad'``, ``'glue'`` or ``'openai/webtext'``
revision (Optional ``Union[str, datasets.Version]``):
If specified, the dataset module will be loaded from the datasets repository at this version.
By default:
- it is set to the local version of the lib.
- it will also try to load it from the master branch if it's not available at the local version of the lib.
Specifying a version that is different from your local version of the lib might cause compatibility issues.
download_config (:class:`DownloadConfig`, optional): Specific download configuration parameters.
download_mode (:class:`DownloadMode`, default ``REUSE_DATASET_IF_EXISTS``): Download/generate mode.
data_files (:obj:`Union[Dict, List, str]`, optional): Defining the data_files of the dataset configuration.
use_auth_token (``str`` or ``bool``, optional): Optional string or boolean to use as Bearer token for remote files on the Datasets Hub.
If True, will get token from `"~/.huggingface"`.
config_kwargs: optional attributes for builder class which will override the attributes if supplied.
"""
config_names = get_dataset_config_names(
path=path,
revision=revision,
download_config=download_config,
download_mode=download_mode,
data_files=data_files,
)
return {
config_name: get_dataset_config_info(
path=path,
config_name=config_name,
data_files=data_files,
download_config=download_config,
download_mode=download_mode,
revision=revision,
use_auth_token=use_auth_token,
**config_kwargs,
)
for config_name in config_names
}
def get_dataset_config_names(
path: str,
revision: Optional[Union[str, Version]] = None,
download_config: Optional[DownloadConfig] = None,
download_mode: Optional[DownloadMode] = None,
force_local_path: Optional[str] = None,
dynamic_modules_path: Optional[str] = None,
data_files: Optional[Union[Dict, List, str]] = None,
**download_kwargs,
):
"""Get the list of available config names for a particular dataset.
Args:
path (``str``): path to the dataset processing script with the dataset builder. Can be either:
- a local path to processing script or the directory containing the script (if the script has the same name as the directory),
e.g. ``'./dataset/squad'`` or ``'./dataset/squad/squad.py'``
- a dataset identifier on the Hugging Face Hub (list all available datasets and ids with ``datasets.list_datasets()``)
e.g. ``'squad'``, ``'glue'`` or ``'openai/webtext'``
revision (Optional ``Union[str, datasets.Version]``):
If specified, the dataset module will be loaded from the datasets repository at this version.
By default:
- it is set to the local version of the lib.
- it will also try to load it from the master branch if it's not available at the local version of the lib.
Specifying a version that is different from your local version of the lib might cause compatibility issues.
download_config (:class:`DownloadConfig`, optional): Specific download configuration parameters.
download_mode (:class:`DownloadMode`, default ``REUSE_DATASET_IF_EXISTS``): Download/generate mode.
force_local_path (Optional str): Optional path to a local path to download and prepare the script to.
Used to inspect or modify the script folder.
dynamic_modules_path (Optional str, defaults to HF_MODULES_CACHE / "datasets_modules", i.e. ~/.cache/huggingface/modules/datasets_modules):
Optional path to the directory in which the dynamic modules are saved. It must have been initialized with :obj:`init_dynamic_modules`.
By default the datasets and metrics are stored inside the `datasets_modules` module.
data_files (:obj:`Union[Dict, List, str]`, optional): Defining the data_files of the dataset configuration.
download_kwargs: optional attributes for DownloadConfig() which will override the attributes in download_config if supplied,
for example ``use_auth_token``
"""
dataset_module = dataset_module_factory(
path,
revision=revision,
download_config=download_config,
download_mode=download_mode,
force_local_path=force_local_path,
dynamic_modules_path=dynamic_modules_path,
data_files=data_files,
**download_kwargs,
)
builder_cls = import_main_class(dataset_module.module_path)
return list(builder_cls.builder_configs.keys()) or [dataset_module.builder_kwargs.get("name", "default")]
def get_dataset_config_info(
path: str,
config_name: Optional[str] = None,
data_files: Optional[Union[str, Sequence[str], Mapping[str, Union[str, Sequence[str]]]]] = None,
download_config: Optional[DownloadConfig] = None,
download_mode: Optional[DownloadMode] = None,
revision: Optional[Union[str, Version]] = None,
use_auth_token: Optional[Union[bool, str]] = None,
**config_kwargs,
) -> DatasetInfo:
"""Get the meta information (DatasetInfo) about a dataset for a particular config
Args:
path (``str``): path to the dataset processing script with the dataset builder. Can be either:
- a local path to processing script or the directory containing the script (if the script has the same name as the directory),
e.g. ``'./dataset/squad'`` or ``'./dataset/squad/squad.py'``
- a dataset identifier on the Hugging Face Hub (list all available datasets and ids with ``datasets.list_datasets()``)
e.g. ``'squad'``, ``'glue'`` or ``'openai/webtext'``
config_name (:obj:`str`, optional): Defining the name of the dataset configuration.
data_files (:obj:`str` or :obj:`Sequence` or :obj:`Mapping`, optional): Path(s) to source data file(s).
download_config (:class:`~utils.DownloadConfig`, optional): Specific download configuration parameters.
download_mode (:class:`DownloadMode`, default ``REUSE_DATASET_IF_EXISTS``): Download/generate mode.
revision (:class:`~utils.Version` or :obj:`str`, optional): Version of the dataset script to load:
- For datasets in the `huggingface/datasets` library on GitHub like "squad", the default version of the module is the local version of the lib.
You can specify a different version from your local version of the lib (e.g. "master" or "1.2.0") but it might cause compatibility issues.
- For community datasets like "lhoestq/squad" that have their own git repository on the Datasets Hub, the default version "main" corresponds to the "main" branch.
You can specify a different version that the default "main" by using a commit sha or a git tag of the dataset repository.
use_auth_token (``str`` or ``bool``, optional): Optional string or boolean to use as Bearer token for remote files on the Datasets Hub.
If True, will get token from `"~/.huggingface"`.
config_kwargs: optional attributes for builder class which will override the attributes if supplied.
"""
builder = load_dataset_builder(
path,
name=config_name,
data_files=data_files,
download_config=download_config,
download_mode=download_mode,
revision=revision,
use_auth_token=use_auth_token,
**config_kwargs,
)
extend_dataset_builder_for_streaming(builder, use_auth_token=use_auth_token)
info = builder.info
if info.splits is None:
try:
download_config = download_config.copy() if download_config else DownloadConfig()
if use_auth_token is not None:
download_config.use_auth_token = use_auth_token
info.splits = {
split_generator.name: {"name": split_generator.name, "dataset_name": path}
for split_generator in builder._split_generators(
StreamingDownloadManager(base_path=builder.base_path, download_config=download_config)
)
}
except Exception as err:
raise SplitsNotFoundError("The split names could not be parsed from the dataset config.") from err
return info
def get_dataset_split_names(
path: str,
config_name: Optional[str] = None,
data_files: Optional[Union[str, Sequence[str], Mapping[str, Union[str, Sequence[str]]]]] = None,
download_config: Optional[DownloadConfig] = None,
download_mode: Optional[DownloadMode] = None,
revision: Optional[Union[str, Version]] = None,
use_auth_token: Optional[Union[bool, str]] = None,
**config_kwargs,
):
"""Get the list of available splits for a particular config and dataset.
Args:
path (``str``): path to the dataset processing script with the dataset builder. Can be either:
- a local path to processing script or the directory containing the script (if the script has the same name as the directory),
e.g. ``'./dataset/squad'`` or ``'./dataset/squad/squad.py'``
- a dataset identifier on the Hugging Face Hub (list all available datasets and ids with ``datasets.list_datasets()``)
e.g. ``'squad'``, ``'glue'`` or ``'openai/webtext'``
config_name (:obj:`str`, optional): Defining the name of the dataset configuration.
data_files (:obj:`str` or :obj:`Sequence` or :obj:`Mapping`, optional): Path(s) to source data file(s).
download_config (:class:`~utils.DownloadConfig`, optional): Specific download configuration parameters.
download_mode (:class:`DownloadMode`, default ``REUSE_DATASET_IF_EXISTS``): Download/generate mode.
revision (:class:`~utils.Version` or :obj:`str`, optional): Version of the dataset script to load:
- For datasets in the `huggingface/datasets` library on GitHub like "squad", the default version of the module is the local version of the lib.
You can specify a different version from your local version of the lib (e.g. "master" or "1.2.0") but it might cause compatibility issues.
- For community datasets like "lhoestq/squad" that have their own git repository on the Datasets Hub, the default version "main" corresponds to the "main" branch.
You can specify a different version that the default "main" by using a commit sha or a git tag of the dataset repository.
use_auth_token (``str`` or ``bool``, optional): Optional string or boolean to use as Bearer token for remote files on the Datasets Hub.
If True, will get token from `"~/.huggingface"`.
config_kwargs: optional attributes for builder class which will override the attributes if supplied.
"""
info = get_dataset_config_info(
path,
config_name=config_name,
data_files=data_files,
download_config=download_config,
download_mode=download_mode,
revision=revision,
use_auth_token=use_auth_token,
**config_kwargs,
)
return list(info.splits.keys())
| 53.392216
| 174
| 0.690462
|
81c7485c9edd7b1b93dea57337ced0bf6a369949
| 3,240
|
py
|
Python
|
delta/data/frontend/plp.py
|
didichuxing/delta
|
31dfebc8f20b7cb282b62f291ff25a87e403cc86
|
[
"Apache-2.0"
] | 1,442
|
2019-07-09T07:34:28.000Z
|
2020-11-15T09:52:09.000Z
|
delta/data/frontend/plp.py
|
didichuxing/delta
|
31dfebc8f20b7cb282b62f291ff25a87e403cc86
|
[
"Apache-2.0"
] | 93
|
2019-07-22T09:20:20.000Z
|
2020-11-13T01:59:30.000Z
|
delta/data/frontend/plp.py
|
didichuxing/delta
|
31dfebc8f20b7cb282b62f291ff25a87e403cc86
|
[
"Apache-2.0"
] | 296
|
2019-07-09T07:35:28.000Z
|
2020-11-16T02:27:51.000Z
|
# Copyright (C) 2017 Beijing Didi Infinity Technology and Development Co.,Ltd.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""This model extracts PLP features per frame."""
import delta.compat as tf
from core.ops import py_x_ops
from delta.utils.hparam import HParams
from delta.data.frontend.base_frontend import BaseFrontend
class Plp(BaseFrontend):
"""
Compute PLP features of every frame in speech, return a float tensor
with size (num_frames, plp_order + 1).
"""
def __init__(self, config: dict):
super().__init__(config)
@classmethod
def params(cls, config=None):
"""
Set params.
:param config: contains four optional parameters:
--sample_rate : Waveform data sample frequency (must match the waveform
file, if specified there). (float, default = 16000)
--window_length : Window length in seconds. (float, default = 0.025)
--frame_length : Hop length in seconds. (float, default = 0.010)
--plp_order : Plp order. (int, default=12).
:return:An object of class HParams, which is a set of hyperparameters as name-value pairs.
"""
window_length = 0.025
frame_length = 0.010
plp_order = 12
sample_rate = 16000
hparams = HParams(cls=cls)
hparams.add_hparam('window_length', window_length)
hparams.add_hparam('frame_length', frame_length)
hparams.add_hparam('plp_order', plp_order)
hparams.add_hparam('sample_rate', sample_rate)
if config is not None:
hparams.override_from_dict(config)
return hparams
def call(self, audio_data, sample_rate=None):
"""
Caculate plp features of audio data.
:param audio_data: the audio signal from which to compute spectrum.
Should be an (1, N) tensor.
:param sample_rate: [option]the samplerate of the signal we working
with, default is 16kHz.
:return:A float tensor of size (num_frames, (plp_order + 1)) containing plp
features of every frame in speech.
"""
p = self.config
with tf.name_scope('plp'):
if sample_rate == None:
sample_rate = tf.constant(p.sample_rate, dtype=tf.int32)
assert_op = tf.assert_equal(
tf.constant(p.sample_rate), tf.cast(sample_rate, dtype=tf.int32))
with tf.control_dependencies([assert_op]):
sample_rate = tf.cast(sample_rate, dtype=float)
plp = py_x_ops.plp(
audio_data,
sample_rate,
window_length=p.window_length,
frame_length=p.frame_length,
plp_order=p.plp_order)
return plp
| 35.217391
| 94
| 0.658951
|
830f69e4218387da7b75c53eae46f376e920f7de
| 67,437
|
py
|
Python
|
wikum-env3/lib/python3.7/site-packages/praw/models/reddit/widgets.py
|
xuericlin/wikum
|
f0171f1697efa91d6957f976f473c9201db85648
|
[
"MIT"
] | 1
|
2019-12-24T07:49:18.000Z
|
2019-12-24T07:49:18.000Z
|
praw/models/reddit/widgets.py
|
TebelloX/praw
|
ffefec2b514d2d4df1bb3090a0a6e3f1c43c8085
|
[
"BSD-2-Clause"
] | 1
|
2019-11-04T16:26:26.000Z
|
2019-11-06T12:09:24.000Z
|
praw/models/reddit/widgets.py
|
TebelloX/praw
|
ffefec2b514d2d4df1bb3090a0a6e3f1c43c8085
|
[
"BSD-2-Clause"
] | null | null | null |
"""Provide classes related to widgets."""
import os.path
from json import dumps, JSONEncoder
from ...const import API_PATH
from ...util.cache import cachedproperty
from ..base import PRAWBase
from ..list.base import BaseList
class Button(PRAWBase):
"""Class to represent a single button inside a :class:`.ButtonWidget`.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``color`` The hex color used to outline the button.
``height`` Image height. Only present on image buttons.
``hoverState`` A ``dict`` describing the state of the button when
hovered over. Optional.
``kind`` Either ``'text'`` or ``'image'``.
``linkUrl`` A link that can be visited by clicking the button.
Only present on image buttons.
``text`` The text displayed on the button.
``url`` If the button is a text button, a link that can be
visited by clicking the button.
If the button is an image button, the URL of a
Reddit-hosted image.
``width`` Image width. Only present on image buttons.
======================= ===================================================
"""
class Image(PRAWBase):
"""Class to represent an image that's part of a :class:`.ImageWidget`.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``height`` Image height.
``linkUrl`` A link that can be visited by clicking the image.
``url`` The URL of the (Reddit-hosted) image.
``width`` Image width.
======================= ===================================================
"""
class ImageData(PRAWBase):
"""Class for image data that's part of a :class:`.CustomWidget`.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``height`` The image height.
``name`` The image name.
``url`` The URL of the image on Reddit's servers.
``width`` The image width.
======================= ===================================================
"""
class MenuLink(PRAWBase):
"""Class to represent a single link inside a menu or submenu.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``text`` The text of the menu link.
``url`` The URL that the menu item links to.
======================= ===================================================
"""
class Submenu(BaseList):
r"""Class to represent a submenu of links inside a menu.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``children`` A list of the :class:`.MenuLink`\ s in this
submenu. Can be iterated over by iterating over the
:class:`.Submenu` (e.g. ``for menu_link in
submenu``).
``text`` The name of the submenu.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "children"
class SubredditWidgets(PRAWBase):
"""Class to represent a subreddit's widgets.
Create an instance like so:
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
Data will be lazy-loaded. By default, PRAW will not request progressively
loading images from Reddit. To enable this, instantiate a SubredditWidgets
object, then set the attribute ``progressive_images`` to ``True`` before
performing any action that would result in a network request.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
widgets.progressive_images = True
for widget in widgets.sidebar:
# do something
Access a subreddit's widgets with the following attributes:
.. code-block:: python
print(widgets.id_card)
print(widgets.moderators_widget)
print(widgets.sidebar)
print(widgets.topbar)
The attribute :attr:`.id_card` contains the subreddit's ID card,
which displays information like the number of subscribers.
The attribute :attr:`.moderators_widget` contains the subreddit's
moderators widget, which lists the moderators of the subreddit.
The attribute :attr:`.sidebar` contains a list of widgets which make up
the sidebar of the subreddit.
The attribute :attr:`.topbar` contains a list of widgets which make up
the top bar of the subreddit.
To edit a subreddit's widgets, use :attr:`~.SubredditWidgets.mod`. For
example:
.. code-block:: python
widgets.mod.add_text_area('My title', '**bold text**',
{'backgroundColor': '#FFFF66',
'headerColor': '#3333EE'})
For more information, see :class:`.SubredditWidgetsModeration`.
To edit a particular widget, use ``.mod`` on the widget. For example:
.. code-block:: python
for widget in widgets.sidebar:
widget.mod.update(shortName='Exciting new name')
For more information, see :class:`.WidgetModeration`.
**Currently available Widgets**:
- :class:`.ButtonWidget`
- :class:`.Calendar`
- :class:`.CommunityList`
- :class:`.CustomWidget`
- :class:`.IDCard`
- :class:`.ImageWidget`
- :class:`.Menu`
- :class:`.ModeratorsWidget`
- :class:`.PostFlairWidget`
- :class:`.RulesWidget`
- :class:`.TextArea`
"""
@cachedproperty
def id_card(self):
"""Get this subreddit's :class:`.IDCard` widget."""
return self.items[self.layout["idCardWidget"]]
@cachedproperty
def items(self):
"""Get this subreddit's widgets as a dict from ID to widget."""
items = {}
for item_name, data in self._raw_items.items():
data["subreddit"] = self.subreddit
items[item_name] = self._reddit._objector.objectify(data)
return items
@cachedproperty
def mod(self):
"""Get an instance of :class:`.SubredditWidgetsModeration`.
.. note::
Using any of the methods of :class:`.SubredditWidgetsModeration`
will likely result in the data of this :class:`.SubredditWidgets`
being outdated. To re-sync, call :meth:`.refresh`.
"""
return SubredditWidgetsModeration(self.subreddit, self._reddit)
@cachedproperty
def moderators_widget(self):
"""Get this subreddit's :class:`.ModeratorsWidget`."""
return self.items[self.layout["moderatorWidget"]]
@cachedproperty
def sidebar(self):
"""Get a list of Widgets that make up the sidebar."""
return [
self.items[widget_name]
for widget_name in self.layout["sidebar"]["order"]
]
@cachedproperty
def topbar(self):
"""Get a list of Widgets that make up the top bar."""
return [
self.items[widget_name]
for widget_name in self.layout["topbar"]["order"]
]
def refresh(self):
"""Refresh the subreddit's widgets.
By default, PRAW will not request progressively
loading images from Reddit. To enable this,
set the attribute ``progressive_images`` to ``True`` prior to
calling ``refresh()``.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
widgets.progressive_images = True
widgets.refresh()
"""
self._fetch()
def __getattr__(self, attr):
"""Return the value of `attr`."""
if not attr.startswith("_") and not self._fetched:
self._fetch()
return getattr(self, attr)
raise AttributeError(
"{!r} object has no attribute {!r}".format(
self.__class__.__name__, attr
)
)
def __init__(self, subreddit):
"""Initialize the class.
:param subreddit: The :class:`.Subreddit` the widgets belong to.
"""
self._raw_items = None
self._fetched = False
self.subreddit = subreddit
self.progressive_images = False
super(SubredditWidgets, self).__init__(subreddit._reddit, {})
def __repr__(self):
"""Return an object initialization representation of the object."""
return "SubredditWidgets(subreddit={subreddit!r})".format(
subreddit=self.subreddit
)
def _fetch(self):
data = self._reddit.get(
API_PATH["widgets"].format(subreddit=self.subreddit),
params={"progressive_images": self.progressive_images},
)
self._raw_items = data.pop("items")
super(SubredditWidgets, self).__init__(self.subreddit._reddit, data)
cached_property_names = [
"id_card",
"moderators_widget",
"sidebar",
"topbar",
"items",
]
inst_dict_pop = self.__dict__.pop
for name in cached_property_names:
inst_dict_pop(name, None)
self._fetched = True
class SubredditWidgetsModeration:
"""Class for moderating a subreddit's widgets.
Get an instance of this class from :attr:`.SubredditWidgets.mod`.
Example usage:
.. code-block:: python
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
reddit.subreddit('learnpython').widgets.mod.add_text_area(
'My title', '**bold text**', styles)
.. note::
To use this class's methods, the authenticated user must be a moderator
with appropriate permissions.
"""
def __init__(self, subreddit, reddit):
"""Initialize the class."""
self._subreddit = subreddit
self._reddit = reddit
def _create_widget(self, payload):
path = API_PATH["widget_create"].format(subreddit=self._subreddit)
widget = self._reddit.post(
path, data={"json": dumps(payload, cls=WidgetEncoder)}
)
widget.subreddit = self._subreddit
return widget
def add_button_widget(
self, short_name, description, buttons, styles, **other_settings
):
r"""Add and return a :class:`.ButtonWidget`.
:param short_name: A name for the widget, no longer than 30 characters.
:param description: Markdown text to describe the widget.
:param buttons: A ``list`` of ``dict``\ s describing buttons, as
specified in `Reddit docs`_. As of this writing, the format is:
Each button is either a text button or an image button. A text
button looks like this:
.. code-block:: none
{
"kind": "text",
"text": a string no longer than 30 characters,
"url": a valid URL,
"color": a 6-digit rgb hex color, e.g. `#AABBCC`,
"textColor": a 6-digit rgb hex color, e.g. `#AABBCC`,
"fillColor": a 6-digit rgb hex color, e.g. `#AABBCC`,
"hoverState": {...}
}
An image button looks like this:
.. code-block:: none
{
"kind": "image",
"text": a string no longer than 30 characters,
"linkUrl": a valid URL,
"url": a valid URL of a reddit-hosted image,
"height": an integer,
"width": an integer,
"hoverState": {...}
}
Both types of buttons have the field ``hoverState``. The field does
not have to be included (it is optional). If it is included, it can
be one of two types: text or image. A text ``hoverState`` looks
like this:
.. code-block:: none
{
"kind": "text",
"text": a string no longer than 30 characters,
"color": a 6-digit rgb hex color, e.g. `#AABBCC`,
"textColor": a 6-digit rgb hex color, e.g. `#AABBCC`,
"fillColor": a 6-digit rgb hex color, e.g. `#AABBCC`
}
An image ``hoverState`` looks like this:
.. code-block:: none
{
"kind": "image",
"url": a valid URL of a reddit-hosted image,
"height": an integer,
"width": an integer
}
.. note::
The method :meth:`.upload_image` can be used to upload images to
Reddit for a ``url`` field that holds a Reddit-hosted image.
.. note::
An image ``hoverState`` may be paired with a text widget, and a
text ``hoverState`` may be paired with an image widget.
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
my_image = widget_moderation.upload_image('/path/to/pic.jpg')
buttons = [
{
'kind': 'text',
'text': 'View source',
'url': 'https://github.com/praw-dev/praw',
'color': '#FF0000',
'textColor': '#00FF00',
'fillColor': '#0000FF',
'hoverState': {
'kind': 'text',
'text': 'ecruos weiV',
'color': '#FFFFFF',
'textColor': '#000000',
'fillColor': '#0000FF'
}
},
{
'kind': 'image',
'text': 'View documentation',
'linkUrl': 'https://praw.readthedocs.io',
'url': my_image,
'height': 200,
'width': 200,
'hoverState': {
'kind': 'image',
'url': my_image,
'height': 200,
'width': 200
}
}
]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_button_widget(
'Things to click', 'Click some of these *cool* links!',
buttons, styles)
"""
button_widget = {
"buttons": buttons,
"description": description,
"kind": "button",
"shortName": short_name,
"styles": styles,
}
button_widget.update(other_settings)
return self._create_widget(button_widget)
def add_calendar(
self,
short_name,
google_calendar_id,
requires_sync,
configuration,
styles,
**other_settings
):
"""Add and return a :class:`.Calendar` widget.
:param short_name: A name for the widget, no longer than 30 characters.
:param google_calendar_id: An email-style calendar ID. To share a
Google Calendar, make it public,
then find the "Calendar ID."
:param requires_sync: A ``bool``.
:param configuration: A ``dict`` as specified in `Reddit docs`_.
For example:
.. code-block:: python
{'numEvents': 10,
'showDate': True,
'showDescription': False,
'showLocation': False,
'showTime': True,
'showTitle': True}
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
config = {'numEvents': 10,
'showDate': True,
'showDescription': False,
'showLocation': False,
'showTime': True,
'showTitle': True}
cal_id = 'y6nm89jy427drk8l71w75w9wjn@group.calendar.google.com'
new_widget = widget_moderation.add_calendar('Upcoming Events',
cal_id, True,
config, styles)
"""
calendar = {
"shortName": short_name,
"googleCalendarId": google_calendar_id,
"requiresSync": requires_sync,
"configuration": configuration,
"styles": styles,
"kind": "calendar",
}
calendar.update(other_settings)
return self._create_widget(calendar)
def add_community_list(
self, short_name, data, styles, description="", **other_settings
):
"""Add and return a :class:`.CommunityList` widget.
:param short_name: A name for the widget, no longer than 30 characters.
:param data: A ``list`` of subreddits. Subreddits can be represented as
``str`` (e.g. the string ``'redditdev'``) or as
:class:`.Subreddit` (e.g.
``reddit.subreddit('redditdev')``). These types may be
mixed within the list.
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
:param description: A ``str`` containing Markdown (default: ``''``).
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
subreddits = ['learnpython', reddit.subreddit('redditdev')]
new_widget = widget_moderation.add_community_list('My fav subs',
subreddits,
styles,
'description')
"""
community_list = {
"data": data,
"kind": "community-list",
"shortName": short_name,
"styles": styles,
"description": description,
}
community_list.update(other_settings)
return self._create_widget(community_list)
def add_custom_widget(
self,
short_name,
text,
css,
height,
image_data,
styles,
**other_settings
):
r"""Add and return a :class:`.CustomWidget`.
:param short_name: A name for the widget, no longer than 30 characters.
:param text: The Markdown text displayed in the widget.
:param css: The CSS for the widget, no longer than 100000 characters.
.. note::
As of this writing, Reddit will not accept empty CSS. If you
wish to create a custom widget without CSS, consider using
``'/**/'`` (an empty comment) as your CSS.
:param height: The height of the widget, between 50 and 500.
:param image_data: A ``list`` of ``dict``\ s as specified in
`Reddit docs`_. Each ``dict`` represents an image and has the
key ``'url'`` which maps to the URL of an image hosted on
Reddit's servers. Images should be uploaded using
:meth:`.upload_image`.
For example:
.. code-block:: python
[{'url': 'https://some.link', # from upload_image()
'width': 600, 'height': 450,
'name': 'logo'},
{'url': 'https://other.link', # from upload_image()
'width': 450, 'height': 600,
'name': 'icon'}]
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}``.
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
image_paths = ['/path/to/image1.jpg', '/path/to/image2.png']
image_urls = [widget_moderation.upload_image(img_path)
for img_path in image_paths]
image_dicts = [{'width': 600, 'height': 450, 'name': 'logo',
'url': image_urls[0]},
{'width': 450, 'height': 600, 'name': 'icon',
'url': image_urls[1]}]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_custom_widget('My widget',
'# Hello world!',
'/**/', 200,
image_dicts, styles)
"""
custom_widget = {
"css": css,
"height": height,
"imageData": image_data,
"kind": "custom",
"shortName": short_name,
"styles": styles,
"text": text,
}
custom_widget.update(other_settings)
return self._create_widget(custom_widget)
def add_image_widget(self, short_name, data, styles, **other_settings):
r"""Add and return an :class:`.ImageWidget`.
:param short_name: A name for the widget, no longer than 30 characters.
:param data: A ``list`` of ``dict``\ s as specified in `Reddit docs`_.
Each ``dict`` has the key ``'url'`` which maps to the URL
of an image hosted on Reddit's servers. Images should
be uploaded using :meth:`.upload_image`.
For example:
.. code-block:: python
[{'url': 'https://some.link', # from upload_image()
'width': 600, 'height': 450,
'linkUrl': 'https://github.com/praw-dev/praw'},
{'url': 'https://other.link', # from upload_image()
'width': 450, 'height': 600,
'linkUrl': 'https://praw.readthedocs.io'}]
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}``.
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
image_paths = ['/path/to/image1.jpg', '/path/to/image2.png']
image_dicts = [{'width': 600, 'height': 450, 'linkUrl': '',
'url': widget_moderation.upload_image(img_path)}
for img_path in image_paths]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_image_widget('My cool pictures',
image_dicts, styles)
"""
image_widget = {
"data": data,
"kind": "image",
"shortName": short_name,
"styles": styles,
}
image_widget.update(other_settings)
return self._create_widget(image_widget)
def add_menu(self, data, **other_settings):
r"""Add and return a :class:`.Menu` widget.
:param data: A ``list`` of ``dict``\ s describing menu contents, as
specified in `Reddit docs`_. As of this writing, the format is:
.. code-block:: none
[
{
"text": a string no longer than 20 characters,
"url": a valid URL
},
OR
{
"children": [
{
"text": a string no longer than 20 characters,
"url": a valid URL,
},
...
],
"text": a string no longer than 20 characters,
},
...
]
.. _Reddit docs: https://www.reddit.com/dev/api#POST_api_widget
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
menu_contents = [
{'text': 'My homepage', 'url': 'https://example.com'},
{'text': 'Python packages',
'children': [
{'text': 'PRAW', 'url': 'https://praw.readthedocs.io/'},
{'text': 'requests', 'url': 'http://python-requests.org'}
]},
{'text': 'Reddit homepage', 'url': 'https://reddit.com'}
]
new_widget = widget_moderation.add_menu(menu_contents)
"""
menu = {"data": data, "kind": "menu"}
menu.update(other_settings)
return self._create_widget(menu)
def add_post_flair_widget(
self, short_name, display, order, styles, **other_settings
):
"""Add and return a :class:`.PostFlairWidget`.
:param short_name: A name for the widget, no longer than 30 characters.
:param display: Display style. Either ``'cloud'`` or ``'list'``.
:param order: A ``list`` of flair template IDs. You can get all flair
template IDs in a subreddit with:
.. code-block:: python
flairs = [f['id'] for f in subreddit.flair.link_templates]
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
Example usage:
.. code-block:: python
subreddit = reddit.subreddit('mysub')
widget_moderation = subreddit.widgets.mod
flairs = [f['id'] for f in subreddit.flair.link_templates]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_post_flair_widget('Some flairs',
'list',
flairs, styles)
"""
post_flair = {
"kind": "post-flair",
"display": display,
"shortName": short_name,
"order": order,
"styles": styles,
}
post_flair.update(other_settings)
return self._create_widget(post_flair)
def add_text_area(self, short_name, text, styles, **other_settings):
"""Add and return a :class:`.TextArea` widget.
:param short_name: A name for the widget, no longer than 30 characters.
:param text: The Markdown text displayed in the widget.
:param styles: A ``dict`` with keys ``backgroundColor`` and
``headerColor``, and values of hex colors. For example,
``{'backgroundColor': '#FFFF66', 'headerColor':
'#3333EE'}``.
Example usage:
.. code-block:: python
widget_moderation = reddit.subreddit('mysub').widgets.mod
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
new_widget = widget_moderation.add_text_area('My cool title',
'*Hello* **world**!',
styles)
"""
text_area = {
"shortName": short_name,
"text": text,
"styles": styles,
"kind": "textarea",
}
text_area.update(other_settings)
return self._create_widget(text_area)
def reorder(self, new_order, section="sidebar"):
"""Reorder the widgets.
:param new_order: A list of widgets. Represented as a ``list`` that
contains ``Widget`` objects, or widget IDs as strings. These types
may be mixed.
:param section: The section to reorder. (default: ``'sidebar'``)
Example usage:
.. code-block:: python
widgets = reddit.subreddit('mysub').widgets
order = list(widgets.sidebar)
order.reverse()
widgets.mod.reorder(order)
"""
order = [
thing.id if isinstance(thing, Widget) else str(thing)
for thing in new_order
]
path = API_PATH["widget_order"].format(
subreddit=self._subreddit, section=section
)
self._reddit.patch(
path, data={"json": dumps(order), "section": section}
)
def upload_image(self, file_path):
"""Upload an image to Reddit and get the URL.
:param file_path: The path to the local file.
:returns: The URL of the uploaded image as a ``str``.
This method is used to upload images for widgets. For example,
it can be used in conjunction with :meth:`.add_image_widget`,
:meth:`.add_custom_widget`, and :meth:`.add_button_widget`.
Example usage:
.. code-block:: python
my_sub = reddit.subreddit('my_sub')
image_url = my_sub.widgets.mod.upload_image('/path/to/image.jpg')
images = [{'width': 300, 'height': 300,
'url': image_url, 'linkUrl': ''}]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
my_sub.widgets.mod.add_image_widget('My cool pictures', images,
styles)
"""
img_data = {
"filepath": os.path.basename(file_path),
"mimetype": "image/jpeg",
}
if file_path.lower().endswith(".png"):
img_data["mimetype"] = "image/png"
url = API_PATH["widget_lease"].format(subreddit=self._subreddit)
# until we learn otherwise, assume this request always succeeds
upload_lease = self._reddit.post(url, data=img_data)["s3UploadLease"]
upload_data = {
item["name"]: item["value"] for item in upload_lease["fields"]
}
upload_url = "https:{}".format(upload_lease["action"])
with open(file_path, "rb") as image:
response = self._reddit._core._requestor._http.post(
upload_url, data=upload_data, files={"file": image}
)
response.raise_for_status()
return upload_url + "/" + upload_data["key"]
class Widget(PRAWBase):
"""Base class to represent a Widget."""
@property
def mod(self):
"""Get an instance of :class:`.WidgetModeration` for this widget.
.. note::
Using any of the methods of :class:`.WidgetModeration` will likely
make outdated the data in the :class:`.SubredditWidgets` that this
widget belongs to. To remedy this, call
:meth:`~.SubredditWidgets.refresh`.
"""
if self._mod is None:
self._mod = WidgetModeration(self, self.subreddit, self._reddit)
return self._mod
def __eq__(self, other):
"""Check equality against another object."""
if isinstance(other, Widget):
return self.id.lower() == other.id.lower()
return str(other).lower() == self.id.lower()
# pylint: disable=invalid-name
def __init__(self, reddit, _data):
"""Initialize an instance of the class."""
self.subreddit = "" # in case it isn't in _data
self.id = "" # in case it isn't in _data
super(Widget, self).__init__(reddit, _data=_data)
self._mod = None
class ButtonWidget(Widget, BaseList):
r"""Class to represent a widget containing one or more buttons.
Find an existing one:
.. code-block:: python
button_widget = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.ButtonWidget):
button_widget = widget
break
for button in button_widget:
print(button.text, button.url)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
buttons = [
{
'kind': 'text',
'text': 'View source',
'url': 'https://github.com/praw-dev/praw',
'color': '#FF0000',
'textColor': '#00FF00',
'fillColor': '#0000FF',
'hoverState': {
'kind': 'text',
'text': 'ecruos weiV',
'color': '#000000',
'textColor': '#FFFFFF',
'fillColor': '#0000FF'
}
},
{
'kind': 'text',
'text': 'View documentation',
'url': 'https://praw.readthedocs.io',
'color': '#FFFFFF',
'textColor': '#FFFF00',
'fillColor': '#0000FF'
},
]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
button_widget = widgets.mod.add_button_widget(
'Things to click', 'Click some of these *cool* links!',
buttons, styles)
For more information on creation, see :meth:`.add_button_widget`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
button_widget = button_widget.mod.update(shortName='My fav buttons',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
button_widget.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``buttons`` A ``list`` of :class:`.Button`\ s. These can also
be accessed just by iterating over the
:class:`.ButtonWidget` (e.g. ``for button in
button_widget``).
``description`` The description, in Markdown.
``description_html`` The description, in HTML.
``id`` The widget ID.
``kind`` The widget kind (always ``'button'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "buttons"
class Calendar(Widget):
r"""Class to represent a calendar widget.
Find an existing one:
.. code-block:: python
calendar = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.Calendar):
calendar = widget
break
print(calendar.googleCalendarId)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
config = {'numEvents': 10,
'showDate': True,
'showDescription': False,
'showLocation': False,
'showTime': True,
'showTitle': True}
cal_id = 'y6nm89jy427drk8l71w75w9wjn@group.calendar.google.com'
calendar = widgets.mod.add_calendar(
'Upcoming Events', cal_id, True, config, styles)
For more information on creation, see :meth:`.add_calendar`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
calendar = calendar.mod.update(shortName='My fav events',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
calendar.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``configuration`` A ``dict`` describing the calendar configuration.
``data`` A ``list`` of ``dict``\ s that represent events.
``id`` The widget ID.
``kind`` The widget kind (always ``'calendar'``).
``requiresSync`` A ``bool``.
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
class CommunityList(Widget, BaseList):
r"""Class to represent a Related Communities widget.
Find an existing one:
.. code-block:: python
community_list = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.CommunityList):
community_list = widget
break
print(community_list)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
subreddits = ['learnpython', reddit.subreddit('announcements')]
community_list = widgets.mod.add_community_list('Related subreddits',
subreddits, styles,
'description')
For more information on creation, see :meth:`.add_community_list`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
community_list = community_list.mod.update(shortName='My fav subs',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
community_list.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``data`` A ``list`` of :class:`.Subreddit`\ s. These can
also be iterated over by iterating over the
:class:`.CommunityList` (e.g. ``for sub in
community_list``).
``id`` The widget ID.
``kind`` The widget kind (always ``'community-list'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "data"
class CustomWidget(Widget):
"""Class to represent a custom widget.
Find an existing one:
.. code-block:: python
custom = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.CustomWidget):
custom = widget
break
print(custom.text)
print(custom.css)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
custom = widgets.mod.add_custom_widget(
'My custom widget', '# Hello world!', '/**/', 200, [], styles)
For more information on creation, see :meth:`.add_custom_widget`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
custom = custom.mod.update(shortName='My fav customization',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
custom.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``css`` The CSS of the widget, as a ``str``.
``height`` The height of the widget, as an ``int``.
``id`` The widget ID.
``imageData`` A ``list`` of :class:`.ImageData` that belong to
the widget.
``kind`` The widget kind (always ``'custom'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``stylesheetUrl`` A link to the widget's stylesheet.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
``text`` The text contents, as Markdown.
``textHtml`` The text contents, as HTML.
======================= ===================================================
"""
def __init__(self, reddit, _data):
"""Initialize the class."""
_data["imageData"] = [
ImageData(reddit, data) for data in _data.pop("imageData")
]
super(CustomWidget, self).__init__(reddit, _data=_data)
class IDCard(Widget):
"""Class to represent an ID card widget.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
id_card = widgets.id_card
print(id_card.subscribersText)
Update one (requires proper moderator permissions):
.. code-block:: python
widgets.id_card.mod.update(currentlyViewingText='Bots')
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
=========================== ===============================================
Attribute Description
=========================== ===============================================
``currentlyViewingCount`` The number of Redditors viewing the subreddit.
``currentlyViewingText`` The text displayed next to the view count. For
example, "users online".
``description`` The subreddit description.
``id`` The widget ID.
``kind`` The widget kind (always ``'id-card'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'``
and ``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget
belongs to.
``subscribersCount`` The number of subscribers to the subreddit.
``subscribersText`` The text displayed next to the subscriber
count. For example, "users subscribed".
=========================== ===============================================
"""
class ImageWidget(Widget, BaseList):
r"""Class to represent an image widget.
Find an existing one:
.. code-block:: python
image_widget = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.ImageWidget):
image_widget = widget
break
for image in image_widget:
print(image.url)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
image_paths = ['/path/to/image1.jpg', '/path/to/image2.png']
image_dicts = [{'width': 600, 'height': 450, 'linkUrl': '',
'url': widgets.mod.upload_image(img_path)}
for img_path in image_paths]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
image_widget = widgets.mod.add_image_widget('My cool pictures',
image_dicts, styles)
For more information on creation, see :meth:`.add_image_widget`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
image_widget = image_widget.mod.update(shortName='My fav images',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
image_widget.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``data`` A list of the :class:`.Image`\ s in this widget.
Can be iterated over by iterating over the
:class:`.ImageWidget` (e.g. ``for img in
image_widget``).
``id`` The widget ID.
``kind`` The widget kind (always ``'image'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "data"
class Menu(Widget, BaseList):
r"""Class to represent the top menu widget of a subreddit.
Menus can generally be found as the first item in a subreddit's top bar.
.. code-block:: python
topbar = reddit.subreddit('redditdev').widgets.topbar
if len(topbar) > 0:
probably_menu = topbar[0]
assert isinstance(probably_menu, praw.models.Menu)
for item in probably_menu:
if isinstance(item, praw.models.Submenu):
print(item.text)
for child in item:
print('\t', child.text, child.url)
else: # MenuLink
print(item.text, item.url)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
menu_contents = [
{'text': 'My homepage', 'url': 'https://example.com'},
{'text': 'Python packages',
'children': [
{'text': 'PRAW', 'url': 'https://praw.readthedocs.io/'},
{'text': 'requests', 'url': 'http://python-requests.org'}
]},
{'text': 'Reddit homepage', 'url': 'https://reddit.com'}
]
menu = widgets.mod.add_menu(menu_contents)
For more information on creation, see :meth:`.add_menu`.
Update one (requires proper moderator permissions):
.. code-block:: python
menu_items = list(menu)
menu_items.reverse()
menu = menu.mod.update(data=menu_items)
Delete one (requires proper moderator permissions):
.. code-block:: python
menu.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``data`` A list of the :class:`.MenuLink`\ s and
:class:`.Submenu`\ s in this widget.
Can be iterated over by iterating over the
:class:`.Menu` (e.g. ``for item in menu``).
``id`` The widget ID.
``kind`` The widget kind (always ``'menu'``).
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "data"
class ModeratorsWidget(Widget, BaseList):
r"""Class to represent a moderators widget.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
print(widgets.moderators_widget)
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
widgets.moderators_widget.mod.update(styles=new_styles)
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``id`` The widget ID.
``kind`` The widget kind (always ``'moderators'``).
``mods`` A list of the :class:`.Redditor`\ s that moderate
the subreddit. Can be iterated over by iterating
over the :class:`.ModeratorsWidget` (e.g. ``for
mod in widgets.moderators_widget``).
``styles`` A ``dict`` with the keys ``'backgroundColor'``
and ``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget
belongs to.
``totalMods`` The total number of moderators in the subreddit.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "mods"
def __init__(self, reddit, _data):
"""Initialize the moderators widget."""
if self.CHILD_ATTRIBUTE not in _data:
# .mod.update() sometimes returns payload without 'mods' field
_data[self.CHILD_ATTRIBUTE] = []
super(ModeratorsWidget, self).__init__(reddit, _data=_data)
class PostFlairWidget(Widget, BaseList):
r"""Class to represent a post flair widget.
Find an existing one:
.. code-block:: python
post_flair_widget = None
widgets = reddit.subreddit('redditdev').widgets
for widget in widgets.sidebar:
if isinstance(widget, praw.models.PostFlairWidget):
post_flair_widget = widget
break
for flair in post_flair_widget:
print(flair)
print(post_flair_widget.templates[flair])
Create one (requires proper moderator permissions):
.. code-block:: python
subreddit = reddit.subreddit('redditdev')
widgets = subreddit.widgets
flairs = [f['id'] for f in subreddit.flair.link_templates]
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
post_flair = widgets.mod.add_post_flair_widget('Some flairs', 'list',
flairs, styles)
For more information on creation, see :meth:`.add_post_flair_widget`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
post_flair = post_flair.mod.update(shortName='My fav flairs',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
post_flair.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``display`` The display style of the widget, either ``'cloud'``
or ``'list'``.
``id`` The widget ID.
``kind`` The widget kind (always ``'post-flair'``).
``order`` A list of the flair IDs in this widget.
Can be iterated over by iterating over the
:class:`.PostFlairWidget` (e.g. ``for flair_id in
post_flair``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
``templates`` A ``dict`` that maps flair IDs to ``dict``\ s that
describe flairs.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "order"
class RulesWidget(Widget, BaseList):
"""Class to represent a rules widget.
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
rules_widget = None
for widget in widgets.sidebar:
if isinstance(widget, praw.models.RulesWidget):
rules_widget = widget
break
from pprint import pprint; pprint(rules_widget.data)
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
rules_widget.mod.update(display='compact', shortName='The LAWS',
styles=new_styles)
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``data`` A list of the subreddit rules.
Can be iterated over by iterating over the
:class:`.RulesWidget` (e.g. ``for rule in
rules_widget``).
``display`` The display style of the widget, either ``'full'``
or ``'compact'``.
``id`` The widget ID.
``kind`` The widget kind (always ``'subreddit-rules'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'``
and ``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget
belongs to.
======================= ===================================================
"""
CHILD_ATTRIBUTE = "data"
def __init__(self, reddit, _data):
"""Initialize the rules widget."""
if self.CHILD_ATTRIBUTE not in _data:
# .mod.update() sometimes returns payload without 'data' field
_data[self.CHILD_ATTRIBUTE] = []
super(RulesWidget, self).__init__(reddit, _data=_data)
class TextArea(Widget):
"""Class to represent a text area widget.
Find a text area in a subreddit:
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
text_area = None
for widget in widgets.sidebar:
if isinstance(widget, praw.models.TextArea):
text_area = widget
break
print(text_area.text)
Create one (requires proper moderator permissions):
.. code-block:: python
widgets = reddit.subreddit('redditdev').widgets
styles = {'backgroundColor': '#FFFF66', 'headerColor': '#3333EE'}
text_area = widgets.mod.add_text_area('My cool title',
'*Hello* **world**!',
styles)
For more information on creation, see :meth:`.add_text_area`.
Update one (requires proper moderator permissions):
.. code-block:: python
new_styles = {'backgroundColor': '#FFFFFF', 'headerColor': '#FF9900'}
text_area = text_area.mod.update(shortName='My fav text',
styles=new_styles)
Delete one (requires proper moderator permissions):
.. code-block:: python
text_area.mod.delete()
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
comprehensive in any way.
======================= ===================================================
Attribute Description
======================= ===================================================
``id`` The widget ID.
``kind`` The widget kind (always ``'textarea'``).
``shortName`` The short name of the widget.
``styles`` A ``dict`` with the keys ``'backgroundColor'`` and
``'headerColor'``.
``subreddit`` The :class:`.Subreddit` the button widget belongs
to.
``text`` The widget's text, as Markdown.
``textHtml`` The widget's text, as HTML.
======================= ===================================================
"""
class WidgetEncoder(JSONEncoder):
"""Class to encode widget-related objects."""
def default(self, o): # pylint: disable=E0202
"""Serialize ``PRAWBase`` objects."""
if isinstance(o, self._subreddit_class):
return str(o)
elif isinstance(o, PRAWBase):
return {
key: val
for key, val in vars(o).items()
if not key.startswith("_")
}
return JSONEncoder.default(self, o)
class WidgetModeration:
"""Class for moderating a particular widget.
Example usage:
.. code-block:: python
widget = reddit.subreddit('my_sub').widgets.sidebar[0]
widget.mod.update(shortName='My new title')
widget.mod.delete()
"""
def __init__(self, widget, subreddit, reddit):
"""Initialize the widget moderation object."""
self.widget = widget
self._reddit = reddit
self._subreddit = subreddit
def delete(self):
"""Delete the widget.
Example usage:
.. code-block:: python
widget.mod.delete()
"""
path = API_PATH["widget_modify"].format(
widget_id=self.widget.id, subreddit=self._subreddit
)
self._reddit.request("DELETE", path)
def update(self, **kwargs):
"""Update the widget. Returns the updated widget.
Parameters differ based on the type of widget. See
`Reddit documentation
<https://www.reddit.com/dev/api#PUT_api_widget_{widget_id}>`_ or the
document of the particular type of widget.
For example, update a text widget like so:
.. code-block:: python
text_widget.mod.update(shortName='New text area', text='Hello!')
.. note::
Most parameters follow the ``lowerCamelCase`` convention. When in
doubt, check the Reddit documentation linked above.
"""
path = API_PATH["widget_modify"].format(
widget_id=self.widget.id, subreddit=self._subreddit
)
payload = {
key: value
for key, value in vars(self.widget).items()
if not key.startswith("_")
}
del payload["subreddit"] # not JSON serializable
payload.update(kwargs)
widget = self._reddit.put(
path, data={"json": dumps(payload, cls=WidgetEncoder)}
)
widget.subreddit = self._subreddit
return widget
| 37.444198
| 79
| 0.52225
|
8346d2286e247ca62de75f4e92657dc7efd58851
| 1,990
|
py
|
Python
|
var/spack/repos/builtin/packages/pkgconf/package.py
|
robertsawko/spack
|
135cf4835f5b646c4aaa0e2eb5552c80fc3a5ce8
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1
|
2019-11-28T10:14:14.000Z
|
2019-11-28T10:14:14.000Z
|
var/spack/repos/builtin/packages/pkgconf/package.py
|
robertsawko/spack
|
135cf4835f5b646c4aaa0e2eb5552c80fc3a5ce8
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/pkgconf/package.py
|
robertsawko/spack
|
135cf4835f5b646c4aaa0e2eb5552c80fc3a5ce8
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1
|
2017-01-21T17:19:32.000Z
|
2017-01-21T17:19:32.000Z
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Pkgconf(AutotoolsPackage):
"""pkgconf is a program which helps to configure compiler and linker
flags for development frameworks. It is similar to pkg-config from
freedesktop.org, providing additional functionality while also
maintaining compatibility."""
homepage = "http://pkgconf.org/"
# URL must remain http:// so Spack can bootstrap curl
url = "http://distfiles.dereferenced.org/pkgconf/pkgconf-1.6.3.tar.xz"
version('1.6.3', sha256='61f0b31b0d5ea0e862b454a80c170f57bad47879c0c42bd8de89200ff62ea210')
version('1.6.1', sha256='22b9ee38438901f9d60f180e5182821180854fa738fd071f593ea26a81da208c')
version('1.6.0', sha256='6135a3abb576672ba54a899860442ba185063f0f90dae5892f64f7bae8e1ece5')
version('1.5.4', sha256='9c5864a4e08428ef52f05a41c948529555458dec6d283b50f8b7d32463c54664')
version('1.4.2', sha256='bab39371d4ab972be1d539a8b10b6cc21f8eafc97f617102e667e82bd32eb234')
version('1.4.0', sha256='1d112ff35dad516ffbfbdb013df3a035140618fe7632ec44ffa894a9c713301b')
version('1.3.10', sha256='62577d265fa9415a57a77a59dede5526b7ece3ef59a750434b281b262f0c1da9')
version('1.3.8', sha256='fc06f058e6905435481f649865ca51000192c91808f307b1053ca5e859cb1488')
provides('pkgconfig')
# TODO: Add a package for the kyua testing framework
# depends_on('kyua', type='test')
def setup_dependent_build_environment(self, env, dependent_spec):
"""Adds the ACLOCAL path for autotools."""
env.append_path('ACLOCAL_PATH', self.prefix.share.aclocal)
@run_after('install')
def link_pkg_config(self):
symlink('pkgconf', '{0}/pkg-config'.format(self.prefix.bin))
symlink('pkgconf.1',
'{0}/pkg-config.1'.format(self.prefix.share.man.man1))
| 47.380952
| 96
| 0.749749
|
12cdc56d417303eefb74628de55631746cab8289
| 5,558
|
py
|
Python
|
contrib/seeds/makeseeds.py
|
L00119483/TechSquad.io
|
3ebafca95c5b125f3dbe52d9d4cde29c61a48975
|
[
"MIT"
] | 4
|
2018-06-16T20:08:19.000Z
|
2018-08-22T15:44:58.000Z
|
contrib/seeds/makeseeds.py
|
L00119483/TechSquad.io
|
3ebafca95c5b125f3dbe52d9d4cde29c61a48975
|
[
"MIT"
] | null | null | null |
contrib/seeds/makeseeds.py
|
L00119483/TechSquad.io
|
3ebafca95c5b125f3dbe52d9d4cde29c61a48975
|
[
"MIT"
] | 7
|
2018-06-06T18:51:07.000Z
|
2018-09-08T15:17:04.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2013-2018 The Bitcoin Core developers
# Copyright (c) 2018 The Wagerr developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 615801
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
""
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/WagerrCore:2.2.(0|1|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
if len(sline) > 11:
agent = sline[11][1:] + sline[12][:-1]
else:
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(re.sub(' ', '-', ip['agent']))]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple wagerr ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| 32.127168
| 186
| 0.56855
|
3505090dc6828f9ad3d779efed94c093bdd8366b
| 1,943
|
py
|
Python
|
MA333/Project1/logistic_regression.py
|
iydon/homework
|
253d4746528ef62d33eba1de0b90dcb17ec587ed
|
[
"MIT"
] | 8
|
2019-10-20T08:18:54.000Z
|
2021-07-11T12:14:56.000Z
|
MA333/Project1/logistic_regression.py
|
AllenYZB/homework
|
65bd3372df197bec5e152a37cdc1f6f5432b7f3e
|
[
"MIT"
] | 2
|
2022-01-13T03:04:10.000Z
|
2022-03-12T00:49:10.000Z
|
MA333/Project1/logistic_regression.py
|
AllenYZB/homework
|
65bd3372df197bec5e152a37cdc1f6f5432b7f3e
|
[
"MIT"
] | 2
|
2019-11-02T05:46:01.000Z
|
2020-03-12T23:11:28.000Z
|
import numpy as np
from numpy import array, diag, divide, exp, matrix, power, zeros
from numpy.linalg import inv, norm
class logistic_regression_IRLS(object):
"""
Maximum likelihood approach for logistic regression.
"""
def __init__(self):
self.w = None
def fit(self, X, y, w0=None, erf=None, maxit=None, display=None):
self.__newton_raphson(X, y, w0, erf, maxit, display)
def predict(self, X):
y = X * self.w
return array((y>0).astype(int).T)[0]
def __newton_raphson(self, X, y, w0, erf, maxit, display):
"""
Args:
X: nxd matrix.
y: nx1 column vector.
w0: dx1 column vector.
erf: error function with two parameters.
erf = lambda old,new: ...
"""
# Pre-process
X = matrix(X)
n,d = X.shape
y = matrix(y).reshape(n, 1)
if w0==None:
w0 = zeros((d, 1))
if erf==None:
erf = lambda old,new: norm(old-new,2)<1e-6
if maxit==None:
maxit = 64
if display==None:
display = False
w1 = zeros((d, 1))
ex = zeros((n, 1))
# Iterations
for i in range(maxit):
ex = exp(X*w0).T
D = diag(array(ex/power(1+ex,2))[0])
w1 = w0 + inv(X.T*D*X)*X.T*(y-(ex/(1+ex)).T)
if erf(w0, w1):
if display:
print("Convergence @ the %d iteration(s)."%i)
break
w0 = w1
self.w = w1
"""
from sklearn.linear_model import LogisticRegression as Regression
X = np.random.rand(20, 2)-0.5
y = (np.sum(X,1)>0).astype(int)
model_1 = logistic_regression_IRLS()
model_1.fit(X, y)
y_1 = model_1.predict(X)
model_2 = Regression(solver="lbfgs")
model_2.fit(X, y)
y_2 = model_2.predict(X)
dy_1 = norm(y_1-y, 2)
dy_2 = norm(y_2-y, 2)
print(dy_1, dy_2)
"""
| 25.233766
| 69
| 0.523932
|
f11f5b0501de5378da84aa992367d19cb64313f5
| 1,786
|
py
|
Python
|
backend/server.py
|
SeonminKim1/tomato-disease-detector
|
265f8266880f7cbda4ac601552bb14d6acee9357
|
[
"MIT"
] | 4
|
2021-04-18T06:26:50.000Z
|
2022-03-18T08:12:43.000Z
|
backend/server.py
|
SeonminKim1/tomato-disease-detector
|
265f8266880f7cbda4ac601552bb14d6acee9357
|
[
"MIT"
] | null | null | null |
backend/server.py
|
SeonminKim1/tomato-disease-detector
|
265f8266880f7cbda4ac601552bb14d6acee9357
|
[
"MIT"
] | 2
|
2021-04-17T14:10:37.000Z
|
2021-11-02T13:51:03.000Z
|
from fastapi import FastAPI, File, BackgroundTasks
from fastapi.logger import logger
from fastapi.responses import Response
import cv2
import asyncio
from detection import get_model as get_det_model
import io
import numpy as np
from PIL import Image
detector_model = get_det_model("./weights/yolov5s_tomato_7classes.pt")
app = FastAPI(
title="Tomato Disease Detector",
description="Plant Disease Detector using DL Models",
version="0.1.0",
)
tasks = {}
@app.post("/detection/image")
def post_predict_disease_detector_image(file: bytes = File(...)):
logger.info("get image")
image = Image.open(io.BytesIO(file)) # .convert("RGB")
open_cv_image = np.array(image)
open_cv_image = open_cv_image[:, :, ::-1].copy()
converted_img = detector_model.detect(open_cv_image, image_size=416)
converted_img = Image.fromarray(converted_img)
bytes_io = io.BytesIO()
converted_img.save(bytes_io, format="PNG")
return Response(bytes_io.getvalue(), media_type="image/png")
@app.post("/detection/video")
async def post_predict_disease_detector_video(background_tasks: BackgroundTasks):
logger.info(f"Post Success Video")
name = f"/var/lib/assets/detect1.mp4"
logger.info(f"file: {name}")
video_path = "/var/lib/assets/video1.mp4"
cap = cv2.VideoCapture(video_path)
background_tasks.add_task(
detector_model.detect, cap, image_size=416, video=True, save_path=name
)
# asyncio.create_task(
# detector_model.detect(cap, image_size=416, video=True, save_path=name)
# )
@app.get("/detection/video/status")
async def get_predict_disease_detector_video():
status, progress, save_path = detector_model.get_status()
return {"status": status, "progress": progress, "save_path": save_path}
| 28.349206
| 81
| 0.727324
|
f25a26e0c93d2806a4db8913632e3ebd6c2b0959
| 1,627
|
py
|
Python
|
tests/resources/mlflow-test-plugin/setup.py
|
rbrown-kayak/mlflow
|
f3af81f229b9dc4ec6f97eb4996d260df20db824
|
[
"Apache-2.0"
] | 2
|
2020-09-15T14:19:27.000Z
|
2021-05-21T09:59:02.000Z
|
tests/resources/mlflow-test-plugin/setup.py
|
rbrown-kayak/mlflow
|
f3af81f229b9dc4ec6f97eb4996d260df20db824
|
[
"Apache-2.0"
] | 36
|
2019-04-11T21:45:45.000Z
|
2022-03-11T05:05:16.000Z
|
tests/resources/mlflow-test-plugin/setup.py
|
rbrown-kayak/mlflow
|
f3af81f229b9dc4ec6f97eb4996d260df20db824
|
[
"Apache-2.0"
] | 2
|
2020-06-28T01:39:36.000Z
|
2022-01-06T17:26:50.000Z
|
from setuptools import setup, find_packages
setup(
name="mlflow-test-plugin",
version="0.0.1",
description="Test plugin for MLflow",
packages=find_packages(),
# Require MLflow as a dependency of the plugin, so that plugin users can simply install
# the plugin & then immediately use it with MLflow
install_requires=["mlflow"],
entry_points={
# Define a Tracking Store plugin for tracking URIs with scheme 'file-plugin'
"mlflow.tracking_store": "file-plugin=mlflow_test_plugin.file_store:PluginFileStore",
# Define a ArtifactRepository plugin for artifact URIs with scheme 'file-plugin'
"mlflow.artifact_repository": "file-plugin=mlflow_test_plugin.local_artifact:PluginLocalArtifactRepository", # noqa
# Define a RunContextProvider plugin. The entry point name for run context providers
# is not used, and so is set to the string "unused" here
"mlflow.run_context_provider": "unused=mlflow_test_plugin.run_context_provider:PluginRunContextProvider", # noqa
# Define a Model Registry Store plugin for tracking URIs with scheme 'file-plugin'
"mlflow.model_registry_store": "file-plugin=mlflow_test_plugin.sqlalchemy_store:PluginRegistrySqlAlchemyStore", # noqa
# Define a MLflow Project Backend plugin called 'dummy-backend'
"mlflow.project_backend": "dummy-backend=mlflow_test_plugin.dummy_backend:PluginDummyProjectBackend", # noqa
# Define a MLflow model deployment plugin for target 'faketarget'
"mlflow.deployments": "faketarget=mlflow_test_plugin.fake_deployment_plugin",
},
)
| 58.107143
| 127
| 0.742471
|
508d112499ac12ac87ba4cf9124683ffecfdeb6d
| 5,624
|
py
|
Python
|
SSnet_in_Action/grad.py
|
ekraka/SSnet
|
6a28140b2e54e5415553609a612fcae92f9103f0
|
[
"MIT"
] | 20
|
2020-01-23T07:29:27.000Z
|
2022-03-22T12:38:33.000Z
|
SSnet_in_Action/grad.py
|
ekraka/SSnet
|
6a28140b2e54e5415553609a612fcae92f9103f0
|
[
"MIT"
] | 3
|
2020-05-19T18:43:19.000Z
|
2021-07-30T16:13:48.000Z
|
SSnet_in_Action/grad.py
|
ekraka/SSnet
|
6a28140b2e54e5415553609a612fcae92f9103f0
|
[
"MIT"
] | 5
|
2020-02-07T18:55:23.000Z
|
2021-07-15T01:43:47.000Z
|
from math import log10, floor
from keras.models import Sequential,Model
import tensorflow as tf
from keras.layers import Dense, Activation, Input,RepeatVector,Embedding, Flatten, Concatenate,Dropout
from keras.models import Model
from keras.utils.vis_utils import model_to_dot
#from sklearn import metrics as mt
from keras import metrics
#from sklearn.metrics import confusion_matrix
#from sklearn.metrics import accuracy_score
#import matplotlib.pyplot as plt
#from sklearn.model_selection import train_test_split
from keras.models import Sequential
from keras.layers import average, concatenate,RepeatVector,Lambda,add,subtract
from keras import backend as K
from keras.layers import Conv2D, MaxPooling2D,Conv1D,GlobalMaxPooling1D,MaxPooling1D,Reshape,Add
from keras.layers import Conv1D, GlobalAveragePooling1D, MaxPooling1D,AveragePooling1D
from keras.regularizers import l2
import numpy as np
def get_heatm(conv, x, model, ref = 0):
predicted_class = int(round(model.predict(x)[0][0]))
predicted_class_output = model.output#[:, predicted_class]
last_conv_layer = model.get_layer(conv)
# This is the gradient of the predicted class with regard to
# the output feature map of `block5_conv3`
grads = K.gradients(predicted_class_output, last_conv_layer.output)[0]
#print (grads.shape)
a, s = grads.shape[-2:]
#print(model.predict(x))
# This is a vector of shape (512,), where each entry
# is the mean intensity of the gradient over a specific feature map channel
##pooled_grads = grads
pooled_grads = K.mean(grads, axis=(0,1))
#print (pooled_grads.shape)
# This function allows us to access the values of the quantities we just defined:
# `pooled_grads` and the output feature map of `block5_conv3`,
# given a sample image
#print (last_conv_layer.output.shape)
iterate = K.function([model.input[0], model.input[1]], [pooled_grads, last_conv_layer.output[0]])
# These are the values of these two quantities, as Numpy arrays,
# given our sample image
#print (iterate(x))
pooled_grads_value, conv_layer_output_value = iterate(x)
# We multiply each channel in the feature map array
# by "how important this channel is" with regard to the predicted class
for i in range(s):
conv_layer_output_value[:,i] *= pooled_grads_value[i]
# The channel-wise mean of the resulting feature map
# is our heatmap of class activation
#print (conv_layer_output_value.shape)
heatmap = np.mean(conv_layer_output_value, axis=(-1,))
heatmap = np.maximum(heatmap, 0)
if np.max(heatmap) > 0:
heatmap /= np.max(heatmap)
return heatmap
def shape_heatmap(heatmap):
f = int(9000 / heatmap.shape[0])
print ('Factor:', f)
l = []
for i in heatmap:
l += [i]*f
if len(l) < 9000:
l += [0]*(9000 - len(l))
return np.array(l).reshape((6, 1500))
def round_sig(x, sig=3):
if x < 1e-2:
return 0.0
return round(x, sig-int(floor(log10(abs(x))))-1)
def create_pdb(pdb, heatmap):
gami, gama = np.amin(heatmap), np.amax(heatmap)
for i in range (len(heatmap)):
a = heatmap[i]
#gami, gama = a.min(), a.max()
heatmap[i] = np.interp(a, (gami, gama), (0.0, 100.0))
#print (np.max(heatmap))
f = open(pdb, 'r')
lines = f.readlines()
f.close()
st, ref = [], []
mod = 0 # model
res = [] # residue
for line in lines:
k = line.strip().split()
if len(k) < 1:
continue
if line.strip().split()[0] in ['MODEL', 'ENDMDL']:
st.append(line.strip())
ref.append(None)
if 'TER' in line[:3]:
st.append(line.strip())
ref.append(None)
mod += 1
res = []
if 'ATOM' in line[:4]:
nres = int(line[22:26])
if nres not in res:
res.append(nres)
st.append(line[:61])
ref.append(round_sig(heatmap[mod][len(res) - 1]))
print ('Number of chains:', mod)
k_ref = []
for i in ref:
if i is not None:
k_ref.append(i)
else:
#print (ref)
k_ref.append(0)#ref[0])
k_ref = np.array(k_ref)
k_ref = np.interp(k_ref, (k_ref.min(), k_ref.max()), (0.0, 100.0))
fst = ''
for i in range (len(st)):
if ref[i] is not None:
ans = str(round_sig(k_ref[i]))
#print (ans)
fst += st[i][:-1]+' '*(6-len(ans))+ans + '\n'
#print (fst)
#stop()
else:
fst += st[i]+ '\n'
#print (ref[i])
'''
clusters = m_box.job(fst, 1)
count = i
for i in clusters:
print (i)
l = ['HETATM', str(count), 'Zn', 'Zn', 'A', str(count), round(i[0]), round(i[1]), round(i[2]), '1.00', '0.00', 'Zn']
fst += "{:>6}{:>5}{:>4} {:>4}{:>2}{:>4}{:>12}{:>8}{:>8}{:>6}{:>6}{:>12}".format(*l)+'\n'
count += 1
'''
g = open(pdb[:-4] + '_GRAD.pdb', 'w')
#print (fst)
g.write(fst)
g.close()
print ('File written as', pdb[:-4] + '_GRAD.pdb')
def job(proteins, X, pdb, model):
l = ['conv1d_3', 'conv1d_6', 'conv1d_9', 'conv1d_12']
heatmap = None
x = [proteins.reshape((-1, 9000, 2)), X.reshape((-1, 512))]
for i in l:
if heatmap is None:
heatmap = get_heatm(i, x, model, 1)/len(l)
else:
heatmap += get_heatm(i, x, model)/len(l)
heatmap = shape_heatmap(heatmap)
create_pdb(pdb, heatmap)
| 32.137143
| 124
| 0.592639
|
04724626f79b5b1e346ff9ddccc8a621d0ea122c
| 14,575
|
py
|
Python
|
platonpm/package.py
|
shinnng/platon.py
|
3197fac3839896290210da04dd0d45f0bdc731ce
|
[
"MIT"
] | null | null | null |
platonpm/package.py
|
shinnng/platon.py
|
3197fac3839896290210da04dd0d45f0bdc731ce
|
[
"MIT"
] | null | null | null |
platonpm/package.py
|
shinnng/platon.py
|
3197fac3839896290210da04dd0d45f0bdc731ce
|
[
"MIT"
] | null | null | null |
import json
from pathlib import (
Path,
)
from typing import (
TYPE_CHECKING,
Any,
Dict,
Generator,
Iterable,
List,
Optional,
Tuple,
Type,
Union,
cast,
)
from platon_typing import (
URI,
Address,
ContractName,
Manifest,
)
from platon_utils import (
to_canonical_address,
to_dict,
to_text,
to_tuple,
)
from platonpm._utils.cache import (
cached_property,
)
from platonpm._utils.contract import (
generate_contract_factory_kwargs,
)
from platonpm._utils.deployments import (
get_linked_deployments,
normalize_linked_references,
validate_deployments_tx_receipt,
validate_linked_references,
)
from platonpm.contract import (
LinkableContract,
)
from platonpm.dependencies import (
Dependencies,
)
from platonpm.deployments import (
DeploymentData,
Deployments,
)
from platonpm.exceptions import (
BytecodeLinkingError,
PlatonPMValidationError,
FailureToFetchIPFSAssetsError,
InsufficientAssetsError,
PyPlatonPMError,
)
from platonpm.uri import (
resolve_uri_contents,
)
from platonpm.validation.manifest import (
check_for_deployments,
validate_build_dependencies_are_present,
validate_manifest_against_schema,
validate_manifest_deployments,
validate_raw_manifest_format,
)
from platonpm.validation.misc import (
validate_w3_instance,
)
from platonpm.validation.package import (
validate_build_dependency,
validate_contract_name,
validate_minimal_contract_factory_data,
)
from platonpm.validation.uri import (
validate_single_matching_uri,
)
from platon._utils.validation import (
validate_address,
)
from platon.platon import (
Contract,
)
if TYPE_CHECKING:
from platon import Web3
class Package(object):
def __init__(
self, manifest: Dict[str, Any], w3: "Web3", uri: Optional[str] = None
) -> None:
"""
A package should be created using one of the available
classmethods and a valid w3 instance.
"""
if not isinstance(manifest, dict):
raise TypeError(
"Package object must be initialized with a dictionary. "
f"Got {type(manifest)}"
)
if "manifest" not in manifest or manifest["manifest"] != "platonpm/3":
raise PlatonPMValidationError(
"Py-Platonpm currently only supports v3 platonpm manifests. "
"Please use the CLI to update or re-generate a v3 manifest. "
)
validate_manifest_against_schema(manifest)
validate_manifest_deployments(manifest)
validate_w3_instance(w3)
self.w3 = w3
self.w3.platon.defaultContractFactory = cast(Type[Contract], LinkableContract)
self.manifest = manifest
self._uri = uri
def update_w3(self, w3: "Web3") -> "Package":
"""
Returns a new instance of `Package` containing the same manifest,
but connected to a different platon instance.
.. doctest::
>>> new_w3 = Web3(Web3.PlatonTesterProvider())
>>> NewPackage = OwnedPackage.update_w3(new_w3)
>>> assert NewPackage.w3 == new_w3
>>> assert OwnedPackage.manifest == NewPackage.manifest
"""
validate_w3_instance(w3)
return Package(self.manifest, w3, self.uri)
def __repr__(self) -> str:
"""
String readable representation of the Package.
.. doctest::
>>> OwnedPackage.__repr__()
'<Package owned==1.0.0>'
"""
name = self.name
version = self.version
return f"<Package {name}=={version}>"
@property
def name(self) -> str:
"""
The name of this ``Package``.
.. doctest::
>>> OwnedPackage.name
'owned'
"""
return self.manifest["name"]
@property
def version(self) -> str:
"""
The package version of a ``Package``.
.. doctest::
>>> OwnedPackage.version
'1.0.0'
"""
return self.manifest["version"]
@property
def manifest_version(self) -> str:
"""
The manifest version of a ``Package``.
.. doctest::
>>> OwnedPackage.manifest_version
'platonpm/3'
"""
return self.manifest["manifest"]
@property
def uri(self) -> Optional[str]:
"""
The uri (local file_path / content-addressed URI) of a ``Package``'s manifest.
"""
return self._uri
@property
def contract_types(self) -> List[str]:
"""
All contract types included in this package.
"""
if 'contractTypes' in self.manifest:
return sorted(self.manifest['contractTypes'].keys())
else:
raise ValueError("No contract types found in manifest; {self.__repr__()}.")
@classmethod
def from_file(cls, file_path: Path, w3: "Web3") -> "Package":
"""
Returns a ``Package`` instantiated by a manifest located at the provided Path.
``file_path`` arg must be a ``pathlib.Path`` instance.
A valid ``Web3`` instance is required to instantiate a ``Package``.
"""
if isinstance(file_path, Path):
raw_manifest = file_path.read_text()
validate_raw_manifest_format(raw_manifest)
manifest = json.loads(raw_manifest)
else:
raise TypeError(
"The Package.from_file method expects a pathlib.Path instance."
f"Got {type(file_path)} instead."
)
return cls(manifest, w3, file_path.as_uri())
@classmethod
def from_uri(cls, uri: URI, w3: "Web3") -> "Package":
"""
Returns a Package object instantiated by a manifest located at a content-addressed URI.
A valid ``Web3`` instance is also required.
URI schemes supported:
- IPFS: `ipfs://Qm...`
- HTTP: `https://api.github.com/repos/:owner/:repo/git/blobs/:file_sha`
- Registry: `erc1319://registry.platon:1/greeter?version=1.0.0`
.. code:: python
OwnedPackage = Package.from_uri('ipfs://QmbeVyFLSuEUxiXKwSsEjef7icpdTdA4kGG9BcrJXKNKUW', w3)
"""
contents = to_text(resolve_uri_contents(uri))
validate_raw_manifest_format(contents)
manifest = json.loads(contents)
return cls(manifest, w3, uri)
#
# Contracts
#
def get_contract_factory(self, name: ContractName) -> LinkableContract:
"""
Return the contract factory for a given contract type, generated from the data vailable
in ``Package.manifest``. Contract factories are accessible from the package class.
.. code:: python
Owned = OwnedPackage.get_contract_factory('owned')
In cases where a contract uses a library, the contract factory will have
unlinked bytecode. The ``platonpm`` package ships with its own subclass of
``platon.contract.Contract``, ``platonpm.contract.LinkableContract`` with a few extra
methods and properties related to bytecode linking.
.. code:: python
>>> math = owned_package.contract_factories.math
>>> math.needs_bytecode_linking
True
>>> linked_math = math.link_bytecode({'MathLib': '0x1234...'})
>>> linked_math.needs_bytecode_linking
False
"""
validate_contract_name(name)
if "contractTypes" not in self.manifest:
raise InsufficientAssetsError(
"This package does not contain any contract type data."
)
try:
contract_data = self.manifest["contractTypes"][name]
except KeyError:
raise InsufficientAssetsError(
"This package does not contain any package data to generate "
f"a contract factory for contract type: {name}. Available contract types include: "
f"{self.contract_types}."
)
validate_minimal_contract_factory_data(contract_data)
contract_kwargs = generate_contract_factory_kwargs(contract_data)
contract_factory = self.w3.platon.contract(**contract_kwargs)
return contract_factory
def get_contract_instance(self, name: ContractName, address: Address) -> Contract:
"""
Will return a ``Web3.contract`` instance generated from the contract type data available
in ``Package.manifest`` and the provided ``address``. The provided ``address`` must be
valid on the connected chain available through ``Package.w3``.
"""
validate_address(address)
validate_contract_name(name)
try:
self.manifest["contractTypes"][name]["abi"]
except KeyError:
raise InsufficientAssetsError(
"Package does not have the ABI required to generate a contract instance "
f"for contract: {name} at address: {address!r}."
)
contract_kwargs = generate_contract_factory_kwargs(
self.manifest["contractTypes"][name]
)
contract_instance = self.w3.platon.contract(
address=address, **contract_kwargs
)
return contract_instance
#
# Build Dependencies
#
@cached_property
def build_dependencies(self) -> "Dependencies":
"""
Return `Dependencies` instance containing the build dependencies available on this Package.
The ``Package`` class should provide access to the full dependency tree.
.. code:: python
>>> owned_package.build_dependencies['zeppelin']
<ZeppelinPackage>
"""
validate_build_dependencies_are_present(self.manifest)
dependencies = self.manifest["buildDependencies"]
dependency_packages = {}
for name, uri in dependencies.items():
try:
validate_build_dependency(name, uri)
dependency_package = Package.from_uri(uri, self.w3)
except PyPlatonPMError as e:
raise FailureToFetchIPFSAssetsError(
f"Failed to retrieve build dependency: {name} from URI: {uri}.\n"
f"Got error: {e}."
)
else:
dependency_packages[name] = dependency_package
return Dependencies(dependency_packages)
#
# Deployments
#
@cached_property
def deployments(self) -> Union["Deployments", Dict[None, None]]:
"""
Returns a ``Deployments`` object containing all the deployment data and contract
instances of a ``Package``'s `contract_types`. Automatically filters deployments
to only expose those available on the current ``Package.w3`` instance.
.. code:: python
package.deployments.get_instance("ContractType")
"""
if not check_for_deployments(self.manifest):
return {}
all_blockchain_uris = self.manifest["deployments"].keys()
matching_uri = validate_single_matching_uri(all_blockchain_uris, self.w3)
deployments = self.manifest["deployments"][matching_uri]
all_contract_instances = self._get_all_contract_instances(deployments)
validate_deployments_tx_receipt(deployments, self.w3, allow_missing_data=True)
linked_deployments = get_linked_deployments(deployments)
if linked_deployments:
for deployment_data in linked_deployments.values():
on_chain_bytecode = self.w3.platon.get_code(
deployment_data["address"]
)
unresolved_linked_refs = normalize_linked_references(
deployment_data["runtimeBytecode"]["linkDependencies"]
)
resolved_linked_refs = tuple(
self._resolve_linked_references(link_ref, deployments)
for link_ref in unresolved_linked_refs
)
for linked_ref in resolved_linked_refs:
validate_linked_references(linked_ref, on_chain_bytecode)
return Deployments(deployments, all_contract_instances)
@to_dict
def _get_all_contract_instances(
self, deployments: Dict[str, DeploymentData]
) -> Iterable[Tuple[str, Contract]]:
for deployment_name, deployment_data in deployments.items():
if deployment_data['contractType'] not in self.contract_types:
raise PlatonPMValidationError(
f"Contract type: {deployment_data['contractType']} for alias: "
f"{deployment_name} not found. Available contract types include: "
f"{self.contract_types}."
)
contract_instance = self.get_contract_instance(
ContractName(deployment_data['contractType']),
deployment_data['address'],
)
yield deployment_name, contract_instance
@to_tuple
def _resolve_linked_references(
self, link_ref: Tuple[int, str, str], deployments: Dict[str, Any]
) -> Generator[Tuple[int, bytes], None, None]:
# No nested deployment: i.e. 'Owned'
offset, link_type, value = link_ref
if link_type == "literal":
yield offset, to_canonical_address(value)
elif value in deployments:
yield offset, to_canonical_address(deployments[value]["address"])
# No nested deployment, but invalid ref
elif ":" not in value:
raise BytecodeLinkingError(
f"Contract instance reference: {value} not found in package's deployment data."
)
# Expects child pkg in build_dependencies
elif value.split(":")[0] not in self.build_dependencies:
raise BytecodeLinkingError(
f"Expected build dependency: {value.split(':')[0]} not found "
"in package's build dependencies."
)
# Find and return resolved, nested ref
else:
unresolved_linked_ref = value.split(":", 1)[-1]
build_dependency = self.build_dependencies[value.split(":")[0]]
yield build_dependency._resolve_link_dependencies(unresolved_linked_ref)
def format_manifest(manifest: Manifest, *, prettify: bool = None) -> str:
if prettify:
return json.dumps(manifest, sort_keys=True, indent=4)
return json.dumps(manifest, sort_keys=True, separators=(",", ":"))
| 33.276256
| 103
| 0.623877
|
f7695f0d187810f59681c2b0fd80cfab8515725c
| 27,825
|
py
|
Python
|
objectpath/core/interpreter.py
|
SREnity/ObjectPath
|
25958956ef482009be911a8b0a8e679b9f636fc9
|
[
"MIT"
] | 327
|
2015-01-02T13:20:39.000Z
|
2022-03-28T11:30:25.000Z
|
objectpath/core/interpreter.py
|
SREnity/ObjectPath
|
25958956ef482009be911a8b0a8e679b9f636fc9
|
[
"MIT"
] | 71
|
2015-02-03T08:22:58.000Z
|
2021-06-20T07:01:51.000Z
|
objectpath/core/interpreter.py
|
SREnity/ObjectPath
|
25958956ef482009be911a8b0a8e679b9f636fc9
|
[
"MIT"
] | 89
|
2015-02-10T01:02:42.000Z
|
2021-08-09T07:17:33.000Z
|
#!/usr/bin/env python
# This file is part of ObjectPath released under MIT license.
# Copyright (C) 2010-2014 Adrian Kalbarczyk
import sys, re
from .parser import parse
from objectpath.core import *
import objectpath.utils.colorify as color # pylint: disable=W0614
from objectpath.utils import flatten, filter_dict, timeutils, skip
from objectpath.utils.json_ext import py2JSON
from objectpath.core import ITER_TYPES, generator, chain
from objectpath.utils.debugger import Debugger
EPSILON = 0.0000000000000001 #this is used in float comparison
EXPR_CACHE = {}
RE_TYPE = type(re.compile(''))
# setting external modules to 0, thus enabling lazy loading. 0 ensures that Pythonic types are never matched.
# this way is efficient because if statement is fast and once loaded these variables are pointing to libraries.
ObjectId = generateID = calendar = escape = escapeDict = unescape = unescapeDict = 0
class Tree(Debugger):
_REGISTERED_FUNCTIONS = {}
@classmethod
def register_function(cls, name, func):
"""
This method is used to add custom functions not catered for by default
:param str name: The name by which the function will be referred to in the expression
:param callable func: The function
:return:
"""
cls._REGISTERED_FUNCTIONS[name] = func
def __init__(self, obj, cfg=None):
if not cfg:
cfg = {}
self.D = cfg.get("debug", False)
self.setObjectGetter(cfg.get("object_getter", None))
self.setData(obj)
self.current = self.node = None
if self.D: super(Tree, self).__init__()
def setData(self, obj):
if type(obj) in ITER_TYPES + [dict]:
self.data = obj
def setObjectGetter(self, object_getter_cb):
if callable(object_getter_cb):
self.object_getter = object_getter_cb
else:
def default_getter(obj, attr):
try:
return obj.__getattribute__(attr)
except AttributeError:
if self.D:
self.end(color.op(".") + " returning '%s'", color.bold(obj))
return obj
self.object_getter = default_getter
def compile(self, expr):
if expr in EXPR_CACHE:
return EXPR_CACHE[expr]
ret = EXPR_CACHE[expr] = parse(expr, self.D)
return ret
def execute(self, expr):
D = self.D
if D: self.start("Tree.execute")
TYPES = [str, int, float, bool, generator, chain]
try:
TYPES += [long]
except NameError:
pass
# TODO change to yield?
def exe(node):
"""
node[0] - operator name
node[1:] - params
"""
types = [
str, timeutils.datetime.time, timeutils.datetime.date,
timeutils.datetime.datetime
]
try:
types += [unicode]
except:
pass
if D: self.start("executing node %s", color.bold(self.cleanOutput(node)))
type_node = type(node)
if node is None or type_node in TYPES:
return node
elif type_node in types:
return node
elif type_node is list:
return (exe(n) for n in node)
elif type_node is dict:
ret = {}
for i in node.items():
ret[exe(i[0])] = exe(i[1])
return ret
op = node[0]
if op == "or":
if D: self.debug("%s or %s", node[1], node[2])
return exe(node[1]) or exe(node[2])
elif op == "and":
if D: self.debug("%s and %s", node[1], node[2])
return exe(node[1]) and exe(node[2])
elif op == "+":
if len(node) > 2:
fst = exe(node[1])
snd = exe(node[2])
if None in (fst, snd):
return fst or snd
typefst = type(fst)
typesnd = type(snd)
if typefst is dict:
try:
fst.update(snd)
except Exception:
if type(snd) is not dict:
raise ProgrammingError(
"Can't add value of type %s to %s" % (
color.bold(
PY_TYPES_MAP.
get(type(snd).__name__,
type(snd).__name__)
), color.bold("object")
)
)
return fst
if typefst is list and typesnd is list:
if D: self.debug("both sides are lists, returning '%s'", fst + snd)
return fst + snd
if typefst in ITER_TYPES or typesnd in ITER_TYPES:
if typefst not in ITER_TYPES:
fst = [fst]
elif typesnd not in ITER_TYPES:
snd = [snd]
if D: self.debug("at least one side is a generator and the other is an iterable, returning chain")
return chain(fst, snd)
if typefst in NUM_TYPES:
try:
return fst + snd
except Exception:
return fst + float(snd)
if typefst in STR_TYPES or typesnd in STR_TYPES:
if D: self.info("doing string comparison '%s' is '%s'", fst, snd)
if sys.version_info[0] < 3:
if typefst is unicode:
fst = fst.encode("utf-8")
if typesnd is unicode:
snd = snd.encode("utf-8")
return str(fst) + str(snd)
try:
timeType = timeutils.datetime.time
if typefst is timeType and typesnd is timeType:
return timeutils.addTimes(fst, snd)
except Exception:
pass
if D: self.debug("standard addition, returning '%s'", fst + snd)
return fst + snd
else:
return exe(node[1])
elif op == "-":
if len(node) > 2:
fst = exe(node[1])
snd = exe(node[2])
try:
return fst - snd
except Exception:
typefst = type(fst)
typesnd = type(snd)
timeType = timeutils.datetime.time
if typefst is timeType and typesnd is timeType:
return timeutils.subTimes(fst, snd)
else:
return -exe(node[1])
elif op == "*":
return exe(node[1])*exe(node[2])
elif op == "%":
return exe(node[1]) % exe(node[2])
elif op == "/":
return exe(node[1])/float(exe(node[2]))
elif op == ">":
if D: self.debug("%s > %s, %s", node[1], node[2], node[1] > node[2])
return exe(node[1]) > exe(node[2])
elif op == "<":
return exe(node[1]) < exe(node[2])
elif op == ">=":
return exe(node[1]) >= exe(node[2])
elif op == "<=":
return exe(node[1]) <= exe(node[2])
# TODO this algorithm produces 3 for 1<2<3 and should be true
# elif op in "<=>=":
# fst=exe(node[1])
# snd=exe(node[2])
# if op==">":
# return fst > snd and snd or False
# elif op=="<":
# return fst < snd and snd or False
# elif op==">=":
# return fst >= snd and snd or False
# elif op=="<=":
# return fst <= snd and snd or False
elif op == "not":
fst = exe(node[1])
if D: self.debug("doing not '%s'", fst)
return not fst
elif op == "in":
fst = exe(node[1])
snd = exe(node[2])
if D: self.debug("doing '%s' in '%s'", node[1], node[2])
if type(fst) in ITER_TYPES and type(snd) in ITER_TYPES:
return any(
x in max(fst, snd, key=len) for x in min(fst, snd, key=len)
)
return exe(node[1]) in exe(node[2])
elif op == "not in":
fst = exe(node[1])
snd = exe(node[2])
if D: self.debug("doing '%s' not in '%s'", node[1], node[2])
if type(fst) in ITER_TYPES and type(snd) in ITER_TYPES:
return not any(
x in max(fst, snd, key=len) for x in min(fst, snd, key=len)
)
return exe(node[1]) not in exe(node[2])
elif op in ("is", "is not"):
if D: self.debug("found operator '%s'", op)
# try:
fst = exe(node[1])
# except Exception as e:
# if D: self.debug("NOT ERROR! Can't execute node[1] '%s', error: '%s'. Falling back to orginal value.",node[1],str(e))
# fst=node[1]
# try:
snd = exe(node[2])
# except Exception as e:
# if D: self.debug("NOT ERROR! Can't execute node[2] '%s', error: '%s'. Falling back to orginal value.",node[2],str(e))
# snd=node[2]
if op == "is" and fst == snd:
return True
# this doesn't work for 3 is not '3'
# if op == "is not" and fst != snd:
# return True
typefst = type(fst)
typesnd = type(snd)
if D: self.debug("type fst: '%s', type snd: '%s'", typefst, typesnd)
if typefst in STR_TYPES:
if D: self.info("doing string comparison '\"%s\" is \"%s\"'", fst, snd)
ret = str(fst) == str(snd)
elif typefst is float or typesnd is float:
if D: self.info("doing float comparison '%s is %s'", fst, snd)
try:
ret = abs(float(fst) - float(snd)) < EPSILON
except:
ret = False
elif typefst is int or typesnd is int:
if D: self.info("doing integer comparison '%s is %s'", fst, snd)
try:
ret = int(fst) == int(snd)
except:
ret = False
elif typefst is list and typesnd is list:
if D: self.info("doing array comparison '%s' is '%s'", fst, snd)
ret = fst == snd
elif typefst is dict and typesnd is dict:
if D: self.info("doing object comparison '%s' is '%s'", fst, snd)
ret = fst == snd
elif fst is None or snd is None:
if fst is None and snd is None:
# this executes only for "is not"
ret = True
else:
ret = (fst or snd) is None
if D: self.info(
"doing None comparison %s is %s = %s", color.bold(fst), color.bold(snd),
color.bold(not not (fst or snd))
)
else:
if D: self.info("can't compare %s and %s. Returning False", self.cleanOutput(fst), self.cleanOutput(snd))
ret = False
# else:
# try:
# global ObjectId
# if not ObjectId:
# from bson.objectid import ObjectId
# if typefst is ObjectId or typesnd is ObjectId:
# if D: self.info("doing MongoDB objectID comparison '%s' is '%s'",fst,snd)
# ret=str(fst)==str(snd)
# else:
# if D: self.info("doing standard comparison '%s' is '%s'",fst,snd)
# ret=fst is snd
# except Exception:
# pass
if op == "is not":
if D: self.info("'is not' found. Returning %s", not ret)
return not ret
else:
if D: self.info("returning %s is %s => %s", color.bold(self.cleanOutput(fst)), color.bold(self.cleanOutput(snd)), color.bold(ret))
return ret
elif op == "re":
return re.compile(exe(node[1]))
elif op == "matches":
fst = exe(node[1])
snd = exe(node[2])
if type(fst) not in STR_TYPES+[RE_TYPE]:
raise Exception("operator " + color.bold("matches") + " expects regexp on the left. Example: 'a.*d' matches 'abcd'")
if type(snd) in ITER_TYPES:
for i in snd:
if not not re.match(fst, i):
return True
return False
else:
# regex matches string
return not not re.match(fst, snd)
# elif op=="(literal)":
# fstLetter=node[1][0]
# if fstLetter is "'":
# return node[1][1:-1]
# elif fstLetter.isdigit:
# return int(node[1])
elif op == "(root)": # this is $
return self.data
# elif op=="(node)":# this is !
# if D: self.debug("returning node %s",self.node)
# return self.node
elif op == "(current)": # this is @
if D: self.debug("returning current node: \n %s", color.bold(self.current))
return self.current
elif op == "name":
return node[1]
elif op == ".":
fst = node[1]
if type(fst) is tuple:
fst = exe(fst)
typefst = type(fst)
if D: self.debug(color.op(".") + " left is '%s'", color.bold(self.cleanOutput(fst)))
# try:
if node[2][0] == "*":
if D:
self.end(
color.op(".") + " returning '%s'",
color.bold(typefst in ITER_TYPES and fst or [fst])
)
return fst # typefst in ITER_TYPES and fst or [fst]
# except:
# pass
snd = exe(node[2])
if D: self.debug(color.op(".") + " right is '%s'", color.bold(snd))
if typefst in ITER_TYPES:
if D: self.debug(
color.op(".") + " filtering %s by %s", color.bold(self.cleanOutput(fst)),
color.bold(snd)
)
if type(snd) in ITER_TYPES:
return filter_dict(fst, list(snd))
else:
# if D: self.debug(list(fst))
return (e[snd] for e in fst if type(e) is dict and snd in e)
try:
if D: self.end(color.op(".") + " returning '%s'", fst.get(snd))
return fst.get(snd)
except Exception:
if isinstance(fst, object):
return self.object_getter(fst, snd)
if D: self.end(color.op(".") + " returning '%s'", color.bold(fst))
return fst
elif op == "..":
fst = flatten(exe(node[1]))
if node[2][0] == "*":
if D: self.debug(color.op("..") + " returning '%s'", color.bold(fst))
return fst
# reduce objects to selected attributes
snd = exe(node[2])
if D: self.debug(
color.op("..") + " finding all %s in %s", color.bold(snd),
color.bold(self.cleanOutput(fst))
)
if type(snd) in ITER_TYPES:
ret = filter_dict(fst, list(snd))
if D: self.debug(color.op("..") + " returning %s", color.bold(ret))
return ret
else:
ret = chain.from_iterable(
type(x) in ITER_TYPES and x or [x]
for x in (e[snd] for e in fst if snd in e)
)
# print list(chain(*(type(x) in ITER_TYPES and x or [x] for x in (e[snd] for e in fst if snd in e))))
if D: self.debug(color.op("..") + " returning %s", color.bold(self.cleanOutput(ret)))
return ret
elif op == "[":
len_node = len(node)
# TODO move it to tree generation phase
if len_node == 1: # empty list
if D: self.debug("returning an empty list")
return []
if len_node == 2: # list - preserved to catch possible event of leaving it as '[' operator
if D: self.debug("doing list mapping")
return [exe(x) for x in node[1]]
if len_node == 3: # selector used []
fst = exe(node[1])
# check against None
if not fst:
return fst
selector = node[2]
if D:
self.debug(
"\n found selector '%s'.\n executing on %s", color.bold(selector),
color.bold(fst)
)
selectorIsTuple = type(selector) is tuple
if selectorIsTuple and selector[0] == "[":
nodeList = []
nodeList_append = nodeList.append
for i in fst:
if D: self.debug("setting self.current to %s", color.bold(i))
self.current = i
nodeList_append(
exe((selector[0], exe(selector[1]), exe(selector[2])))
)
if D: self.debug(
"returning %s objects: %s", color.bold(len(nodeList)),
color.bold(nodeList)
)
return nodeList
if selectorIsTuple and selector[0] == "(current)":
if D:
self.warning(
color.bold("$.*[@]") + " is eqivalent to " +
color.bold("$.*") + "!"
)
return fst
if selectorIsTuple and selector[0] in SELECTOR_OPS:
if D: self.debug(
"found %s operator in selector, %s", color.bold(selector[0]),
color.bold(selector)
)
if type(fst) is dict:
fst = [fst]
# TODO move it to tree building phase
if type(selector[1]) is tuple and selector[1][0] == "name":
selector = (selector[0], selector[1][1], selector[2])
selector0 = selector[0]
selector1 = selector[1]
selector2 = selector[2]
def exeSelector(fst):
for i in fst:
if D:
self.debug("setting self.current to %s", color.bold(i))
self.debug(" s0: %s\n s1: %s\n s2: %s\n Current: %s", selector0, selector1, selector2, i)
self.current = i
if selector0 == "fn":
yield exe(selector)
# elif type(selector1) in STR_TYPES and False:
# if D: self.debug("found string %s", type(i))
# try:
# if exe((selector0,i[selector1],selector2)):
# yield i
# if D: self.debug("appended")
# if D: self.debug("discarded")
# except Exception as e:
# if D: self.debug("discarded, Exception: %s",color.bold(e))
else:
try:
# TODO optimize an event when @ is not used. exe(selector1) can be cached
if exe((selector0, exe(selector1), exe(selector2))):
yield i
if D: self.debug("appended %s", i)
elif D: self.debug("discarded")
except Exception:
if D: self.debug("discarded")
# if D and nodeList: self.debug("returning '%s' objects: '%s'", color.bold(len(nodeList)), color.bold(nodeList))
return exeSelector(fst)
self.current = fst
snd = exe(node[2])
typefst = type(fst)
if typefst in [tuple] + ITER_TYPES + STR_TYPES:
typesnd = type(snd)
# nodes[N]
if typesnd in NUM_TYPES or typesnd is str and snd.isdigit():
n = int(snd)
if D:
self.info(
"getting %sth element from '%s'", color.bold(n),
color.bold(fst)
)
if typefst in (generator, chain):
if n > 0:
return skip(fst, n)
elif n == 0:
return next(fst)
else:
fst = list(fst)
else:
try:
return fst[n]
except (IndexError, TypeError):
return None
# $.*['string']==$.string
if type(snd) in STR_TYPES:
return exe((".", fst, snd))
else:
# $.*[@.string] - bad syntax, but allowed
return snd
else:
try:
if D: self.debug("returning %s", color.bold(fst[snd]))
return fst[snd]
except KeyError:
# CHECK - is it ok to do that or should it be ProgrammingError?
if D: self.debug("returning an empty list")
return []
raise ProgrammingError(
"Wrong usage of " + color.bold("[") + " operator"
)
elif op == "fn":
# Built-in functions
fnName = node[1]
args = None
try:
args = [exe(x) for x in node[2:]]
except IndexError:
if D:
self.debug("NOT ERROR: can't map '%s' with '%s'", node[2:], exe)
# arithmetic
if fnName == "sum":
args = args[0]
if type(args) in NUM_TYPES:
return args
return sum((x for x in args if type(x) in NUM_TYPES))
elif fnName == "max":
args = args[0]
if type(args) in NUM_TYPES:
return args
return max((x for x in args if type(x) in NUM_TYPES))
elif fnName == "min":
args = args[0]
if type(args) in NUM_TYPES:
return args
return min((x for x in args if type(x) in NUM_TYPES))
elif fnName == "avg":
args = args[0]
if type(args) in NUM_TYPES:
return args
if type(args) not in ITER_TYPES:
raise Exception("Argument for avg() is not an array")
else:
args = list(args)
try:
return sum(args)/float(len(args))
except TypeError:
args = [x for x in args if type(x) in NUM_TYPES]
self.warning("Some items in array were ommited")
return sum(args)/float(len(args))
elif fnName == "round":
return round(*args)
# casting
elif fnName == "int":
return int(args[0])
elif fnName == "float":
return float(args[0])
elif fnName == "str":
return str(py2JSON(args[0]))
elif fnName in ("list", "array"):
try:
a = args[0]
except IndexError:
return []
targs = type(a)
if targs is timeutils.datetime.datetime:
return timeutils.date2list(a) + timeutils.time2list(a)
if targs is timeutils.datetime.date:
return timeutils.date2list(a)
if targs is timeutils.datetime.time:
return timeutils.time2list(a)
return list(a)
# string
elif fnName == "upper":
return args[0].upper()
elif fnName == "lower":
return args[0].lower()
elif fnName == "capitalize":
return args[0].capitalize()
elif fnName == "title":
return args[0].title()
elif fnName == "split":
return args[0].split(*args[1:])
elif fnName == "slice":
if args and type(args[1]) not in ITER_TYPES:
raise ExecutionError(
"Wrong usage of slice(STRING, ARRAY). Second argument is not an array but %s."
% color.bold(type(args[1]).__name__)
)
try:
pos = list(args[1])
if type(pos[0]) in ITER_TYPES:
if D: self.debug("run slice() for a list of slicers")
return (args[0][x[0]:x[1]] for x in pos)
return args[0][pos[0]:pos[1]]
except IndexError:
if len(args) != 2:
raise ProgrammingError(
"Wrong usage of slice(STRING, ARRAY). Provided %s argument, should be exactly 2."
% len(args)
)
elif fnName == "escape":
global escape, escapeDict
if not escape:
from objectpath.utils import escape, escapeDict
return escape(args[0], escapeDict)
elif fnName == "unescape":
global unescape, unescapeDict
if not unescape:
from objectpath.utils import unescape, unescapeDict
return unescape(args[0], unescapeDict)
elif fnName == "replace":
if sys.version_info[0] < 3 and type(args[0]) is unicode:
args[0] = args[0].encode("utf8")
return str.replace(args[0], args[1], args[2])
# TODO this should be supported by /regex/
# elif fnName=="REsub":
# return re.sub(args[1],args[2],args[0])
elif fnName == "sort":
if len(args) > 1:
key = args[1]
a = {"key": lambda x: x.get(key, 0)}
else:
a = {}
args = args[0]
if D: self.debug("doing sort on '%s'", args)
try:
return sorted(args, **a)
except TypeError:
return args
elif fnName == "reverse":
args = args[0]
try:
args.reverse()
return args
except TypeError:
return args
elif fnName == "unique":
try:
return list(set(args[0]))
except TypeError:
return args[0]
elif fnName == "map":
return chain.from_iterable(map(lambda x: exe(("fn", args[0], x)), args[1]))
elif fnName in ("count", "len"):
args = args[0]
if args in (True, False, None):
return args
if type(args) in ITER_TYPES:
return len(list(args))
return len(args)
elif fnName == "join":
try:
joiner = args[1]
except Exception:
joiner = ""
try:
return joiner.join(args[0])
except TypeError:
try:
return joiner.join(map(str, args[0]))
except Exception:
return args[0]
# time
elif fnName in ("now", "age", "time", "date", "dateTime"):
if fnName == "now":
return timeutils.now()
if fnName == "date":
return timeutils.date(args)
if fnName == "time":
return timeutils.time(args)
if fnName == "dateTime":
return timeutils.dateTime(args)
# TODO move lang to localize() entirely!
if fnName == "age":
a = {}
if len(args) > 1:
a["reference"] = args[1]
if len(args) > 2:
a["lang"] = args[2]
return list(timeutils.age(args[0], **a))
elif fnName == "toMillis":
args = args[0]
if args.utcoffset() is not None:
args = args - args.utcoffset() # pylint: disable=E1103
global calendar
if not calendar:
import calendar
return int(
calendar.timegm(args.timetuple())*1000 + args.microsecond/1000
)
elif fnName == "localize":
if type(args[0]) is timeutils.datetime.datetime:
return timeutils.UTC2local(*args)
# polygons
elif fnName == "area":
def segments(p):
p = list(map(lambda x: x[0:2], p))
return zip(p, p[1:] + [p[0]])
return 0.5*abs(
sum(x0*y1 - x1*y0 for ((x0, y0), (x1, y1)) in segments(args[0]))
)
# misc
elif fnName == "keys":
try:
return list(args[0].keys())
except AttributeError:
raise ExecutionError(
"Argument is not " + color.bold("object") +
" but %s in keys()" % color.bold(type(args[0]).__name__)
)
elif fnName == "values":
try:
return list(args[0].values())
except AttributeError:
raise ExecutionError(
"Argument is not " + color.bold("object") +
" but %s in values()" % color.bold(type(args[0]).__name__)
)
elif fnName == "type":
ret = type(args[0])
if ret in ITER_TYPES:
return "array"
if ret is dict:
return "object"
return ret.__name__
elif fnName in self._REGISTERED_FUNCTIONS:
return self._REGISTERED_FUNCTIONS[fnName](*args)
else:
raise ProgrammingError(
"Function " + color.bold(fnName) + " does not exist."
)
else:
return node
D = self.D
if type(expr) in STR_TYPES:
tree = self.compile(expr)
elif type(expr) not in (tuple, list, dict):
return expr
ret = exe(tree)
if D: self.end("Tree.execute with: %s", color.bold(self.cleanOutput(ret)))
return ret
def __str__(self):
return "TreeObject()"
def __repr__(self):
return self.__str__()
| 36.372549
| 140
| 0.503073
|
dcf9f076dc99b145d0b8b275cd0619e22249f8ad
| 1,398
|
py
|
Python
|
release/stubs.min/System/Windows/Forms/__init___parts/ListViewHitTestLocations.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Windows/Forms/__init___parts/ListViewHitTestLocations.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Windows/Forms/__init___parts/ListViewHitTestLocations.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
class ListViewHitTestLocations(Enum,IComparable,IFormattable,IConvertible):
"""
Defines constants that represent areas in a System.Windows.Forms.ListView or System.Windows.Forms.ListViewItem.
enum (flags) ListViewHitTestLocations,values: AboveClientArea (256),BelowClientArea (16),Image (2),Label (4),LeftOfClientArea (64),None (1),RightOfClientArea (32),StateImage (512)
"""
def Instance(self):
""" This function has been arbitrarily put into the stubs"""
return ListViewHitTestLocations()
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
AboveClientArea=None
BelowClientArea=None
Image=None
Label=None
LeftOfClientArea=None
None_ =None
RightOfClientArea=None
StateImage=None
value__=None
| 31.772727
| 215
| 0.703147
|
8a60b898faad0732a55747e9b8bdbfb388882ddc
| 8,528
|
py
|
Python
|
data_loader.py
|
wanghaiyangMIT/rhythm-detection
|
dc6f8522d9bd404b3447f5e1724b9474385bb38c
|
[
"CC-BY-4.0"
] | 2
|
2019-05-21T14:10:18.000Z
|
2021-11-06T18:59:08.000Z
|
data_loader.py
|
wanghaiyangMIT/rhythm-detection-opticalflowpure
|
dc6f8522d9bd404b3447f5e1724b9474385bb38c
|
[
"CC-BY-4.0"
] | null | null | null |
data_loader.py
|
wanghaiyangMIT/rhythm-detection-opticalflowpure
|
dc6f8522d9bd404b3447f5e1724b9474385bb38c
|
[
"CC-BY-4.0"
] | null | null | null |
import os
import json
import sys
import cv2
import time
import pickle
import argparse
import random
from tqdm import tqdm
import numpy as np
import torch
from torch.autograd import Variable
import torch.utils.data as data
import torchvision.transforms as transforms
from utils import DataError,letterbox_image224,prep_image_to_tensor,is_peak
from pre_process import letterbox_image
normalize = transforms.Normalize(mean=[.5], std=[.5])
transform = transforms.Compose([transforms.ToTensor(), normalize])
def get_dataset(args, train_ratio = 0.9):
video_list = os.listdir(args.video_dir)
size = len(video_list)
i = int(train_ratio * size) if not args.debug else size - 10
train_set = MyDataLoader(args, video_list[0:i])
test_set = MyDataLoader(args, video_list[i:], is_test=True)
return train_set, test_set
class MyDataLoader(data.Dataset):
def __init__ (self, args, video_list, is_test=False):
super(MyDataLoader,self).__init__()
self.audio_dir = args.audio_dir
self.video_dir = args.video_dir
self.video_list = video_list
self.epoch_size = args.epoch_size
self.is_test = is_test
self.num_sample = args.num_sample if not is_test else 30 # only test on first 200 segments
self.fps = args.fps
self.delta = int(20 / args.fps) # merge how many frames into one
self.theta = args.theta # onset threshold
self.use_label = args.use_label
self.segment_length = args.segment_length
self.dim_video = args.dim_video
self.current_ptr = 0
self.current_sample = self.num_sample
self.current_complete_video = None
self.current_complete_label = None
self.current_total_length = None
def load_file(self):
if self.current_sample == self.num_sample:
self.current_sample = 0 # fetch a new video
else: # use the previous one
return
if self.current_ptr == len(self.video_list):
self.current_ptr = 0
random.shuffle(self.video_list)
while True:
try:
video_name = self.video_list[self.current_ptr]
self.current_ptr += 1
#f=open('videoname_used.txt','a')
#print('1')
#f.write(video_name)
#print('2')
#f.close()
print(video_name)
identi = video_name.split('.')[0].split('_')[-1]
# video (T, H, W, 3)
# video_name = 'frames_' + str(identi) + '.pkl'
'''
with open(os.path.join(self.video_dir, video_name), 'rb') as f:
u = pickle._Unpickler(f)
u.encoding = 'iso-8859-1'
video = u.load().float() # (T, H, W, c)
'''
#video = torch.tensor([])
video =[]
videostream = cv2.VideoCapture(os.path.join(self.video_dir,video_name))
orilen = int(videostream.get(cv2.CAP_PROP_FRAME_COUNT))
for i in range(orilen):
(grabbed,frame) = videostream.read()
'''
frame = transform(frame).permute(1,2,0)
frame = frame.view(-1,224,224,3)
if i == 0:
video = frame
else:
video = torch.cat((video,frame),0)
'''
video.append(frame)
video = torch.tensor(video).float()
videostream.release()
#speed ++
# label (T, 1)
#audio_name = 'feature_3_' + str(identi) + '.pkl'
audio_name = '4_' + str(identi) + '.pkl'
with open (os.path.join(self.audio_dir, audio_name), 'rb') as f:
u = pickle._Unpickler(f)
u.encoding = 'latin1'
'''
##peak
strength = u.load()[:,0]
print(strength.nonzero(),strength.nonzero()[0].shape,strength.shape[0])
peak = np.zeros((strength.shape[0])).astype(bool)
for i in range(strength.shape[0]):
peak[i] = is_peak(i,strength)
strength = strength*peak
print(strength.nonzero(),strength.nonzero()[0].shape,strength.shape[0])
strength = torch.tensor(strength).float()
##
'''
strength = torch.tensor(u.load()).float()[:,0] # (T,)
strength = strength / torch.max(strength[100:])
T = strength.shape[0]
r = T % self.delta
split = list(strength[0:T-r].split(self.delta))
for j in range(len(split)):
split[j] = torch.max(split[j])
strength = torch.stack(split).view(-1, 1) # (T, 1)
#label = strength.ge(self.theta) if self.use_label else strength
label = strength
#print(label.numpy().nonzero()[0].shape)
with open ('test01.json','w') as f:
a = json.dumps(label.numpy().nonzero()[0].tolist())
f.write(a)
self.current_complete_video = video
self.current_complete_label = label
self.current_total_length = min(video.shape[0], label.shape[0])
break
except FileNotFoundError as e:
print(e)
continue
# raise DataError('MyDataLoader:load_file: ' + str(e))
def __getitem__(self, index):
try:
# sample segment
self.load_file() # fetch a new video or use the previous one
beg = self.segment_length * self.current_sample if self.is_test else \
random.randint(0, self.current_total_length - self.segment_length)
self.current_sample += 1
# print(self.current_ptr, self.current_sample)
if beg >= self.current_total_length:
raise DataError('MyDataLoader:__getitem__: exceed total length')
end = beg + self.segment_length
video = self.current_complete_video[beg:end]
label = self.current_complete_label[beg:end]
#print(label)
#f = open('test_label.txt','a')
#f.write('strength'+str(label))
#print(label)
label = label/torch.max(label)
#f.write('normal'+str(label))
#print(label)
#label = label.ge(self.theta) if self.use_label else strength
#ratio = 1. * torch.sum(label).item() / label.shape[0]
#print(ratio)
#if ratio < 0.4 or ratio > 0.8:
#raise DataError('MyDataLoader:__getitem__: too many or too few onsets')
##peak
label = label.numpy()
peak = np.zeros((label.shape[0],label.shape[1])).astype(bool)
for i in range(label.shape[0]):
peak[i] = is_peak(i,label)
label = label*peak
label = torch.tensor(label).float()
#f.write('peak'+str(label))
#print(label)
#ratio = 1. * torch.sum(label).item() / label.shape[0]
#print(ratio)
label = label.ge(self.theta) if self.use_label else strength
#f.write('theta'+str(label))
#print(label)
# cut bud segments
ratio = 1. * torch.sum(label).item() / label.shape[0]
#if ratio < 0.4 or ratio > 0.8:
#f.write('ratio'+str(ratio))
#f.close()
if ratio < 0.2:
raise DataError('MyDataLoader:__getitem__: too many or too few onsets')
#print(label)
# resize to (1, T, ...)
video = video.unsqueeze(dim=0)
label = label.unsqueeze(dim=0)
#print(label)
# print(self.current_ptr, self.current_sample)
return video, label
#except DataError as e:
except:
#print(e)
zero = torch.tensor([0])
return zero, zero
def __len__(self):
return self.epoch_size
| 41.198068
| 109
| 0.517824
|
2a356b9daa3542de031ed14a8bd2b7034fa134d8
| 1,688
|
py
|
Python
|
chat/Message.py
|
oinqu/network-protocol-design-course
|
3f190c1a5423a472e98af94e00da08a7c4718028
|
[
"MIT"
] | null | null | null |
chat/Message.py
|
oinqu/network-protocol-design-course
|
3f190c1a5423a472e98af94e00da08a7c4718028
|
[
"MIT"
] | null | null | null |
chat/Message.py
|
oinqu/network-protocol-design-course
|
3f190c1a5423a472e98af94e00da08a7c4718028
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class Message:
"""
Message object.
Author: Stanislav Grebennik
This class represents an incoming or outgoing message. Each message is stored in memory
as a separate object.
Main purpose of this class is to be a cache for message segments. The cache can be used in case
of outgoing messages for waiting acks from destination, or in case of incoming messages
for waiting until the message has been fully transmitted. Then the object is deleted.
"""
def __init__(self, msg_id):
self.msg_id = msg_id
self.retries = 0
self.cache = dict()
def is_full(self):
"""
Check whether the messages cache contains all segments.
Used for checking incoming messages.
"""
total_segments = int(next(iter(self.cache)).split('/')[1])
if len(self.cache) == total_segments:
return True
return False
def is_empty(self):
"""
Check whether the messages cache is empty.
Used for checking outgoing messages.
The outgoing messages segment gets deleted after receiving an ack from destination.
So an empty cache is seen as a successfully sent message.
"""
if self.cache == {}:
return True
return False
def get_msg(self):
"""Compile a message from segments stored in cache."""
out = []
total_segments = int(next(iter(self.cache)).split('/')[1])
for current_segment in range(total_segments):
segment_str = f"{current_segment + 1}/{total_segments}"
out.append(self.cache[segment_str])
return ''.join(out)
| 32.461538
| 99
| 0.627962
|
2c9bd4aa5bf888a508fefdac3543c948823734b8
| 2,914
|
py
|
Python
|
learning_logs/views.py
|
lincoco/learning_log
|
980c4ae41cd4e34d3208057a77e7d232c389dec3
|
[
"MIT"
] | 1
|
2019-06-03T03:41:26.000Z
|
2019-06-03T03:41:26.000Z
|
learning_logs/views.py
|
lincoco/learning_log
|
980c4ae41cd4e34d3208057a77e7d232c389dec3
|
[
"MIT"
] | null | null | null |
learning_logs/views.py
|
lincoco/learning_log
|
980c4ae41cd4e34d3208057a77e7d232c389dec3
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, HttpResponse
from .models import Topic, Entry
from django.http import HttpResponseRedirect, Http404
from django.urls import reverse
from .forms import TopicForm, EntryForm
from django.contrib.auth.decorators import login_required
# Create your views here.
# print(Topic.objects.get(id=1))
def index(request):
"""学习笔记的主页"""
return render(request, 'learning_logs/index.html')
@login_required
def topics(request):
"""显示所有的主题"""
topics = Topic.objects.filter(owner=request.user).order_by('data_added')
# topics = Topic.objects.order_by('data_added')
context = {'topics': topics}
return render(request, 'learning_logs/topics.html', context)
@login_required
def topic(request, topic_id):
"""显示单个主题及其所有的条目"""
topic = Topic.objects.get(id=topic_id)
# 确认请求的主题属于当前用户
if topic.owner != request.user:
raise Http404
entries = topic.entry_set.order_by('-data_added') # 减号指定以降序排列
context = {'topic': topic, 'entries': entries}
return render(request, 'learning_logs/topic.html', context)
@login_required
def new_topic(request):
"""添加新主题"""
if request.method != 'POST':
# 未提交数据:创建一个新表单
form = TopicForm()
else:
form = TopicForm(request.POST)
if form.is_valid():
new_topic = form.save(commit=False)
new_topic.owner = request.user
new_topic.save()
# form.save()
return HttpResponseRedirect(reverse('learning_logs:topics'))
context = {'form': form}
return render(request, 'learning_logs/new_topic.html', context)
@login_required
def new_entry(request, topic_id):
"""在特定的主题中添加新目录"""
topic = Topic.objects.get(id=topic_id)
if request.method != 'POST':
form = EntryForm()
else:
form = EntryForm(request.POST)
if form.is_valid():
new_entry = form.save(commit=False)
new_entry.topic = topic
new_entry.save()
return HttpResponseRedirect(reverse('learning_logs:topic',
args=[topic_id]))
context = {'topic': topic, 'form': form}
return render(request, 'learning_logs/new_entry.html', context)
@login_required
def edit_entry(request, entry_id):
"""编辑既有条目"""
entry = Entry.objects.get(id=entry_id)
topic = entry.topic
if topic.owner != request.user:
raise Http404
if request.method != 'POST':
# 初次请求,使用当前条目填充表单
form = EntryForm(instance=entry)
else:
form = EntryForm(instance=entry, data=request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('learning_logs:topic',
args=[topic.id]))
context = {'entry': entry, 'topic': topic, 'form': form}
return render(request, 'learning_logs/edit_entry.html', context)
| 33.113636
| 76
| 0.639671
|
8d448c062862147b9a18fa904b33463cd1c65391
| 193
|
py
|
Python
|
python-HR/practice/pracOne.py
|
pouyapanahandeh/python3-ref
|
b8865ebb6da8065754a4df341cedfccdf3b3d2c5
|
[
"MIT"
] | null | null | null |
python-HR/practice/pracOne.py
|
pouyapanahandeh/python3-ref
|
b8865ebb6da8065754a4df341cedfccdf3b3d2c5
|
[
"MIT"
] | null | null | null |
python-HR/practice/pracOne.py
|
pouyapanahandeh/python3-ref
|
b8865ebb6da8065754a4df341cedfccdf3b3d2c5
|
[
"MIT"
] | null | null | null |
# write program to remove the duplicates in a list.
numbers = [2,4,6,2,4,5,7,6,7,8,9,8,9]
uniques = []
for index in numbers:
if index not in uniques:
uniques.append(index)
print(uniques)
| 21.444444
| 51
| 0.689119
|
08f7f32298a1481af6c836b7af8def45f7956c94
| 6,768
|
py
|
Python
|
dac/gail.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-13T21:48:52.000Z
|
2022-03-13T21:48:52.000Z
|
dac/gail.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | null | null | null |
dac/gail.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-30T07:20:29.000Z
|
2022-03-30T07:20:29.000Z
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An implementation of GAIL with WGAN discriminator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.contrib import summary as contrib_summary
from tensorflow.contrib.eager.python import tfe as contrib_eager_python_tfe
from tensorflow.contrib.gan.python.losses.python import losses_impl as contrib_gan_python_losses_python_losses_impl
class Discriminator(tf.keras.Model):
"""Implementation of a discriminator network."""
def __init__(self, input_dim):
"""Initializes a discriminator.
Args:
input_dim: size of the input space.
"""
super(Discriminator, self).__init__()
kernel_init = tf.keras.initializers.Orthogonal(gain=1.0)
self.main = tf.keras.Sequential([
tf.layers.Dense(
units=256,
input_shape=(input_dim,),
activation='tanh',
kernel_initializer=kernel_init),
tf.layers.Dense(
units=256, activation='tanh', kernel_initializer=kernel_init),
tf.layers.Dense(units=1, kernel_initializer=kernel_init)
])
def call(self, inputs):
"""Performs a forward pass given the inputs.
Args:
inputs: a batch of observations (tfe.Variable).
Returns:
Values of observations.
"""
return self.main(inputs)
class GAIL(object):
"""Implementation of GAIL (https://arxiv.org/abs/1606.03476).
Instead of the original GAN, it uses WGAN (https://arxiv.org/pdf/1704.00028).
"""
def __init__(self, input_dim, subsampling_rate, lambd=10.0, gail_loss='airl'):
"""Initializes actor, critic, target networks and optimizers.
Args:
input_dim: size of the observation space.
subsampling_rate: subsampling rate that was used for expert trajectories.
lambd: gradient penalty coefficient for wgan.
gail_loss: gail loss to use.
"""
self.subsampling_rate = subsampling_rate
self.lambd = lambd
self.gail_loss = gail_loss
with tf.variable_scope('discriminator'):
self.disc_step = contrib_eager_python_tfe.Variable(
0, dtype=tf.int64, name='step')
self.discriminator = Discriminator(input_dim)
self.discriminator_optimizer = tf.train.AdamOptimizer()
self.discriminator_optimizer._create_slots(self.discriminator.variables) # pylint: disable=protected-access
def update(self, batch, expert_batch):
"""Updates the WGAN potential function or GAN discriminator.
Args:
batch: A batch from training policy.
expert_batch: A batch from the expert.
"""
obs = contrib_eager_python_tfe.Variable(
np.stack(batch.obs).astype('float32'))
expert_obs = contrib_eager_python_tfe.Variable(
np.stack(expert_batch.obs).astype('float32'))
expert_mask = contrib_eager_python_tfe.Variable(
np.stack(expert_batch.mask).astype('float32'))
# Since expert trajectories were resampled but no absorbing state,
# statistics of the states changes, we need to adjust weights accordingly.
expert_mask = tf.maximum(0, -expert_mask)
expert_weight = expert_mask / self.subsampling_rate + (1 - expert_mask)
action = contrib_eager_python_tfe.Variable(
np.stack(batch.action).astype('float32'))
expert_action = contrib_eager_python_tfe.Variable(
np.stack(expert_batch.action).astype('float32'))
inputs = tf.concat([obs, action], -1)
expert_inputs = tf.concat([expert_obs, expert_action], -1)
# Avoid using tensorflow random functions since it's impossible to get
# the state of the random number generator used by TensorFlow.
alpha = np.random.uniform(size=(inputs.get_shape()[0], 1))
alpha = contrib_eager_python_tfe.Variable(alpha.astype('float32'))
inter = alpha * inputs + (1 - alpha) * expert_inputs
with tf.GradientTape() as tape:
output = self.discriminator(inputs)
expert_output = self.discriminator(expert_inputs)
with contrib_summary.record_summaries_every_n_global_steps(
100, self.disc_step):
gan_loss = contrib_gan_python_losses_python_losses_impl.modified_discriminator_loss(
expert_output,
output,
label_smoothing=0.0,
real_weights=expert_weight)
contrib_summary.scalar(
'discriminator/expert_output',
tf.reduce_mean(expert_output),
step=self.disc_step)
contrib_summary.scalar(
'discriminator/policy_output',
tf.reduce_mean(output),
step=self.disc_step)
with tf.GradientTape() as tape2:
tape2.watch(inter)
output = self.discriminator(inter)
grad = tape2.gradient(output, [inter])[0]
grad_penalty = tf.reduce_mean(tf.pow(tf.norm(grad, axis=-1) - 1, 2))
loss = gan_loss + self.lambd * grad_penalty
with contrib_summary.record_summaries_every_n_global_steps(
100, self.disc_step):
contrib_summary.scalar(
'discriminator/grad_penalty', grad_penalty, step=self.disc_step)
with contrib_summary.record_summaries_every_n_global_steps(
100, self.disc_step):
contrib_summary.scalar(
'discriminator/loss', gan_loss, step=self.disc_step)
grads = tape.gradient(loss, self.discriminator.variables)
self.discriminator_optimizer.apply_gradients(
zip(grads, self.discriminator.variables), global_step=self.disc_step)
def get_reward(self, obs, action, next_obs): # pylint: disable=unused-argument
if self.gail_loss == 'airl':
inputs = tf.concat([obs, action], -1)
return self.discriminator(inputs)
else:
inputs = tf.concat([obs, action], -1)
return -tf.log(1 - tf.nn.sigmoid(self.discriminator(inputs)) + 1e-8)
@property
def variables(self):
"""Returns all variables including optimizer variables.
Returns:
A dictionary of all variables that are defined in the model.
variables.
"""
disc_vars = (
self.discriminator.variables + self.discriminator_optimizer.variables()
+ [self.disc_step])
return disc_vars
| 35.621053
| 115
| 0.701684
|
d6f31e0dff86b81e78b998ab299e55cde979f69f
| 5,503
|
py
|
Python
|
launch/all.launch.py
|
Valts-M/ros2_com
|
647bfb815595309ef469cbf81a1f7f1b9bb1fc3c
|
[
"Apache-2.0"
] | null | null | null |
launch/all.launch.py
|
Valts-M/ros2_com
|
647bfb815595309ef469cbf81a1f7f1b9bb1fc3c
|
[
"Apache-2.0"
] | null | null | null |
launch/all.launch.py
|
Valts-M/ros2_com
|
647bfb815595309ef469cbf81a1f7f1b9bb1fc3c
|
[
"Apache-2.0"
] | null | null | null |
import launch
from launch.substitutions import Command, LaunchConfiguration
from launch_ros.actions import LifecycleNode
from launch.actions import EmitEvent
from launch.actions import RegisterEventHandler
from launch_ros.events.lifecycle import ChangeState
from launch_ros.events.lifecycle import matches_node_name
from launch_ros.event_handlers import OnStateTransition
from launch.actions import LogInfo
from launch.events import matches_action
from launch.event_handlers.on_shutdown import OnShutdown
import launch_ros
import lifecycle_msgs.msg
import os
def generate_launch_description():
pkg_share = launch_ros.substitutions.FindPackageShare(package='ros2_com').find('ros2_com')
default_model_path = os.path.join(pkg_share, 'descriptions/columbus_description.urdf')
use_sim_time = LaunchConfiguration('use_sim_time')
robot_state_publisher_node = launch_ros.actions.Node(
package='robot_state_publisher',
executable='robot_state_publisher',
parameters=[{'robot_description': Command(['xacro ', LaunchConfiguration('model')]),
'use_sim_time': use_sim_time}]
)
joint_state_publisher_node = launch_ros.actions.Node(
package='joint_state_publisher',
executable='joint_state_publisher',
name='joint_state_publisher',
parameters=[{'use_sim_time': use_sim_time}],
)
clock_server = launch_ros.actions.Node(
package='ros2_com',
executable='clock_server',
name='clock_server'
)
map_saver_server = launch_ros.actions.Node(
package='ros2_com',
executable='map_saver',
name='map_saver_server'
)
robot_localization_node = launch_ros.actions.Node(
package='robot_localization',
executable='ekf_node',
name='ekf_filter_node',
output='screen',
parameters=[os.path.join(pkg_share, 'config/ekf.yaml'),
{'use_sim_time': use_sim_time}]
)
slam_toolbox_node = launch_ros.actions.Node(
package='slam_toolbox',
executable='async_slam_toolbox_node',
name='slam_toolbox',
parameters=[os.path.join(pkg_share, 'config/mapper_params_online_async.yaml'),
{'use_sim_time': use_sim_time}]
)
localization_node = launch_ros.actions.Node(
package='slam_toolbox',
executable='localization_slam_toolbox_node',
name='slam_toolbox',
output='screen',
parameters=[os.path.join(pkg_share, 'config/localization_params.yaml'),
{"use_sim_time" : use_sim_time}],
)
odom_publisher_node = launch_ros.actions.Node(
package='ros2_com',
executable='odom_publisher',
name='odom_publisher',
output='screen',
parameters=[{'use_sim_time': use_sim_time}],
)
ouster_node = LifecycleNode(package='ros2_ouster',
executable='ouster_driver',
name="ouster_driver",
output='screen',
emulate_tty=True,
parameters=[os.path.join(pkg_share, 'config/ouster_config.yaml'), {'use_sim_time': use_sim_time}],
arguments=['--ros-args', '--log-level', 'INFO'],
namespace='/',
)
configure_event = EmitEvent(
event=ChangeState(
lifecycle_node_matcher=matches_action(ouster_node),
transition_id=lifecycle_msgs.msg.Transition.TRANSITION_CONFIGURE,
)
)
activate_event = RegisterEventHandler(
OnStateTransition(
target_lifecycle_node=ouster_node, goal_state='inactive',
entities=[
LogInfo(
msg="[LifecycleLaunch] Ouster driver node is activating."),
EmitEvent(event=ChangeState(
lifecycle_node_matcher=matches_action(ouster_node),
transition_id=lifecycle_msgs.msg.Transition.TRANSITION_ACTIVATE,
)),
],
)
)
# TODO make lifecycle transition to shutdown before SIGINT
shutdown_event = RegisterEventHandler(
OnShutdown(
on_shutdown=[
EmitEvent(event=ChangeState(
lifecycle_node_matcher=matches_node_name(node_name='ouster_driver'),
transition_id=lifecycle_msgs.msg.Transition.TRANSITION_ACTIVE_SHUTDOWN,
)),
LogInfo(
msg="[LifecycleLaunch] Ouster driver node is exiting."),
],
)
)
pose_listener_node = launch_ros.actions.Node(
package='ros2_com',
executable='pose_listener',
name='pose_listener',
output='screen'
)
return launch.LaunchDescription([
launch.actions.DeclareLaunchArgument(name='model', default_value=default_model_path,
description='Absolute path to robot urdf file'),
launch.actions.DeclareLaunchArgument(name='use_sim_time', default_value='false',
description='Flag to enable use_sim_time'),
# map_saver_server,
# clock_server,
robot_state_publisher_node,
#slam_toolbox_node,
# localization_node,
pose_listener_node,
odom_publisher_node,
# ouster_node,
# activate_event,
# configure_event,
# shutdown_event
])
| 37.182432
| 130
| 0.631837
|
43b15c0118531b7cfffb8cd7efd0ed832365da63
| 233
|
py
|
Python
|
clash/shortest/short-clash11.py
|
a93-git/codingame-solutions
|
25df0f7824844646ceb0b6128274ff92f1084c8c
|
[
"MIT"
] | null | null | null |
clash/shortest/short-clash11.py
|
a93-git/codingame-solutions
|
25df0f7824844646ceb0b6128274ff92f1084c8c
|
[
"MIT"
] | null | null | null |
clash/shortest/short-clash11.py
|
a93-git/codingame-solutions
|
25df0f7824844646ceb0b6128274ff92f1084c8c
|
[
"MIT"
] | null | null | null |
""" Find the difference between sum of digits at odd and even pos"""
s=str
n,r,l,i,w = s(input()),range,len,int,sum
o = r(0, l(s(n)), 2)
e = r(1, l(s(n)), 2)
p = w([i(s(n)[x]) for x in o])
q = w([i(s(n)[x]) for x in e])
print(p - q)
| 25.888889
| 68
| 0.540773
|
84a8ea727281d4b20222a47c47ec09c2b58a0705
| 881
|
py
|
Python
|
setup.py
|
GlenRice-NOAA/Sounding-Selection
|
165d3d3922cef4e216188bbb40b498e32dac9509
|
[
"CC0-1.0"
] | null | null | null |
setup.py
|
GlenRice-NOAA/Sounding-Selection
|
165d3d3922cef4e216188bbb40b498e32dac9509
|
[
"CC0-1.0"
] | null | null | null |
setup.py
|
GlenRice-NOAA/Sounding-Selection
|
165d3d3922cef4e216188bbb40b498e32dac9509
|
[
"CC0-1.0"
] | 1
|
2022-01-20T13:07:54.000Z
|
2022-01-20T13:07:54.000Z
|
from setuptools import setup
import pathlib
cwd = pathlib.Path(__file__).parent.resolve()
long_description = (cwd / 'README.md').read_text(encoding='utf-8')
setup(name='sounding_selection',
version='1.0.0',
description='Label-Based Method for Hydrographic Sounding Selection',
license='MIT',
long_description=long_description,
author='Noel Dyer',
package_dir={'': 'src'},
packages=['sounding_selection'],
install_requires=['triangle',
'numpy==1.21.5',
'shapely==1.8.0'],
python_requires='>=3.6, <4',
url='https://github.com/NoelDyer/Sounding-Selection',
long_description_content_type='text/markdown',
zip_safe=True,
entry_points={'console_scripts':
['sounding_selection=sounding_selection.main:main']}
)
| 36.708333
| 76
| 0.61294
|
2b6f5c99e3594baf1e7a6c1589e09ea3643e074c
| 133
|
py
|
Python
|
src/__init__.py
|
ardieb/TrumpTweetClassifier
|
6b0af5b1e7d213fe1acdc401c088be20e3225f5b
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
ardieb/TrumpTweetClassifier
|
6b0af5b1e7d213fe1acdc401c088be20e3225f5b
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
ardieb/TrumpTweetClassifier
|
6b0af5b1e7d213fe1acdc401c088be20e3225f5b
|
[
"MIT"
] | null | null | null |
__title__ = 'tweetokenize'
__version__ = '1.0'
__author__ = 'Arthur Burke'
__license__ = 'MIT'
from src.Vectorize import Vectorize
| 16.625
| 35
| 0.75188
|
5a30c6595f4ffad13978b40e08ce26f45bc21f38
| 2,384
|
py
|
Python
|
billing/integration.py
|
litchfield/merchant
|
e4fba8a88a326bbde39c26e937c17d5283817320
|
[
"BSD-3-Clause"
] | null | null | null |
billing/integration.py
|
litchfield/merchant
|
e4fba8a88a326bbde39c26e937c17d5283817320
|
[
"BSD-3-Clause"
] | null | null | null |
billing/integration.py
|
litchfield/merchant
|
e4fba8a88a326bbde39c26e937c17d5283817320
|
[
"BSD-3-Clause"
] | 1
|
2021-09-19T03:08:42.000Z
|
2021-09-19T03:08:42.000Z
|
from django.utils.importlib import import_module
from django.conf import settings
from django.conf.urls import patterns
class IntegrationModuleNotFound(Exception):
pass
class IntegrationNotConfigured(Exception):
pass
integration_cache = {}
class Integration(object):
"""Base Integration class that needs to be subclassed by
implementations"""
# The mode of the gateway. Looks into the settings else
# defaults to True
test_mode = getattr(settings, "MERCHANT_TEST_MODE", True)
# Name of the integration.
display_name = 'Base Integration'
# Template rendered by the templatetag 'billing'
template = ''
def __init__(self, options=None):
if not options:
options = {}
# The form fields that will be rendered in the template
self.fields = {}
self.fields.update(options)
def add_field(self, key, value):
self.fields[key] = value
def add_fields(self, params):
for (key, val) in params.iteritems():
self.add_field(key, val)
@property
def service_url(self):
# Modified by subclasses
raise NotImplementedError
def get_urls(self):
# Method must be subclassed
urlpatterns = patterns('')
return urlpatterns
@property
def urls(self):
return self.get_urls()
def get_integration(integration, *args, **kwargs):
"""Return a integration instance specified by `integration` name"""
klass = integration_cache.get(integration, None)
if not klass:
integration_filename = "%s_integration" % integration
integration_module = None
for app in settings.INSTALLED_APPS:
try:
integration_module = import_module(".integrations.%s" % integration_filename, package=app)
except ImportError:
pass
if not integration_module:
raise IntegrationModuleNotFound("Missing integration: %s" % (integration))
integration_class_name = "".join(integration_filename.title().split("_"))
try:
klass = getattr(integration_module, integration_class_name)
except AttributeError:
raise IntegrationNotConfigured("Missing %s class in the integration module." % integration_class_name)
integration_cache[integration] = klass
return klass(*args, **kwargs)
| 29.8
| 114
| 0.665268
|
c716b170d08626ab5def7089005d9642aafac4c1
| 43,515
|
py
|
Python
|
bloop/models.py
|
monoflo/bloop
|
c476298e5a40decf9fdf2ed50df74be8f91fdffd
|
[
"MIT"
] | null | null | null |
bloop/models.py
|
monoflo/bloop
|
c476298e5a40decf9fdf2ed50df74be8f91fdffd
|
[
"MIT"
] | null | null | null |
bloop/models.py
|
monoflo/bloop
|
c476298e5a40decf9fdf2ed50df74be8f91fdffd
|
[
"MIT"
] | null | null | null |
import collections
import collections.abc
import inspect
import logging
from copy import copy as copyfn
from typing import Callable, Dict, Optional, Set
from . import util
from .conditions import ComparisonMixin
from .exceptions import InvalidModel, InvalidStream
from .signals import model_created, object_modified
from .types import DateTime, Number, Type
__all__ = ["BaseModel", "Column", "GlobalSecondaryIndex", "LocalSecondaryIndex"]
logger = logging.getLogger("bloop.models")
missing = util.missing
class IMeta:
"""This class exists to provide autocomplete hints for computed variables on a model's Meta object.
Subclassing IMeta is **OPTIONAL** and rarely necessary; it is primarily available for users writing generic code
over a class of models, eg. transforms on all columns of a model or a Marshmallow adapter.
.. code-block:: python
import bloop.models
class User(BaseModel):
id = Column(String, hash_key=True)
email = Column(String, dynamo_name="e")
class Meta(bloop.models.IMeta):
read_units = 500
User.Meta.co # Pycharm renders:
# +---------------------------+
# | User.Meta.columns |
# | User.Meta.columns_by_name |
# +---------------------------+
"""
abstract: bool
table_name: str
read_units: Optional[int]
write_units: Optional[int]
stream: Optional[Dict]
ttl: Optional[Dict]
encryption: Optional[Dict]
backups: Optional[Dict]
model: "BaseModel"
hash_key: Optional["Column"]
range_key: Optional["Column"]
keys: Set["Column"]
columns: Set["Column"]
columns_by_name: Dict[str, "Column"]
indexes: Set["Index"]
gsis: Set["GlobalSecondaryIndex"]
lsis: Set["LocalSecondaryIndex"]
init: Callable[[], "BaseModel"]
projection: Dict
class BaseModel:
"""Abstract base that all models derive from.
Provides a basic ``__init__`` method that takes ``**kwargs`` whose
keys are columns names:
.. code-block:: python
class URL(BaseModel):
id = Column(UUID, hash_key=True)
ip = Column(IPv6)
name = Column(String)
url = URL(id=uuid.uuid4(), name="google")
By default, the ``__init__`` method is not called when new instances are
required, for example when iterating results from Query, Scan or a Stream.
"""
class Meta(IMeta):
abstract = True
def __init__(self, **attrs):
# Only set values from **attrs if there's a
# corresponding `name` for a column in the model
for column in self.Meta.columns:
value = attrs.get(column.name, missing)
if value is missing:
value = column.default()
if value is not missing:
setattr(self, column.name, value)
def __init_subclass__(cls: type, **kwargs):
ensure_hash(cls)
meta = initialize_meta(cls)
# before we start binding, we should ensure that no combination of parent classes
# will cause conflicts. For example:
# class C(A, B) where
# A has a column named "foo" and dynamo_name "ddb"
# B has a column named "bar" and dynamo_name "ddb"
# both A and B are valid mixins, but C must fail because there isn't a 1:1 binding to the "ddb" column.
#
# TODO | for now, we'll assume that the class being defined is special, and can replace columns with the
# TODO | same dynamo_name. In the example above, that would mean C has a column named "baz" and dynamo_name
# TODO | "ddb" which would prevent the parent columns "foo" and "bar" from binding to the child class.
modeled_attrs = set((attr for (_, attr) in inspect.getmembers(cls, lambda x: isinstance(x, (Column, Index)))))
local_attrs = {
name: value
for name, value in cls.__dict__.items()
if isinstance(value, (Column, Index))
}
derived_attrs = modeled_attrs - set(local_attrs.values())
# 0.0 Pre-validation for collisions in derived columns/indexes
dynamo_names = [x.dynamo_name for x in derived_attrs]
collisions = [name for name, count in collections.Counter(dynamo_names).items() if count > 1]
if collisions:
collisions.sort()
raise InvalidModel(
f"The model {cls.__name__} subclasses one or more models with conflicting "
f"column or index definitions for the following values of dynamo_name: {collisions}")
derived_hash_keys = set((x.name for x in derived_attrs if isinstance(x, Column) and x.hash_key))
if len(derived_hash_keys) > 1:
derived_hash_keys = sorted(derived_hash_keys)
raise InvalidModel(
f"The model {cls.__name__} subclasses one or more models that declare multiple "
f"columns as the hash key: {derived_hash_keys}")
derived_range_keys = set((x.name for x in derived_attrs if isinstance(x, Column) and x.range_key))
if len(derived_range_keys) > 1:
derived_range_keys = sorted(derived_range_keys)
raise InvalidModel(
f"The model {cls.__name__} subclasses one or more models that declare multiple "
f"columns as the range key: {derived_range_keys}")
# 0.1 Pre-validation for collisions in local columns/indexes
dynamo_names = [x.dynamo_name for x in local_attrs.values()]
collisions = [name for name, count in collections.Counter(dynamo_names).items() if count > 1]
if collisions:
collisions.sort()
raise InvalidModel(
f"The model {cls.__name__} contains conflicting column or index definitions for the "
f"following values of dynamo_name: {collisions}")
local_hash_keys = [x.name for x in local_attrs.values() if isinstance(x, Column) and x.hash_key]
if len(local_hash_keys) > 1:
local_hash_keys = sorted(local_hash_keys)
raise InvalidModel(
f"The model {cls.__name__} defines multiple columns as hash columns: {local_hash_keys}")
local_range_keys = [x.name for x in local_attrs.values() if isinstance(x, Column) and x.range_key]
if len(local_range_keys) > 1:
local_range_keys = sorted(local_range_keys)
raise InvalidModel(
f"The model {cls.__name__} defines multiple columns as range columns: {local_range_keys}")
# 1.0 Bind derived columns so they can be referenced by derived indexes
for attr in derived_attrs:
if isinstance(attr, Column):
bind_column(cls, attr.name, attr, copy=True)
# 1.1 Bind derived indexes
for attr in derived_attrs:
if isinstance(attr, Index):
bind_index(cls, attr.name, attr, copy=True)
# 1.2 Bind local columns, allowing them to overwrite existing columns
for name, attr in local_attrs.items():
if isinstance(attr, Column):
bind_column(cls, name, attr, force=True)
# 1.3 Bind local indexes, allowing them to overwrite existing indexes
for name, attr in local_attrs.items():
if isinstance(attr, Index):
bind_index(cls, name, attr, force=True)
# 2.0 Ensure concrete models are valid
# Currently, this just checks that a hash key is defined
if not meta.abstract and not meta.hash_key:
raise InvalidModel(f"{meta.model.__name__!r} has no hash key.")
validate_stream(meta)
validate_ttl(meta)
validate_encryption(meta)
validate_backups(meta)
# 3.0 Fire model_created for customizing the class after creation
model_created.send(None, model=cls)
@classmethod
def _load(cls, attrs, *, context, **kwargs):
""" dict (dynamo name) -> obj """
return unpack_from_dynamodb(
model=cls,
attrs=attrs or {},
expected=cls.Meta.columns,
context=context, **kwargs)
@classmethod
def _dump(cls, obj, *, context, **kwargs):
""" obj -> dict """
if obj is None:
return None
dump = context["engine"]._dump
filtered = filter(
lambda item: item[1] is not None,
((
column.dynamo_name,
dump(column.typedef, getattr(obj, column.name, None), context=context, **kwargs)
) for column in cls.Meta.columns))
return dict(filtered) or None
def __repr__(self):
attrs = ", ".join("{}={!r}".format(*item) for item in loaded_columns(self))
return f"{self.__class__.__name__}({attrs})"
class Index:
"""Abstract base class for GSIs and LSIs.
An index must be bound to a model by calling :func:`bind_index(meta, model) <bloop.models.bind_index>`,
which lets the index compute projected columns, validate hash and range keys, etc.
.. seealso::
:class:`~bloop.models.GlobalSecondaryIndex` and :class:`~bloop.models.LocalSecondaryIndex`
:param projection: Either "keys", "all", or a list of column name or objects.
Included columns will be projected into the index. Key columns are always included.
:param hash_key: The column that the index can be queried against. Always the table hash_key for LSIs.
:param range_key: The column that the index can be sorted on. Always required for an LSI. Default is None.
:param str dynamo_name: *(Optional)* The index's name in in DynamoDB. Defaults to the index’s name in the model.
"""
def __init__(self, *, projection, hash_key=None, range_key=None, dynamo_name=None, **kwargs):
self.model = None
if not isinstance(hash_key, (str, Column, type(None))):
raise InvalidModel(f"Index hash_key must be a str or Column, but was {type(hash_key)!r}")
if not isinstance(range_key, (str, Column, type(None))):
raise InvalidModel(f"Index range_key must be a str or Column, but was {type(range_key)!r}")
self._hash_key = hash_key
self._range_key = range_key
self._name = None
self._dynamo_name = dynamo_name
self.projection = validate_projection(projection)
def __copy__(self):
"""
Create a shallow copy of this Index. Primarily used when initializing models that subclass other abstract
models or mixins (baseless classes that contain Columns and Indexes). You can override this method to
change how derived models are created:
.. code-block:: python
import copy
class MyIndex(Index):
def __copy__(self):
new = super().__copy__()
new.derived = True
return new
index = MyIndex(projection="keys", hash_key="some_column")
same = copy.copy(index)
assert same.derived # True
:return: A shallow copy of this Index, with the ``model`` and ``_name`` attributes unset, and the
computed projection invalidated.
"""
cls = self.__class__
obj = cls.__new__(cls)
obj.__dict__.update(self.__dict__)
obj.model = None
obj._name = None
obj.projection = {
"mode": self.projection["mode"],
"included": None,
"available": None,
"strict": self.projection["strict"]
}
return obj
def __set_name__(self, owner, name):
self._name = name
def __repr__(self):
if isinstance(self, LocalSecondaryIndex):
cls_name = "LSI"
elif isinstance(self, GlobalSecondaryIndex):
cls_name = "GSI"
else:
cls_name = self.__class__.__name__
# <GSI[User.by_email=all]>
# <GSI[User.by_email=keys]>
# <LSI[User.by_email=include]>
return f"<{cls_name}[{self.model.__name__}.{self.name}={self.projection['mode']}]>"
@property
def name(self):
"""Name of the model's attr that references self"""
return self._name
@property
def dynamo_name(self):
if self._dynamo_name is None:
return self.name
return self._dynamo_name
@property
def hash_key(self):
if isinstance(self._hash_key, Column):
# replacement is late-binding to handle direct references in models
# before BaseModel.__init_subclass__ can name each column
self._hash_key = self._hash_key.name
return self.model.Meta.columns_by_name[self._hash_key]
@property
def range_key(self):
if self._range_key is None:
return None
if isinstance(self._range_key, Column):
# replacement is late-binding to handle direct references in models
# before BaseModel.__init_subclass__ can name each column
self._range_key = self._range_key.name
return self.model.Meta.columns_by_name[self._range_key]
@property
def keys(self):
keys = {self.hash_key}
if self.range_key:
keys.add(self.range_key)
return keys
def __set__(self, obj, value):
raise AttributeError(f"{self.model.__name__}.{self.name} is a {self.__class__.__name__}")
def __get__(self, obj, type=None):
if obj is None:
return self
raise AttributeError(f"{self.model.__name__}.{self.name} is a {self.__class__.__name__}")
def __delete__(self, obj):
raise AttributeError(f"{self.model.__name__}.{self.name} is a {self.__class__.__name__}")
class GlobalSecondaryIndex(Index):
"""See `GlobalSecondaryIndex`_ in the DynamoDB Developer Guide for details.
:param projection: Either "keys", "all", or a list of column name or objects.
Included columns will be projected into the index. Key columns are always included.
:param hash_key: The column that the index can be queried against.
:param range_key: *(Optional)* The column that the index can be sorted on. Default is None.
:param int read_units: *(Optional)* Provisioned read units for the index. Default is None.
When no value is provided and the index does not exist, it will be created with 1 read unit. If the index
already exists, it will use the actual index's read units.
:param int write_units: *(Optional)* Provisioned write units for the index. Default is None.
When no value is provided and the index does not exist, it will be created with 1 write unit. If the index
already exists, it will use the actual index's write units.
:param str dynamo_name: *(Optional)* The index's name in in DynamoDB. Defaults to the index’s name in the model.
.. _GlobalSecondaryIndex: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GSI.html
"""
def __init__(
self, *, projection,
hash_key, range_key=None,
read_units=None, write_units=None,
dynamo_name=None, **kwargs):
super().__init__(
hash_key=hash_key, range_key=range_key,
dynamo_name=dynamo_name, projection=projection, **kwargs)
self.write_units = write_units
self.read_units = read_units
class LocalSecondaryIndex(Index):
"""See `LocalSecondaryIndex`_ in the DynamoDB Developer Guide for details.
Unlike :class:`~bloop.models.GlobalSecondaryIndex` each LSI shares its throughput with the table
and their hash key is always the table hash key.
:param projection: Either "keys", "all", or a list of column name or objects.
Included columns will be projected into the index. Key columns are always included.
:param range_key: The column that the index can be sorted against.
:param str dynamo_name: *(Optional)* The index's name in in DynamoDB. Defaults to the index’s name in the model.
:param bool strict: *(Optional)* Restricts queries and scans on the LSI to columns in the projection.
When False, DynamoDB may silently incur additional reads to load results. You should not disable this
unless you have an explicit need. Default is True.
.. _LocalSecondaryIndex: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LSI.html
"""
def __init__(self, *, projection, range_key, dynamo_name=None, strict=True, **kwargs):
# Hash key MUST be the table hash; do not specify
if "hash_key" in kwargs:
raise InvalidModel("An LSI shares its hash key with the Model.")
if ("write_units" in kwargs) or ("read_units" in kwargs):
raise InvalidModel("An LSI shares its provisioned throughput with the Model.")
super().__init__(range_key=range_key, dynamo_name=dynamo_name, projection=projection, **kwargs)
self.projection["strict"] = strict
@property
def hash_key(self):
return self.model.Meta.hash_key
@property
def read_units(self):
return self.model.Meta.read_units
@read_units.setter
def read_units(self, value):
self.model.Meta.read_units = value
@property
def write_units(self):
return self.model.Meta.write_units
@write_units.setter
def write_units(self, value):
self.model.Meta.write_units = value
class Column(ComparisonMixin):
model: BaseModel
"""Represents a single attribute in DynamoDB.
:param typedef: The type of this attribute. Can be either a :class:`~bloop.types.Type` or
an instance thereof. If a type class is provided, the column will call the constructor without arguments
to create an instance. For example, ``Column(Integer)`` and ``Column(Integer())`` are equivalent.
:param bool hash_key: *(Optional)* True if this is the model's hash key.
A model must have exactly one Column with ``hash_key=True``. Default is False.
:param bool range_key: *(Optional)* True if this is the model's range key.
A model can have at most one Column with
``range_key=True``. Default is False.
:param str dynamo_name: *(Optional)* The column's name in in DynamoDB. Defaults to the index’s name in the model.
"""
def __init__(self, typedef, hash_key=False, range_key=False, dynamo_name=None, default=missing):
self.hash_key: bool = hash_key
self.range_key: bool = range_key
self._name: str = None
self._dynamo_name: str = dynamo_name
if not callable(default):
self.default = lambda: default
else:
self.default = default
if subclassof(typedef, Type):
typedef = typedef()
if instanceof(typedef, Type):
self.typedef = typedef
else:
raise TypeError(f"Expected {typedef} to be instance or subclass of Type")
super().__init__()
def __copy__(self):
"""
Create a shallow copy of this Column. Primarily used when initializing models that subclass other abstract
models or mixins (baseless classes that contain Columns and Indexes). You can override this method to
change how derived models are created:
.. code-block:: python
import copy
class MyColumn(Column):
def __copy__(self):
new = super().__copy__()
new.derived = True
return new
column = MyColumn(Integer)
same = copy.copy(column)
assert same.derived # True
:return: A shallow copy of this Column, with the ``model`` and ``_name`` attributes unset.
"""
cls = self.__class__
obj = cls.__new__(cls)
obj.__dict__.update(self.__dict__)
obj.model = None
obj._name = None
return obj
def __set_name__(self, owner, name):
self._name = name
__hash__ = object.__hash__
def __set__(self, obj, value):
if self._name is None:
raise AttributeError("Can't set field without binding to model")
obj.__dict__[self._name] = value
# Notify the tracking engine that this value was intentionally mutated
object_modified.send(self, obj=obj, column=self, value=value)
def __get__(self, obj, type=None):
if obj is None:
return self
if self._name is None:
raise AttributeError("Can't get field without binding to model")
try:
return obj.__dict__[self._name]
except KeyError:
raise AttributeError(f"'{obj.__class__}' has no attribute '{self._name}'")
def __delete__(self, obj):
try:
if self._name is None:
raise AttributeError("Can't delete field without binding to model")
try:
del obj.__dict__[self._name]
except KeyError:
raise AttributeError(f"'{obj.__class__}' has no attribute '{self._name}'")
finally:
# Unlike set, we always want to mark on delete. If we didn't, and the column wasn't loaded
# (say from a query) then the intention "ensure this doesn't have a value" wouldn't be captured.
object_modified.send(self, obj=obj, column=self, value=None)
def __repr__(self):
if self.hash_key:
extra = "=hash"
elif self.range_key:
extra = "=range"
else:
extra = ""
# <Column[Pin.url]>
# <Column[User.id=hash]>
# <Column[File.fragment=range]>
return f"<{self.__class__.__name__}[{self.model.__name__}.{self.name}{extra}]>"
@property
def name(self):
"""Name of the model's attr that references self"""
return self._name
@property
def dynamo_name(self):
if self._dynamo_name is None:
return self.name
return self._dynamo_name
def subclassof(obj, classinfo):
"""Wrap issubclass to only return True/False"""
try:
return issubclass(obj, classinfo)
except TypeError:
return False
def instanceof(obj, classinfo):
"""Wrap isinstance to only return True/False"""
try:
return isinstance(obj, classinfo)
except TypeError: # pragma: no cover
# No coverage since we never call this without a class,
# type, or tuple of classes, types, or such tuples.
return False
def loaded_columns(obj: BaseModel):
"""Yields each (name, value) tuple for all columns in an object that aren't missing"""
for column in sorted(obj.Meta.columns, key=lambda c: c.name):
value = getattr(obj, column.name, missing)
if value is not missing:
yield column.name, value
def unpack_from_dynamodb(*, attrs, expected, model=None, obj=None, engine=None, context=None, **kwargs):
"""Push values by dynamo_name into an object"""
context = context or {"engine": engine}
engine = engine or context.get("engine", None)
if not engine:
raise ValueError("You must provide engine or a context with an engine.")
if model is None and obj is None:
raise ValueError("You must provide a model or obj to unpack.")
if model is not None and obj is not None:
raise ValueError("Only specify model or obj.")
if model:
obj = model.Meta.init()
for column in expected:
value = attrs.get(column.dynamo_name, None)
value = engine._load(column.typedef, value, context=context, **kwargs)
setattr(obj, column.name, value)
return obj
def validate_projection(projection):
validated_projection = {
"mode": None,
"included": None,
"available": None,
"strict": True
}
# String check first since it is also an Iterable.
# Without this, the following will make "unknown" a list
if isinstance(projection, str):
if projection not in ("keys", "all"):
raise InvalidModel(f"{projection!r} is not a valid Index projection.")
validated_projection["mode"] = projection
elif isinstance(projection, collections.abc.Iterable):
projection = list(projection)
# These checks aren't done together; that would allow a mix
# of column instances and column names. There aren't any cases
# where a mix is required, over picking a style. Much more likely,
# the user is trying to do something odd and doesn't understand what
# the index projection means.
if (
all(isinstance(p, str) for p in projection) or
all(isinstance(p, Column) for p in projection)):
validated_projection["mode"] = "include"
validated_projection["included"] = projection
else:
raise InvalidModel("Index projection must be a list of strings or Columns to select specific Columns.")
else:
raise InvalidModel("Index projection must be 'all', 'keys', or a list of Columns or Column names.")
return validated_projection
def validate_stream(meta):
stream = meta.stream
if stream is None:
return
if not isinstance(stream, collections.abc.MutableMapping):
raise InvalidStream("Stream must be None or a dict.")
if "include" not in stream:
raise InvalidStream("Specify what the stream will return with the 'include' key.")
include = stream["include"] = set(stream["include"])
# []
if not include:
raise InvalidStream("Must include at least one of 'keys', 'old', or 'new'.")
# ["what is this", "keys"]
for value in include:
if value not in {"new", "keys", "old"}:
raise InvalidStream("Streams can only contain 'keys', 'old', and/or 'new'.")
# ["keys", "old"]
if include == {"new", "keys"} or include == {"old", "keys"}:
raise InvalidStream("The option 'keys' cannot be used with either 'old' or 'new'.")
stream.setdefault("arn", None)
def validate_encryption(meta):
encryption = meta.encryption
if encryption is None:
return
if not isinstance(encryption, collections.abc.MutableMapping):
raise InvalidModel("Encryption must be None or a dict.")
if "enabled" not in encryption:
raise InvalidModel("Encryption must specify whether it is enabled with the 'enabled' key.")
def validate_backups(meta):
backups = meta.backups
if backups is None:
return
if not isinstance(backups, collections.abc.MutableMapping):
raise InvalidModel("Backups must be None or a dict.")
if "enabled" not in backups:
raise InvalidModel("Backups must specify whether it is enabled with the 'enabled' key.")
def validate_ttl(meta):
ttl = meta.ttl
if ttl is None:
return
if not isinstance(ttl, collections.abc.MutableMapping):
raise InvalidModel("TTL must be None or a dict.")
if "column" not in ttl:
raise InvalidModel("TTL must specify the column to use with the 'column' key.")
ttl_column = ttl["column"]
if isinstance(ttl_column, Column):
# late-bind to column by name in case it was re-bound since declaration
ttl["column"] = meta.columns_by_name[ttl_column.name]
elif isinstance(ttl_column, str):
ttl["column"] = meta.columns_by_name[ttl_column]
else:
raise InvalidModel("TTL column must be a column name or column instance.")
typedef = ttl["column"].typedef
if typedef.backing_type != Number.backing_type:
# special case this check for common confusion between DateTime and Timestamp
if isinstance(typedef, DateTime):
raise InvalidModel(
"TTL column must be a unix timestamp but was a bloop.DateTime instead. "
"Did you mean to use bloop.Timestamp?")
else:
raise InvalidModel(
"TTL column must be a unix timestamp with backing_type 'N' but was "
f"{typedef.backing_type!r} instead.")
ttl.setdefault("enabled", "disabled")
def unbound_repr(obj):
class UNBOUND:
pass
original_model = getattr(obj, "model", missing)
obj.model = UNBOUND
r = repr(obj)
if original_model is missing:
delattr(obj, "model")
else:
setattr(obj, "model", original_model)
return r
def setdefault(obj, field, default):
"""Set an object's field to default if it doesn't have a value"""
setattr(obj, field, getattr(obj, field, default))
def ensure_hash(cls) -> None:
if getattr(cls, "__hash__", None) is not None:
return
logger.info(f"searching for nearest __hash__ impl in {cls.__name__}.__mro__")
hash_fn = object.__hash__
for base in cls.__mro__: # pragma: no branch (because __mro__ will never be an empty list)
hash_fn = getattr(base, "__hash__")
if hash_fn:
break
cls.__hash__ = hash_fn
def initialize_meta(cls: type):
meta = getattr(cls, "Meta", missing)
for base in cls.__mro__:
if base is cls:
continue
parent_meta = getattr(base, "Meta", None)
if meta is parent_meta:
meta = missing
break
if meta is missing:
class Meta(IMeta):
pass
meta = cls.Meta = Meta
meta.model = cls
setdefault(meta, "init", lambda: cls.__new__(cls))
setdefault(meta, "abstract", False)
setdefault(meta, "table_name", cls.__name__)
setdefault(meta, "write_units", None)
setdefault(meta, "read_units", None)
setdefault(meta, "stream", None)
setdefault(meta, "ttl", None)
setdefault(meta, "encryption", None)
setdefault(meta, "backups", None)
setdefault(meta, "hash_key", None)
setdefault(meta, "range_key", None)
setdefault(meta, "keys", set())
setdefault(meta, "columns", set())
setdefault(meta, "columns_by_name", dict())
setdefault(meta, "indexes", set())
setdefault(meta, "gsis", set())
setdefault(meta, "lsis", set())
# API consistency with an Index, so (index or model.Meta) can be
# used interchangeably to get the available columns from that
# object.
setdefault(meta, "projection", {
"mode": "all",
"included": meta.columns,
"available": meta.columns,
"strict": True
})
return meta
def bind_column(model, name, column, force=False, recursive=False, copy=False) -> Column:
"""Bind a column to the model with the given name.
This method is primarily used during BaseModel.__init_subclass__, although it can be used to easily
attach a new column to an existing model:
.. code-block:: python
import bloop.models
class User(BaseModel):
id = Column(String, hash_key=True)
email = Column(String, dynamo_name="e")
bound = bloop.models.bind_column(User, "email", email)
assert bound is email
# rebind with force, and use a copy
bound = bloop.models.bind_column(User, "email", email, force=True, copy=True)
assert bound is not email
If an existing index refers to this column, it will be updated to point to the new column
using :meth:`~bloop.models.refresh_index`, including recalculating the index projection.
Meta attributes including ``Meta.columns``, ``Meta.hash_key``, etc. will be updated if necessary.
If ``name`` or the column's ``dynamo_name`` conflicts with an existing column or index on the model, raises
:exc:`~bloop.exceptions.InvalidModel` unless ``force`` is True. If ``recursive`` is ``True`` and there are
existing subclasses of ``model``, a copy of the column will attempt to bind to each subclass. The recursive
calls will not force the bind, and will always use a new copy. If ``copy`` is ``True`` then a copy of the
provided column is used. This uses a shallow copy via :meth:`~bloop.models.Column.__copy__`.
:param model:
The model to bind the column to.
:param name:
The name to bind the column as. In effect, used for ``setattr(model, name, column)``
:param column:
The column to bind to the model.
:param force:
Unbind existing columns or indexes with the same name or dynamo_name. Default is False.
:param recursive:
Bind to each subclass of this model. Default is False.
:param copy:
Use a copy of the column instead of the column directly. Default is False.
:return:
The bound column. This is a new column when ``copy`` is True, otherwise the input column.
"""
if not subclassof(model, BaseModel):
raise InvalidModel(f"{model} is not a subclass of BaseModel")
meta = model.Meta
if copy:
column = copyfn(column)
# TODO elif column.model is not None: logger.warning(f"Trying to rebind column bound to {column.model}")
column._name = name
safe_repr = unbound_repr(column)
# Guard against name, dynamo_name collisions; if force=True, unbind any matches
same_dynamo_name = (
util.index(meta.columns, "dynamo_name").get(column.dynamo_name) or
util.index(meta.indexes, "dynamo_name").get(column.dynamo_name)
)
same_name = (
meta.columns_by_name.get(column.name) or
util.index(meta.indexes, "name").get(column.name)
)
if column.hash_key and column.range_key:
raise InvalidModel(f"Tried to bind {safe_repr} as both a hash and range key.")
if force:
if same_name:
unbind(meta, name=column.name)
if same_dynamo_name:
unbind(meta, dynamo_name=column.dynamo_name)
else:
if same_name:
raise InvalidModel(
f"The column {safe_repr} has the same name as an existing column "
f"or index {same_name}. Did you mean to bind with force=True?")
if same_dynamo_name:
raise InvalidModel(
f"The column {safe_repr} has the same dynamo_name as an existing "
f"column or index {same_name}. Did you mean to bind with force=True?")
if column.hash_key and meta.hash_key:
raise InvalidModel(
f"Tried to bind {safe_repr} but {meta.model} "
f"already has a different hash_key: {meta.hash_key}")
if column.range_key and meta.range_key:
raise InvalidModel(
f"Tried to bind {safe_repr} but {meta.model} "
f"already has a different range_key: {meta.range_key}")
# success!
# --------------------------------
column.model = meta.model
meta.columns.add(column)
meta.columns_by_name[name] = column
setattr(meta.model, name, column)
if column.hash_key:
meta.hash_key = column
meta.keys.add(column)
if column.range_key:
meta.range_key = column
meta.keys.add(column)
try:
for index in meta.indexes:
refresh_index(meta, index)
except KeyError as e:
raise InvalidModel(
f"Binding column {column} removed a required column for index {unbound_repr(index)}") from e
if recursive:
for subclass in util.walk_subclasses(meta.model):
try:
bind_column(subclass, name, column, force=False, recursive=False, copy=True)
except InvalidModel:
pass
return column
def bind_index(model, name, index, force=False, recursive=True, copy=False) -> Index:
"""Bind an index to the model with the given name.
This method is primarily used during BaseModel.__init_subclass__, although it can be used to easily
attach a new index to an existing model:
.. code-block:: python
import bloop.models
class User(BaseModel):
id = Column(String, hash_key=True)
email = Column(String, dynamo_name="e")
by_email = GlobalSecondaryIndex(projection="keys", hash_key="email")
bound = bloop.models.bind_index(User, "by_email", by_email)
assert bound is by_email
# rebind with force, and use a copy
bound = bloop.models.bind_index(User, "by_email", by_email, force=True, copy=True)
assert bound is not by_email
If ``name`` or the index's ``dynamo_name`` conflicts with an existing column or index on the model, raises
:exc:`~bloop.exceptions.InvalidModel` unless ``force`` is True. If ``recursive`` is ``True`` and there are
existing subclasses of ``model``, a copy of the index will attempt to bind to each subclass. The recursive
calls will not force the bind, and will always use a new copy. If ``copy`` is ``True`` then a copy of the
provided index is used. This uses a shallow copy via :meth:`~bloop.models.Index.__copy__`.
:param model:
The model to bind the index to.
:param name:
The name to bind the index as. In effect, used for ``setattr(model, name, index)``
:param index:
The index to bind to the model.
:param force:
Unbind existing columns or indexes with the same name or dynamo_name. Default is False.
:param recursive:
Bind to each subclass of this model. Default is False.
:param copy:
Use a copy of the index instead of the index directly. Default is False.
:return:
The bound index. This is a new column when ``copy`` is True, otherwise the input index.
"""
if not subclassof(model, BaseModel):
raise InvalidModel(f"{model} is not a subclass of BaseModel")
meta = model.Meta
if copy:
index = copyfn(index)
# TODO elif index.model is not None: logger.warning(f"Trying to rebind index bound to {index.model}")
index._name = name
safe_repr = unbound_repr(index)
# Guard against name, dynamo_name collisions; if force=True, unbind any matches
same_dynamo_name = (
util.index(meta.columns, "dynamo_name").get(index.dynamo_name) or
util.index(meta.indexes, "dynamo_name").get(index.dynamo_name)
)
same_name = (
meta.columns_by_name.get(index.name) or
util.index(meta.indexes, "name").get(index.name)
)
if isinstance(index, LocalSecondaryIndex) and not meta.range_key:
raise InvalidModel("An LSI requires the Model to have a range key.")
if force:
if same_name:
unbind(meta, name=index.name)
if same_dynamo_name:
unbind(meta, dynamo_name=index.dynamo_name)
else:
if same_name:
raise InvalidModel(
f"The index {safe_repr} has the same name as an existing index "
f"or column {same_name}. Did you mean to bind with force=True?")
if same_dynamo_name:
raise InvalidModel(
f"The index {safe_repr} has the same dynamo_name as an existing "
f"index or column {same_name}. Did you mean to bind with force=True?")
# success!
# --------------------------------
index.model = meta.model
meta.indexes.add(index)
setattr(meta.model, name, index)
if isinstance(index, LocalSecondaryIndex):
meta.lsis.add(index)
if isinstance(index, GlobalSecondaryIndex):
meta.gsis.add(index)
try:
refresh_index(meta, index)
except KeyError as e:
raise InvalidModel("Index expected a hash or range key that does not exist") from e
if recursive:
for subclass in util.walk_subclasses(meta.model):
try:
bind_index(subclass, name, index, force=False, recursive=False, copy=True)
except InvalidModel:
pass
return index
def refresh_index(meta, index) -> None:
"""Recalculate the projection, hash_key, and range_key for the given index.
:param meta: model.Meta to find columns by name
:param index: The index to refresh
"""
# All projections include model + index keys
projection_keys = set.union(meta.keys, index.keys)
proj = index.projection
mode = proj["mode"]
if mode == "keys":
proj["included"] = projection_keys
elif mode == "all":
proj["included"] = meta.columns
elif mode == "include": # pragma: no branch
if all(isinstance(p, str) for p in proj["included"]):
proj["included"] = set(meta.columns_by_name[n] for n in proj["included"])
else:
proj["included"] = set(proj["included"])
proj["included"].update(projection_keys)
if proj["strict"]:
proj["available"] = proj["included"]
else:
proj["available"] = meta.columns
def unbind(meta, name=None, dynamo_name=None) -> None:
"""Unconditionally remove any columns or indexes bound to the given name or dynamo_name.
.. code-block:: python
import bloop.models
class User(BaseModel):
id = Column(String, hash_key=True)
email = Column(String, dynamo_name="e")
by_email = GlobalSecondaryIndex(projection="keys", hash_key=email)
for dynamo_name in ("id", "e", "by_email"):
bloop.models.unbind(User.Meta, dynamo_name=dynamo_name)
assert not User.Meta.columns
assert not User.Meta.indexes
assert not User.Meta.keys
.. warning::
This method does not pre- or post- validate the model with the requested changes. You are responsible
for ensuring the model still has a hash key, that required columns exist for each index, etc.
:param meta: model.Meta to remove the columns or indexes from
:param name: column or index name to unbind by. Default is None.
:param dynamo_name: column or index name to unbind by. Default is None.
"""
if name is not None:
columns = {x for x in meta.columns if x.name == name}
indexes = {x for x in meta.indexes if x.name == name}
elif dynamo_name is not None:
columns = {x for x in meta.columns if x.dynamo_name == dynamo_name}
indexes = {x for x in meta.indexes if x.dynamo_name == dynamo_name}
else:
raise RuntimeError("Must provide name= or dynamo_name= to unbind from meta")
# Nothing in bloop should allow name or dynamo_name
# collisions to exist, so this is either a bug or
# the user manually hacked up meta.
assert len(columns) <= 1
assert len(indexes) <= 1
assert not (columns and indexes)
if columns:
[column] = columns
meta.columns.remove(column)
# If these don't line up, there's likely a bug in bloop
# or the user manually hacked up columns_by_name
expect_same = meta.columns_by_name[column.name]
assert expect_same is column
meta.columns_by_name.pop(column.name)
if column in meta.keys:
meta.keys.remove(column)
if meta.hash_key is column:
meta.hash_key = None
if meta.range_key is column:
meta.range_key = None
delattr(meta.model, column.name)
if indexes:
[index] = indexes
meta.indexes.remove(index)
if index in meta.gsis:
meta.gsis.remove(index)
if index in meta.lsis:
meta.lsis.remove(index)
delattr(meta.model, index.name)
# required to bootstrap BaseModel.__init_subclass__
initialize_meta(BaseModel)
| 38.305458
| 118
| 0.634862
|
bff5f945a6f407e41da9b8e3d824685f935054e4
| 10,019
|
py
|
Python
|
tests/test_params.py
|
pguermo/pytest-ansible
|
c2891a80ce570e66fec43082e66eb99cae15fd18
|
[
"MIT"
] | null | null | null |
tests/test_params.py
|
pguermo/pytest-ansible
|
c2891a80ce570e66fec43082e66eb99cae15fd18
|
[
"MIT"
] | null | null | null |
tests/test_params.py
|
pguermo/pytest-ansible
|
c2891a80ce570e66fec43082e66eb99cae15fd18
|
[
"MIT"
] | null | null | null |
import sys
import pytest
import ansible
from pkg_resources import parse_version
from pytest_ansible.has_version import has_ansible_v28
try:
import mock
except ImportError:
from unittest import mock
import re
try:
from _pytest.main import EXIT_OK, EXIT_TESTSFAILED, EXIT_USAGEERROR, EXIT_NOTESTSCOLLECTED, EXIT_INTERRUPTED # NOQA
except ImportError:
from _pytest.main import ExitCode
EXIT_OK = ExitCode.OK
EXIT_TESTSFAILED = ExitCode.TESTS_FAILED
EXIT_USAGEERROR = ExitCode.USAGE_ERROR
EXIT_INTERRUPTED = ExitCode.INTERRUPTED
EXIT_NOTESTSCOLLECTED = ExitCode.NO_TESTS_COLLECTED
if sys.version_info[0] == 2:
import __builtin__ as builtins # NOQA
else:
import builtins # NOQA
def test_plugin_help(testdir):
"""Verifies expected output from of py.test --help"""
result = testdir.runpytest('--help')
result.stdout.fnmatch_lines([
# Check for the github args section header
'pytest-ansible:',
# Check for the specific args
' --inventory=ANSIBLE_INVENTORY, --ansible-inventory=ANSIBLE_INVENTORY',
' --host-pattern=ANSIBLE_HOST_PATTERN, --ansible-host-pattern=ANSIBLE_HOST_PATTERN',
' --connection=ANSIBLE_CONNECTION, --ansible-connection=ANSIBLE_CONNECTION',
' --user=ANSIBLE_USER, --ansible-user=ANSIBLE_USER',
' --check, --ansible-check',
' --module-path=ANSIBLE_MODULE_PATH, --ansible-module-path=ANSIBLE_MODULE_PATH',
' --become, --ansible-become',
' --become-method=ANSIBLE_BECOME_METHOD, --ansible-become-method=ANSIBLE_BECOME_METHOD',
' --become-user=ANSIBLE_BECOME_USER, --ansible-become-user=ANSIBLE_BECOME_USER',
' --ask-become-pass=ANSIBLE_ASK_BECOME_PASS, --ansible-ask-become-pass=ANSIBLE_ASK_BECOME_PASS',
# Check for the marker in --help
' ansible (args)*Ansible integration',
])
def test_plugin_markers(testdir):
"""Verifies expected output from of py.test --markers"""
result = testdir.runpytest('--markers')
result.stdout.fnmatch_lines([
'@pytest.mark.ansible(*args): Ansible integration',
])
def test_report_header(testdir, option):
"""Verify the expected ansible version in the pytest report header.
"""
result = testdir.runpytest(*option.args)
assert result.ret == EXIT_NOTESTSCOLLECTED
result.stdout.fnmatch_lines([
'ansible: %s' % ansible.__version__,
])
def test_params_not_required_when_not_using_fixture(testdir, option):
"""Verify the ansible parameters are not required if the fixture is not used.
"""
src = """
import pytest
def test_func():
assert True
"""
testdir.makepyfile(src)
result = testdir.runpytest(*option.args)
assert result.ret == EXIT_OK
@pytest.mark.parametrize(
"fixture_name",
[
'ansible_adhoc',
'ansible_module',
'ansible_facts',
],
)
def test_params_required_when_using_fixture(testdir, option, fixture_name):
"""Verify the ansible parameters are required if the fixture is used.
"""
src = """
import pytest
def test_func({0}):
{0}
""".format(fixture_name)
testdir.makepyfile(src)
result = testdir.runpytest(*option.args)
assert result.ret == EXIT_USAGEERROR
result.stderr.fnmatch_lines([
'ERROR: Missing required parameter --ansible-host-pattern/--host-pattern',
])
@pytest.mark.parametrize(
"fixture_name",
[
'ansible_host',
'ansible_group',
],
)
def test_params_required_when_using_generator(testdir, option, fixture_name):
"""Verify the ansible parameters are required when using a fixture generator.
"""
src = """
import pytest
def test_func({0}):
assert True
""".format(fixture_name)
testdir.makepyfile(src)
result = testdir.runpytest(*option.args)
assert result.ret == EXIT_INTERRUPTED
result.stdout.fnmatch_lines([
'collected 0 items / 1 errors',
'*UsageError: Missing required parameter --ansible-host-pattern/--host-pattern',
])
@pytest.mark.parametrize(
"required_value_parameter",
[
'--ansible-inventory', '--inventory',
'--ansible-host-pattern', '--host-pattern',
'--ansible-connection', '--connection',
'--ansible-user', '--user',
'--ansible-become-method', '--become-method',
'--ansible-become-user', '--become-user',
'--ansible-module-path', '--module-path',
],
)
def test_param_requires_value(testdir, required_value_parameter):
"""Verifies failure when not providing a value to a parameter that requires a value"""
result = testdir.runpytest(*[required_value_parameter])
assert result.ret == EXIT_USAGEERROR
result.stderr.fnmatch_lines([
'*: error: argument *%s*: expected one argument' % required_value_parameter,
])
def test_params_required_with_inventory_without_host_pattern(testdir, option):
'''Verify that a host pattern is required when an inventory is supplied.'''
src = """
import pytest
def test_func(ansible_module):
assert True
"""
testdir.makepyfile(src)
result = testdir.runpytest(*option.args + ['--ansible-inventory', 'local,'])
assert result.ret == EXIT_USAGEERROR
result.stderr.fnmatch_lines(
[
'ERROR: Missing required parameter --ansible-host-pattern/--host-pattern',
]
)
@pytest.mark.requires_ansible_v1
def test_params_required_with_bogus_inventory_v1(testdir, option):
src = """
import pytest
def test_func(ansible_module):
assert True
"""
testdir.makepyfile(src)
with mock.patch('os.path.exists', return_value=False) as mock_exists:
result = testdir.runpytest(*['--ansible-inventory', 'bogus', '--ansible-host-pattern', 'all'])
# Assert py.test exit code
assert result.ret == EXIT_TESTSFAILED
# Assert expected error output
result.stdout.fnmatch_lines([
'*Unable to find an inventory file, specify one with -i ?',
])
# Assert mock open called on provided file
mock_exists.assert_any_call('bogus')
@pytest.mark.skipif(
parse_version(ansible.__version__) < parse_version('2.0.0') or
parse_version(ansible.__version__) >= parse_version('2.4.0'),
reason="requires ansible >= 2.0 and < 2.4"
)
def test_params_required_with_bogus_inventory_v2(testdir, option, recwarn):
src = """
import pytest
def test_func(ansible_module):
with pytest.warns(UserWarning, match="provided hosts list is empty, only localhost is available"):
ansible_module.ping()
"""
testdir.makepyfile(src)
with mock.patch('ansible.parsing.dataloader.DataLoader.path_exists', return_value=False) as mock_exists:
result = testdir.runpytest(*['-vvvvvs', '--ansible-inventory', 'bogus', '--ansible-host-pattern', 'all'])
# Assert py.test exit code
assert result.ret == EXIT_OK
# Assert mock open called on provided file
mock_exists.assert_any_call('bogus')
@pytest.mark.requires_ansible_v24
@pytest.mark.skipif(
has_ansible_v28,
reason="requires ansible < 2.8"
)
def test_params_required_with_bogus_inventory_v24(testdir, option, recwarn):
src = """
import pytest
def test_func(ansible_module):
with pytest.warns(UserWarning) as record:
ansible_module.ping()
# Ensure at least one warning in the queue
assert len(record) >= 1
# Ensure the latest warning is the ansible localhost warning
assert record[0].message.args[0] == "provided hosts list is empty, only localhost is available"
"""
testdir.makepyfile(src)
result = testdir.runpytest(*['-vvvvvs', '--ansible-inventory', 'bogus', '--ansible-host-pattern', 'all'])
# Assert py.test exit code
assert result.ret == EXIT_OK
# There appear to be '\n' newline characters within the output. Using the join on errlines flattens the string for
# easier comparison.
assert re.search(r'Unable to parse .*/bogus as an inventory source', ' '.join(result.errlines))
@pytest.mark.requires_ansible_v1
def test_params_required_without_inventory_with_host_pattern_v1(testdir, option):
src = """
import pytest
def test_func(ansible_module):
assert True
"""
testdir.makepyfile(src)
result = testdir.runpytest(*option.args + ['--ansible-host-pattern', 'all'])
assert result.ret == EXIT_TESTSFAILED
result.stdout.fnmatch_lines(
[
'*Unable to find an inventory file, specify one with -i ?',
]
)
@pytest.mark.requires_ansible_v2
def test_params_required_without_inventory_with_host_pattern_v2(testdir, option):
src = """
import pytest
def test_func(ansible_module):
assert True
"""
testdir.makepyfile(src)
result = testdir.runpytest(*option.args + ['--ansible-host-pattern', 'all'])
assert result.ret == EXIT_OK
# TODO - validate the following warning message
# [WARNING]: provided hosts list is empty, only localhost is available
if False:
result.stderr.fnmatch_lines(
[
"*[WARNING]: Host file not found: /etc/ansible/hosts*",
"*provided hosts list is empty, only localhost is available",
]
)
def test_param_override_with_marker(testdir):
src = """
import pytest
@pytest.mark.ansible(inventory='local,', connection='local', host_pattern='all')
def test_func(ansible_module):
ansible_module.ping()
"""
testdir.makepyfile(src)
result = testdir.runpytest(*['-vvvvvs', '--tb', 'native', '--ansible-inventory', 'garbage,', '--ansible-host-pattern',
'garbage', '--ansible-connection', 'garbage'])
assert result.ret == EXIT_OK
# Mock assert the correct variables are set
| 33.285714
| 122
| 0.664438
|
70ab2496035418b23f375f145fe639eda4dad474
| 242
|
py
|
Python
|
tests/basics/builtin_dir.py
|
peterson79/pycom-micropython-sigfox
|
3f93fc2c02567c96f18cff4af9125db8fd7a6fb4
|
[
"MIT"
] | 37
|
2017-12-07T15:49:29.000Z
|
2022-03-16T16:01:38.000Z
|
tests/basics/builtin_dir.py
|
peterson79/pycom-micropython-sigfox
|
3f93fc2c02567c96f18cff4af9125db8fd7a6fb4
|
[
"MIT"
] | 27
|
2015-01-02T16:17:37.000Z
|
2015-09-07T19:21:26.000Z
|
tests/basics/builtin_dir.py
|
peterson79/pycom-micropython-sigfox
|
3f93fc2c02567c96f18cff4af9125db8fd7a6fb4
|
[
"MIT"
] | 22
|
2016-08-01T01:35:30.000Z
|
2022-03-22T18:12:23.000Z
|
# test builtin dir
# dir of locals
print('__name__' in dir())
# dir of module
import sys
print('platform' in dir(sys))
class Foo:
def __init__(self):
self.x = 1
foo = Foo()
print('__init__' in dir(foo))
print('x' in dir(foo))
| 14.235294
| 29
| 0.636364
|
b2f999b623a86006a08f423c147dd79910d4aa2b
| 19,350
|
py
|
Python
|
seisflows/solver/base.py
|
weiliu620/seisflows
|
42a576ceda8c3ac5d38cedfacd7d4bde98d471d8
|
[
"BSD-2-Clause"
] | null | null | null |
seisflows/solver/base.py
|
weiliu620/seisflows
|
42a576ceda8c3ac5d38cedfacd7d4bde98d471d8
|
[
"BSD-2-Clause"
] | null | null | null |
seisflows/solver/base.py
|
weiliu620/seisflows
|
42a576ceda8c3ac5d38cedfacd7d4bde98d471d8
|
[
"BSD-2-Clause"
] | 1
|
2020-08-26T16:40:03.000Z
|
2020-08-26T16:40:03.000Z
|
import subprocess
import sys
import numpy as np
from functools import partial
from glob import glob
from importlib import import_module
from os.path import basename, join
from seisflows.config import ParameterError, custom_import
from seisflows.plugins import solver_io
from seisflows.tools import msg, unix
from seisflows.tools.seismic import Container, call_solver
from seisflows.tools.tools import Struct, diff, exists
PAR = sys.modules['seisflows_parameters']
PATH = sys.modules['seisflows_paths']
system = sys.modules['seisflows_system']
preprocess = sys.modules['seisflows_preprocess']
class base(object):
""" Provides an interface through which solver simulations can be set up
and run and a parent class for SPECFEM2D, SPECFEM3D and SPECFEM3D_GLOBE
subclasses
This class supports only acoustic and isotropic elastic inversions.
For additional options, see github.com/rmodrak/seisflows-multiparameter
eval_func, eval_grad, apply_hess
These methods deal with evaluation of the misfit function or its
derivatives. Together, they provide the primary interface through which
SeisFlows interacts with SPECFEM2D/3D
forward, adjoint
These methods allow direct access to low-level SPECFEM2D/3D components,
providing an alternative interface through which to interact with the
solver
steup, generate_data, generate_model
One-time operations performed at beginning of an inversion or
migration
initialize_solver_directories, initialize_adjoint_traces
SPECFEM2D/3D requires a particular directory structure in which to run and
particular file formats for models, data, and parameter files. These
methods help put in place all these prerequisites
load, save
For reading and writing SPECFEM2D/3D models and kernels. On the disk,
models and kernels are stored as binary files, and in memory, as
dictionaries with different keys corresponding to different material
parameters
split, merge
Within the solver routines, it is natural to store models as
dictionaries. Within the optimization routines, it is natural to store
models as vectors. Two methods, 'split' and 'merge', are used to convert
back and forth between these two representations
combine, smooth
Utilities for combining and smoothing kernels
"""
assert 'MATERIALS' in PAR
assert 'DENSITY' in PAR
parameters = []
if PAR.MATERIALS == 'Elastic':
parameters += ['vp']
parameters += ['vs']
elif PAR.MATERIALS == 'Acoustic':
parameters += ['vp']
if PAR.DENSITY == 'Variable':
parameters += ['rho']
def check(self):
""" Checks parameters and paths
"""
# number of processors per simulation
if 'NPROC' not in PAR:
raise ParameterError(PAR, 'NPROC')
# format used by SPECFEM for reading and writing models
# (currently, SPECFEM offers both 'fortran_binary' and 'adios')
if 'SOLVERIO' not in PAR:
setattr(PAR, 'SOLVERIO', 'fortran_binary')
# solver scratch paths
if 'SCRATCH' not in PATH:
raise ParameterError(PATH, 'SCRATCH')
if 'LOCAL' not in PATH:
setattr(PATH, 'LOCAL', None)
if 'SOLVER' not in PATH:
if PATH.LOCAL:
setattr(PATH, 'SOLVER', join(PATH.LOCAL, 'solver'))
else:
setattr(PATH, 'SOLVER', join(PATH.SCRATCH, 'solver'))
# solver input paths
if 'SPECFEM_BIN' not in PATH:
raise ParameterError(PATH, 'SPECFEM_BIN')
if 'SPECFEM_DATA' not in PATH:
raise ParameterError(PATH, 'SPECFEM_DATA')
# assertions
assert self.parameters != []
assert hasattr(solver_io, PAR.SOLVERIO)
# assert hasattr(self.io, 'read_slice')
# assert hasattr(self.io, 'write_slice')
assert hasattr(solver_io, 'read_slice')
assert hasattr(solver_io, 'write_slice')
def setup(self):
"""
Prepares solver for inversion or migration
Sets up directory structure expected by SPECFEM and copies or
generates seismic data to be inverted or migrated
"""
# clean up for new inversion
unix.rm(self.cwd)
# As input for an inversion or migration, users can choose between
# providing data, or providing a target model from which data are
# generated on the fly. In the former case, a value for PATH.DATA must
# be supplied; in the latter case, a value for PATH.MODEL_TRUE must be
# provided
if PATH.DATA:
# copy user supplied data
self.initialize_solver_directories()
src = glob(PATH.DATA +'/'+ self.source_name +'/'+ '*')
dst = 'traces/obs/'
unix.cp(src, dst)
else:
# generate data on the fly
self.generate_data(
model_path=PATH.MODEL_TRUE,
model_name='model_true',
model_type='gll')
# prepare initial model
self.generate_mesh(
model_path=PATH.MODEL_INIT,
model_name='model_init',
model_type='gll')
self.initialize_adjoint_traces()
def clean(self):
unix.cd(self.cwd)
unix.rm('OUTPUT_FILES')
unix.mkdir('OUTPUT_FILES')
def generate_data(self, *args, **kwargs):
""" Generates data
"""
# must be implemented by subclass
raise NotImplementedError
def generate_mesh(self, *args, **kwargs):
""" Performs meshing and database generation
"""
# must be implemented by subclass
raise NotImplementedError
### high-level solver interface
def eval_func(self, path='', export_traces=False, write_residuals=True):
"""
Performs forward simulations needed for misfit function evaluation
:input path :: directory from which model is imported
:input export_traces :: save or discard traces?
"""
unix.cd(self.cwd)
self.import_model(path)
self.forward()
if write_residuals:
preprocess.prepare_eval_grad(self.cwd)
self.export_residuals(path)
def eval_grad(self, path='', export_traces=False):
"""
Evaluates gradient by carrying out adjoint simulations.
(A function evaluation must already have been carried out.)
:input path :: directory from which model is imported
:input export_traces :: save or discard traces?
"""
unix.cd(self.cwd)
self.adjoint()
self.export_kernels(path)
if export_traces:
self.export_traces(path+'/'+'traces/syn', prefix='traces/syn')
self.export_traces(path+'/'+'traces/adj', prefix='traces/adj')
def apply_hess(self, path=''):
"""
Computes action of Hessian on a given model vector.
(A gradient evaluation must have already been carried out.)
:input path :: directory to which output files are exported
"""
unix.cd(self.cwd)
self.import_model(path)
unix.mkdir('traces/lcg')
self.forward('traces/lcg')
preprocess.prepare_apply_hess(self.cwd)
self.adjoint()
self.export_kernels(path)
### low-level solver interface
def forward(self):
""" Calls forward solver
"""
# must be implemented by subclass
raise NotImplementedError
def adjoint(self):
""" Calls adjoint solver
"""
# must be implemented by subclass
raise NotImplementedError
### model input/output
@property
def io(self):
""" Solver IO module
"""
return getattr(solver_io, PAR.SOLVERIO)
def load(self, path, parameters=[], prefix='', suffix=''):
"""
Loads SPECFEM2D/3D models or kernels
:input path :: directory from which model is read
:input parameters :: list of material parameters to be read
(if empty, defaults to self.parameters)
:input prefix :: optional filename prefix
:input suffix :: optional filename suffix, eg '_kernel'
:output dict :: model or kernels indexed by material parameter
and processor rank, ie dict[parameter][iproc]
"""
dict = Container()
for iproc in range(self.mesh_properties.nproc):
for key in parameters or self.parameters:
dict[key] += self.io.read_slice(
path, prefix+key+suffix, iproc)
return dict
def save(self, dict, path, parameters=['vp','vs','rho'],
prefix='', suffix=''):
"""
Saves SPECFEM2D/3D models or kernels
:input dict :: model stored as a dictionary or Container
:input path :: directory to which model is written
:input parameters :: list of material parameters to be written
:input prefix :: optional filename prefix
:input suffix :: optional filename suffix, eg '_kernel'
"""
unix.mkdir(path)
# fill in any missing parameters
missing_keys = diff(parameters, dict.keys())
for iproc in range(self.mesh_properties.nproc):
for key in missing_keys:
dict[key] += self.io.read_slice(
PATH.MODEL_INIT, prefix+key+suffix, iproc)
# write slices to disk
for iproc in range(self.mesh_properties.nproc):
for key in parameters:
self.io.write_slice(
dict[key][iproc], path, prefix+key+suffix, iproc)
def merge(self, model, parameters=[]):
""" Converts model from dictionary to vector representation
"""
m = np.array([])
for key in parameters or self.parameters:
for iproc in range(self.mesh_properties.nproc):
m = np.append(m, model[key][iproc])
return m
def split(self, m, parameters=[]):
""" Converts model from vector to dictionary representation
"""
nproc = self.mesh_properties.nproc
ngll = self.mesh_properties.ngll
model = Container()
for idim, key in enumerate(parameters or self.parameters):
model[key] = []
for iproc in range(nproc):
imin = sum(ngll)*idim + sum(ngll[:iproc])
imax = sum(ngll)*idim + sum(ngll[:iproc+1])
model[key] += [m[imin:imax]]
return model
### postprocessing wrappers
def combine(self, input_path='', output_path='', parameters=[]):
""" Sums individual source contributions. Wrapper over xcombine_sem
utility.
"""
if not exists(input_path):
raise Exception
if not exists(output_path):
unix.mkdir(output_path)
unix.cd(self.cwd)
with open('kernel_paths', 'w') as file:
file.writelines([join(input_path, name+'\n')
for name in self.source_names])
for name in parameters or self.parameters:
call_solver(
system.mpiexec(),
PATH.SPECFEM_BIN +'/'+ 'xcombine_sem '
+ name + '_kernel' + ' '
+ 'kernel_paths' + ' '
+ output_path)
def smooth(self, input_path='', output_path='', parameters=[], span=0.):
""" Smooths kernels by convolving them with a Gaussian. Wrapper over
xsmooth_sem utility.
"""
if not exists(input_path):
raise Exception
if not exists(output_path):
unix.mkdir(output_path)
# apply smoothing operator
unix.cd(self.cwd)
for name in parameters or self.parameters:
print (' smoothing', name)
call_solver(
system.mpiexec(),
PATH.SPECFEM_BIN +'/'+ 'xsmooth_sem '
+ str(span) + ' '
+ str(span) + ' '
+ name + '_kernel' + ' '
+ input_path + '/ '
+ output_path + '/ ',
output='/dev/null')
print('')
# rename output files
files = glob(output_path+'/*')
unix.rename('_smooth', '', files)
### file transfer utilities
def import_model(self, path):
model = self.load(path+'/'+'model')
self.save(model, self.model_databases)
def import_traces(self, path):
src = glob(join(path, 'traces', self.source_name, '*'))
dst = join(self.cwd, 'traces/obs')
unix.cp(src, dst)
def export_model(self, path, parameters=['rho', 'vp', 'vs']):
if self.taskid == 0:
unix.mkdir(path)
for key in parameters:
files = glob(join(self.model_databases, '*'+key+'.bin'))
unix.cp(files, path)
def export_kernels(self, path):
unix.cd(self.kernel_databases)
# work around conflicting name conventions
self.rename_kernels()
src = glob('*_kernel.bin')
dst = join(path, 'kernels', self.source_name)
unix.mkdir(dst)
unix.mv(src, dst)
def export_residuals(self, path):
unix.mkdir(join(path, 'residuals'))
src = join(self.cwd, 'residuals')
dst = join(path, 'residuals', self.source_name)
unix.mv(src, dst)
def export_traces(self, path, prefix='traces/obs'):
unix.mkdir(join(path))
src = join(self.cwd, prefix)
dst = join(path, self.source_name)
unix.cp(src, dst)
def rename_kernels(self):
""" Works around conflicting kernel filename conventions
"""
files = []
files += glob('*proc??????_alpha_kernel.bin')
files += glob('*proc??????_alpha[hv]_kernel.bin')
files += glob('*proc??????_reg1_alpha_kernel.bin')
files += glob('*proc??????_reg1_alpha[hv]_kernel.bin')
unix.rename('alpha', 'vp', files)
files = []
files += glob('*proc??????_beta_kernel.bin')
files += glob('*proc??????_beta[hv]_kernel.bin')
files += glob('*proc??????_reg1_beta_kernel.bin')
files += glob('*proc??????_reg1_beta[hv]_kernel.bin')
unix.rename('beta', 'vs', files)
def rename_data(self, path):
""" Works around conflicting data filename conventions
"""
pass
### setup utilities
def initialize_solver_directories(self):
""" Creates directory structure expected by SPECFEM3D, copies
executables, and prepares input files. Executables must be supplied
by user as there is currently no mechanism for automatically
compiling from source.
"""
unix.mkdir(self.cwd)
unix.cd(self.cwd)
# create directory structure
unix.mkdir('bin')
unix.mkdir('DATA')
unix.mkdir('OUTPUT_FILES')
unix.mkdir('traces/obs')
unix.mkdir('traces/syn')
unix.mkdir('traces/adj')
unix.mkdir(self.model_databases)
unix.mkdir(self.kernel_databases)
# copy exectuables
src = glob(PATH.SPECFEM_BIN +'/'+ '*')
dst = 'bin/'
unix.cp(src, dst)
# copy input files
src = glob(PATH.SPECFEM_DATA +'/'+ '*')
dst = 'DATA/'
unix.cp(src, dst)
src = 'DATA/' + self.source_prefix +'_'+ self.source_name
dst = 'DATA/' + self.source_prefix
unix.cp(src, dst)
self.check_solver_parameter_files()
def initialize_adjoint_traces(self):
""" Puts in place "adjoint traces" expected by SPECFEM
"""
for filename in self.data_filenames:
# read traces
d = preprocess.reader(self.cwd +'/'+ 'traces/obs', filename)
# Adjoint traces are initialized by writing zeros for all channels.
# Channels actually in use during an inversion or migration will be
# overwritten with nonzero values later on.
for t in d:
t.data[:] = 0.
# write traces
preprocess.writer(d, self.cwd +'/'+ 'traces/adj', filename)
def check_mesh_properties(self, path=None):
if not path:
path = PATH.MODEL_INIT
if not exists(path):
raise Exception
# count slices and grid points
key = self.parameters[0]
iproc = 0
ngll = []
while True:
dummy = self.io.read_slice(path, key, iproc)[0]
ngll += [len(dummy)]
iproc += 1
if not exists('%s/proc%06d_%s.bin' % (path, iproc, key)):
break
nproc = iproc
# create coordinate pointers
coords = Struct()
for key in ['x', 'y', 'z']:
coords[key] = partial(self.io.read_slice, self, path, key)
self._mesh_properties = Struct([
['nproc', nproc],
['ngll', ngll],
['path', path],
['coords', coords]])
def check_source_names(self):
""" Determines names of sources by applying wildcard rule to user-
supplied input files
"""
path = PATH.SPECFEM_DATA
if not exists(path):
raise Exception
# apply wildcard rule
wildcard = self.source_prefix+'_*'
globstar = sorted(glob(path +'/'+ wildcard))
if not globstar:
print(msg.SourceError_SPECFEM % (path, wildcard))
sys.exit(-1)
names = []
for path in globstar:
names += [basename(path).split('_')[-1]]
self._source_names = names[:PAR.NTASK]
def check_solver_parameter_files(self):
# optional method, can be implemented by subclass
pass
### additional solver attributes
@property
def taskid(self):
# because it is sometimes useful to overload system.taskid
return system.taskid()
@property
def source_name(self):
# returns name of source currently under consideration
return self.source_names[self.taskid]
@property
def cwd(self):
# returns working directory currently in use
return join(PATH.SOLVER, self.source_name)
@property
def source_names(self):
if not hasattr(self, '_source_names'):
self.check_source_names()
return self._source_names
@property
def mesh_properties(self):
if not hasattr(self, '_mesh_properties'):
self.check_mesh_properties()
return self._mesh_properties
@property
def data_filenames(self):
# required method, must be implemented by subclass
return NotImplementedError
@property
def model_databases(self):
# required method, must be implemented by subclass
return NotImplementedError
@property
def kernel_databases(self):
# required method, must be implemented by subclass
return NotImplementedError
@property
def source_prefix(self):
# required method, must be implemented by subclass
return NotImplementedError
| 31.009615
| 82
| 0.592817
|
294fdf81f9660203101cfa47ffab35f792573e61
| 653
|
py
|
Python
|
project/urls.py
|
yrkv/remote-robot-server
|
ff308acb79883b76c832420a045f5036b9605cab
|
[
"CC0-1.0"
] | null | null | null |
project/urls.py
|
yrkv/remote-robot-server
|
ff308acb79883b76c832420a045f5036b9605cab
|
[
"CC0-1.0"
] | null | null | null |
project/urls.py
|
yrkv/remote-robot-server
|
ff308acb79883b76c832420a045f5036b9605cab
|
[
"CC0-1.0"
] | null | null | null |
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from welcome.views import index, health
from welcome.robot_control import get_state, set_state
urlpatterns = [
# Examples:
# url(r'^$', 'project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^get_state$', get_state),
url(r'^set_state$', set_state),
url(r'^$', index),
url(r'^health$', health),
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
| 25.115385
| 57
| 0.661562
|
ae074b729161a8a016438e6cda4b4b4c4c15281c
| 6,045
|
py
|
Python
|
tapestry/gallery.py
|
msgre/tapestry
|
42ac142a8134593a3810fe4cf9bc770f323606d2
|
[
"Apache-1.1"
] | null | null | null |
tapestry/gallery.py
|
msgre/tapestry
|
42ac142a8134593a3810fe4cf9bc770f323606d2
|
[
"Apache-1.1"
] | null | null | null |
tapestry/gallery.py
|
msgre/tapestry
|
42ac142a8134593a3810fe4cf9bc770f323606d2
|
[
"Apache-1.1"
] | null | null | null |
import math
import os
import pygame
import pygame.locals
import re
import time
from .config import GalleryConfig, TitlesConfig
from .detective import get_entries, check_file
from .states import get_state, STATE_ACTION, STATE_BACK, STATE_MISSING_SOURCE, STATE_NEXT, STATE_PREV, STATE_QUIT
from .view import View
class Gallery(View):
filename_re = re.compile(r'^[^.].+\.(jpg|jpeg|gif|png|bmp)$', re.IGNORECASE)
def load_config(self):
gc = GalleryConfig(self.dirpath)
status, config = gc.process()
self.autoplay = config['autoplay']
self.x_spacing = config['margin'] * 2
self.y_spacing = config['margin'] * 3 + config['fontsize']
self.font = self.display.load_font(config['fontsize'])
if not status:
print('No configuration file found.', flush=True)
config_msg = []
for k, v in config.items():
config_msg.append('{k}={v}'.format(k=k, v=v))
print("Configuration: {}".format(', '.join(config_msg)), flush=True)
return config
def load_titles(self):
tc = TitlesConfig(self.dirpath)
status, config = tc.process()
if not status:
print('No titles file found.', flush=True)
return config
def get_optimal_size(self, img_width, img_height):
"""
Calculate optimal image size for given size.
"""
display_width, display_height = self.display.size
display_width -= self.x_spacing
display_height -= self.y_spacing
if display_width < img_width:
width = display_width
height = display_width / img_width * img_height
elif display_height < img_height:
height = display_height
width = display_height / img_height * img_width
elif display_width > img_width and display_height > img_height:
width = display_width
height = display_width / img_width * img_height
else:
width = display_width
height = display_height
if height > display_height:
height = display_height
width = display_height / img_height * img_width
return (int(width), int(height))
def get_state(self):
"""
Return current state according to events on keyboard (and timer).
"""
state = get_state()
if state == STATE_ACTION:
self.autoplay = not self.autoplay
if state is not None:
if state == STATE_ACTION and not self.autoplay:
pygame.time.set_timer(pygame.USEREVENT, 0)
else:
self.autoplay = True
pygame.time.set_timer(pygame.USEREVENT, 0)
pygame.time.set_timer(pygame.USEREVENT, self.config['delay'])
return state
def render_screen(self, imgpath, title):
"""
Render provided image on screen.
"""
display_width, display_height = self.display.size
# open and resize source image
print(imgpath, flush=True)
img = pygame.image.load(imgpath)
img_size = img.get_rect().size
optimal_size = self.get_optimal_size(img_size[0], img_size[1])
img = pygame.transform.smoothscale(img, optimal_size)
# put image on screen (centered, bg color)
self.display.screen.fill(self.config['screen_background'])
x = math.ceil((display_width - optimal_size[0]) / 2)
y = math.ceil(self.config['margin'] + (display_height - self.y_spacing - optimal_size[1]) / 2)
self.display.screen.blit(img, (x, y))
# pause symbol on screen
if not self.autoplay:
pause = self.display.pause
self.display.screen.blit(pause['img'], (pause['x'], pause['y']))
# optional title
if title:
text = self.font.render(title, True, self.config['screen_color'])
text_size = text.get_rect().size
text_x = display_width / 2 - text_size[0] / 2
text_y = display_height - self.y_spacing + self.config['margin'] + round(self.config['fontsize'] * 0.1)
self.display.screen.blit(text, (text_x, text_y))
# self.display.screen.blit(text, (1000, 0))
pygame.display.update()
def run(self):
# get images
files = get_entries(self.dirpath, condition=check_file)
files = self.filter_files(files)
if not files:
return STATE_MISSING_SOURCE
# load configuration files
self.config = self.load_config()
titles = self.load_titles()
# initialize main loop
clock = pygame.time.Clock()
idx = 0
old_filepath = None
old_autoplay = None
files_len = len(files)
state = None
if self.autoplay:
# USEREVENT is same as STATE_NEXT state
pygame.time.set_timer(pygame.USEREVENT, self.config['delay'])
# main loop
while state not in [STATE_QUIT, STATE_MISSING_SOURCE, STATE_BACK]:
clock.tick(self.fps)
state = self.get_state()
if state == STATE_NEXT:
idx += 1
if idx > files_len - 1:
idx = files_len - 1
elif state == STATE_PREV:
idx -= 1
if idx < 0:
idx = 0
if state or old_filepath is None:
filepath = os.path.join(self.dirpath, files[idx])
if os.path.exists(filepath):
if old_filepath != filepath or self.autoplay != old_autoplay:
self.render_screen(filepath, titles.get(files[idx]))
old_filepath = filepath
old_autoplay = self.autoplay
else:
state = STATE_MISSING_SOURCE
self.display.screen.fill(self.config['screen_background'])
pygame.time.set_timer(pygame.USEREVENT, 0)
# pygame.display.quit() # TODO: ???
return state
| 34.346591
| 115
| 0.586435
|
744f1364b7f198691fe6af115e14cd8d0d24cb74
| 144,532
|
py
|
Python
|
lib/isl/interface/isl.py
|
ShreyasKhandekar/IEGenLib
|
93bce46a3fe0b829c3cb957746a53b7cfc4373b5
|
[
"BSD-2-Clause"
] | 5
|
2019-05-20T03:35:41.000Z
|
2021-09-16T22:22:13.000Z
|
lib/isl/interface/isl.py
|
ShreyasKhandekar/IEGenLib
|
93bce46a3fe0b829c3cb957746a53b7cfc4373b5
|
[
"BSD-2-Clause"
] | 160
|
2021-01-05T18:34:10.000Z
|
2022-03-03T01:27:49.000Z
|
lib/isl/interface/isl.py
|
ShreyasKhandekar/IEGenLib
|
93bce46a3fe0b829c3cb957746a53b7cfc4373b5
|
[
"BSD-2-Clause"
] | 10
|
2015-11-04T17:45:04.000Z
|
2022-01-15T08:43:18.000Z
|
from ctypes import *
isl = cdll.LoadLibrary("libisl.so")
libc = cdll.LoadLibrary("libc.so.6")
class Error(Exception):
pass
class Context:
defaultInstance = None
def __init__(self):
ptr = isl.isl_ctx_alloc()
self.ptr = ptr
def __del__(self):
isl.isl_ctx_free(self)
def from_param(self):
return self.ptr
@staticmethod
def getDefaultInstance():
if Context.defaultInstance == None:
Context.defaultInstance = Context()
return Context.defaultInstance
isl.isl_ctx_alloc.restype = c_void_p
isl.isl_ctx_free.argtypes = [Context]
class union_pw_multi_aff:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is union_pw_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_pw_multi_aff_from_union_pw_aff(isl.isl_union_pw_aff_copy(args[0].ptr))
return
if len(args) == 1 and args[0].__class__ is pw_multi_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_pw_multi_aff_from_pw_multi_aff(isl.isl_pw_multi_aff_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_pw_multi_aff_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_union_pw_multi_aff_free(self.ptr)
def __str__(self):
ptr = isl.isl_union_pw_multi_aff_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.union_pw_multi_aff("""%s""")' % s
else:
return 'isl.union_pw_multi_aff("%s")' % s
def add(arg0, arg1):
try:
if not arg0.__class__ is union_pw_multi_aff:
arg0 = union_pw_multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is union_pw_multi_aff:
arg1 = union_pw_multi_aff(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_pw_multi_aff_add(isl.isl_union_pw_multi_aff_copy(arg0.ptr), isl.isl_union_pw_multi_aff_copy(arg1.ptr))
return union_pw_multi_aff(ctx=ctx, ptr=res)
def flat_range_product(arg0, arg1):
try:
if not arg0.__class__ is union_pw_multi_aff:
arg0 = union_pw_multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is union_pw_multi_aff:
arg1 = union_pw_multi_aff(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_pw_multi_aff_flat_range_product(isl.isl_union_pw_multi_aff_copy(arg0.ptr), isl.isl_union_pw_multi_aff_copy(arg1.ptr))
return union_pw_multi_aff(ctx=ctx, ptr=res)
def pullback(arg0, arg1):
if arg1.__class__ is union_pw_multi_aff:
res = isl.isl_union_pw_multi_aff_pullback_union_pw_multi_aff(isl.isl_union_pw_multi_aff_copy(arg0.ptr), isl.isl_union_pw_multi_aff_copy(arg1.ptr))
return union_pw_multi_aff(ctx=arg0.ctx, ptr=res)
def union_add(arg0, arg1):
try:
if not arg0.__class__ is union_pw_multi_aff:
arg0 = union_pw_multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is union_pw_multi_aff:
arg1 = union_pw_multi_aff(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_pw_multi_aff_union_add(isl.isl_union_pw_multi_aff_copy(arg0.ptr), isl.isl_union_pw_multi_aff_copy(arg1.ptr))
return union_pw_multi_aff(ctx=ctx, ptr=res)
isl.isl_union_pw_multi_aff_from_union_pw_aff.restype = c_void_p
isl.isl_union_pw_multi_aff_from_union_pw_aff.argtypes = [c_void_p]
isl.isl_union_pw_multi_aff_from_pw_multi_aff.restype = c_void_p
isl.isl_union_pw_multi_aff_from_pw_multi_aff.argtypes = [c_void_p]
isl.isl_union_pw_multi_aff_read_from_str.restype = c_void_p
isl.isl_union_pw_multi_aff_read_from_str.argtypes = [Context, c_char_p]
isl.isl_union_pw_multi_aff_add.restype = c_void_p
isl.isl_union_pw_multi_aff_add.argtypes = [c_void_p, c_void_p]
isl.isl_union_pw_multi_aff_flat_range_product.restype = c_void_p
isl.isl_union_pw_multi_aff_flat_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_union_pw_multi_aff_pullback_union_pw_multi_aff.restype = c_void_p
isl.isl_union_pw_multi_aff_pullback_union_pw_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_union_pw_multi_aff_union_add.restype = c_void_p
isl.isl_union_pw_multi_aff_union_add.argtypes = [c_void_p, c_void_p]
isl.isl_union_pw_multi_aff_free.argtypes = [c_void_p]
isl.isl_union_pw_multi_aff_to_str.argtypes = [c_void_p]
isl.isl_union_pw_multi_aff_to_str.restype = POINTER(c_char)
class multi_union_pw_aff:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is union_pw_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_multi_union_pw_aff_from_union_pw_aff(isl.isl_union_pw_aff_copy(args[0].ptr))
return
if len(args) == 1 and args[0].__class__ is multi_pw_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_multi_union_pw_aff_from_multi_pw_aff(isl.isl_multi_pw_aff_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_multi_union_pw_aff_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_multi_union_pw_aff_free(self.ptr)
def __str__(self):
ptr = isl.isl_multi_union_pw_aff_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.multi_union_pw_aff("""%s""")' % s
else:
return 'isl.multi_union_pw_aff("%s")' % s
def add(arg0, arg1):
try:
if not arg0.__class__ is multi_union_pw_aff:
arg0 = multi_union_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_union_pw_aff:
arg1 = multi_union_pw_aff(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_multi_union_pw_aff_add(isl.isl_multi_union_pw_aff_copy(arg0.ptr), isl.isl_multi_union_pw_aff_copy(arg1.ptr))
return multi_union_pw_aff(ctx=ctx, ptr=res)
def flat_range_product(arg0, arg1):
try:
if not arg0.__class__ is multi_union_pw_aff:
arg0 = multi_union_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_union_pw_aff:
arg1 = multi_union_pw_aff(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_multi_union_pw_aff_flat_range_product(isl.isl_multi_union_pw_aff_copy(arg0.ptr), isl.isl_multi_union_pw_aff_copy(arg1.ptr))
return multi_union_pw_aff(ctx=ctx, ptr=res)
def pullback(arg0, arg1):
if arg1.__class__ is union_pw_multi_aff:
res = isl.isl_multi_union_pw_aff_pullback_union_pw_multi_aff(isl.isl_multi_union_pw_aff_copy(arg0.ptr), isl.isl_union_pw_multi_aff_copy(arg1.ptr))
return multi_union_pw_aff(ctx=arg0.ctx, ptr=res)
def range_product(arg0, arg1):
try:
if not arg0.__class__ is multi_union_pw_aff:
arg0 = multi_union_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_union_pw_aff:
arg1 = multi_union_pw_aff(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_multi_union_pw_aff_range_product(isl.isl_multi_union_pw_aff_copy(arg0.ptr), isl.isl_multi_union_pw_aff_copy(arg1.ptr))
return multi_union_pw_aff(ctx=ctx, ptr=res)
def union_add(arg0, arg1):
try:
if not arg0.__class__ is multi_union_pw_aff:
arg0 = multi_union_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_union_pw_aff:
arg1 = multi_union_pw_aff(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_multi_union_pw_aff_union_add(isl.isl_multi_union_pw_aff_copy(arg0.ptr), isl.isl_multi_union_pw_aff_copy(arg1.ptr))
return multi_union_pw_aff(ctx=ctx, ptr=res)
isl.isl_multi_union_pw_aff_from_union_pw_aff.restype = c_void_p
isl.isl_multi_union_pw_aff_from_union_pw_aff.argtypes = [c_void_p]
isl.isl_multi_union_pw_aff_from_multi_pw_aff.restype = c_void_p
isl.isl_multi_union_pw_aff_from_multi_pw_aff.argtypes = [c_void_p]
isl.isl_multi_union_pw_aff_read_from_str.restype = c_void_p
isl.isl_multi_union_pw_aff_read_from_str.argtypes = [Context, c_char_p]
isl.isl_multi_union_pw_aff_add.restype = c_void_p
isl.isl_multi_union_pw_aff_add.argtypes = [c_void_p, c_void_p]
isl.isl_multi_union_pw_aff_flat_range_product.restype = c_void_p
isl.isl_multi_union_pw_aff_flat_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_union_pw_aff_pullback_union_pw_multi_aff.restype = c_void_p
isl.isl_multi_union_pw_aff_pullback_union_pw_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_multi_union_pw_aff_range_product.restype = c_void_p
isl.isl_multi_union_pw_aff_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_union_pw_aff_union_add.restype = c_void_p
isl.isl_multi_union_pw_aff_union_add.argtypes = [c_void_p, c_void_p]
isl.isl_multi_union_pw_aff_free.argtypes = [c_void_p]
isl.isl_multi_union_pw_aff_to_str.argtypes = [c_void_p]
isl.isl_multi_union_pw_aff_to_str.restype = POINTER(c_char)
class union_pw_aff(union_pw_multi_aff, multi_union_pw_aff):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is pw_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_pw_aff_from_pw_aff(isl.isl_pw_aff_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_pw_aff_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_union_pw_aff_free(self.ptr)
def __str__(self):
ptr = isl.isl_union_pw_aff_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.union_pw_aff("""%s""")' % s
else:
return 'isl.union_pw_aff("%s")' % s
def add(arg0, arg1):
try:
if not arg0.__class__ is union_pw_aff:
arg0 = union_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is union_pw_aff:
arg1 = union_pw_aff(arg1)
except:
return union_pw_multi_aff(arg0).add(arg1)
ctx = arg0.ctx
res = isl.isl_union_pw_aff_add(isl.isl_union_pw_aff_copy(arg0.ptr), isl.isl_union_pw_aff_copy(arg1.ptr))
return union_pw_aff(ctx=ctx, ptr=res)
def pullback(arg0, arg1):
if arg1.__class__ is union_pw_multi_aff:
res = isl.isl_union_pw_aff_pullback_union_pw_multi_aff(isl.isl_union_pw_aff_copy(arg0.ptr), isl.isl_union_pw_multi_aff_copy(arg1.ptr))
return union_pw_aff(ctx=arg0.ctx, ptr=res)
def union_add(arg0, arg1):
try:
if not arg0.__class__ is union_pw_aff:
arg0 = union_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is union_pw_aff:
arg1 = union_pw_aff(arg1)
except:
return union_pw_multi_aff(arg0).union_add(arg1)
ctx = arg0.ctx
res = isl.isl_union_pw_aff_union_add(isl.isl_union_pw_aff_copy(arg0.ptr), isl.isl_union_pw_aff_copy(arg1.ptr))
return union_pw_aff(ctx=ctx, ptr=res)
isl.isl_union_pw_aff_from_pw_aff.restype = c_void_p
isl.isl_union_pw_aff_from_pw_aff.argtypes = [c_void_p]
isl.isl_union_pw_aff_read_from_str.restype = c_void_p
isl.isl_union_pw_aff_read_from_str.argtypes = [Context, c_char_p]
isl.isl_union_pw_aff_add.restype = c_void_p
isl.isl_union_pw_aff_add.argtypes = [c_void_p, c_void_p]
isl.isl_union_pw_aff_pullback_union_pw_multi_aff.restype = c_void_p
isl.isl_union_pw_aff_pullback_union_pw_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_union_pw_aff_union_add.restype = c_void_p
isl.isl_union_pw_aff_union_add.argtypes = [c_void_p, c_void_p]
isl.isl_union_pw_aff_free.argtypes = [c_void_p]
isl.isl_union_pw_aff_to_str.argtypes = [c_void_p]
isl.isl_union_pw_aff_to_str.restype = POINTER(c_char)
class multi_pw_aff(multi_union_pw_aff):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is pw_multi_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_multi_pw_aff_from_pw_multi_aff(isl.isl_pw_multi_aff_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_multi_pw_aff_read_from_str(self.ctx, args[0])
return
if len(args) == 1 and args[0].__class__ is multi_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_multi_pw_aff_from_multi_aff(isl.isl_multi_aff_copy(args[0].ptr))
return
if len(args) == 1 and args[0].__class__ is pw_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_multi_pw_aff_from_pw_aff(isl.isl_pw_aff_copy(args[0].ptr))
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_multi_pw_aff_free(self.ptr)
def __str__(self):
ptr = isl.isl_multi_pw_aff_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.multi_pw_aff("""%s""")' % s
else:
return 'isl.multi_pw_aff("%s")' % s
def add(arg0, arg1):
try:
if not arg0.__class__ is multi_pw_aff:
arg0 = multi_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_pw_aff:
arg1 = multi_pw_aff(arg1)
except:
return multi_union_pw_aff(arg0).add(arg1)
ctx = arg0.ctx
res = isl.isl_multi_pw_aff_add(isl.isl_multi_pw_aff_copy(arg0.ptr), isl.isl_multi_pw_aff_copy(arg1.ptr))
return multi_pw_aff(ctx=ctx, ptr=res)
def flat_range_product(arg0, arg1):
try:
if not arg0.__class__ is multi_pw_aff:
arg0 = multi_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_pw_aff:
arg1 = multi_pw_aff(arg1)
except:
return multi_union_pw_aff(arg0).flat_range_product(arg1)
ctx = arg0.ctx
res = isl.isl_multi_pw_aff_flat_range_product(isl.isl_multi_pw_aff_copy(arg0.ptr), isl.isl_multi_pw_aff_copy(arg1.ptr))
return multi_pw_aff(ctx=ctx, ptr=res)
def product(arg0, arg1):
try:
if not arg0.__class__ is multi_pw_aff:
arg0 = multi_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_pw_aff:
arg1 = multi_pw_aff(arg1)
except:
return multi_union_pw_aff(arg0).product(arg1)
ctx = arg0.ctx
res = isl.isl_multi_pw_aff_product(isl.isl_multi_pw_aff_copy(arg0.ptr), isl.isl_multi_pw_aff_copy(arg1.ptr))
return multi_pw_aff(ctx=ctx, ptr=res)
def pullback(arg0, arg1):
if arg1.__class__ is multi_aff:
res = isl.isl_multi_pw_aff_pullback_multi_aff(isl.isl_multi_pw_aff_copy(arg0.ptr), isl.isl_multi_aff_copy(arg1.ptr))
return multi_pw_aff(ctx=arg0.ctx, ptr=res)
if arg1.__class__ is pw_multi_aff:
res = isl.isl_multi_pw_aff_pullback_pw_multi_aff(isl.isl_multi_pw_aff_copy(arg0.ptr), isl.isl_pw_multi_aff_copy(arg1.ptr))
return multi_pw_aff(ctx=arg0.ctx, ptr=res)
if arg1.__class__ is multi_pw_aff:
res = isl.isl_multi_pw_aff_pullback_multi_pw_aff(isl.isl_multi_pw_aff_copy(arg0.ptr), isl.isl_multi_pw_aff_copy(arg1.ptr))
return multi_pw_aff(ctx=arg0.ctx, ptr=res)
def range_product(arg0, arg1):
try:
if not arg0.__class__ is multi_pw_aff:
arg0 = multi_pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_pw_aff:
arg1 = multi_pw_aff(arg1)
except:
return multi_union_pw_aff(arg0).range_product(arg1)
ctx = arg0.ctx
res = isl.isl_multi_pw_aff_range_product(isl.isl_multi_pw_aff_copy(arg0.ptr), isl.isl_multi_pw_aff_copy(arg1.ptr))
return multi_pw_aff(ctx=ctx, ptr=res)
isl.isl_multi_pw_aff_from_pw_multi_aff.restype = c_void_p
isl.isl_multi_pw_aff_from_pw_multi_aff.argtypes = [c_void_p]
isl.isl_multi_pw_aff_read_from_str.restype = c_void_p
isl.isl_multi_pw_aff_read_from_str.argtypes = [Context, c_char_p]
isl.isl_multi_pw_aff_from_multi_aff.restype = c_void_p
isl.isl_multi_pw_aff_from_multi_aff.argtypes = [c_void_p]
isl.isl_multi_pw_aff_from_pw_aff.restype = c_void_p
isl.isl_multi_pw_aff_from_pw_aff.argtypes = [c_void_p]
isl.isl_multi_pw_aff_add.restype = c_void_p
isl.isl_multi_pw_aff_add.argtypes = [c_void_p, c_void_p]
isl.isl_multi_pw_aff_flat_range_product.restype = c_void_p
isl.isl_multi_pw_aff_flat_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_pw_aff_product.restype = c_void_p
isl.isl_multi_pw_aff_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_pw_aff_pullback_multi_aff.restype = c_void_p
isl.isl_multi_pw_aff_pullback_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_multi_pw_aff_pullback_pw_multi_aff.restype = c_void_p
isl.isl_multi_pw_aff_pullback_pw_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_multi_pw_aff_pullback_multi_pw_aff.restype = c_void_p
isl.isl_multi_pw_aff_pullback_multi_pw_aff.argtypes = [c_void_p, c_void_p]
isl.isl_multi_pw_aff_range_product.restype = c_void_p
isl.isl_multi_pw_aff_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_pw_aff_free.argtypes = [c_void_p]
isl.isl_multi_pw_aff_to_str.argtypes = [c_void_p]
isl.isl_multi_pw_aff_to_str.restype = POINTER(c_char)
class pw_multi_aff(multi_pw_aff, union_pw_multi_aff):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is multi_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_pw_multi_aff_from_multi_aff(isl.isl_multi_aff_copy(args[0].ptr))
return
if len(args) == 1 and args[0].__class__ is pw_aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_pw_multi_aff_from_pw_aff(isl.isl_pw_aff_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_pw_multi_aff_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_pw_multi_aff_free(self.ptr)
def __str__(self):
ptr = isl.isl_pw_multi_aff_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.pw_multi_aff("""%s""")' % s
else:
return 'isl.pw_multi_aff("%s")' % s
def add(arg0, arg1):
try:
if not arg0.__class__ is pw_multi_aff:
arg0 = pw_multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is pw_multi_aff:
arg1 = pw_multi_aff(arg1)
except:
return multi_pw_aff(arg0).add(arg1)
ctx = arg0.ctx
res = isl.isl_pw_multi_aff_add(isl.isl_pw_multi_aff_copy(arg0.ptr), isl.isl_pw_multi_aff_copy(arg1.ptr))
return pw_multi_aff(ctx=ctx, ptr=res)
def flat_range_product(arg0, arg1):
try:
if not arg0.__class__ is pw_multi_aff:
arg0 = pw_multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is pw_multi_aff:
arg1 = pw_multi_aff(arg1)
except:
return multi_pw_aff(arg0).flat_range_product(arg1)
ctx = arg0.ctx
res = isl.isl_pw_multi_aff_flat_range_product(isl.isl_pw_multi_aff_copy(arg0.ptr), isl.isl_pw_multi_aff_copy(arg1.ptr))
return pw_multi_aff(ctx=ctx, ptr=res)
def product(arg0, arg1):
try:
if not arg0.__class__ is pw_multi_aff:
arg0 = pw_multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is pw_multi_aff:
arg1 = pw_multi_aff(arg1)
except:
return multi_pw_aff(arg0).product(arg1)
ctx = arg0.ctx
res = isl.isl_pw_multi_aff_product(isl.isl_pw_multi_aff_copy(arg0.ptr), isl.isl_pw_multi_aff_copy(arg1.ptr))
return pw_multi_aff(ctx=ctx, ptr=res)
def pullback(arg0, arg1):
if arg1.__class__ is multi_aff:
res = isl.isl_pw_multi_aff_pullback_multi_aff(isl.isl_pw_multi_aff_copy(arg0.ptr), isl.isl_multi_aff_copy(arg1.ptr))
return pw_multi_aff(ctx=arg0.ctx, ptr=res)
if arg1.__class__ is pw_multi_aff:
res = isl.isl_pw_multi_aff_pullback_pw_multi_aff(isl.isl_pw_multi_aff_copy(arg0.ptr), isl.isl_pw_multi_aff_copy(arg1.ptr))
return pw_multi_aff(ctx=arg0.ctx, ptr=res)
def range_product(arg0, arg1):
try:
if not arg0.__class__ is pw_multi_aff:
arg0 = pw_multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is pw_multi_aff:
arg1 = pw_multi_aff(arg1)
except:
return multi_pw_aff(arg0).range_product(arg1)
ctx = arg0.ctx
res = isl.isl_pw_multi_aff_range_product(isl.isl_pw_multi_aff_copy(arg0.ptr), isl.isl_pw_multi_aff_copy(arg1.ptr))
return pw_multi_aff(ctx=ctx, ptr=res)
def union_add(arg0, arg1):
try:
if not arg0.__class__ is pw_multi_aff:
arg0 = pw_multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is pw_multi_aff:
arg1 = pw_multi_aff(arg1)
except:
return multi_pw_aff(arg0).union_add(arg1)
ctx = arg0.ctx
res = isl.isl_pw_multi_aff_union_add(isl.isl_pw_multi_aff_copy(arg0.ptr), isl.isl_pw_multi_aff_copy(arg1.ptr))
return pw_multi_aff(ctx=ctx, ptr=res)
isl.isl_pw_multi_aff_from_multi_aff.restype = c_void_p
isl.isl_pw_multi_aff_from_multi_aff.argtypes = [c_void_p]
isl.isl_pw_multi_aff_from_pw_aff.restype = c_void_p
isl.isl_pw_multi_aff_from_pw_aff.argtypes = [c_void_p]
isl.isl_pw_multi_aff_read_from_str.restype = c_void_p
isl.isl_pw_multi_aff_read_from_str.argtypes = [Context, c_char_p]
isl.isl_pw_multi_aff_add.restype = c_void_p
isl.isl_pw_multi_aff_add.argtypes = [c_void_p, c_void_p]
isl.isl_pw_multi_aff_flat_range_product.restype = c_void_p
isl.isl_pw_multi_aff_flat_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_pw_multi_aff_product.restype = c_void_p
isl.isl_pw_multi_aff_product.argtypes = [c_void_p, c_void_p]
isl.isl_pw_multi_aff_pullback_multi_aff.restype = c_void_p
isl.isl_pw_multi_aff_pullback_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_pw_multi_aff_pullback_pw_multi_aff.restype = c_void_p
isl.isl_pw_multi_aff_pullback_pw_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_pw_multi_aff_range_product.restype = c_void_p
isl.isl_pw_multi_aff_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_pw_multi_aff_union_add.restype = c_void_p
isl.isl_pw_multi_aff_union_add.argtypes = [c_void_p, c_void_p]
isl.isl_pw_multi_aff_free.argtypes = [c_void_p]
isl.isl_pw_multi_aff_to_str.argtypes = [c_void_p]
isl.isl_pw_multi_aff_to_str.restype = POINTER(c_char)
class pw_aff(union_pw_aff, multi_pw_aff, pw_multi_aff):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_pw_aff_from_aff(isl.isl_aff_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_pw_aff_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_pw_aff_free(self.ptr)
def __str__(self):
ptr = isl.isl_pw_aff_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.pw_aff("""%s""")' % s
else:
return 'isl.pw_aff("%s")' % s
def add(arg0, arg1):
try:
if not arg0.__class__ is pw_aff:
arg0 = pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is pw_aff:
arg1 = pw_aff(arg1)
except:
return union_pw_aff(arg0).add(arg1)
ctx = arg0.ctx
res = isl.isl_pw_aff_add(isl.isl_pw_aff_copy(arg0.ptr), isl.isl_pw_aff_copy(arg1.ptr))
return pw_aff(ctx=ctx, ptr=res)
def pullback(arg0, arg1):
if arg1.__class__ is multi_aff:
res = isl.isl_pw_aff_pullback_multi_aff(isl.isl_pw_aff_copy(arg0.ptr), isl.isl_multi_aff_copy(arg1.ptr))
return pw_aff(ctx=arg0.ctx, ptr=res)
if arg1.__class__ is pw_multi_aff:
res = isl.isl_pw_aff_pullback_pw_multi_aff(isl.isl_pw_aff_copy(arg0.ptr), isl.isl_pw_multi_aff_copy(arg1.ptr))
return pw_aff(ctx=arg0.ctx, ptr=res)
if arg1.__class__ is multi_pw_aff:
res = isl.isl_pw_aff_pullback_multi_pw_aff(isl.isl_pw_aff_copy(arg0.ptr), isl.isl_multi_pw_aff_copy(arg1.ptr))
return pw_aff(ctx=arg0.ctx, ptr=res)
def union_add(arg0, arg1):
try:
if not arg0.__class__ is pw_aff:
arg0 = pw_aff(arg0)
except:
raise
try:
if not arg1.__class__ is pw_aff:
arg1 = pw_aff(arg1)
except:
return union_pw_aff(arg0).union_add(arg1)
ctx = arg0.ctx
res = isl.isl_pw_aff_union_add(isl.isl_pw_aff_copy(arg0.ptr), isl.isl_pw_aff_copy(arg1.ptr))
return pw_aff(ctx=ctx, ptr=res)
isl.isl_pw_aff_from_aff.restype = c_void_p
isl.isl_pw_aff_from_aff.argtypes = [c_void_p]
isl.isl_pw_aff_read_from_str.restype = c_void_p
isl.isl_pw_aff_read_from_str.argtypes = [Context, c_char_p]
isl.isl_pw_aff_add.restype = c_void_p
isl.isl_pw_aff_add.argtypes = [c_void_p, c_void_p]
isl.isl_pw_aff_pullback_multi_aff.restype = c_void_p
isl.isl_pw_aff_pullback_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_pw_aff_pullback_pw_multi_aff.restype = c_void_p
isl.isl_pw_aff_pullback_pw_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_pw_aff_pullback_multi_pw_aff.restype = c_void_p
isl.isl_pw_aff_pullback_multi_pw_aff.argtypes = [c_void_p, c_void_p]
isl.isl_pw_aff_union_add.restype = c_void_p
isl.isl_pw_aff_union_add.argtypes = [c_void_p, c_void_p]
isl.isl_pw_aff_free.argtypes = [c_void_p]
isl.isl_pw_aff_to_str.argtypes = [c_void_p]
isl.isl_pw_aff_to_str.restype = POINTER(c_char)
class multi_aff(multi_pw_aff, pw_multi_aff):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is aff:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_multi_aff_from_aff(isl.isl_aff_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_multi_aff_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_multi_aff_free(self.ptr)
def __str__(self):
ptr = isl.isl_multi_aff_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.multi_aff("""%s""")' % s
else:
return 'isl.multi_aff("%s")' % s
def add(arg0, arg1):
try:
if not arg0.__class__ is multi_aff:
arg0 = multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_aff:
arg1 = multi_aff(arg1)
except:
return multi_pw_aff(arg0).add(arg1)
ctx = arg0.ctx
res = isl.isl_multi_aff_add(isl.isl_multi_aff_copy(arg0.ptr), isl.isl_multi_aff_copy(arg1.ptr))
return multi_aff(ctx=ctx, ptr=res)
def flat_range_product(arg0, arg1):
try:
if not arg0.__class__ is multi_aff:
arg0 = multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_aff:
arg1 = multi_aff(arg1)
except:
return multi_pw_aff(arg0).flat_range_product(arg1)
ctx = arg0.ctx
res = isl.isl_multi_aff_flat_range_product(isl.isl_multi_aff_copy(arg0.ptr), isl.isl_multi_aff_copy(arg1.ptr))
return multi_aff(ctx=ctx, ptr=res)
def product(arg0, arg1):
try:
if not arg0.__class__ is multi_aff:
arg0 = multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_aff:
arg1 = multi_aff(arg1)
except:
return multi_pw_aff(arg0).product(arg1)
ctx = arg0.ctx
res = isl.isl_multi_aff_product(isl.isl_multi_aff_copy(arg0.ptr), isl.isl_multi_aff_copy(arg1.ptr))
return multi_aff(ctx=ctx, ptr=res)
def pullback(arg0, arg1):
if arg1.__class__ is multi_aff:
res = isl.isl_multi_aff_pullback_multi_aff(isl.isl_multi_aff_copy(arg0.ptr), isl.isl_multi_aff_copy(arg1.ptr))
return multi_aff(ctx=arg0.ctx, ptr=res)
def range_product(arg0, arg1):
try:
if not arg0.__class__ is multi_aff:
arg0 = multi_aff(arg0)
except:
raise
try:
if not arg1.__class__ is multi_aff:
arg1 = multi_aff(arg1)
except:
return multi_pw_aff(arg0).range_product(arg1)
ctx = arg0.ctx
res = isl.isl_multi_aff_range_product(isl.isl_multi_aff_copy(arg0.ptr), isl.isl_multi_aff_copy(arg1.ptr))
return multi_aff(ctx=ctx, ptr=res)
isl.isl_multi_aff_from_aff.restype = c_void_p
isl.isl_multi_aff_from_aff.argtypes = [c_void_p]
isl.isl_multi_aff_read_from_str.restype = c_void_p
isl.isl_multi_aff_read_from_str.argtypes = [Context, c_char_p]
isl.isl_multi_aff_add.restype = c_void_p
isl.isl_multi_aff_add.argtypes = [c_void_p, c_void_p]
isl.isl_multi_aff_flat_range_product.restype = c_void_p
isl.isl_multi_aff_flat_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_aff_product.restype = c_void_p
isl.isl_multi_aff_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_aff_pullback_multi_aff.restype = c_void_p
isl.isl_multi_aff_pullback_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_multi_aff_range_product.restype = c_void_p
isl.isl_multi_aff_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_aff_free.argtypes = [c_void_p]
isl.isl_multi_aff_to_str.argtypes = [c_void_p]
isl.isl_multi_aff_to_str.restype = POINTER(c_char)
class aff(pw_aff, multi_aff):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_aff_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_aff_free(self.ptr)
def __str__(self):
ptr = isl.isl_aff_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.aff("""%s""")' % s
else:
return 'isl.aff("%s")' % s
def add(arg0, arg1):
try:
if not arg0.__class__ is aff:
arg0 = aff(arg0)
except:
raise
try:
if not arg1.__class__ is aff:
arg1 = aff(arg1)
except:
return pw_aff(arg0).add(arg1)
ctx = arg0.ctx
res = isl.isl_aff_add(isl.isl_aff_copy(arg0.ptr), isl.isl_aff_copy(arg1.ptr))
return aff(ctx=ctx, ptr=res)
def pullback(arg0, arg1):
if arg1.__class__ is multi_aff:
res = isl.isl_aff_pullback_multi_aff(isl.isl_aff_copy(arg0.ptr), isl.isl_multi_aff_copy(arg1.ptr))
return aff(ctx=arg0.ctx, ptr=res)
isl.isl_aff_read_from_str.restype = c_void_p
isl.isl_aff_read_from_str.argtypes = [Context, c_char_p]
isl.isl_aff_add.restype = c_void_p
isl.isl_aff_add.argtypes = [c_void_p, c_void_p]
isl.isl_aff_pullback_multi_aff.restype = c_void_p
isl.isl_aff_pullback_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_aff_free.argtypes = [c_void_p]
isl.isl_aff_to_str.argtypes = [c_void_p]
isl.isl_aff_to_str.restype = POINTER(c_char)
class union_map:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is basic_map:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_map_from_basic_map(isl.isl_basic_map_copy(args[0].ptr))
return
if len(args) == 1 and args[0].__class__ is map:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_map_from_map(isl.isl_map_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_map_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_union_map_free(self.ptr)
def __str__(self):
ptr = isl.isl_union_map_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.union_map("""%s""")' % s
else:
return 'isl.union_map("%s")' % s
def affine_hull(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_affine_hull(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def apply_domain(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_apply_domain(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def apply_range(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_apply_range(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def coalesce(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_coalesce(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def compute_divs(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_compute_divs(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def deltas(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_deltas(isl.isl_union_map_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def detect_equalities(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_detect_equalities(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def domain(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_domain(isl.isl_union_map_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def domain_factor_domain(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_domain_factor_domain(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def domain_factor_range(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_domain_factor_range(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def domain_map(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_domain_map(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def domain_map_union_pw_multi_aff(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_domain_map_union_pw_multi_aff(isl.isl_union_map_copy(arg0.ptr))
return union_pw_multi_aff(ctx=ctx, ptr=res)
def domain_product(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_domain_product(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def factor_domain(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_factor_domain(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def factor_range(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_factor_range(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def fixed_power(arg0, arg1):
if arg1.__class__ is val:
res = isl.isl_union_map_fixed_power_val(isl.isl_union_map_copy(arg0.ptr), isl.isl_val_copy(arg1.ptr))
return union_map(ctx=arg0.ctx, ptr=res)
def foreach_map(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
exc_info = [None]
fn = CFUNCTYPE(c_int, c_void_p, c_void_p)
def cb_func(cb_arg0, cb_arg1):
cb_arg0 = map(ctx=arg0.ctx, ptr=cb_arg0)
try:
arg1(cb_arg0)
except:
import sys
exc_info[0] = sys.exc_info()
return -1
return 0
cb = fn(cb_func)
ctx = arg0.ctx
res = isl.isl_union_map_foreach_map(arg0.ptr, cb, None)
if exc_info[0] != None:
raise exc_info[0][0], exc_info[0][1], exc_info[0][2]
return res
@staticmethod
def convert_from(arg0):
if arg0.__class__ is union_pw_multi_aff:
res = isl.isl_union_map_from_union_pw_multi_aff(isl.isl_union_pw_multi_aff_copy(arg0.ptr))
return union_map(ctx=arg0.ctx, ptr=res)
if arg0.__class__ is multi_union_pw_aff:
res = isl.isl_union_map_from_multi_union_pw_aff(isl.isl_multi_union_pw_aff_copy(arg0.ptr))
return union_map(ctx=arg0.ctx, ptr=res)
@staticmethod
def from_domain_and_range(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_from_domain_and_range(isl.isl_union_set_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def gist(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_gist(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def gist_domain(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_gist_domain(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def gist_params(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_gist_params(isl.isl_union_map_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def gist_range(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_gist_range(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def intersect(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_intersect(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def intersect_domain(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_intersect_domain(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def intersect_params(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_intersect_params(isl.isl_union_map_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def intersect_range(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_intersect_range(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def is_bijective(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_is_bijective(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_empty(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_is_empty(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_equal(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_is_equal(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_injective(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_is_injective(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_single_valued(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_is_single_valued(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_strict_subset(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_is_strict_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_subset(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_is_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def lexmax(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_lexmax(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def lexmin(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_lexmin(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def polyhedral_hull(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_polyhedral_hull(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def product(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_product(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def range(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_range(isl.isl_union_map_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def range_factor_domain(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_range_factor_domain(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def range_factor_range(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_range_factor_range(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def range_map(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_range_map(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def range_product(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_range_product(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def reverse(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_reverse(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def subtract(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_subtract(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def subtract_domain(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_subtract_domain(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def subtract_range(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_subtract_range(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def union(arg0, arg1):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_union(isl.isl_union_map_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_map(ctx=ctx, ptr=res)
def wrap(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_wrap(isl.isl_union_map_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def zip(arg0):
try:
if not arg0.__class__ is union_map:
arg0 = union_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_map_zip(isl.isl_union_map_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
isl.isl_union_map_from_basic_map.restype = c_void_p
isl.isl_union_map_from_basic_map.argtypes = [c_void_p]
isl.isl_union_map_from_map.restype = c_void_p
isl.isl_union_map_from_map.argtypes = [c_void_p]
isl.isl_union_map_read_from_str.restype = c_void_p
isl.isl_union_map_read_from_str.argtypes = [Context, c_char_p]
isl.isl_union_map_affine_hull.restype = c_void_p
isl.isl_union_map_affine_hull.argtypes = [c_void_p]
isl.isl_union_map_apply_domain.restype = c_void_p
isl.isl_union_map_apply_domain.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_apply_range.restype = c_void_p
isl.isl_union_map_apply_range.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_coalesce.restype = c_void_p
isl.isl_union_map_coalesce.argtypes = [c_void_p]
isl.isl_union_map_compute_divs.restype = c_void_p
isl.isl_union_map_compute_divs.argtypes = [c_void_p]
isl.isl_union_map_deltas.restype = c_void_p
isl.isl_union_map_deltas.argtypes = [c_void_p]
isl.isl_union_map_detect_equalities.restype = c_void_p
isl.isl_union_map_detect_equalities.argtypes = [c_void_p]
isl.isl_union_map_domain.restype = c_void_p
isl.isl_union_map_domain.argtypes = [c_void_p]
isl.isl_union_map_domain_factor_domain.restype = c_void_p
isl.isl_union_map_domain_factor_domain.argtypes = [c_void_p]
isl.isl_union_map_domain_factor_range.restype = c_void_p
isl.isl_union_map_domain_factor_range.argtypes = [c_void_p]
isl.isl_union_map_domain_map.restype = c_void_p
isl.isl_union_map_domain_map.argtypes = [c_void_p]
isl.isl_union_map_domain_map_union_pw_multi_aff.restype = c_void_p
isl.isl_union_map_domain_map_union_pw_multi_aff.argtypes = [c_void_p]
isl.isl_union_map_domain_product.restype = c_void_p
isl.isl_union_map_domain_product.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_factor_domain.restype = c_void_p
isl.isl_union_map_factor_domain.argtypes = [c_void_p]
isl.isl_union_map_factor_range.restype = c_void_p
isl.isl_union_map_factor_range.argtypes = [c_void_p]
isl.isl_union_map_fixed_power_val.restype = c_void_p
isl.isl_union_map_fixed_power_val.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_foreach_map.argtypes = [c_void_p, c_void_p, c_void_p]
isl.isl_union_map_from_union_pw_multi_aff.restype = c_void_p
isl.isl_union_map_from_union_pw_multi_aff.argtypes = [c_void_p]
isl.isl_union_map_from_multi_union_pw_aff.restype = c_void_p
isl.isl_union_map_from_multi_union_pw_aff.argtypes = [c_void_p]
isl.isl_union_map_from_domain_and_range.restype = c_void_p
isl.isl_union_map_from_domain_and_range.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_gist.restype = c_void_p
isl.isl_union_map_gist.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_gist_domain.restype = c_void_p
isl.isl_union_map_gist_domain.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_gist_params.restype = c_void_p
isl.isl_union_map_gist_params.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_gist_range.restype = c_void_p
isl.isl_union_map_gist_range.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_intersect.restype = c_void_p
isl.isl_union_map_intersect.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_intersect_domain.restype = c_void_p
isl.isl_union_map_intersect_domain.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_intersect_params.restype = c_void_p
isl.isl_union_map_intersect_params.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_intersect_range.restype = c_void_p
isl.isl_union_map_intersect_range.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_is_bijective.restype = c_bool
isl.isl_union_map_is_bijective.argtypes = [c_void_p]
isl.isl_union_map_is_empty.restype = c_bool
isl.isl_union_map_is_empty.argtypes = [c_void_p]
isl.isl_union_map_is_equal.restype = c_bool
isl.isl_union_map_is_equal.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_is_injective.restype = c_bool
isl.isl_union_map_is_injective.argtypes = [c_void_p]
isl.isl_union_map_is_single_valued.restype = c_bool
isl.isl_union_map_is_single_valued.argtypes = [c_void_p]
isl.isl_union_map_is_strict_subset.restype = c_bool
isl.isl_union_map_is_strict_subset.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_is_subset.restype = c_bool
isl.isl_union_map_is_subset.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_lexmax.restype = c_void_p
isl.isl_union_map_lexmax.argtypes = [c_void_p]
isl.isl_union_map_lexmin.restype = c_void_p
isl.isl_union_map_lexmin.argtypes = [c_void_p]
isl.isl_union_map_polyhedral_hull.restype = c_void_p
isl.isl_union_map_polyhedral_hull.argtypes = [c_void_p]
isl.isl_union_map_product.restype = c_void_p
isl.isl_union_map_product.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_range.restype = c_void_p
isl.isl_union_map_range.argtypes = [c_void_p]
isl.isl_union_map_range_factor_domain.restype = c_void_p
isl.isl_union_map_range_factor_domain.argtypes = [c_void_p]
isl.isl_union_map_range_factor_range.restype = c_void_p
isl.isl_union_map_range_factor_range.argtypes = [c_void_p]
isl.isl_union_map_range_map.restype = c_void_p
isl.isl_union_map_range_map.argtypes = [c_void_p]
isl.isl_union_map_range_product.restype = c_void_p
isl.isl_union_map_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_reverse.restype = c_void_p
isl.isl_union_map_reverse.argtypes = [c_void_p]
isl.isl_union_map_subtract.restype = c_void_p
isl.isl_union_map_subtract.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_subtract_domain.restype = c_void_p
isl.isl_union_map_subtract_domain.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_subtract_range.restype = c_void_p
isl.isl_union_map_subtract_range.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_union.restype = c_void_p
isl.isl_union_map_union.argtypes = [c_void_p, c_void_p]
isl.isl_union_map_wrap.restype = c_void_p
isl.isl_union_map_wrap.argtypes = [c_void_p]
isl.isl_union_map_zip.restype = c_void_p
isl.isl_union_map_zip.argtypes = [c_void_p]
isl.isl_union_map_free.argtypes = [c_void_p]
isl.isl_union_map_to_str.argtypes = [c_void_p]
isl.isl_union_map_to_str.restype = POINTER(c_char)
class map(union_map):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is basic_map:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_map_from_basic_map(isl.isl_basic_map_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_map_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_map_free(self.ptr)
def __str__(self):
ptr = isl.isl_map_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.map("""%s""")' % s
else:
return 'isl.map("%s")' % s
def affine_hull(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_affine_hull(isl.isl_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def apply_domain(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).apply_domain(arg1)
ctx = arg0.ctx
res = isl.isl_map_apply_domain(isl.isl_map_copy(arg0.ptr), isl.isl_map_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
def apply_range(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).apply_range(arg1)
ctx = arg0.ctx
res = isl.isl_map_apply_range(isl.isl_map_copy(arg0.ptr), isl.isl_map_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
def coalesce(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_coalesce(isl.isl_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def complement(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_complement(isl.isl_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def deltas(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_deltas(isl.isl_map_copy(arg0.ptr))
return set(ctx=ctx, ptr=res)
def detect_equalities(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_detect_equalities(isl.isl_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def flatten(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_flatten(isl.isl_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def flatten_domain(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_flatten_domain(isl.isl_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def flatten_range(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_flatten_range(isl.isl_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def foreach_basic_map(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
exc_info = [None]
fn = CFUNCTYPE(c_int, c_void_p, c_void_p)
def cb_func(cb_arg0, cb_arg1):
cb_arg0 = basic_map(ctx=arg0.ctx, ptr=cb_arg0)
try:
arg1(cb_arg0)
except:
import sys
exc_info[0] = sys.exc_info()
return -1
return 0
cb = fn(cb_func)
ctx = arg0.ctx
res = isl.isl_map_foreach_basic_map(arg0.ptr, cb, None)
if exc_info[0] != None:
raise exc_info[0][0], exc_info[0][1], exc_info[0][2]
return res
def gist(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).gist(arg1)
ctx = arg0.ctx
res = isl.isl_map_gist(isl.isl_map_copy(arg0.ptr), isl.isl_map_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
def gist_domain(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_map(arg0).gist_domain(arg1)
ctx = arg0.ctx
res = isl.isl_map_gist_domain(isl.isl_map_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
def intersect(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).intersect(arg1)
ctx = arg0.ctx
res = isl.isl_map_intersect(isl.isl_map_copy(arg0.ptr), isl.isl_map_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
def intersect_domain(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_map(arg0).intersect_domain(arg1)
ctx = arg0.ctx
res = isl.isl_map_intersect_domain(isl.isl_map_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
def intersect_params(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_map(arg0).intersect_params(arg1)
ctx = arg0.ctx
res = isl.isl_map_intersect_params(isl.isl_map_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
def intersect_range(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_map(arg0).intersect_range(arg1)
ctx = arg0.ctx
res = isl.isl_map_intersect_range(isl.isl_map_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
def is_bijective(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_is_bijective(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_disjoint(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).is_disjoint(arg1)
ctx = arg0.ctx
res = isl.isl_map_is_disjoint(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_empty(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_is_empty(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_equal(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).is_equal(arg1)
ctx = arg0.ctx
res = isl.isl_map_is_equal(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_injective(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_is_injective(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_single_valued(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_is_single_valued(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_strict_subset(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).is_strict_subset(arg1)
ctx = arg0.ctx
res = isl.isl_map_is_strict_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_subset(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).is_subset(arg1)
ctx = arg0.ctx
res = isl.isl_map_is_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def lexmax(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_lexmax(isl.isl_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def lexmin(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_lexmin(isl.isl_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def polyhedral_hull(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_polyhedral_hull(isl.isl_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def reverse(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_reverse(isl.isl_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def sample(arg0):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_map_sample(isl.isl_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def subtract(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).subtract(arg1)
ctx = arg0.ctx
res = isl.isl_map_subtract(isl.isl_map_copy(arg0.ptr), isl.isl_map_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
def union(arg0, arg1):
try:
if not arg0.__class__ is map:
arg0 = map(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_map(arg0).union(arg1)
ctx = arg0.ctx
res = isl.isl_map_union(isl.isl_map_copy(arg0.ptr), isl.isl_map_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
isl.isl_map_from_basic_map.restype = c_void_p
isl.isl_map_from_basic_map.argtypes = [c_void_p]
isl.isl_map_read_from_str.restype = c_void_p
isl.isl_map_read_from_str.argtypes = [Context, c_char_p]
isl.isl_map_affine_hull.restype = c_void_p
isl.isl_map_affine_hull.argtypes = [c_void_p]
isl.isl_map_apply_domain.restype = c_void_p
isl.isl_map_apply_domain.argtypes = [c_void_p, c_void_p]
isl.isl_map_apply_range.restype = c_void_p
isl.isl_map_apply_range.argtypes = [c_void_p, c_void_p]
isl.isl_map_coalesce.restype = c_void_p
isl.isl_map_coalesce.argtypes = [c_void_p]
isl.isl_map_complement.restype = c_void_p
isl.isl_map_complement.argtypes = [c_void_p]
isl.isl_map_deltas.restype = c_void_p
isl.isl_map_deltas.argtypes = [c_void_p]
isl.isl_map_detect_equalities.restype = c_void_p
isl.isl_map_detect_equalities.argtypes = [c_void_p]
isl.isl_map_flatten.restype = c_void_p
isl.isl_map_flatten.argtypes = [c_void_p]
isl.isl_map_flatten_domain.restype = c_void_p
isl.isl_map_flatten_domain.argtypes = [c_void_p]
isl.isl_map_flatten_range.restype = c_void_p
isl.isl_map_flatten_range.argtypes = [c_void_p]
isl.isl_map_foreach_basic_map.argtypes = [c_void_p, c_void_p, c_void_p]
isl.isl_map_gist.restype = c_void_p
isl.isl_map_gist.argtypes = [c_void_p, c_void_p]
isl.isl_map_gist_domain.restype = c_void_p
isl.isl_map_gist_domain.argtypes = [c_void_p, c_void_p]
isl.isl_map_intersect.restype = c_void_p
isl.isl_map_intersect.argtypes = [c_void_p, c_void_p]
isl.isl_map_intersect_domain.restype = c_void_p
isl.isl_map_intersect_domain.argtypes = [c_void_p, c_void_p]
isl.isl_map_intersect_params.restype = c_void_p
isl.isl_map_intersect_params.argtypes = [c_void_p, c_void_p]
isl.isl_map_intersect_range.restype = c_void_p
isl.isl_map_intersect_range.argtypes = [c_void_p, c_void_p]
isl.isl_map_is_bijective.restype = c_bool
isl.isl_map_is_bijective.argtypes = [c_void_p]
isl.isl_map_is_disjoint.restype = c_bool
isl.isl_map_is_disjoint.argtypes = [c_void_p, c_void_p]
isl.isl_map_is_empty.restype = c_bool
isl.isl_map_is_empty.argtypes = [c_void_p]
isl.isl_map_is_equal.restype = c_bool
isl.isl_map_is_equal.argtypes = [c_void_p, c_void_p]
isl.isl_map_is_injective.restype = c_bool
isl.isl_map_is_injective.argtypes = [c_void_p]
isl.isl_map_is_single_valued.restype = c_bool
isl.isl_map_is_single_valued.argtypes = [c_void_p]
isl.isl_map_is_strict_subset.restype = c_bool
isl.isl_map_is_strict_subset.argtypes = [c_void_p, c_void_p]
isl.isl_map_is_subset.restype = c_bool
isl.isl_map_is_subset.argtypes = [c_void_p, c_void_p]
isl.isl_map_lexmax.restype = c_void_p
isl.isl_map_lexmax.argtypes = [c_void_p]
isl.isl_map_lexmin.restype = c_void_p
isl.isl_map_lexmin.argtypes = [c_void_p]
isl.isl_map_polyhedral_hull.restype = c_void_p
isl.isl_map_polyhedral_hull.argtypes = [c_void_p]
isl.isl_map_reverse.restype = c_void_p
isl.isl_map_reverse.argtypes = [c_void_p]
isl.isl_map_sample.restype = c_void_p
isl.isl_map_sample.argtypes = [c_void_p]
isl.isl_map_subtract.restype = c_void_p
isl.isl_map_subtract.argtypes = [c_void_p, c_void_p]
isl.isl_map_union.restype = c_void_p
isl.isl_map_union.argtypes = [c_void_p, c_void_p]
isl.isl_map_free.argtypes = [c_void_p]
isl.isl_map_to_str.argtypes = [c_void_p]
isl.isl_map_to_str.restype = POINTER(c_char)
class basic_map(map):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_basic_map_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_basic_map_free(self.ptr)
def __str__(self):
ptr = isl.isl_basic_map_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.basic_map("""%s""")' % s
else:
return 'isl.basic_map("%s")' % s
def affine_hull(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_affine_hull(isl.isl_basic_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def apply_domain(arg0, arg1):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
try:
if not arg1.__class__ is basic_map:
arg1 = basic_map(arg1)
except:
return map(arg0).apply_domain(arg1)
ctx = arg0.ctx
res = isl.isl_basic_map_apply_domain(isl.isl_basic_map_copy(arg0.ptr), isl.isl_basic_map_copy(arg1.ptr))
return basic_map(ctx=ctx, ptr=res)
def apply_range(arg0, arg1):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
try:
if not arg1.__class__ is basic_map:
arg1 = basic_map(arg1)
except:
return map(arg0).apply_range(arg1)
ctx = arg0.ctx
res = isl.isl_basic_map_apply_range(isl.isl_basic_map_copy(arg0.ptr), isl.isl_basic_map_copy(arg1.ptr))
return basic_map(ctx=ctx, ptr=res)
def deltas(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_deltas(isl.isl_basic_map_copy(arg0.ptr))
return basic_set(ctx=ctx, ptr=res)
def detect_equalities(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_detect_equalities(isl.isl_basic_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def flatten(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_flatten(isl.isl_basic_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def flatten_domain(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_flatten_domain(isl.isl_basic_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def flatten_range(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_flatten_range(isl.isl_basic_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def gist(arg0, arg1):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
try:
if not arg1.__class__ is basic_map:
arg1 = basic_map(arg1)
except:
return map(arg0).gist(arg1)
ctx = arg0.ctx
res = isl.isl_basic_map_gist(isl.isl_basic_map_copy(arg0.ptr), isl.isl_basic_map_copy(arg1.ptr))
return basic_map(ctx=ctx, ptr=res)
def intersect(arg0, arg1):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
try:
if not arg1.__class__ is basic_map:
arg1 = basic_map(arg1)
except:
return map(arg0).intersect(arg1)
ctx = arg0.ctx
res = isl.isl_basic_map_intersect(isl.isl_basic_map_copy(arg0.ptr), isl.isl_basic_map_copy(arg1.ptr))
return basic_map(ctx=ctx, ptr=res)
def intersect_domain(arg0, arg1):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
try:
if not arg1.__class__ is basic_set:
arg1 = basic_set(arg1)
except:
return map(arg0).intersect_domain(arg1)
ctx = arg0.ctx
res = isl.isl_basic_map_intersect_domain(isl.isl_basic_map_copy(arg0.ptr), isl.isl_basic_set_copy(arg1.ptr))
return basic_map(ctx=ctx, ptr=res)
def intersect_range(arg0, arg1):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
try:
if not arg1.__class__ is basic_set:
arg1 = basic_set(arg1)
except:
return map(arg0).intersect_range(arg1)
ctx = arg0.ctx
res = isl.isl_basic_map_intersect_range(isl.isl_basic_map_copy(arg0.ptr), isl.isl_basic_set_copy(arg1.ptr))
return basic_map(ctx=ctx, ptr=res)
def is_empty(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_is_empty(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_equal(arg0, arg1):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
try:
if not arg1.__class__ is basic_map:
arg1 = basic_map(arg1)
except:
return map(arg0).is_equal(arg1)
ctx = arg0.ctx
res = isl.isl_basic_map_is_equal(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_subset(arg0, arg1):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
try:
if not arg1.__class__ is basic_map:
arg1 = basic_map(arg1)
except:
return map(arg0).is_subset(arg1)
ctx = arg0.ctx
res = isl.isl_basic_map_is_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def lexmax(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_lexmax(isl.isl_basic_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def lexmin(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_lexmin(isl.isl_basic_map_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def reverse(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_reverse(isl.isl_basic_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def sample(arg0):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_map_sample(isl.isl_basic_map_copy(arg0.ptr))
return basic_map(ctx=ctx, ptr=res)
def union(arg0, arg1):
try:
if not arg0.__class__ is basic_map:
arg0 = basic_map(arg0)
except:
raise
try:
if not arg1.__class__ is basic_map:
arg1 = basic_map(arg1)
except:
return map(arg0).union(arg1)
ctx = arg0.ctx
res = isl.isl_basic_map_union(isl.isl_basic_map_copy(arg0.ptr), isl.isl_basic_map_copy(arg1.ptr))
return map(ctx=ctx, ptr=res)
isl.isl_basic_map_read_from_str.restype = c_void_p
isl.isl_basic_map_read_from_str.argtypes = [Context, c_char_p]
isl.isl_basic_map_affine_hull.restype = c_void_p
isl.isl_basic_map_affine_hull.argtypes = [c_void_p]
isl.isl_basic_map_apply_domain.restype = c_void_p
isl.isl_basic_map_apply_domain.argtypes = [c_void_p, c_void_p]
isl.isl_basic_map_apply_range.restype = c_void_p
isl.isl_basic_map_apply_range.argtypes = [c_void_p, c_void_p]
isl.isl_basic_map_deltas.restype = c_void_p
isl.isl_basic_map_deltas.argtypes = [c_void_p]
isl.isl_basic_map_detect_equalities.restype = c_void_p
isl.isl_basic_map_detect_equalities.argtypes = [c_void_p]
isl.isl_basic_map_flatten.restype = c_void_p
isl.isl_basic_map_flatten.argtypes = [c_void_p]
isl.isl_basic_map_flatten_domain.restype = c_void_p
isl.isl_basic_map_flatten_domain.argtypes = [c_void_p]
isl.isl_basic_map_flatten_range.restype = c_void_p
isl.isl_basic_map_flatten_range.argtypes = [c_void_p]
isl.isl_basic_map_gist.restype = c_void_p
isl.isl_basic_map_gist.argtypes = [c_void_p, c_void_p]
isl.isl_basic_map_intersect.restype = c_void_p
isl.isl_basic_map_intersect.argtypes = [c_void_p, c_void_p]
isl.isl_basic_map_intersect_domain.restype = c_void_p
isl.isl_basic_map_intersect_domain.argtypes = [c_void_p, c_void_p]
isl.isl_basic_map_intersect_range.restype = c_void_p
isl.isl_basic_map_intersect_range.argtypes = [c_void_p, c_void_p]
isl.isl_basic_map_is_empty.restype = c_bool
isl.isl_basic_map_is_empty.argtypes = [c_void_p]
isl.isl_basic_map_is_equal.restype = c_bool
isl.isl_basic_map_is_equal.argtypes = [c_void_p, c_void_p]
isl.isl_basic_map_is_subset.restype = c_bool
isl.isl_basic_map_is_subset.argtypes = [c_void_p, c_void_p]
isl.isl_basic_map_lexmax.restype = c_void_p
isl.isl_basic_map_lexmax.argtypes = [c_void_p]
isl.isl_basic_map_lexmin.restype = c_void_p
isl.isl_basic_map_lexmin.argtypes = [c_void_p]
isl.isl_basic_map_reverse.restype = c_void_p
isl.isl_basic_map_reverse.argtypes = [c_void_p]
isl.isl_basic_map_sample.restype = c_void_p
isl.isl_basic_map_sample.argtypes = [c_void_p]
isl.isl_basic_map_union.restype = c_void_p
isl.isl_basic_map_union.argtypes = [c_void_p, c_void_p]
isl.isl_basic_map_free.argtypes = [c_void_p]
isl.isl_basic_map_to_str.argtypes = [c_void_p]
isl.isl_basic_map_to_str.restype = POINTER(c_char)
class union_set:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is basic_set:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_set_from_basic_set(isl.isl_basic_set_copy(args[0].ptr))
return
if len(args) == 1 and args[0].__class__ is set:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_set_from_set(isl.isl_set_copy(args[0].ptr))
return
if len(args) == 1 and args[0].__class__ is point:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_set_from_point(isl.isl_point_copy(args[0].ptr))
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_set_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_union_set_free(self.ptr)
def __str__(self):
ptr = isl.isl_union_set_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.union_set("""%s""")' % s
else:
return 'isl.union_set("%s")' % s
def affine_hull(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_affine_hull(isl.isl_union_set_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def apply(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_apply(isl.isl_union_set_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_set(ctx=ctx, ptr=res)
def coalesce(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_coalesce(isl.isl_union_set_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def compute_divs(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_compute_divs(isl.isl_union_set_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def detect_equalities(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_detect_equalities(isl.isl_union_set_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def foreach_point(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
exc_info = [None]
fn = CFUNCTYPE(c_int, c_void_p, c_void_p)
def cb_func(cb_arg0, cb_arg1):
cb_arg0 = point(ctx=arg0.ctx, ptr=cb_arg0)
try:
arg1(cb_arg0)
except:
import sys
exc_info[0] = sys.exc_info()
return -1
return 0
cb = fn(cb_func)
ctx = arg0.ctx
res = isl.isl_union_set_foreach_point(arg0.ptr, cb, None)
if exc_info[0] != None:
raise exc_info[0][0], exc_info[0][1], exc_info[0][2]
return res
def foreach_set(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
exc_info = [None]
fn = CFUNCTYPE(c_int, c_void_p, c_void_p)
def cb_func(cb_arg0, cb_arg1):
cb_arg0 = set(ctx=arg0.ctx, ptr=cb_arg0)
try:
arg1(cb_arg0)
except:
import sys
exc_info[0] = sys.exc_info()
return -1
return 0
cb = fn(cb_func)
ctx = arg0.ctx
res = isl.isl_union_set_foreach_set(arg0.ptr, cb, None)
if exc_info[0] != None:
raise exc_info[0][0], exc_info[0][1], exc_info[0][2]
return res
def gist(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_gist(isl.isl_union_set_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_set(ctx=ctx, ptr=res)
def gist_params(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_gist_params(isl.isl_union_set_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return union_set(ctx=ctx, ptr=res)
def identity(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_identity(isl.isl_union_set_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
def intersect(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_intersect(isl.isl_union_set_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_set(ctx=ctx, ptr=res)
def intersect_params(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_intersect_params(isl.isl_union_set_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return union_set(ctx=ctx, ptr=res)
def is_empty(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_is_empty(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_equal(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_is_equal(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_strict_subset(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_is_strict_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_subset(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_is_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def lexmax(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_lexmax(isl.isl_union_set_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def lexmin(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_lexmin(isl.isl_union_set_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def polyhedral_hull(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_polyhedral_hull(isl.isl_union_set_copy(arg0.ptr))
return union_set(ctx=ctx, ptr=res)
def sample_point(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_sample_point(isl.isl_union_set_copy(arg0.ptr))
return point(ctx=ctx, ptr=res)
def subtract(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_subtract(isl.isl_union_set_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_set(ctx=ctx, ptr=res)
def union(arg0, arg1):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
try:
if not arg1.__class__ is union_set:
arg1 = union_set(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_union(isl.isl_union_set_copy(arg0.ptr), isl.isl_union_set_copy(arg1.ptr))
return union_set(ctx=ctx, ptr=res)
def unwrap(arg0):
try:
if not arg0.__class__ is union_set:
arg0 = union_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_set_unwrap(isl.isl_union_set_copy(arg0.ptr))
return union_map(ctx=ctx, ptr=res)
isl.isl_union_set_from_basic_set.restype = c_void_p
isl.isl_union_set_from_basic_set.argtypes = [c_void_p]
isl.isl_union_set_from_set.restype = c_void_p
isl.isl_union_set_from_set.argtypes = [c_void_p]
isl.isl_union_set_from_point.restype = c_void_p
isl.isl_union_set_from_point.argtypes = [c_void_p]
isl.isl_union_set_read_from_str.restype = c_void_p
isl.isl_union_set_read_from_str.argtypes = [Context, c_char_p]
isl.isl_union_set_affine_hull.restype = c_void_p
isl.isl_union_set_affine_hull.argtypes = [c_void_p]
isl.isl_union_set_apply.restype = c_void_p
isl.isl_union_set_apply.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_coalesce.restype = c_void_p
isl.isl_union_set_coalesce.argtypes = [c_void_p]
isl.isl_union_set_compute_divs.restype = c_void_p
isl.isl_union_set_compute_divs.argtypes = [c_void_p]
isl.isl_union_set_detect_equalities.restype = c_void_p
isl.isl_union_set_detect_equalities.argtypes = [c_void_p]
isl.isl_union_set_foreach_point.argtypes = [c_void_p, c_void_p, c_void_p]
isl.isl_union_set_foreach_set.argtypes = [c_void_p, c_void_p, c_void_p]
isl.isl_union_set_gist.restype = c_void_p
isl.isl_union_set_gist.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_gist_params.restype = c_void_p
isl.isl_union_set_gist_params.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_identity.restype = c_void_p
isl.isl_union_set_identity.argtypes = [c_void_p]
isl.isl_union_set_intersect.restype = c_void_p
isl.isl_union_set_intersect.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_intersect_params.restype = c_void_p
isl.isl_union_set_intersect_params.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_is_empty.restype = c_bool
isl.isl_union_set_is_empty.argtypes = [c_void_p]
isl.isl_union_set_is_equal.restype = c_bool
isl.isl_union_set_is_equal.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_is_strict_subset.restype = c_bool
isl.isl_union_set_is_strict_subset.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_is_subset.restype = c_bool
isl.isl_union_set_is_subset.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_lexmax.restype = c_void_p
isl.isl_union_set_lexmax.argtypes = [c_void_p]
isl.isl_union_set_lexmin.restype = c_void_p
isl.isl_union_set_lexmin.argtypes = [c_void_p]
isl.isl_union_set_polyhedral_hull.restype = c_void_p
isl.isl_union_set_polyhedral_hull.argtypes = [c_void_p]
isl.isl_union_set_sample_point.restype = c_void_p
isl.isl_union_set_sample_point.argtypes = [c_void_p]
isl.isl_union_set_subtract.restype = c_void_p
isl.isl_union_set_subtract.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_union.restype = c_void_p
isl.isl_union_set_union.argtypes = [c_void_p, c_void_p]
isl.isl_union_set_unwrap.restype = c_void_p
isl.isl_union_set_unwrap.argtypes = [c_void_p]
isl.isl_union_set_free.argtypes = [c_void_p]
isl.isl_union_set_to_str.argtypes = [c_void_p]
isl.isl_union_set_to_str.restype = POINTER(c_char)
class set(union_set):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_set_read_from_str(self.ctx, args[0])
return
if len(args) == 1 and args[0].__class__ is basic_set:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_set_from_basic_set(isl.isl_basic_set_copy(args[0].ptr))
return
if len(args) == 1 and args[0].__class__ is point:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_set_from_point(isl.isl_point_copy(args[0].ptr))
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_set_free(self.ptr)
def __str__(self):
ptr = isl.isl_set_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.set("""%s""")' % s
else:
return 'isl.set("%s")' % s
def affine_hull(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_affine_hull(isl.isl_set_copy(arg0.ptr))
return basic_set(ctx=ctx, ptr=res)
def apply(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is map:
arg1 = map(arg1)
except:
return union_set(arg0).apply(arg1)
ctx = arg0.ctx
res = isl.isl_set_apply(isl.isl_set_copy(arg0.ptr), isl.isl_map_copy(arg1.ptr))
return set(ctx=ctx, ptr=res)
def coalesce(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_coalesce(isl.isl_set_copy(arg0.ptr))
return set(ctx=ctx, ptr=res)
def complement(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_complement(isl.isl_set_copy(arg0.ptr))
return set(ctx=ctx, ptr=res)
def detect_equalities(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_detect_equalities(isl.isl_set_copy(arg0.ptr))
return set(ctx=ctx, ptr=res)
def flatten(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_flatten(isl.isl_set_copy(arg0.ptr))
return set(ctx=ctx, ptr=res)
def foreach_basic_set(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
exc_info = [None]
fn = CFUNCTYPE(c_int, c_void_p, c_void_p)
def cb_func(cb_arg0, cb_arg1):
cb_arg0 = basic_set(ctx=arg0.ctx, ptr=cb_arg0)
try:
arg1(cb_arg0)
except:
import sys
exc_info[0] = sys.exc_info()
return -1
return 0
cb = fn(cb_func)
ctx = arg0.ctx
res = isl.isl_set_foreach_basic_set(arg0.ptr, cb, None)
if exc_info[0] != None:
raise exc_info[0][0], exc_info[0][1], exc_info[0][2]
return res
def gist(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_set(arg0).gist(arg1)
ctx = arg0.ctx
res = isl.isl_set_gist(isl.isl_set_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return set(ctx=ctx, ptr=res)
def identity(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_identity(isl.isl_set_copy(arg0.ptr))
return map(ctx=ctx, ptr=res)
def intersect(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_set(arg0).intersect(arg1)
ctx = arg0.ctx
res = isl.isl_set_intersect(isl.isl_set_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return set(ctx=ctx, ptr=res)
def intersect_params(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_set(arg0).intersect_params(arg1)
ctx = arg0.ctx
res = isl.isl_set_intersect_params(isl.isl_set_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return set(ctx=ctx, ptr=res)
def is_disjoint(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_set(arg0).is_disjoint(arg1)
ctx = arg0.ctx
res = isl.isl_set_is_disjoint(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_empty(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_is_empty(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_equal(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_set(arg0).is_equal(arg1)
ctx = arg0.ctx
res = isl.isl_set_is_equal(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_strict_subset(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_set(arg0).is_strict_subset(arg1)
ctx = arg0.ctx
res = isl.isl_set_is_strict_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_subset(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_set(arg0).is_subset(arg1)
ctx = arg0.ctx
res = isl.isl_set_is_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_wrapping(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_is_wrapping(arg0.ptr)
if res < 0:
raise
return bool(res)
def lexmax(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_lexmax(isl.isl_set_copy(arg0.ptr))
return set(ctx=ctx, ptr=res)
def lexmin(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_lexmin(isl.isl_set_copy(arg0.ptr))
return set(ctx=ctx, ptr=res)
def polyhedral_hull(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_polyhedral_hull(isl.isl_set_copy(arg0.ptr))
return basic_set(ctx=ctx, ptr=res)
def sample(arg0):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_set_sample(isl.isl_set_copy(arg0.ptr))
return basic_set(ctx=ctx, ptr=res)
def subtract(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_set(arg0).subtract(arg1)
ctx = arg0.ctx
res = isl.isl_set_subtract(isl.isl_set_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return set(ctx=ctx, ptr=res)
def union(arg0, arg1):
try:
if not arg0.__class__ is set:
arg0 = set(arg0)
except:
raise
try:
if not arg1.__class__ is set:
arg1 = set(arg1)
except:
return union_set(arg0).union(arg1)
ctx = arg0.ctx
res = isl.isl_set_union(isl.isl_set_copy(arg0.ptr), isl.isl_set_copy(arg1.ptr))
return set(ctx=ctx, ptr=res)
isl.isl_set_read_from_str.restype = c_void_p
isl.isl_set_read_from_str.argtypes = [Context, c_char_p]
isl.isl_set_from_basic_set.restype = c_void_p
isl.isl_set_from_basic_set.argtypes = [c_void_p]
isl.isl_set_from_point.restype = c_void_p
isl.isl_set_from_point.argtypes = [c_void_p]
isl.isl_set_affine_hull.restype = c_void_p
isl.isl_set_affine_hull.argtypes = [c_void_p]
isl.isl_set_apply.restype = c_void_p
isl.isl_set_apply.argtypes = [c_void_p, c_void_p]
isl.isl_set_coalesce.restype = c_void_p
isl.isl_set_coalesce.argtypes = [c_void_p]
isl.isl_set_complement.restype = c_void_p
isl.isl_set_complement.argtypes = [c_void_p]
isl.isl_set_detect_equalities.restype = c_void_p
isl.isl_set_detect_equalities.argtypes = [c_void_p]
isl.isl_set_flatten.restype = c_void_p
isl.isl_set_flatten.argtypes = [c_void_p]
isl.isl_set_foreach_basic_set.argtypes = [c_void_p, c_void_p, c_void_p]
isl.isl_set_gist.restype = c_void_p
isl.isl_set_gist.argtypes = [c_void_p, c_void_p]
isl.isl_set_identity.restype = c_void_p
isl.isl_set_identity.argtypes = [c_void_p]
isl.isl_set_intersect.restype = c_void_p
isl.isl_set_intersect.argtypes = [c_void_p, c_void_p]
isl.isl_set_intersect_params.restype = c_void_p
isl.isl_set_intersect_params.argtypes = [c_void_p, c_void_p]
isl.isl_set_is_disjoint.restype = c_bool
isl.isl_set_is_disjoint.argtypes = [c_void_p, c_void_p]
isl.isl_set_is_empty.restype = c_bool
isl.isl_set_is_empty.argtypes = [c_void_p]
isl.isl_set_is_equal.restype = c_bool
isl.isl_set_is_equal.argtypes = [c_void_p, c_void_p]
isl.isl_set_is_strict_subset.restype = c_bool
isl.isl_set_is_strict_subset.argtypes = [c_void_p, c_void_p]
isl.isl_set_is_subset.restype = c_bool
isl.isl_set_is_subset.argtypes = [c_void_p, c_void_p]
isl.isl_set_is_wrapping.restype = c_bool
isl.isl_set_is_wrapping.argtypes = [c_void_p]
isl.isl_set_lexmax.restype = c_void_p
isl.isl_set_lexmax.argtypes = [c_void_p]
isl.isl_set_lexmin.restype = c_void_p
isl.isl_set_lexmin.argtypes = [c_void_p]
isl.isl_set_polyhedral_hull.restype = c_void_p
isl.isl_set_polyhedral_hull.argtypes = [c_void_p]
isl.isl_set_sample.restype = c_void_p
isl.isl_set_sample.argtypes = [c_void_p]
isl.isl_set_subtract.restype = c_void_p
isl.isl_set_subtract.argtypes = [c_void_p, c_void_p]
isl.isl_set_union.restype = c_void_p
isl.isl_set_union.argtypes = [c_void_p, c_void_p]
isl.isl_set_free.argtypes = [c_void_p]
isl.isl_set_to_str.argtypes = [c_void_p]
isl.isl_set_to_str.restype = POINTER(c_char)
class basic_set(set):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_basic_set_read_from_str(self.ctx, args[0])
return
if len(args) == 1 and args[0].__class__ is point:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_basic_set_from_point(isl.isl_point_copy(args[0].ptr))
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_basic_set_free(self.ptr)
def __str__(self):
ptr = isl.isl_basic_set_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.basic_set("""%s""")' % s
else:
return 'isl.basic_set("%s")' % s
def affine_hull(arg0):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_set_affine_hull(isl.isl_basic_set_copy(arg0.ptr))
return basic_set(ctx=ctx, ptr=res)
def apply(arg0, arg1):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
try:
if not arg1.__class__ is basic_map:
arg1 = basic_map(arg1)
except:
return set(arg0).apply(arg1)
ctx = arg0.ctx
res = isl.isl_basic_set_apply(isl.isl_basic_set_copy(arg0.ptr), isl.isl_basic_map_copy(arg1.ptr))
return basic_set(ctx=ctx, ptr=res)
def detect_equalities(arg0):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_set_detect_equalities(isl.isl_basic_set_copy(arg0.ptr))
return basic_set(ctx=ctx, ptr=res)
def flatten(arg0):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_set_flatten(isl.isl_basic_set_copy(arg0.ptr))
return basic_set(ctx=ctx, ptr=res)
def gist(arg0, arg1):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
try:
if not arg1.__class__ is basic_set:
arg1 = basic_set(arg1)
except:
return set(arg0).gist(arg1)
ctx = arg0.ctx
res = isl.isl_basic_set_gist(isl.isl_basic_set_copy(arg0.ptr), isl.isl_basic_set_copy(arg1.ptr))
return basic_set(ctx=ctx, ptr=res)
def intersect(arg0, arg1):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
try:
if not arg1.__class__ is basic_set:
arg1 = basic_set(arg1)
except:
return set(arg0).intersect(arg1)
ctx = arg0.ctx
res = isl.isl_basic_set_intersect(isl.isl_basic_set_copy(arg0.ptr), isl.isl_basic_set_copy(arg1.ptr))
return basic_set(ctx=ctx, ptr=res)
def intersect_params(arg0, arg1):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
try:
if not arg1.__class__ is basic_set:
arg1 = basic_set(arg1)
except:
return set(arg0).intersect_params(arg1)
ctx = arg0.ctx
res = isl.isl_basic_set_intersect_params(isl.isl_basic_set_copy(arg0.ptr), isl.isl_basic_set_copy(arg1.ptr))
return basic_set(ctx=ctx, ptr=res)
def is_empty(arg0):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_set_is_empty(arg0.ptr)
if res < 0:
raise
return bool(res)
def is_equal(arg0, arg1):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
try:
if not arg1.__class__ is basic_set:
arg1 = basic_set(arg1)
except:
return set(arg0).is_equal(arg1)
ctx = arg0.ctx
res = isl.isl_basic_set_is_equal(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_subset(arg0, arg1):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
try:
if not arg1.__class__ is basic_set:
arg1 = basic_set(arg1)
except:
return set(arg0).is_subset(arg1)
ctx = arg0.ctx
res = isl.isl_basic_set_is_subset(arg0.ptr, arg1.ptr)
if res < 0:
raise
return bool(res)
def is_wrapping(arg0):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_set_is_wrapping(arg0.ptr)
if res < 0:
raise
return bool(res)
def lexmax(arg0):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_set_lexmax(isl.isl_basic_set_copy(arg0.ptr))
return set(ctx=ctx, ptr=res)
def lexmin(arg0):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_set_lexmin(isl.isl_basic_set_copy(arg0.ptr))
return set(ctx=ctx, ptr=res)
def sample(arg0):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_basic_set_sample(isl.isl_basic_set_copy(arg0.ptr))
return basic_set(ctx=ctx, ptr=res)
def union(arg0, arg1):
try:
if not arg0.__class__ is basic_set:
arg0 = basic_set(arg0)
except:
raise
try:
if not arg1.__class__ is basic_set:
arg1 = basic_set(arg1)
except:
return set(arg0).union(arg1)
ctx = arg0.ctx
res = isl.isl_basic_set_union(isl.isl_basic_set_copy(arg0.ptr), isl.isl_basic_set_copy(arg1.ptr))
return set(ctx=ctx, ptr=res)
isl.isl_basic_set_read_from_str.restype = c_void_p
isl.isl_basic_set_read_from_str.argtypes = [Context, c_char_p]
isl.isl_basic_set_from_point.restype = c_void_p
isl.isl_basic_set_from_point.argtypes = [c_void_p]
isl.isl_basic_set_affine_hull.restype = c_void_p
isl.isl_basic_set_affine_hull.argtypes = [c_void_p]
isl.isl_basic_set_apply.restype = c_void_p
isl.isl_basic_set_apply.argtypes = [c_void_p, c_void_p]
isl.isl_basic_set_detect_equalities.restype = c_void_p
isl.isl_basic_set_detect_equalities.argtypes = [c_void_p]
isl.isl_basic_set_flatten.restype = c_void_p
isl.isl_basic_set_flatten.argtypes = [c_void_p]
isl.isl_basic_set_gist.restype = c_void_p
isl.isl_basic_set_gist.argtypes = [c_void_p, c_void_p]
isl.isl_basic_set_intersect.restype = c_void_p
isl.isl_basic_set_intersect.argtypes = [c_void_p, c_void_p]
isl.isl_basic_set_intersect_params.restype = c_void_p
isl.isl_basic_set_intersect_params.argtypes = [c_void_p, c_void_p]
isl.isl_basic_set_is_empty.restype = c_bool
isl.isl_basic_set_is_empty.argtypes = [c_void_p]
isl.isl_basic_set_is_equal.restype = c_bool
isl.isl_basic_set_is_equal.argtypes = [c_void_p, c_void_p]
isl.isl_basic_set_is_subset.restype = c_bool
isl.isl_basic_set_is_subset.argtypes = [c_void_p, c_void_p]
isl.isl_basic_set_is_wrapping.restype = c_bool
isl.isl_basic_set_is_wrapping.argtypes = [c_void_p]
isl.isl_basic_set_lexmax.restype = c_void_p
isl.isl_basic_set_lexmax.argtypes = [c_void_p]
isl.isl_basic_set_lexmin.restype = c_void_p
isl.isl_basic_set_lexmin.argtypes = [c_void_p]
isl.isl_basic_set_sample.restype = c_void_p
isl.isl_basic_set_sample.argtypes = [c_void_p]
isl.isl_basic_set_union.restype = c_void_p
isl.isl_basic_set_union.argtypes = [c_void_p, c_void_p]
isl.isl_basic_set_free.argtypes = [c_void_p]
isl.isl_basic_set_to_str.argtypes = [c_void_p]
isl.isl_basic_set_to_str.restype = POINTER(c_char)
class multi_val:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_multi_val_free(self.ptr)
def __str__(self):
ptr = isl.isl_multi_val_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.multi_val("""%s""")' % s
else:
return 'isl.multi_val("%s")' % s
def add(arg0, arg1):
try:
if not arg0.__class__ is multi_val:
arg0 = multi_val(arg0)
except:
raise
try:
if not arg1.__class__ is multi_val:
arg1 = multi_val(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_multi_val_add(isl.isl_multi_val_copy(arg0.ptr), isl.isl_multi_val_copy(arg1.ptr))
return multi_val(ctx=ctx, ptr=res)
def flat_range_product(arg0, arg1):
try:
if not arg0.__class__ is multi_val:
arg0 = multi_val(arg0)
except:
raise
try:
if not arg1.__class__ is multi_val:
arg1 = multi_val(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_multi_val_flat_range_product(isl.isl_multi_val_copy(arg0.ptr), isl.isl_multi_val_copy(arg1.ptr))
return multi_val(ctx=ctx, ptr=res)
def product(arg0, arg1):
try:
if not arg0.__class__ is multi_val:
arg0 = multi_val(arg0)
except:
raise
try:
if not arg1.__class__ is multi_val:
arg1 = multi_val(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_multi_val_product(isl.isl_multi_val_copy(arg0.ptr), isl.isl_multi_val_copy(arg1.ptr))
return multi_val(ctx=ctx, ptr=res)
def range_product(arg0, arg1):
try:
if not arg0.__class__ is multi_val:
arg0 = multi_val(arg0)
except:
raise
try:
if not arg1.__class__ is multi_val:
arg1 = multi_val(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_multi_val_range_product(isl.isl_multi_val_copy(arg0.ptr), isl.isl_multi_val_copy(arg1.ptr))
return multi_val(ctx=ctx, ptr=res)
isl.isl_multi_val_add.restype = c_void_p
isl.isl_multi_val_add.argtypes = [c_void_p, c_void_p]
isl.isl_multi_val_flat_range_product.restype = c_void_p
isl.isl_multi_val_flat_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_val_product.restype = c_void_p
isl.isl_multi_val_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_val_range_product.restype = c_void_p
isl.isl_multi_val_range_product.argtypes = [c_void_p, c_void_p]
isl.isl_multi_val_free.argtypes = [c_void_p]
isl.isl_multi_val_to_str.argtypes = [c_void_p]
isl.isl_multi_val_to_str.restype = POINTER(c_char)
class point(basic_set):
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_point_free(self.ptr)
def __str__(self):
ptr = isl.isl_point_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.point("""%s""")' % s
else:
return 'isl.point("%s")' % s
isl.isl_point_free.argtypes = [c_void_p]
isl.isl_point_to_str.argtypes = [c_void_p]
isl.isl_point_to_str.restype = POINTER(c_char)
class schedule:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_schedule_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_schedule_free(self.ptr)
def __str__(self):
ptr = isl.isl_schedule_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.schedule("""%s""")' % s
else:
return 'isl.schedule("%s")' % s
def get_map(arg0):
try:
if not arg0.__class__ is schedule:
arg0 = schedule(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_get_map(arg0.ptr)
return union_map(ctx=ctx, ptr=res)
def get_root(arg0):
try:
if not arg0.__class__ is schedule:
arg0 = schedule(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_get_root(arg0.ptr)
return schedule_node(ctx=ctx, ptr=res)
def pullback(arg0, arg1):
if arg1.__class__ is union_pw_multi_aff:
res = isl.isl_schedule_pullback_union_pw_multi_aff(isl.isl_schedule_copy(arg0.ptr), isl.isl_union_pw_multi_aff_copy(arg1.ptr))
return schedule(ctx=arg0.ctx, ptr=res)
isl.isl_schedule_read_from_str.restype = c_void_p
isl.isl_schedule_read_from_str.argtypes = [Context, c_char_p]
isl.isl_schedule_get_map.restype = c_void_p
isl.isl_schedule_get_map.argtypes = [c_void_p]
isl.isl_schedule_get_root.restype = c_void_p
isl.isl_schedule_get_root.argtypes = [c_void_p]
isl.isl_schedule_pullback_union_pw_multi_aff.restype = c_void_p
isl.isl_schedule_pullback_union_pw_multi_aff.argtypes = [c_void_p, c_void_p]
isl.isl_schedule_free.argtypes = [c_void_p]
isl.isl_schedule_to_str.argtypes = [c_void_p]
isl.isl_schedule_to_str.restype = POINTER(c_char)
class schedule_node:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_schedule_node_free(self.ptr)
def __str__(self):
ptr = isl.isl_schedule_node_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.schedule_node("""%s""")' % s
else:
return 'isl.schedule_node("%s")' % s
def band_member_get_coincident(arg0, arg1):
try:
if not arg0.__class__ is schedule_node:
arg0 = schedule_node(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_node_band_member_get_coincident(arg0.ptr, arg1)
if res < 0:
raise
return bool(res)
def band_member_set_coincident(arg0, arg1, arg2):
try:
if not arg0.__class__ is schedule_node:
arg0 = schedule_node(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_node_band_member_set_coincident(isl.isl_schedule_node_copy(arg0.ptr), arg1, arg2)
return schedule_node(ctx=ctx, ptr=res)
def child(arg0, arg1):
try:
if not arg0.__class__ is schedule_node:
arg0 = schedule_node(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_node_child(isl.isl_schedule_node_copy(arg0.ptr), arg1)
return schedule_node(ctx=ctx, ptr=res)
def get_prefix_schedule_multi_union_pw_aff(arg0):
try:
if not arg0.__class__ is schedule_node:
arg0 = schedule_node(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_node_get_prefix_schedule_multi_union_pw_aff(arg0.ptr)
return multi_union_pw_aff(ctx=ctx, ptr=res)
def get_prefix_schedule_union_map(arg0):
try:
if not arg0.__class__ is schedule_node:
arg0 = schedule_node(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_node_get_prefix_schedule_union_map(arg0.ptr)
return union_map(ctx=ctx, ptr=res)
def get_prefix_schedule_union_pw_multi_aff(arg0):
try:
if not arg0.__class__ is schedule_node:
arg0 = schedule_node(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_node_get_prefix_schedule_union_pw_multi_aff(arg0.ptr)
return union_pw_multi_aff(ctx=ctx, ptr=res)
def get_schedule(arg0):
try:
if not arg0.__class__ is schedule_node:
arg0 = schedule_node(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_node_get_schedule(arg0.ptr)
return schedule(ctx=ctx, ptr=res)
def parent(arg0):
try:
if not arg0.__class__ is schedule_node:
arg0 = schedule_node(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_schedule_node_parent(isl.isl_schedule_node_copy(arg0.ptr))
return schedule_node(ctx=ctx, ptr=res)
isl.isl_schedule_node_band_member_get_coincident.restype = c_bool
isl.isl_schedule_node_band_member_get_coincident.argtypes = [c_void_p, c_int]
isl.isl_schedule_node_band_member_set_coincident.restype = c_void_p
isl.isl_schedule_node_band_member_set_coincident.argtypes = [c_void_p, c_int, c_int]
isl.isl_schedule_node_child.restype = c_void_p
isl.isl_schedule_node_child.argtypes = [c_void_p, c_int]
isl.isl_schedule_node_get_prefix_schedule_multi_union_pw_aff.restype = c_void_p
isl.isl_schedule_node_get_prefix_schedule_multi_union_pw_aff.argtypes = [c_void_p]
isl.isl_schedule_node_get_prefix_schedule_union_map.restype = c_void_p
isl.isl_schedule_node_get_prefix_schedule_union_map.argtypes = [c_void_p]
isl.isl_schedule_node_get_prefix_schedule_union_pw_multi_aff.restype = c_void_p
isl.isl_schedule_node_get_prefix_schedule_union_pw_multi_aff.argtypes = [c_void_p]
isl.isl_schedule_node_get_schedule.restype = c_void_p
isl.isl_schedule_node_get_schedule.argtypes = [c_void_p]
isl.isl_schedule_node_parent.restype = c_void_p
isl.isl_schedule_node_parent.argtypes = [c_void_p]
isl.isl_schedule_node_free.argtypes = [c_void_p]
isl.isl_schedule_node_to_str.argtypes = [c_void_p]
isl.isl_schedule_node_to_str.restype = POINTER(c_char)
class union_access_info:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and args[0].__class__ is union_map:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_union_access_info_from_sink(isl.isl_union_map_copy(args[0].ptr))
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_union_access_info_free(self.ptr)
def __str__(self):
ptr = isl.isl_union_access_info_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.union_access_info("""%s""")' % s
else:
return 'isl.union_access_info("%s")' % s
def compute_flow(arg0):
try:
if not arg0.__class__ is union_access_info:
arg0 = union_access_info(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_access_info_compute_flow(isl.isl_union_access_info_copy(arg0.ptr))
return union_flow(ctx=ctx, ptr=res)
def set_may_source(arg0, arg1):
try:
if not arg0.__class__ is union_access_info:
arg0 = union_access_info(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_access_info_set_may_source(isl.isl_union_access_info_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_access_info(ctx=ctx, ptr=res)
def set_must_source(arg0, arg1):
try:
if not arg0.__class__ is union_access_info:
arg0 = union_access_info(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_access_info_set_must_source(isl.isl_union_access_info_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_access_info(ctx=ctx, ptr=res)
def set_schedule(arg0, arg1):
try:
if not arg0.__class__ is union_access_info:
arg0 = union_access_info(arg0)
except:
raise
try:
if not arg1.__class__ is schedule:
arg1 = schedule(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_access_info_set_schedule(isl.isl_union_access_info_copy(arg0.ptr), isl.isl_schedule_copy(arg1.ptr))
return union_access_info(ctx=ctx, ptr=res)
def set_schedule_map(arg0, arg1):
try:
if not arg0.__class__ is union_access_info:
arg0 = union_access_info(arg0)
except:
raise
try:
if not arg1.__class__ is union_map:
arg1 = union_map(arg1)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_access_info_set_schedule_map(isl.isl_union_access_info_copy(arg0.ptr), isl.isl_union_map_copy(arg1.ptr))
return union_access_info(ctx=ctx, ptr=res)
isl.isl_union_access_info_from_sink.restype = c_void_p
isl.isl_union_access_info_from_sink.argtypes = [c_void_p]
isl.isl_union_access_info_compute_flow.restype = c_void_p
isl.isl_union_access_info_compute_flow.argtypes = [c_void_p]
isl.isl_union_access_info_set_may_source.restype = c_void_p
isl.isl_union_access_info_set_may_source.argtypes = [c_void_p, c_void_p]
isl.isl_union_access_info_set_must_source.restype = c_void_p
isl.isl_union_access_info_set_must_source.argtypes = [c_void_p, c_void_p]
isl.isl_union_access_info_set_schedule.restype = c_void_p
isl.isl_union_access_info_set_schedule.argtypes = [c_void_p, c_void_p]
isl.isl_union_access_info_set_schedule_map.restype = c_void_p
isl.isl_union_access_info_set_schedule_map.argtypes = [c_void_p, c_void_p]
isl.isl_union_access_info_free.argtypes = [c_void_p]
isl.isl_union_access_info_to_str.argtypes = [c_void_p]
isl.isl_union_access_info_to_str.restype = POINTER(c_char)
class union_flow:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_union_flow_free(self.ptr)
def __str__(self):
ptr = isl.isl_union_flow_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.union_flow("""%s""")' % s
else:
return 'isl.union_flow("%s")' % s
def get_full_may_dependence(arg0):
try:
if not arg0.__class__ is union_flow:
arg0 = union_flow(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_flow_get_full_may_dependence(arg0.ptr)
return union_map(ctx=ctx, ptr=res)
def get_full_must_dependence(arg0):
try:
if not arg0.__class__ is union_flow:
arg0 = union_flow(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_flow_get_full_must_dependence(arg0.ptr)
return union_map(ctx=ctx, ptr=res)
def get_may_dependence(arg0):
try:
if not arg0.__class__ is union_flow:
arg0 = union_flow(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_flow_get_may_dependence(arg0.ptr)
return union_map(ctx=ctx, ptr=res)
def get_may_no_source(arg0):
try:
if not arg0.__class__ is union_flow:
arg0 = union_flow(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_flow_get_may_no_source(arg0.ptr)
return union_map(ctx=ctx, ptr=res)
def get_must_dependence(arg0):
try:
if not arg0.__class__ is union_flow:
arg0 = union_flow(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_flow_get_must_dependence(arg0.ptr)
return union_map(ctx=ctx, ptr=res)
def get_must_no_source(arg0):
try:
if not arg0.__class__ is union_flow:
arg0 = union_flow(arg0)
except:
raise
ctx = arg0.ctx
res = isl.isl_union_flow_get_must_no_source(arg0.ptr)
return union_map(ctx=ctx, ptr=res)
isl.isl_union_flow_get_full_may_dependence.restype = c_void_p
isl.isl_union_flow_get_full_may_dependence.argtypes = [c_void_p]
isl.isl_union_flow_get_full_must_dependence.restype = c_void_p
isl.isl_union_flow_get_full_must_dependence.argtypes = [c_void_p]
isl.isl_union_flow_get_may_dependence.restype = c_void_p
isl.isl_union_flow_get_may_dependence.argtypes = [c_void_p]
isl.isl_union_flow_get_may_no_source.restype = c_void_p
isl.isl_union_flow_get_may_no_source.argtypes = [c_void_p]
isl.isl_union_flow_get_must_dependence.restype = c_void_p
isl.isl_union_flow_get_must_dependence.argtypes = [c_void_p]
isl.isl_union_flow_get_must_no_source.restype = c_void_p
isl.isl_union_flow_get_must_no_source.argtypes = [c_void_p]
isl.isl_union_flow_free.argtypes = [c_void_p]
isl.isl_union_flow_to_str.argtypes = [c_void_p]
isl.isl_union_flow_to_str.restype = POINTER(c_char)
class val:
def __init__(self, *args, **keywords):
if "ptr" in keywords:
self.ctx = keywords["ctx"]
self.ptr = keywords["ptr"]
return
if len(args) == 1 and type(args[0]) == int:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_val_int_from_si(self.ctx, args[0])
return
if len(args) == 1 and type(args[0]) == str:
self.ctx = Context.getDefaultInstance()
self.ptr = isl.isl_val_read_from_str(self.ctx, args[0])
return
raise Error
def __del__(self):
if hasattr(self, 'ptr'):
isl.isl_val_free(self.ptr)
def __str__(self):
ptr = isl.isl_val_to_str(self.ptr)
res = str(cast(ptr, c_char_p).value)
libc.free(ptr)
return res
def __repr__(self):
s = str(self)
if '"' in s:
return 'isl.val("""%s""")' % s
else:
return 'isl.val("%s")' % s
@staticmethod
def infty():
ctx = Context.getDefaultInstance()
res = isl.isl_val_infty(ctx)
return val(ctx=ctx, ptr=res)
@staticmethod
def nan():
ctx = Context.getDefaultInstance()
res = isl.isl_val_nan(ctx)
return val(ctx=ctx, ptr=res)
@staticmethod
def neginfty():
ctx = Context.getDefaultInstance()
res = isl.isl_val_neginfty(ctx)
return val(ctx=ctx, ptr=res)
@staticmethod
def negone():
ctx = Context.getDefaultInstance()
res = isl.isl_val_negone(ctx)
return val(ctx=ctx, ptr=res)
@staticmethod
def one():
ctx = Context.getDefaultInstance()
res = isl.isl_val_one(ctx)
return val(ctx=ctx, ptr=res)
@staticmethod
def zero():
ctx = Context.getDefaultInstance()
res = isl.isl_val_zero(ctx)
return val(ctx=ctx, ptr=res)
isl.isl_val_int_from_si.restype = c_void_p
isl.isl_val_int_from_si.argtypes = [Context, c_int]
isl.isl_val_read_from_str.restype = c_void_p
isl.isl_val_read_from_str.argtypes = [Context, c_char_p]
isl.isl_val_infty.restype = c_void_p
isl.isl_val_infty.argtypes = [Context]
isl.isl_val_nan.restype = c_void_p
isl.isl_val_nan.argtypes = [Context]
isl.isl_val_neginfty.restype = c_void_p
isl.isl_val_neginfty.argtypes = [Context]
isl.isl_val_negone.restype = c_void_p
isl.isl_val_negone.argtypes = [Context]
isl.isl_val_one.restype = c_void_p
isl.isl_val_one.argtypes = [Context]
isl.isl_val_zero.restype = c_void_p
isl.isl_val_zero.argtypes = [Context]
isl.isl_val_free.argtypes = [c_void_p]
isl.isl_val_to_str.argtypes = [c_void_p]
isl.isl_val_to_str.restype = POINTER(c_char)
| 36.851606
| 158
| 0.622139
|
2fa9b118e1045c8d2fc982de1e7200257ff83a6d
| 64
|
py
|
Python
|
retina/model/anchors/registry.py
|
mike112223/retina
|
cdad3af8240471619f42e9edd1caf68a0241bea6
|
[
"Apache-2.0"
] | null | null | null |
retina/model/anchors/registry.py
|
mike112223/retina
|
cdad3af8240471619f42e9edd1caf68a0241bea6
|
[
"Apache-2.0"
] | null | null | null |
retina/model/anchors/registry.py
|
mike112223/retina
|
cdad3af8240471619f42e9edd1caf68a0241bea6
|
[
"Apache-2.0"
] | null | null | null |
from retina.utils import Registry
ANCHORS = Registry('anchor')
| 16
| 33
| 0.78125
|
bd1016cc112fad9959012aa016b2de2358b8b4f3
| 62
|
py
|
Python
|
packages/pytea/pylib/torch/nn/__init__.py
|
lego0901/pytea
|
8ede650def2e68f4610ba816451d8b9e28f09f76
|
[
"MIT"
] | null | null | null |
packages/pytea/pylib/torch/nn/__init__.py
|
lego0901/pytea
|
8ede650def2e68f4610ba816451d8b9e28f09f76
|
[
"MIT"
] | null | null | null |
packages/pytea/pylib/torch/nn/__init__.py
|
lego0901/pytea
|
8ede650def2e68f4610ba816451d8b9e28f09f76
|
[
"MIT"
] | null | null | null |
from .modules import *
from . import functional as functional
| 20.666667
| 38
| 0.790323
|
dabee702aa5d24282ce7625b6101221d7e53604a
| 617
|
py
|
Python
|
apps/sucursales/migrations/0001_initial.py
|
Kingarturs/mercedez-crm
|
882c0bb4c011a41e82c203ebd1e0b36dc075dfd7
|
[
"Apache-2.0"
] | null | null | null |
apps/sucursales/migrations/0001_initial.py
|
Kingarturs/mercedez-crm
|
882c0bb4c011a41e82c203ebd1e0b36dc075dfd7
|
[
"Apache-2.0"
] | null | null | null |
apps/sucursales/migrations/0001_initial.py
|
Kingarturs/mercedez-crm
|
882c0bb4c011a41e82c203ebd1e0b36dc075dfd7
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-05-04 04:32
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Sucursal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=80)),
('direccion', models.CharField(max_length=120)),
('telefono', models.CharField(max_length=20)),
],
),
]
| 25.708333
| 114
| 0.573744
|
5a8006462c89f1d2fe5ec2c8c7438e38fa9fbbc5
| 3,397
|
py
|
Python
|
satflow/baseline/optical_flow.py
|
j0shmillar/satflow
|
a0ebe53565845da5b3eac51713f32eede70b42ba
|
[
"MIT"
] | null | null | null |
satflow/baseline/optical_flow.py
|
j0shmillar/satflow
|
a0ebe53565845da5b3eac51713f32eede70b42ba
|
[
"MIT"
] | null | null | null |
satflow/baseline/optical_flow.py
|
j0shmillar/satflow
|
a0ebe53565845da5b3eac51713f32eede70b42ba
|
[
"MIT"
] | null | null | null |
import cv2
import numpy as np
import torch.nn.functional as F
import webdataset as wds
import yaml
from satflow.data.datasets import OpticalFlowDataset, SatFlowDataset
def load_config(config_file):
with open(config_file, "r") as cfg:
return yaml.load(cfg, Loader=yaml.FullLoader)["config"]
config = load_config("/satflow/configs/datamodule/optical_flow.yaml")
dset = wds.WebDataset("/run/media/jacob/data/satflow-flow-144-tiled-{00001..00149}.tar")
dataset = SatFlowDataset([dset], config=config)
import matplotlib.pyplot as plt
import torch
def warp_flow(img, flow):
h, w = flow.shape[:2]
flow = -flow
flow[:, :, 0] += np.arange(w)
flow[:, :, 1] += np.arange(h)[:, np.newaxis]
res = cv2.remap(img, flow, None, cv2.INTER_LINEAR)
return res
debug = False
total_losses = np.array([0.0 for _ in range(48)]) # Want to break down loss by future timestep
channel_total_losses = np.array([total_losses for _ in range(12)])
count = 0
baseline_losses = np.array([0.0 for _ in range(48)]) # Want to break down loss by future timestep
channel_baseline_losses = np.array([baseline_losses for _ in range(12)])
for data in dataset:
tmp_loss = 0
tmp_base = 0
count += 1
past_frames, next_frames = data
prev_frame = past_frames[1]
curr_frame = past_frames[0]
# Do it for each of the 12 channels
for ch in range(12):
# prev_frame = np.moveaxis(prev_frame, [0], [2])
# curr_frame = np.moveaxis(curr_frame, [0], [2])
flow = cv2.calcOpticalFlowFarneback(
past_frames[1][ch], past_frames[0][ch], None, 0.5, 3, 15, 3, 5, 1.2, 0
)
warped_frame = warp_flow(curr_frame[ch].astype(np.float32), flow)
warped_frame = np.expand_dims(warped_frame, axis=-1)
loss = F.mse_loss(
torch.from_numpy(warped_frame),
torch.from_numpy(np.expand_dims(next_frames[0][ch], axis=-1)),
)
channel_total_losses[ch][0] += loss.item()
loss = F.mse_loss(
torch.from_numpy(curr_frame[ch].astype(np.float32)),
torch.from_numpy(next_frames[0][ch]),
)
channel_baseline_losses[ch][0] += loss.item()
for i in range(1, 48):
warped_frame = warp_flow(warped_frame.astype(np.float32), flow)
warped_frame = np.expand_dims(warped_frame, axis=-1)
loss = F.mse_loss(
torch.from_numpy(warped_frame),
torch.from_numpy(np.expand_dims(next_frames[i][ch], axis=-1)),
)
channel_total_losses[ch][i] += loss.item()
tmp_loss += loss.item()
loss = F.mse_loss(
torch.from_numpy(curr_frame[ch].astype(np.float32)),
torch.from_numpy(next_frames[i][ch]),
)
channel_baseline_losses[ch][i] += loss.item()
print(
f"Avg Total Loss: {np.mean(channel_total_losses) / count} Avg Baseline Loss: {np.mean(channel_baseline_losses) / count}"
)
if count % 100 == 0:
np.save("optical_flow_mse_loss_channels_reverse.npy", channel_total_losses / count)
np.save(
"baseline_current_image_mse_loss_channels_reverse.npy", channel_baseline_losses / count
)
np.save("optical_flow_mse_loss_reverse.npy", channel_total_losses / count)
np.save("baseline_current_image_mse_loss_reverse.npy", channel_baseline_losses / count)
| 37.32967
| 128
| 0.648808
|
8417b3d8d2a7109b51c7fc502106fc2cfb6af430
| 142
|
py
|
Python
|
sudoku/main.py
|
Lancea12/sudoku_solver
|
160fbd0f12c3ee52c31ef07249a6811000e03843
|
[
"MIT"
] | null | null | null |
sudoku/main.py
|
Lancea12/sudoku_solver
|
160fbd0f12c3ee52c31ef07249a6811000e03843
|
[
"MIT"
] | null | null | null |
sudoku/main.py
|
Lancea12/sudoku_solver
|
160fbd0f12c3ee52c31ef07249a6811000e03843
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask import render_template
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
db = SQLAlchemy(app)
| 20.285714
| 39
| 0.823944
|
a2a91b4c03f883adacd33efd7ea274b1fb5b1b69
| 29
|
py
|
Python
|
tests/common/snappi/__init__.py
|
lolyu/sonic-mgmt
|
ed888fd1ce26e7f44fd7f70af00c43ace4882668
|
[
"Apache-2.0"
] | 132
|
2016-10-19T12:34:44.000Z
|
2022-03-16T09:00:39.000Z
|
tests/common/snappi/__init__.py
|
lolyu/sonic-mgmt
|
ed888fd1ce26e7f44fd7f70af00c43ace4882668
|
[
"Apache-2.0"
] | 3,152
|
2016-09-21T23:05:58.000Z
|
2022-03-31T23:29:08.000Z
|
tests/common/snappi/__init__.py
|
lolyu/sonic-mgmt
|
ed888fd1ce26e7f44fd7f70af00c43ace4882668
|
[
"Apache-2.0"
] | 563
|
2016-09-20T01:00:15.000Z
|
2022-03-31T22:43:54.000Z
|
# Place for snappi fixtures.
| 14.5
| 28
| 0.758621
|
f6f06fc41ad6185bf1ca182042ac67fcf8b12088
| 185
|
py
|
Python
|
config.py
|
SebastjanLeskovar/iron_gate
|
5543de193f3ba98433362b0c9a33d3782b046447
|
[
"MIT"
] | null | null | null |
config.py
|
SebastjanLeskovar/iron_gate
|
5543de193f3ba98433362b0c9a33d3782b046447
|
[
"MIT"
] | null | null | null |
config.py
|
SebastjanLeskovar/iron_gate
|
5543de193f3ba98433362b0c9a33d3782b046447
|
[
"MIT"
] | null | null | null |
''' Configure threading in port scanner '''
# Number of allowed threads (default: 100)
number_threads = 100
# Number of jobs assigned (default: 1024, total: 65535)
number_jobs = 1024
| 23.125
| 55
| 0.735135
|
ef9831e99a0cd308c05fa95ea2597abaf24fd0fe
| 9,520
|
py
|
Python
|
tests/test_loadgen.py
|
corvustristis/apiritif
|
b80c64d1a696e970fc437075a7a63435ba465dca
|
[
"Apache-2.0"
] | null | null | null |
tests/test_loadgen.py
|
corvustristis/apiritif
|
b80c64d1a696e970fc437075a7a63435ba465dca
|
[
"Apache-2.0"
] | null | null | null |
tests/test_loadgen.py
|
corvustristis/apiritif
|
b80c64d1a696e970fc437075a7a63435ba465dca
|
[
"Apache-2.0"
] | null | null | null |
import copy
import logging
import os
import tempfile
import time
from unittest import TestCase
from multiprocessing.pool import CLOSE
import apiritif
from apiritif import store, thread
from apiritif.loadgen import Worker, Params, Supervisor, JTLSampleWriter
dummy_tests = [os.path.join(os.path.dirname(__file__), "resources", "test_dummy.py")]
logging.basicConfig(level=logging.DEBUG)
class DummyWriter(JTLSampleWriter):
def __init__(self, output_file, workers_log):
super(DummyWriter, self).__init__(output_file)
with open(workers_log, 'a') as log:
log.write("%s\n" % os.getpid())
class TestLoadGen(TestCase):
def setUp(self):
self.required_method_called = False
def get_required_method(self, method):
def required_method(*args, **kwargs):
self.required_method_called = True
method(*args, **kwargs)
return required_method
def test_thread(self):
outfile = tempfile.NamedTemporaryFile()
print(outfile.name)
params = Params()
params.concurrency = 2
params.iterations = 10
params.report = outfile.name
params.tests = dummy_tests
worker = Worker(params)
worker.run_nose(params)
def test_setup_errors(self):
error_tests = [os.path.join(os.path.dirname(__file__), "resources", "test_setup_errors.py")]
outfile = tempfile.NamedTemporaryFile()
print(outfile.name)
params = Params()
params.concurrency = 1
params.iterations = 1
params.report = outfile.name
params.tests = error_tests
params.verbose = True
worker = Worker(params)
self.assertRaises(RuntimeError, worker.run_nose, params)
with open(outfile.name, 'rt') as _file:
_file.read()
def test_worker(self):
outfile = tempfile.NamedTemporaryFile()
print(outfile.name)
params = Params()
params.concurrency = 2
params.iterations = 10
params.report = outfile.name
params.tests = dummy_tests
worker = Worker(params)
worker.start()
worker.join()
def test_empty_worker(self):
outfile = tempfile.NamedTemporaryFile()
print(outfile.name)
params = Params()
params.concurrency = 2
params.iterations = 10
params.report = outfile.name
params.tests = []
worker = Worker(params)
worker.close = self.get_required_method(worker.close) # check whether close has been called
try:
worker.start()
except: # assertRaises doesn't catch it
pass
self.assertTrue(self.required_method_called)
def test_supervisor(self):
outfile = tempfile.NamedTemporaryFile()
params = Params()
params.tests = dummy_tests
params.report = outfile.name + "%s"
params.concurrency = 9
params.iterations = 5
sup = Supervisor(params)
sup.start()
while sup.isAlive():
time.sleep(1)
def test_empty_supervisor(self):
outfile = tempfile.NamedTemporaryFile()
params = Params()
params.tests = []
params.report = outfile.name + "%s"
params.concurrency = 9
params.iterations = 5
sup = Supervisor(params)
sup.start()
while sup.isAlive():
time.sleep(1)
self.assertEqual(CLOSE, sup.workers._state)
def test_writers_x3(self):
# writers must:
# 1. be the same for threads of one process
# 2. be set up only once
# 3. be different for different processes
def dummy_worker_init(self, params):
"""
:type params: Params
"""
super(Worker, self).__init__(params.concurrency)
self.params = params
store.writer = DummyWriter(self.params.report, self.params.workers_log)
outfile = tempfile.NamedTemporaryFile()
outfile.close()
params = Params()
# use this log to spy on writers
workers_log = outfile.name + '-workers.log'
params.workers_log = workers_log
params.tests = [os.path.join(os.path.dirname(__file__), "resources", "test_smart_transactions.py")]
params.report = outfile.name + "%s"
# it causes 2 processes and 3 threads (totally)
params.concurrency = 3
params.worker_count = 2
params.iterations = 2
saved_worker_init = Worker.__init__
Worker.__init__ = dummy_worker_init
try:
sup = Supervisor(params)
sup.start()
while sup.isAlive():
time.sleep(1)
with open(workers_log) as log:
writers = log.readlines()
self.assertEqual(2, len(writers))
self.assertNotEqual(writers[0], writers[1])
finally:
Worker.__init__ = saved_worker_init
os.remove(workers_log)
for i in range(params.worker_count):
os.remove(params.report % i)
def test_handlers(self):
# handlers must:
# 1. be unique for thread
# 2. be set up every launch of test suite
def log_line(line):
with open(thread.handlers_log, 'a') as log:
log.write("%s\n" % line)
def mock_get_handlers():
transaction_handlers = thread.get_from_thread_store('transaction_handlers')
if not transaction_handlers:
transaction_handlers = {'enter': [], 'exit': []}
length = "%s/%s" % (len(transaction_handlers['enter']), len(transaction_handlers['exit']))
log_line("get: {pid: %s, idx: %s, iteration: %s, len: %s}" %
(os.getpid(), thread.get_index(), thread.get_iteration(), length))
return transaction_handlers
def mock_set_handlers(handlers):
log_line("set: {pid: %s, idx: %s, iteration: %s, handlers: %s}," %
(os.getpid(), thread.get_index(), thread.get_iteration(), handlers))
thread.put_into_thread_store(transaction_handlers=handlers)
outfile = tempfile.NamedTemporaryFile()
outfile.close()
params = Params()
# use this log to spy on writers
handlers_log = outfile.name + '-handlers.log'
thread.handlers_log = handlers_log
params.tests = [os.path.join(os.path.dirname(__file__), "resources", "test_smart_transactions.py")]
params.report = outfile.name + "%s"
# it causes 2 processes and 3 threads (totally)
params.concurrency = 3
params.worker_count = 2
params.iterations = 2
saved_get_handlers = apiritif.get_transaction_handlers
saved_set_handlers = apiritif.set_transaction_handlers
apiritif.get_transaction_handlers = mock_get_handlers
apiritif.set_transaction_handlers = mock_set_handlers
try:
sup = Supervisor(params)
sup.start()
while sup.isAlive():
time.sleep(1)
with open(handlers_log) as log:
handlers = log.readlines()
self.assertEqual(36, len(handlers))
self.assertEqual(6, len([handler for handler in handlers if handler.startswith('set')]))
self.assertEqual(0, len([handler for handler in handlers if handler.endswith('2/2}')]))
finally:
apiritif.get_transaction_handlers = saved_get_handlers
apiritif.set_transaction_handlers = saved_set_handlers
os.remove(handlers_log)
for i in range(params.worker_count):
os.remove(params.report % i)
def test_ramp_up1(self):
outfile = tempfile.NamedTemporaryFile()
print(outfile.name)
params1 = Params()
params1.concurrency = 50
params1.report = outfile.name
params1.tests = dummy_tests
params1.ramp_up = 60
params1.steps = 5
params1.worker_count = 2
params1.worker_index = 0
worker1 = Worker(params1)
res1 = [x.delay for x in worker1._get_thread_params()]
print(res1)
self.assertEquals(params1.concurrency, len(res1))
params2 = copy.deepcopy(params1)
params2.worker_index = 1
worker2 = Worker(params2)
res2 = [x.delay for x in worker2._get_thread_params()]
print(res2)
self.assertEquals(params2.concurrency, len(res2))
print(sorted(res1 + res2))
def test_ramp_up2(self):
outfile = tempfile.NamedTemporaryFile()
print(outfile.name)
params1 = Params()
params1.concurrency = 50
params1.report = outfile.name
params1.tests = dummy_tests
params1.ramp_up = 60
params1.worker_count = 1
params1.worker_index = 0
worker1 = Worker(params1)
res1 = [x.delay for x in worker1._get_thread_params()]
print(res1)
self.assertEquals(params1.concurrency, len(res1))
def test_unicode_ldjson(self):
outfile = tempfile.NamedTemporaryFile(suffix=".ldjson")
print(outfile.name)
params = Params()
params.concurrency = 2
params.iterations = 1
params.report = outfile.name
params.tests = dummy_tests
worker = Worker(params)
worker.start()
worker.join()
with open(outfile.name) as fds:
print(fds.read())
| 32.162162
| 107
| 0.607563
|
b394231d928b81c88d2ded8f174ad03bf85c6514
| 9,763
|
py
|
Python
|
polls/views.py
|
30Meridian/RozumneMistoSnapshot
|
67a83b3908674d01992561dfb37596e395b4d482
|
[
"BSD-3-Clause"
] | null | null | null |
polls/views.py
|
30Meridian/RozumneMistoSnapshot
|
67a83b3908674d01992561dfb37596e395b4d482
|
[
"BSD-3-Clause"
] | null | null | null |
polls/views.py
|
30Meridian/RozumneMistoSnapshot
|
67a83b3908674d01992561dfb37596e395b4d482
|
[
"BSD-3-Clause"
] | null | null | null |
import datetime
from django.views.generic import DetailView, ListView, RedirectView, CreateView, DeleteView, UpdateView
from django.shortcuts import redirect, get_object_or_404
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponse, HttpResponseRedirect
from django.core.exceptions import PermissionDenied
from django.core.exceptions import ValidationError
from weunion.settings import CRON_SECRET
from weunion.models import Town
from polls.models import Choice, Poll, Vote
from .forms import PollForm, ChoiceFormSet
class PollListView(ListView):
def get_queryset(self):
if 'town' in self.request.session:
return Poll.objects.filter(active=1, town=self.request.session['town']).order_by('id','archive')
else:
#return reverse_lazy('regions')
return Poll.objects.filter(active=3)
def get_context_data(self, **kwargs):
context = super(PollListView, self).get_context_data(**kwargs)
context['slug'] = get_object_or_404(Town, id=self.request.session['town']).slug
if self.request.user.is_authenticated() and self.request.user.is_active \
and self.request.user.isAllowedToModerate(self.request.session["town"]):
context['allowed'] = True
else:
context['allowed'] = False
return context
class PollDetailView(DetailView):
model = Poll
def get_context_data(self, **kwargs):
context = super(PollDetailView, self).get_context_data(**kwargs)
if not(self.request.session.has_key('town')):
self.request.session['town'] = self.object.town.id
self.request.session['town_name'] = self.object.town.name
if(self.request.user.is_authenticated() and self.request.user.is_active):
context['poll'].votable = self.object.can_vote(self.request.user)
if self.request.user.isAllowedToModerate(self.request.session["town"]):
context['allowed'] = True
else:
context['allowed'] = False
else:
context['poll'].votable = False
return context
class PollVoteView(RedirectView):
def post(self, request, *args, **kwargs):
poll = Poll.objects.get(id=kwargs['pk'])
user = request.user
choice = Choice.objects.get(id=request.POST['choice_pk'])
Vote.objects.create(poll=poll, user=user, choice=choice)
messages.success(request,"Дякуємо за Ваш голос")
return redirect('../../', args=kwargs['pk'])
#return super(PollVoteView, self).post(request, *args, **kwargs)
def get_redirect_url(self, **kwargs):
return redirect('../polls', args=[kwargs['pk']])
class PollCreateView(CreateView):
model = Poll
form_class = PollForm
def get(self, request, *args, **kwargs):
self.object = None
form_class = self.get_form_class()
form = self.get_form(form_class)
choice_form = ChoiceFormSet()
if self.request.user.is_authenticated() and self.request.user.is_active \
and self.request.user.isAllowedToModerate(self.request.session["town"]):
return self.render_to_response(self.get_context_data(form=form,
choice_form=choice_form,))
else:
raise PermissionDenied('Доступ заборнено!')
def post(self, request, *args, **kwargs):
self.object = None
form_class = self.get_form_class()
form = self.get_form(form_class)
choice_form = ChoiceFormSet(request.POST, request.FILES)
if (form.is_valid() and choice_form.is_valid()):
image_list = []
choice_list = []
for field in choice_form:
if field.cleaned_data:
if field.cleaned_data['image']:
image_list.append(field.cleaned_data['image'])
if field.cleaned_data['choice']:
choice_list.append(field.cleaned_data['choice'])
if not (choice_list or image_list):
return self.form_invalid(form, choice_form, 'Додайте зображення або текстовий варіант відповіді.')
if choice_list and image_list:
return self.form_invalid(form, choice_form, 'Додайте лише або зображення , або текстовий варіант відповіді!')
return self.form_valid(form, choice_form)
else:
return self.form_invalid(form, choice_form, 'Дані не коректні, або не заповлені поля!')
def form_valid(self, form, choice_form):
town = Town.objects.get(pk=self.request.session["town"])
self.object = form.save(commit=False)
self.object.town = town
self.object.save()
choice_form.instance = self.object
choice_form.save()
messages.success(self.request, "Опитування успішно додано.")
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form, choice_form, message):
messages.warning(self.request, message)
return self.render_to_response(
self.get_context_data(form=form, choice_form=choice_form,))
def get_success_url(self):
slug = get_object_or_404(Town, id=self.request.session['town']).slug
return reverse_lazy('polls:list', kwargs={'townslug': slug},)
class PollDeleteView(DeleteView):
model = Poll
def get_success_url(self):
slug = get_object_or_404(Town, id=self.request.session['town']).slug
messages.success(self.request, "Опитування успішно видалене")
return reverse_lazy('polls:list', kwargs={'townslug': slug},)
def get(self, request, *args, **kwargs):
if self.request.user.is_authenticated() and self.request.user.is_active \
and self.request.user.isAllowedToModerate(self.request.session["town"]):
return super(PollDeleteView, self).get(request, *args, **kwargs)
else:
raise PermissionDenied('Доступ заборнено!')
class PollUpdateView(UpdateView):
model = Poll
form_class = PollForm
def get(self, request, *args, **kwargs):
if self.request.user.is_authenticated() and self.request.user.is_active \
and self.request.user.isAllowedToModerate(self.request.session["town"]):
return super(PollUpdateView, self).get(request, *args, **kwargs)
else:
raise PermissionDenied('Доступ заборнено!')
def get_context_data(self, **kwargs):
context = super(PollUpdateView, self).get_context_data(**kwargs)
if self.request.POST:
context['form'] = PollForm(self.request.POST, instance=self.object)
context['choice_form'] = ChoiceFormSet(self.request.POST, self.request.FILES, instance=self.object)
else:
context['form'] = PollForm(instance=self.object)
context['choice_form'] = ChoiceFormSet(instance=self.object)
return context
def post(self, request, *args, **kwargs):
self.object = self.get_object()
form_class = self.get_form_class()
form = self.get_form(form_class)
choice_form = ChoiceFormSet(request.POST, request.FILES, instance=self.object)
if (form.is_valid() and choice_form.is_valid()):
image_list = []
choice_list = []
for field in choice_form:
if field.cleaned_data:
if field.cleaned_data['image']:
image_list.append(field.cleaned_data['image'])
if field.cleaned_data['choice']:
choice_list.append(field.cleaned_data['choice'])
if not (choice_list or image_list):
return self.form_invalid(form, choice_form, 'Додайте зображення або текстовий варіант відповіді.')
if choice_list and image_list:
return self.form_invalid(form, choice_form, 'Додайте лише або зображення , або текстовий варіант відповіді!')
return self.form_valid(form, choice_form)
else:
return self.form_invalid(form, choice_form, 'Дані не коректні, або не заповлені поля!')
def form_valid(self, form, choice_form):
town = Town.objects.get(pk=self.request.session["town"])
self.object = form.save(commit=False)
self.object.town = town
self.object.save()
choice_form.instance = self.object
choice_form.save()
messages.success(self.request, "Опитування успішно змінено.")
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form, choice_form, message):
messages.warning(self.request, message)
return self.render_to_response(
self.get_context_data(form=form, choice_form=choice_form,))
def get_success_url(self):
slug = get_object_or_404(Town, id=self.request.session['town']).slug
return reverse_lazy('polls:detail', kwargs={'townslug': slug,'pk': self.object.pk },)
#проверяем кроном просроченые голосования и переносим в архивные\
def checktimeout(request, secret, townslug):
if(secret == CRON_SECRET):
polls = Poll.objects.filter(active=1, archive=0)
count = 0
for poll in polls:
if(poll.date_end <= datetime.date.today() ):
poll.archive= 1 # делаем архивной если у петиции кончилось время сбора подписей и она не набрала нужного количества голосов
poll.save()
count +=1
if(count):
return HttpResponse('Done! Find: '+str(count)+' poll(-s)')
else:
return HttpResponse("Not found any polls that mutch enddate!")
else:
raise PermissionDenied('Досуп заборонено.')
| 42.447826
| 138
| 0.648981
|
f3184f4c071788d5262808fdf3a7bcfeea8c7694
| 14,159
|
py
|
Python
|
postprocessing/scr_run_network_det.py
|
geoHeil/FAST
|
96e41c8eb1705c7715065b7aed69570dd85752d1
|
[
"Apache-2.0"
] | null | null | null |
postprocessing/scr_run_network_det.py
|
geoHeil/FAST
|
96e41c8eb1705c7715065b7aed69570dd85752d1
|
[
"Apache-2.0"
] | null | null | null |
postprocessing/scr_run_network_det.py
|
geoHeil/FAST
|
96e41c8eb1705c7715065b7aed69570dd85752d1
|
[
"Apache-2.0"
] | 1
|
2021-06-25T13:09:29.000Z
|
2021-06-25T13:09:29.000Z
|
#########################################################################
## NETWORK DETECTION ##
#########################################################################
#########################################################################
## loads necessary libraries ##
#########################################################################
import cPickle as pickle
import sys
import gc
import time
import numpy as np
from collections import defaultdict
from itertools import count, islice
from operator import itemgetter
import multiprocessing
import os
from pseudo_association import *
from event_resolution import *
from util import *
from os.path import isfile, join, getsize
# Dimension of the event dictionary entry in numpy format:
# [dt, bbox * 4, station_id, diagonalKey, networkEventID, event_stats * 3]
M = 1 + 4 + 1 + 1 + 1 + 3
#########################################################################
## Event-pair detection functions ##
#########################################################################
def partition(fname):
print ' Partitioning %s...' % fname
load_file = data_folder + fname
file_size = getsize(load_file)
PARTITION_GAP = param["network"]["max_width"]
if param["performance"]["num_cores"] == 1:
# No parallelization
print ' %s: %d partition' % (fname, 1)
return [0]
PARTITION_SIZE = min(param["performance"]["partition_size"],
file_size / param["performance"]["num_cores"] / 2)
# Jump ahead size: around 100 lines
JUMP_SIZE = 400
with open(load_file, 'rb') as f:
byte_positions = [0]
line_start = 0
while file_size - line_start > PARTITION_SIZE:
f.seek(PARTITION_SIZE, 1) # jump PARTITION_SIZE bytes from current file position
f.readline() # read a line to make sure we're now at the beginning of a new line (if we end up in the middle of a line, now we're at the start of the following line)
tmp = f.readline().strip().split()
dt = int(tmp[0])
prev_dt = dt
end_reached = False
while line_start - byte_positions[-1] < 2 * PARTITION_SIZE:
while dt - prev_dt < PARTITION_GAP and line_start - byte_positions[-1] < 2 * PARTITION_SIZE:
line_start = f.tell()
f.seek(JUMP_SIZE, 1)
f.readline() # read a line to make sure we're now at the beginning of a new line (if we end up in the middle of a line, now we're at the start of the following line)
tmp = f.readline().strip().split()
dt = int(tmp[0])
f.seek(line_start, 0)
line = f.readline()
line_start = f.tell()
if line == '':
end_reached = True
break
tmp = line.strip().split()
dt = int(tmp[0])
if dt - prev_dt > PARTITION_GAP:
break
prev_dt = dt
# this means the previous while loop ended either because we found a dt more
# than PARTITION_GAP away from prev_dt, or we read 2x PARTITION_SIZE
# in which case we just split here
if not end_reached and line_start > 0:
byte_positions.append(line_start)
print ' %s: %d partitions' % (fname, len(byte_positions))
return byte_positions
def dict_to_numpy(d):
num_entries = sum(len(d[k]) for k in d)
arr = np.empty([num_entries, M], dtype=np.int64)
idx = 0
for k in d:
for event in d[k]:
arr[idx, :] = [k, event[0][0], event[0][1], event[0][2],
event[0][3], event[1], hash(event[2]), -1,
event[4][0], event[4][1], event[4][2]]
idx += 1
return arr
def event_dict_to_numpy(d):
num_entries = sum(len(d[k]) for k in d)
arr = np.empty([num_entries, 4], dtype=np.int64)
idx = 0
for eid in d:
for k, t1, sim in d[eid]:
arr[idx, :] = [hash(eid), k, t1, sim]
idx += 1
return arr
def detection_init():
global process_counter
process_counter = multiprocessing.Value('i', 0)
def detection(args):
byte_pos = args[0]
bytes_to_read = args[1]
cidx = args[2]
fname = data_folder + detdata_filenames[cidx]
start = time.time()
pid = os.getpid()
associator = NetworkAssociator()
clouds = EventCloudExtractor(dL = param["network"]["dgapL"],
dW = param["network"]["dgapW"])
global process_counter
with process_counter.get_lock():
process_counter.value += 1
# get events - create hash table
pid_prefix = str(pid + process_counter.value * 1000)
diags = clouds.p_triplet_to_diags(fname, byte_pos,
bytes_to_read, pid_prefix = pid_prefix,
ivals_thresh = param["network"]["ivals_thresh"])
#/ extract event-pair clouds
curr_event_dict = clouds.diags_to_event_list(diags,
npass = param['network']["num_pass"])
del diags
#/ prune event-pairs
min_sum = get_min_sum(param)
prune_events(curr_event_dict, param["network"]["min_dets"],
min_sum, param["network"]["max_width"])
print ' Time taken for %s (byte %d):' % (detdata_filenames[cidx], byte_pos), time.time() - start
#/ Save event-pairs for the single station case
if nstations == 1:
arr = event_dict_to_numpy(curr_event_dict)
np.save('%s%s_byte_%d_event_pairs.npy' % (data_folder,
detdata_filenames[cidx], byte_pos), arr)
del curr_event_dict, arr
return '%s%s_byte_%d_event_pairs.npy' % (data_folder,
detdata_filenames[cidx], byte_pos)
#/ get bounding boxes
diags_dict = associator.clouds_to_network_diags_one_channel(
curr_event_dict, cidx)
del curr_event_dict
print " Saving diags_dict to %s_byte_%d.npy" % (detdata_filenames[cidx], byte_pos)
arr = dict_to_numpy(diags_dict)
np.save('%s%s_byte_%d.npy' % (data_folder, detdata_filenames[cidx], byte_pos), arr)
del diags_dict, arr
return '%s%s_byte_%d.npy' % (data_folder, detdata_filenames[cidx], byte_pos)
def process(cidx):
print ' Extracting event-pairs for %s...' % detdata_filenames[cidx]
t0 = time.time()
byte_positions = byte_positions_list[cidx]
args = []
for idx in range(len(byte_positions)): # fill args with tuples of the form (byte_pos, bytes_to_read, cidx). bytes_to_read is -1 for the last byte_pos, in which case read() will read until EOF
if idx == len(byte_positions) - 1:
args.append((byte_positions[idx], -1, cidx))
else:
args.append((byte_positions[idx], byte_positions[idx + 1] - byte_positions[idx], cidx))
pool = multiprocessing.Pool(param["performance"]["num_cores"],
initializer=detection_init)
output_files = pool.map(detection, args)
pool.terminate()
print ' [TIMING] %s:' % (detdata_filenames[cidx]), time.time() - t0
return output_files
if __name__ == '__main__':
grand_start_time = time.time()
param = parse_json(sys.argv[1])
nstations = len(param["io"]["channel_vars"])
data_folder = get_data_folder(param)
out_folder = get_output_folder(param)
out_fname = get_network_fname(param)
if not os.path.exists(out_folder):
os.makedirs(out_folder)
########################################################################
# Partition ##
########################################################################
print "1. Partition"
detdata_filenames = get_pairs_filenames(param)
p = multiprocessing.Pool(nstations)
# list of lists of byte positions,
# each list corresponding to one of detdata_filenames
byte_positions_list = p.map(partition, detdata_filenames)
with open('%s/byte_positions_list.dat' % out_folder, 'wb') as f:
pickle.dump(byte_positions_list, f, protocol=pickle.HIGHEST_PROTOCOL)
#byte_positions_list = pickle.load(open('byte_positions_list.dat', 'rb'))
print '[TIMING] partition:', time.time() - grand_start_time
########################################################################
# Event-pair detection ##
########################################################################
print
print "2. Extract event-pairs"
process_start_time = time.time()
fnames = []
for i in xrange(nstations):
fnames.extend(process(i))
print '[TIMING] event-pair exatraction:', time.time() - process_start_time
#########################################################################
## Network detection ##
#########################################################################
gc.collect()
print
print '3. Extract network events...'
network_start_time = time.time()
#/ Single station network detection
if nstations == 1:
# get byte_positions corresponding to the single station
event_dict = defaultdict(list)
for fname in fnames:
event_pairs = np.load(fname)
for event in event_pairs:
event_dict[event[0]].append(event[1:])
event_start, event_dt, event_stats, pair_list = event_resolution_single(
event_dict, param["network"]["max_fp"])
# TODO: Save to prettier formats
events = {'event_start': event_start, 'event_dt': event_dt,
'event_stats': event_stats}
print " Outputting results to %s*" % out_fname
f = open('%s_%s_events.txt' % (out_fname,
param["io"]["channel_vars"][0]), 'w')
f.write('event_start, event_dt, event_stats[0],'
+ ' event_stats[1], event_stats[2]\n')
for i in range(len(event_start)):
f.write('%d,%d,%d,%d,%d\n' % (event_start[i], event_dt[i],
event_stats[i][0], event_stats[i][1], event_stats[i][2]))
f.close()
if pair_list is not None:
with open('%s_%s_pairs_list.dat' % (out_fname,
param["io"]["channel_vars"][0]), "wb") as f:
pickle.dump(pair_list, f, protocol=pickle.HIGHEST_PROTOCOL)
print '[TIMING] build network index:', time.time() - network_start_time
exit(1)
#/ map events to diagonals
t4 = time.time()
all_arrs = []
for file in fnames:
print " %s" % file
arr = np.load(file)
all_arrs.append(arr)
all_diags = np.concatenate(all_arrs)
#/ sort event pairs by diagonal and initial time t1 in bounding box
inds = np.lexsort([all_diags[:,3], all_diags[:,0]])
all_diags = all_diags[inds, ...]
print " Saving all_diags_dict to all_diags_dict.npy"
np.save("%s/all_diags_dict.npy" % out_folder, all_diags)
print '[TIMING] build network index:', time.time() - t4
#########################################################################
## pseudo-association ##
#########################################################################
associator = NetworkAssociator()
#/ pseudo-association
t5 = time.time()
print
print '4. Network pseudo-association'
icount, network_events = associator.associate_network_diags(
all_diags, nstations = nstations, offset = param["network"]["input_offset"])
del all_diags
print " Saving network event to network_event.dat"
with open('%s/network_event.dat' % out_folder, "wb") as f:
pickle.dump(network_events, f, protocol=pickle.HIGHEST_PROTOCOL)
print '[TIMING] pseudo-association:', time.time() - t5
########################################################################
# EVENT RESOLUTION - detections ##
########################################################################
# network_events = pickle.load(open('%s/network_event.dat' % out_folder, 'rb'))
# print "Loaded network event"
gc.collect()
#/ Get network events
network_events_final = get_network_events_final_by_station(
network_events, param["network"]["max_fp"],
nstations, param["network"]["nsta_thresh"])
# add all events to list and dedup
final_eventlist, network_eventlist, nfinal = event_to_list_and_dedup(
network_events_final, nstations)
#/ get statistics for each event
final_eventstats = np.zeros((nfinal, 8))
for idx, netevent in enumerate(network_eventlist):
flatten(netevent)
tot_dets = 0
max_dets = 0
tot_vol = 0
max_vol = 0
tot_nsta = 0
max_dL = 0
max_peaksum = 0
for nid in netevent:
ndets, vol, nsta, dL, peaksum = get_event_stats(network_events[nid])
tot_dets += ndets
tot_vol += vol
tot_nsta += nsta
max_dets = max(max_dets, ndets)
max_vol = max(max_vol, vol)
max_dL = max(max_dL, dL)
max_peaksum = max(max_peaksum, peaksum)
final_eventstats[idx] = np.asarray([max_dL, len(netevent), tot_nsta, tot_dets, max_dets, tot_vol, max_vol, max_peaksum], dtype= int)
#/ store in array - total stats and highest stat for any event pair containing event
final_eventstats_str = output_results(final_eventlist,
final_eventstats, out_fname, param["io"]["channel_vars"])
#/ pickle output:
mdict = dict()
mdict['final_eventlist'] = final_eventlist #ok
mdict['network_eventlist'] = network_eventlist #ok
mdict['final_eventstats'] = final_eventstats
mdict['final_eventstats_str'] = final_eventstats_str
with open(out_fname + '.dat', "wb") as f:
pickle.dump(mdict, f, protocol=pickle.HIGHEST_PROTOCOL)
print ("[OUTPUT] Results saved to: %s" % (out_folder))
print '[TIMING] Total time: ', time.time() - grand_start_time
| 41.521994
| 195
| 0.557878
|
c9f299eda98086c0aafeb172601f59e3e7c97283
| 12,002
|
py
|
Python
|
tools/project.py
|
ecoromka/mbed-os
|
757b9e250d9cae73d79f302db03550dec3bfb82b
|
[
"Apache-2.0"
] | 3
|
2019-04-26T05:38:10.000Z
|
2021-08-11T21:00:48.000Z
|
tools/project.py
|
engali94/mbed-os
|
d030c04a6039c832bfe1610efb8162e0807678d1
|
[
"Apache-2.0"
] | 7
|
2018-12-07T15:59:10.000Z
|
2019-04-17T19:13:53.000Z
|
tools/project.py
|
engali94/mbed-os
|
d030c04a6039c832bfe1610efb8162e0807678d1
|
[
"Apache-2.0"
] | 7
|
2019-01-18T07:20:14.000Z
|
2019-03-07T09:09:14.000Z
|
"""
Copyright (c) 2016-2019 ARM Limited. All rights reserved.
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations
"""
from __future__ import print_function, absolute_import
from builtins import str
import sys
from os.path import (join, abspath, dirname, exists, basename, normpath,
realpath, relpath, basename)
from os import remove
ROOT = abspath(join(dirname(__file__), ".."))
sys.path.insert(0, ROOT)
from shutil import move, rmtree
from argparse import ArgumentParser
from tools.paths import EXPORT_DIR, MBED_HAL, MBED_LIBRARIES, MBED_TARGETS_PATH
from tools.settings import BUILD_DIR
from tools.export import (
EXPORTERS,
mcu_ide_matrix,
mcu_ide_list,
export_project,
get_exporter_toolchain,
)
from tools.tests import TESTS, TEST_MAP
from tools.tests import test_known, test_name_known, Test
from tools.targets import TARGET_NAMES
from tools.utils import (
argparse_filestring_type,
argparse_profile_filestring_type,
argparse_many,
args_error,
)
from tools.utils import argparse_force_lowercase_type
from tools.utils import argparse_force_uppercase_type
from tools.utils import print_large_string
from tools.utils import NotSupportedException
from tools.options import extract_profile, list_profiles, extract_mcus
from tools.notifier.term import TerminalNotifier
""" The CLI entry point for exporting projects from the mbed tools to any of the
supported IDEs or project structures.
"""
EXPORTER_ALIASES = {
u'gcc_arm': u'make_gcc_arm',
u'uvision': u'uvision5',
}
def resolve_exporter_alias(ide):
if ide in EXPORTER_ALIASES:
return EXPORTER_ALIASES[ide]
else:
return ide
def setup_project(
ide,
target,
zip,
program,
source_dir,
build,
export_path,
):
"""Generate a name, if not provided, and find dependencies
Positional arguments:
ide - IDE or project structure that will soon be exported to
target - MCU that the project will build for
Keyword arguments:
program - the index of a test program
source_dir - the directory, or directories that contain all of the sources
build - a directory that will contain the result of the export
"""
# Some libraries have extra macros (called by exporter symbols) to we need
# to pass them to maintain compilation macros integrity between compiled
# library and header files we might use with it
if source_dir:
# --source is used to generate IDE files to toolchain directly
# in the source tree and doesn't generate zip file
project_dir = export_path or source_dir[0]
if program:
project_name = TESTS[program]
else:
project_name = basename(normpath(realpath(source_dir[0])))
if zip:
src_paths = {path.strip(".\\/"): [path] for path in source_dir}
else:
src_paths = {relpath(path, project_dir): [path] for path in source_dir}
lib_paths = None
else:
test = Test(program)
if not build:
# Substitute the mbed library builds with their sources
if MBED_LIBRARIES in test.dependencies:
test.dependencies.remove(MBED_LIBRARIES)
test.dependencies.append(MBED_HAL)
test.dependencies.append(MBED_TARGETS_PATH)
src_paths = [test.source_dir]
lib_paths = test.dependencies
project_name = "_".join([test.id, ide, target])
project_dir = join(EXPORT_DIR, project_name)
return project_dir, project_name, src_paths, lib_paths
def export(target, ide, build=None, src=None, macros=None, project_id=None,
zip_proj=False, build_profile=None, export_path=None, notify=None,
app_config=None, ignore=None):
"""Do an export of a project.
Positional arguments:
target - MCU that the project will compile for
ide - the IDE or project structure to export to
Keyword arguments:
build - to use the compiled mbed libraries or not
src - directory or directories that contain the source to export
macros - extra macros to add to the project
project_id - the name of the project
clean - start from a clean state before exporting
zip_proj - create a zip file or not
ignore - list of paths to add to mbedignore
Returns an object of type Exporter (tools/exports/exporters.py)
"""
project_dir, name, src, lib = setup_project(
ide,
target,
bool(zip_proj),
program=project_id,
source_dir=src,
build=build,
export_path=export_path,
)
zip_name = name+".zip" if zip_proj else None
return export_project(
src,
project_dir,
target,
ide,
name=name,
macros=macros,
libraries_paths=lib,
zip_proj=zip_name,
build_profile=build_profile,
notify=TerminalNotifier(),
app_config=app_config,
ignore=ignore
)
def clean(source_dir):
if exists(EXPORT_DIR):
rmtree(EXPORT_DIR)
for cls in EXPORTERS.values():
try:
cls.clean(basename(abspath(source_dir[0])))
except (NotImplementedError, IOError, OSError):
pass
for f in list(EXPORTERS.values())[0].CLEAN_FILES:
try:
remove(f)
except (IOError, OSError):
pass
def get_args(argv):
parser = ArgumentParser()
targetnames = TARGET_NAMES
targetnames.sort()
toolchainlist = list(EXPORTERS.keys()) + list(EXPORTER_ALIASES.keys())
toolchainlist.sort()
parser.add_argument(
"-m", "--mcu",
metavar="MCU",
help="generate project for the given MCU ({})".format(
', '.join(targetnames))
)
parser.add_argument(
"-i",
dest="ide",
type=argparse_force_lowercase_type(
toolchainlist, "toolchain"),
help="The target IDE: %s" % str(toolchainlist)
)
parser.add_argument(
"-c", "--clean",
action="store_true",
default=False,
help="clean the export directory"
)
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument(
"-p",
type=test_known,
dest="program",
help="The index of the desired test program: [0-%s]" % (len(TESTS) - 1)
)
group.add_argument(
"-n",
type=test_name_known,
dest="program",
help="The name of the desired test program"
)
parser.add_argument(
"-b",
dest="build",
default=False,
action="store_true",
help="use the mbed library build, instead of the sources"
)
group.add_argument(
"-L", "--list-tests",
action="store_true",
dest="list_tests",
default=False,
help="list available programs in order and exit"
)
group.add_argument(
"-S", "--list-matrix",
dest="supported_ides",
default=False,
const="matrix",
choices=["matrix", "ides"],
nargs="?",
help="displays supported matrix of MCUs and IDEs"
)
group.add_argument(
"--update-packs",
dest="update_packs",
action="store_true",
default=False
)
parser.add_argument(
"-E",
action="store_true",
dest="supported_ides_html",
default=False,
help="Generate a markdown version of the results of -S in README.md"
)
parser.add_argument(
"--build",
type=argparse_filestring_type,
dest="build_dir",
default=None,
help="Directory for the exported project files"
)
parser.add_argument(
"--source",
action="append",
type=argparse_filestring_type,
dest="source_dir",
default=[],
help="The source (input) directory"
)
parser.add_argument(
"--custom-targets",
action="append",
type=argparse_filestring_type,
dest="custom_targets_directory",
default=[],
help="Specify directory containing custom_targets.json"
)
parser.add_argument(
"-D",
action="append",
dest="macros",
help="Add a macro definition"
)
parser.add_argument(
"--profile",
dest="profile",
action="append",
type=argparse_profile_filestring_type,
help=("Build profile to use. Can be either path to json"
"file or one of the default one ({})".format(
", ".join(list_profiles()))),
default=[]
)
parser.add_argument(
"--app-config",
dest="app_config",
default=None
)
parser.add_argument(
"-z",
action="store_true",
default=None,
dest="zip",
)
parser.add_argument(
"--ignore",
dest="ignore",
type=argparse_many(str),
default=None,
help=("Comma separated list of patterns to add to mbedignore "
"(eg. ./main.cpp)")
)
return parser.parse_args(argv), parser
def main():
"""Entry point"""
# Parse Options
options, parser = get_args(sys.argv[1:])
# Print available tests in order and exit
if options.list_tests:
print('\n'.join(str(test) for test in sorted(TEST_MAP.values())))
elif options.supported_ides:
if options.supported_ides == "matrix":
print_large_string(mcu_ide_matrix())
elif options.supported_ides == "ides":
print(mcu_ide_list())
elif options.supported_ides_html:
html = mcu_ide_matrix(verbose_html=True)
with open("README.md", "w") as readme:
readme.write("Exporter IDE/Platform Support\n")
readme.write("-----------------------------------\n")
readme.write("\n")
readme.write(html)
elif options.update_packs:
from tools.arm_pack_manager import Cache
cache = Cache(True, True)
cache.cache_everything()
else:
# Check required arguments
if not options.mcu:
args_error(parser, "argument -m/--mcu is required")
if not options.ide:
args_error(parser, "argument -i is required")
if (options.program is None) and (not options.source_dir):
args_error(parser, "one of -p, -n, or --source is required")
if options.clean:
clean(options.source_dir)
ide = resolve_exporter_alias(options.ide)
exporter, toolchain_name = get_exporter_toolchain(ide)
profile = extract_profile(parser, options, toolchain_name, fallback="debug")
mcu = extract_mcus(parser, options)[0]
if not exporter.is_target_supported(mcu):
args_error(parser, "%s not supported by %s" % (mcu, ide))
try:
export(
mcu,
ide,
build=options.build,
src=options.source_dir,
macros=options.macros,
project_id=options.program,
zip_proj=not bool(options.source_dir) or options.zip,
build_profile=profile,
app_config=options.app_config,
export_path=options.build_dir,
ignore=options.ignore
)
except NotSupportedException as exc:
print("[Not Supported] %s" % str(exc))
exit(1)
exit(0)
if __name__ == "__main__":
main()
| 29.416667
| 84
| 0.623229
|
dad46dcab869276bfc56b02e67b04fe514a72755
| 3,191
|
py
|
Python
|
i3pystatus/whosonlocation.py
|
crwood/i3pystatus
|
0e7e04f075afb73be1a71d3ad2fc5b0d33bb9214
|
[
"MIT"
] | null | null | null |
i3pystatus/whosonlocation.py
|
crwood/i3pystatus
|
0e7e04f075afb73be1a71d3ad2fc5b0d33bb9214
|
[
"MIT"
] | null | null | null |
i3pystatus/whosonlocation.py
|
crwood/i3pystatus
|
0e7e04f075afb73be1a71d3ad2fc5b0d33bb9214
|
[
"MIT"
] | 1
|
2019-08-15T10:35:54.000Z
|
2019-08-15T10:35:54.000Z
|
from i3pystatus import IntervalModule
import requests
from collections import OrderedDict
from bs4 import BeautifulSoup
class WhosOnLocation():
email = None
password = None
session = None
def __init__(self, email, password):
self.email = email
self.password = password
self.session = requests.Session()
def login(self):
login_details = {'email_input': self.email,
'password_input': self.password,
'_redirect_url': '',
'continue_submit': 'Login'}
r = self.session.post('https://login.whosonlocation.com/login', data=login_details)
return r.url == 'https://au.whosonlocation.com/home?justloggedin=true'
def get_status(self):
r = self.session.get('https://au.whosonlocation.com/home?justloggedin=true')
html = BeautifulSoup(r.content)
status = html.body.find("span", {"class": "my-status-name"})
if status:
return status.text
def on_site(self):
return self.__change_status('onsite')
def off_site(self):
return self.__change_status('offsite')
def __change_status(self, status):
r = self.session.post('https://au.whosonlocation.com/ajax/changestatus', data={'status': status})
return r.json()
# _type can be org or location
def search(self, keyword, _type='location'):
payload = {'keyword': keyword, 'type': _type}
r = self.session.get('https://au.whosonlocation.com/home/search', params=payload)
return self.__parse_results(BeautifulSoup(r.content))
@staticmethod
def __parse_results(page):
titles = ['Name', 'Title', 'Department', 'Current Location', 'Home Location']
table = page.body.find_all("tr", {"class": "dataRow"})
results = []
for row in table:
values = [v.string for v in row.findAll('td', {'class': 'truncate'})]
results.append(OrderedDict(zip(titles, values)))
return results
class WOL(IntervalModule):
"""
Change your whosonlocation.com status.
Requires the PyPi module `beautifulsoup4`
"""
location = None
email = None
password = None
settings = (
'email',
'password'
)
color_on_site = '#00FF00'
color_off_site = '#ff0000'
format = 'Status: {status}'
status = None
on_leftclick = 'change_status'
def init(self):
self.location = WhosOnLocation(self.email, self.password)
if not self.location.login():
raise Exception("Failed to login")
def change_status(self):
if self.status == 'On-Site':
self.location.off_site()
elif self.status == 'Off-Site':
self.location.on_site()
def run(self):
self.status = self.location.get_status()
color = None
if self.status == 'Off-Site':
color = self.color_off_site
elif self.status == 'On-Site':
color = self.color_on_site
self.output = {
"full_text": self.format.format(
status=self.status
),
"color": color
}
| 29.546296
| 105
| 0.596365
|
1bb17e3dd77230a5a7b5e2d7cbcfa1769eef8d21
| 3,283
|
py
|
Python
|
arcade/examples/background_music.py
|
JFincher42/arcade
|
f9eebfc4c6989e0e99d7b6dfe0409f248bfd5a44
|
[
"MIT"
] | 1
|
2021-03-04T14:02:29.000Z
|
2021-03-04T14:02:29.000Z
|
arcade/examples/background_music.py
|
3w36zj6/arcade
|
2429205243f3b8c247a67163caea2e694f8e2ba4
|
[
"MIT"
] | null | null | null |
arcade/examples/background_music.py
|
3w36zj6/arcade
|
2429205243f3b8c247a67163caea2e694f8e2ba4
|
[
"MIT"
] | null | null | null |
"""
Background Music Example
If Python and Arcade are installed, this example can be run from the command line with:
python -m arcade.examples.background_music
"""
import arcade
import time
SCREEN_WIDTH = 600
SCREEN_HEIGHT = 300
SCREEN_TITLE = "Starting Template Simple"
MUSIC_VOLUME = 0.5
class MyGame(arcade.Window):
""" Main application class. """
def __init__(self, width, height, title):
super().__init__(width, height, title)
arcade.set_background_color(arcade.color.WHITE)
# Variables used to manage our music. See setup() for giving them
# values.
self.music_list = []
self.current_song_index = 0
self.current_player = None
self.music = None
def advance_song(self):
""" Advance our pointer to the next song. This does NOT start the song. """
self.current_song_index += 1
if self.current_song_index >= len(self.music_list):
self.current_song_index = 0
print(f"Advancing song to {self.current_song_index}.")
def play_song(self):
""" Play the song. """
# Stop what is currently playing.
if self.music:
self.music.stop()
# Play the next song
print(f"Playing {self.music_list[self.current_song_index]}")
self.music = arcade.Sound(self.music_list[self.current_song_index], streaming=True)
self.current_player = self.music.play(MUSIC_VOLUME)
# This is a quick delay. If we don't do this, our elapsed time is 0.0
# and on_update will think the music is over and advance us to the next
# song before starting this one.
time.sleep(0.03)
def setup(self):
""" Set up the game here. Call this function to restart the game. """
# List of music
self.music_list = [":resources:music/funkyrobot.mp3", ":resources:music/1918.mp3"]
# Array index of what to play
self.current_song_index = 0
# Play the song
self.play_song()
def on_draw(self):
""" Render the screen. """
arcade.start_render()
position = self.music.get_stream_position(self.current_player)
length = self.music.get_length()
size = 20
margin = size * .5
# Print time elapsed and total
y = SCREEN_HEIGHT - (size + margin)
text = f"{int(position) // 60}:{int(position) % 60:02} of {int(length) // 60}:{int(length) % 60:02}"
arcade.draw_text(text, 0, y, arcade.csscolor.BLACK, size)
# Print current song
y -= size + margin
text = f"Currently playing: {self.music_list[self.current_song_index]}"
arcade.draw_text(text, 0, y, arcade.csscolor.BLACK, size)
def on_update(self, dt):
position = self.music.get_stream_position(self.current_player)
# The position pointer is reset to 0 right after we finish the song.
# This makes it very difficult to figure out if we just started playing
# or if we are doing playing.
if position == 0.0:
self.advance_song()
self.play_song()
def main():
""" Main method """
window = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
window.setup()
arcade.run()
if __name__ == "__main__":
main()
| 31.266667
| 108
| 0.632348
|
5bf0b4083067d9622331cbef5802b756210f1c37
| 3,650
|
py
|
Python
|
scico/test/test_flax.py
|
lanl/scico
|
976c9e5833f8f67eed2eaa43460d89fb09bb9f78
|
[
"BSD-3-Clause"
] | 18
|
2021-09-21T18:55:11.000Z
|
2022-03-21T20:13:05.000Z
|
scico/test/test_flax.py
|
lanl/scico
|
976c9e5833f8f67eed2eaa43460d89fb09bb9f78
|
[
"BSD-3-Clause"
] | 218
|
2021-09-21T21:45:08.000Z
|
2022-03-30T18:45:27.000Z
|
scico/test/test_flax.py
|
lanl/scico
|
976c9e5833f8f67eed2eaa43460d89fb09bb9f78
|
[
"BSD-3-Clause"
] | 2
|
2021-09-23T22:44:47.000Z
|
2021-12-18T16:01:43.000Z
|
from functools import partial
import numpy as np
import pytest
from flax import linen as nn
from scico import _flax as sflax
from scico import random
from scico._flax import FlaxMap
class TestSet:
def test_convnblock_default(self):
nflt = 16 # number of filters
conv = partial(nn.Conv, dtype=np.float32)
norm = partial(nn.BatchNorm, dtype=np.float32)
flxm = sflax.ConvBNBlock(
num_filters=nflt,
conv=conv,
norm=norm,
act=nn.relu,
)
assert flxm.kernel_size == (3, 3) # size of kernel
assert flxm.strides == (1, 1) # stride of convolution
def test_convnblock_args(self):
nflt = 16 # number of filters
ksz = (5, 5) # size of kernel
strd = (2, 2) # stride of convolution
conv = partial(nn.Conv, dtype=np.float32)
norm = partial(nn.BatchNorm, dtype=np.float32)
flxm = sflax.ConvBNBlock(
num_filters=nflt,
conv=conv,
norm=norm,
act=nn.leaky_relu,
kernel_size=ksz,
strides=strd,
)
assert flxm.act == nn.leaky_relu
assert flxm.kernel_size == ksz # size of kernel
assert flxm.strides == strd # stride of convolution
class DnCNNNetTest:
def __init__(self):
depth = 3 # depth of model
chn = 1 # number of channels
num_filters = 16 # number of filters per layer
N = 128 # image size
self.x, key = random.randn((10, N, N, chn), seed=1234)
self.dncnn = sflax.DnCNNNet(
depth=depth,
channels=chn,
num_filters=num_filters,
)
self.variables = self.dncnn.init(key, self.x)
@pytest.fixture(scope="module")
def testobj():
yield DnCNNNetTest()
def test_DnCNN_call(testobj):
# Test for the construction / forward pass.
dnx = testobj.dncnn.apply(testobj.variables, testobj.x, train=False, mutable=False)
assert testobj.x.dtype == dnx.dtype
def test_DnCNN_train(testobj):
# Test effect of training flag.
bn0bias_before = testobj.variables["params"]["ConvBNBlock_0"]["BatchNorm_0"]["bias"]
bn0mean_before = testobj.variables["batch_stats"]["ConvBNBlock_0"]["BatchNorm_0"]["mean"]
dnx, new_state = testobj.dncnn.apply(
testobj.variables, testobj.x, train=True, mutable=["batch_stats"]
)
bn0mean_new = new_state["batch_stats"]["ConvBNBlock_0"]["BatchNorm_0"]["mean"]
bn0bias_after = testobj.variables["params"]["ConvBNBlock_0"]["BatchNorm_0"]["bias"]
bn0mean_after = testobj.variables["batch_stats"]["ConvBNBlock_0"]["BatchNorm_0"]["mean"]
try:
np.testing.assert_allclose(bn0bias_before, bn0bias_after, rtol=1e-5)
np.testing.assert_allclose(
bn0mean_new - bn0mean_before, bn0mean_new + bn0mean_after, rtol=1e-5
)
except Exception as e:
print(e)
assert 0
def test_DnCNN_test(testobj):
# Test effect of training flag.
bn0var_before = testobj.variables["batch_stats"]["ConvBNBlock_0"]["BatchNorm_0"]["var"]
dnx, new_state = testobj.dncnn.apply(
testobj.variables, testobj.x, train=False, mutable=["batch_stats"]
)
bn0var_after = new_state["batch_stats"]["ConvBNBlock_0"]["BatchNorm_0"]["var"]
np.testing.assert_allclose(bn0var_before, bn0var_after, rtol=1e-5)
def test_FlaxMap_call(testobj):
# Test for the usage of flax model as a map.
fmap = FlaxMap(testobj.dncnn, testobj.variables)
N = 128 # image size
x, key = random.randn((N, N))
out = fmap(x)
assert x.dtype == out.dtype
assert x.ndim == out.ndim
| 33.181818
| 93
| 0.637808
|
77594cd3fa5fd0a43d0cb26c12a0c36be2447a4f
| 909
|
py
|
Python
|
kubernetes/test/test_v1_host_alias.py
|
L3T/python
|
b6e4ae81a2afb49f668a142eb7d1c6e2571ef478
|
[
"Apache-2.0"
] | 2
|
2020-06-21T08:03:18.000Z
|
2020-06-21T09:53:29.000Z
|
kubernetes/test/test_v1_host_alias.py
|
L3T/python
|
b6e4ae81a2afb49f668a142eb7d1c6e2571ef478
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_v1_host_alias.py
|
L3T/python
|
b6e4ae81a2afb49f668a142eb7d1c6e2571ef478
|
[
"Apache-2.0"
] | 1
|
2020-12-10T07:28:08.000Z
|
2020-12-10T07:28:08.000Z
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: release-1.16
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import kubernetes.client
from kubernetes.client.models.v1_host_alias import V1HostAlias # noqa: E501
from kubernetes.client.rest import ApiException
class TestV1HostAlias(unittest.TestCase):
"""V1HostAlias unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testV1HostAlias(self):
"""Test V1HostAlias"""
# FIXME: construct object with mandatory attributes with example values
# model = kubernetes.client.models.v1_host_alias.V1HostAlias() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 22.725
| 124
| 0.708471
|
f8e6695364f143f7139c92c7e53ae977c2ebfdf5
| 3,040
|
py
|
Python
|
PyFlow/Packages/PyFlowOpenCv/__init__.py
|
bobosky/PyFlowOpenCv
|
1c02fcbee6f3a1998f81e81d9673beab2fecae6c
|
[
"Apache-2.0"
] | 2
|
2020-07-17T20:05:50.000Z
|
2022-02-13T23:35:04.000Z
|
PyFlow/Packages/PyFlowOpenCv/__init__.py
|
bobosky/PyFlowOpenCv
|
1c02fcbee6f3a1998f81e81d9673beab2fecae6c
|
[
"Apache-2.0"
] | null | null | null |
PyFlow/Packages/PyFlowOpenCv/__init__.py
|
bobosky/PyFlowOpenCv
|
1c02fcbee6f3a1998f81e81d9673beab2fecae6c
|
[
"Apache-2.0"
] | 1
|
2021-01-15T07:01:53.000Z
|
2021-01-15T07:01:53.000Z
|
PACKAGE_NAME = 'PyFlowOpenCv'
from collections import OrderedDict
from PyFlow.UI.UIInterfaces import IPackage
# Pins
from PyFlow.Packages.PyFlowOpenCv.Pins.ImagePin import ImagePin
from PyFlow.Packages.PyFlowOpenCv.Pins.ImagePin import VideoPin
from PyFlow.Packages.PyFlowOpenCv.Pins.ImagePin import GraphElementPin
from PyFlow.Packages.PyFlowOpenCv.Pins.ImagePin import KeyPointsPin
from PyFlow.Packages.PyFlowOpenCv.Pins.ImagePin import BackgroundSubtractorPin
from PyFlow.Packages.PyFlowOpenCv.Pins.ImagePin import DescriptorPin
from PyFlow.Packages.PyFlowOpenCv.Pins.ImagePin import FeatureMatchPin
from PyFlow.Packages.PyFlowOpenCv.Pins.ImagePin import NumpyDataPin
# Function based nodes
from PyFlow.Packages.PyFlowOpenCv.FunctionLibraries.OpenCvLib import OpenCvLib
from PyFlow.Packages.PyFlowOpenCv.FunctionLibraries.OpenCvLib import LK_optical_flow_Lib
from PyFlow.Packages.PyFlowOpenCv.FunctionLibraries.OpenCvLib import Dense_optical_flow_Lib
from PyFlow.Packages.PyFlowOpenCv.FunctionLibraries.ImageFilteringLib import ImageFilteringLib
from PyFlow.Packages.PyFlowOpenCv.FunctionLibraries.GeometricImageTransformationsLib import GeometricImageTransformationsLib
# Class based nodes
from PyFlow.Packages.PyFlowOpenCv.Nodes.ViewerNode import ViewerNode
# Tools
from PyFlow.Packages.PyFlowOpenCv.Tools.ImageViewerTool import ImageViewerTool
# Factories
from PyFlow.Packages.PyFlowOpenCv.Factories.PinInputWidgetFactory import getInputWidget
from PyFlow.Packages.PyFlowOpenCv.Factories.UINodeFactory import createUINode
_FOO_LIBS = {OpenCvLib.__name__: OpenCvLib(PACKAGE_NAME),
LK_optical_flow_Lib.__name__: LK_optical_flow_Lib(PACKAGE_NAME),
Dense_optical_flow_Lib.__name__:Dense_optical_flow_Lib(PACKAGE_NAME),
ImageFilteringLib.__name__: ImageFilteringLib(PACKAGE_NAME),
GeometricImageTransformationsLib.__name__: GeometricImageTransformationsLib(PACKAGE_NAME),
}
_NODES = {}
_PINS = {}
_TOOLS = OrderedDict()
_PREFS_WIDGETS = OrderedDict()
_EXPORTERS = OrderedDict()
_NODES[ViewerNode.__name__] = ViewerNode
_PINS[ImagePin.__name__] = ImagePin
_PINS[VideoPin.__name__] = VideoPin
_PINS[GraphElementPin.__name__] = GraphElementPin
_PINS[KeyPointsPin.__name__] = KeyPointsPin
_PINS[BackgroundSubtractorPin.__name__] = BackgroundSubtractorPin
_PINS[DescriptorPin.__name__] = DescriptorPin
_PINS[FeatureMatchPin.__name__] = FeatureMatchPin
_PINS[NumpyDataPin.__name__] = NumpyDataPin
_TOOLS[ImageViewerTool.__name__] = ImageViewerTool
class PyFlowOpenCv(IPackage):
def __init__(self):
super(PyFlowOpenCv, self).__init__()
@staticmethod
def GetExporters():
return _EXPORTERS
@staticmethod
def GetFunctionLibraries():
return _FOO_LIBS
@staticmethod
def GetNodeClasses():
return _NODES
@staticmethod
def GetPinClasses():
return _PINS
@staticmethod
def GetToolClasses():
return _TOOLS
#@staticmethod
#def UIPinsFactory():
# return createUIPin
@staticmethod
def UINodesFactory():
return createUINode
@staticmethod
def PinsInputWidgetFactory():
return getInputWidget
| 31.666667
| 124
| 0.843421
|
3d1ac3506bee05d8d26f84aa3a307db2a74eceb8
| 10,500
|
py
|
Python
|
networkx/algorithms/community/modularity_max.py
|
LamprosYfantis/networkx
|
4f957ad8abef63f0933dcc198468897fbcdabce2
|
[
"BSD-3-Clause"
] | null | null | null |
networkx/algorithms/community/modularity_max.py
|
LamprosYfantis/networkx
|
4f957ad8abef63f0933dcc198468897fbcdabce2
|
[
"BSD-3-Clause"
] | null | null | null |
networkx/algorithms/community/modularity_max.py
|
LamprosYfantis/networkx
|
4f957ad8abef63f0933dcc198468897fbcdabce2
|
[
"BSD-3-Clause"
] | null | null | null |
# modularity_max.py - functions for finding communities based on modularity
#
# Copyright 2018 Edward L. Platt
#
# This file is part of NetworkX
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
#
# Authors:
# Edward L. Platt <ed@elplatt.com>
#
# TODO:
# - Alter equations for weighted case
# - Write tests for weighted case
"""Functions for detecting communities based on modularity.
"""
from __future__ import division
import networkx as nx
from networkx.algorithms.community.quality import modularity
from networkx.utils.mapped_queue import MappedQueue
__all__ = [
'greedy_modularity_communities',
'_naive_greedy_modularity_communities']
def greedy_modularity_communities(G, weight=None):
"""Find communities in graph using Clauset-Newman-Moore greedy modularity
maximization. This method currently supports the Graph class and does not
consider edge weights.
Greedy modularity maximization begins with each node in its own community
and joins the pair of communities that most increases modularity until no
such pair exists.
Parameters
----------
G : NetworkX graph
Returns
-------
Yields sets of nodes, one for each community.
Examples
--------
>>> from networkx import greedy_modularity_communities
>>> G = nx.karate_club_graph()
>>> c = list(greedy_modularity_communities(G))
>>> sorted(c[0])
[8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
References
----------
.. [1] M. E. J Newman 'Networks: An Introduction', page 224
Oxford University Press 2011.
.. [2] Clauset, A., Newman, M. E., & Moore, C.
"Finding community structure in very large networks."
Physical Review E 70(6), 2004.
"""
# Count nodes and edges
N = len(G.nodes())
m = sum([d.get('weight', 1) for u, v, d in G.edges(data=True)])
q0 = 1.0 / (2.0*m)
# Map node labels to contiguous integers
label_for_node = dict((i, v) for i, v in enumerate(G.nodes()))
node_for_label = dict((label_for_node[i], i) for i in range(N))
# Calculate degrees
k_for_label = G.degree(G.nodes(), weight=weight)
k = [k_for_label[label_for_node[i]] for i in range(N)]
# Initialize community and merge lists
communities = dict((i, frozenset([i])) for i in range(N))
merges = []
# Initial modularity
partition = [[label_for_node[x] for x in c] for c in communities.values()]
q_cnm = modularity(G, partition)
# Initialize data structures
# CNM Eq 8-9 (Eq 8 was missing a factor of 2 (from A_ij + A_ji)
# a[i]: fraction of edges within community i
# dq_dict[i][j]: dQ for merging community i, j
# dq_heap[i][n] : (-dq, i, j) for communitiy i nth largest dQ
# H[n]: (-dq, i, j) for community with nth largest max_j(dQ_ij)
a = [k[i]*q0 for i in range(N)]
dq_dict = dict(
(i, dict(
(j, 2*q0 - 2*k[i]*k[j]*q0*q0)
for j in [
node_for_label[u]
for u in G.neighbors(label_for_node[i])]
if j != i))
for i in range(N))
dq_heap = [
MappedQueue([
(-dq, i, j)
for j, dq in dq_dict[i].items()])
for i in range(N)]
H = MappedQueue([
dq_heap[i].h[0]
for i in range(N)
if len(dq_heap[i]) > 0])
# Merge communities until we can't improve modularity
while len(H) > 1:
# Find best merge
# Remove from heap of row maxes
# Ties will be broken by choosing the pair with lowest min community id
try:
dq, i, j = H.pop()
except IndexError:
break
dq = -dq
# Remove best merge from row i heap
dq_heap[i].pop()
# Push new row max onto H
if len(dq_heap[i]) > 0:
H.push(dq_heap[i].h[0])
# If this element was also at the root of row j, we need to remove the
# duplicate entry from H
if dq_heap[j].h[0] == (-dq, j, i):
H.remove((-dq, j, i))
# Remove best merge from row j heap
dq_heap[j].remove((-dq, j, i))
# Push new row max onto H
if len(dq_heap[j]) > 0:
H.push(dq_heap[j].h[0])
else:
# Duplicate wasn't in H, just remove from row j heap
dq_heap[j].remove((-dq, j, i))
# Stop when change is non-positive
if dq <= 0:
break
# Perform merge
communities[j] = frozenset(communities[i] | communities[j])
del communities[i]
merges.append((i, j, dq))
# New modularity
q_cnm += dq
# Get list of communities connected to merged communities
i_set = set(dq_dict[i].keys())
j_set = set(dq_dict[j].keys())
all_set = (i_set | j_set) - set([i, j])
both_set = i_set & j_set
# Merge i into j and update dQ
for k in all_set:
# Calculate new dq value
if k in both_set:
dq_jk = dq_dict[j][k] + dq_dict[i][k]
elif k in j_set:
dq_jk = dq_dict[j][k] - 2.0*a[i]*a[k]
else:
# k in i_set
dq_jk = dq_dict[i][k] - 2.0*a[j]*a[k]
# Update rows j and k
for row, col in [(j, k), (k, j)]:
# Save old value for finding heap index
if k in j_set:
d_old = (-dq_dict[row][col], row, col)
else:
d_old = None
# Update dict for j,k only (i is removed below)
dq_dict[row][col] = dq_jk
# Save old max of per-row heap
if len(dq_heap[row]) > 0:
d_oldmax = dq_heap[row].h[0]
else:
d_oldmax = None
# Add/update heaps
d = (-dq_jk, row, col)
if d_old is None:
# We're creating a new nonzero element, add to heap
dq_heap[row].push(d)
else:
# Update existing element in per-row heap
dq_heap[row].update(d_old, d)
# Update heap of row maxes if necessary
if d_oldmax is None:
# No entries previously in this row, push new max
H.push(d)
else:
# We've updated an entry in this row, has the max changed?
if dq_heap[row].h[0] != d_oldmax:
H.update(d_oldmax, dq_heap[row].h[0])
# Remove row/col i from matrix
i_neighbors = dq_dict[i].keys()
for k in i_neighbors:
# Remove from dict
dq_old = dq_dict[k][i]
del dq_dict[k][i]
# Remove from heaps if we haven't already
if k != j:
# Remove both row and column
for row, col in [(k, i), (i, k)]:
# Check if replaced dq is row max
d_old = (-dq_old, row, col)
if dq_heap[row].h[0] == d_old:
# Update per-row heap and heap of row maxes
dq_heap[row].remove(d_old)
H.remove(d_old)
# Update row max
if len(dq_heap[row]) > 0:
H.push(dq_heap[row].h[0])
else:
# Only update per-row heap
dq_heap[row].remove(d_old)
del dq_dict[i]
# Mark row i as deleted, but keep placeholder
dq_heap[i] = MappedQueue()
# Merge i into j and update a
a[j] += a[i]
a[i] = 0
communities = [
frozenset([label_for_node[i] for i in c])
for c in communities.values()]
return sorted(communities, key=len, reverse=True)
def _naive_greedy_modularity_communities(G):
"""Find communities in graph using the greedy modularity maximization.
This implementation is O(n^4), much slower than alternatives, but it is
provided as an easy-to-understand reference implementation.
"""
# First create one community for each node
communities = list([frozenset([u]) for u in G.nodes()])
# Track merges
merges = []
# Greedily merge communities until no improvement is possible
old_modularity = None
new_modularity = modularity(G, communities)
while old_modularity is None or new_modularity > old_modularity:
# Save modularity for comparison
old_modularity = new_modularity
# Find best pair to merge
trial_communities = list(communities)
to_merge = None
for i, u in enumerate(communities):
for j, v in enumerate(communities):
# Skip i=j and empty communities
if j <= i or len(u) == 0 or len(v) == 0:
continue
# Merge communities u and v
trial_communities[j] = u | v
trial_communities[i] = frozenset([])
trial_modularity = modularity(G, trial_communities)
if trial_modularity >= new_modularity:
# Check if strictly better or tie
if trial_modularity > new_modularity:
# Found new best, save modularity and group indexes
new_modularity = trial_modularity
to_merge = (i, j, new_modularity - old_modularity)
elif (
to_merge and
min(i, j) < min(to_merge[0], to_merge[1])
):
# Break ties by choosing pair with lowest min id
new_modularity = trial_modularity
to_merge = (i, j, new_modularity - old_modularity)
# Un-merge
trial_communities[i] = u
trial_communities[j] = v
if to_merge is not None:
# If the best merge improves modularity, use it
merges.append(to_merge)
i, j, dq = to_merge
u, v = communities[i], communities[j]
communities[j] = u | v
communities[i] = frozenset([])
# Remove empty communities and sort
communities = [c for c in communities if len(c) > 0]
for com in sorted(communities, key=lambda x: len(x), reverse=True):
yield com
| 37.102473
| 79
| 0.541905
|
306ec436257c0d39ab9e5f00b62ab6c5b2c8cfb0
| 11,485
|
py
|
Python
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/applyactions_f3190fab64661538563653e07df155a5.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 20
|
2019-05-07T01:59:14.000Z
|
2022-02-11T05:24:47.000Z
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/applyactions_f3190fab64661538563653e07df155a5.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 60
|
2019-04-03T18:59:35.000Z
|
2022-02-22T12:05:05.000Z
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/applyactions_f3190fab64661538563653e07df155a5.py
|
OpenIxia/ixnetwork_restpy
|
f628db450573a104f327cf3c737ca25586e067ae
|
[
"MIT"
] | 13
|
2019-05-20T10:48:31.000Z
|
2021-10-06T07:45:44.000Z
|
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class ApplyActions(Base):
"""Select the type of apply action instructions that the table flow entry will support.
The ApplyActions class encapsulates a required applyActions resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'applyActions'
_SDM_ATT_MAP = {
'CopyTtlIn': 'copyTtlIn',
'CopyTtlOut': 'copyTtlOut',
'DecrementMplsTtl': 'decrementMplsTtl',
'DecrementNetworkTtl': 'decrementNetworkTtl',
'Experimenter': 'experimenter',
'Group': 'group',
'Output': 'output',
'PopMpls': 'popMpls',
'PopPbb': 'popPbb',
'PopVlan': 'popVlan',
'PushMpls': 'pushMpls',
'PushPbb': 'pushPbb',
'PushVlan': 'pushVlan',
'SetField': 'setField',
'SetMplsTtl': 'setMplsTtl',
'SetNetworkTtl': 'setNetworkTtl',
'SetQueue': 'setQueue',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(ApplyActions, self).__init__(parent, list_op)
@property
def CopyTtlIn(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Copy TTL In Apply Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['CopyTtlIn'])
@CopyTtlIn.setter
def CopyTtlIn(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['CopyTtlIn'], value)
@property
def CopyTtlOut(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Copy TTL Out Apply Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['CopyTtlOut'])
@CopyTtlOut.setter
def CopyTtlOut(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['CopyTtlOut'], value)
@property
def DecrementMplsTtl(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Decrement MPLS TTL Apply Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['DecrementMplsTtl'])
@DecrementMplsTtl.setter
def DecrementMplsTtl(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['DecrementMplsTtl'], value)
@property
def DecrementNetworkTtl(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Decrement Network TTL Write Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['DecrementNetworkTtl'])
@DecrementNetworkTtl.setter
def DecrementNetworkTtl(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['DecrementNetworkTtl'], value)
@property
def Experimenter(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Experimenter Write Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['Experimenter'])
@Experimenter.setter
def Experimenter(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['Experimenter'], value)
@property
def Group(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Group Write Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['Group'])
@Group.setter
def Group(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['Group'], value)
@property
def Output(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Output Apply Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['Output'])
@Output.setter
def Output(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['Output'], value)
@property
def PopMpls(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Pop MPLS Apply Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['PopMpls'])
@PopMpls.setter
def PopMpls(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['PopMpls'], value)
@property
def PopPbb(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Pop PBB Write Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['PopPbb'])
@PopPbb.setter
def PopPbb(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['PopPbb'], value)
@property
def PopVlan(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Pop VLAN Apply Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['PopVlan'])
@PopVlan.setter
def PopVlan(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['PopVlan'], value)
@property
def PushMpls(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Push MPLS Apply Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['PushMpls'])
@PushMpls.setter
def PushMpls(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['PushMpls'], value)
@property
def PushPbb(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Push PBB Write Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['PushPbb'])
@PushPbb.setter
def PushPbb(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['PushPbb'], value)
@property
def PushVlan(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Push VLAN Apply Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['PushVlan'])
@PushVlan.setter
def PushVlan(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['PushVlan'], value)
@property
def SetField(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Set Field Write Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['SetField'])
@SetField.setter
def SetField(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['SetField'], value)
@property
def SetMplsTtl(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Set MPLS TTL Apply Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['SetMplsTtl'])
@SetMplsTtl.setter
def SetMplsTtl(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['SetMplsTtl'], value)
@property
def SetNetworkTtl(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Set Network TTL Write Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['SetNetworkTtl'])
@SetNetworkTtl.setter
def SetNetworkTtl(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['SetNetworkTtl'], value)
@property
def SetQueue(self):
# type: () -> bool
"""
Returns
-------
- bool: If selected, table supports Set Queue Write Actions.
"""
return self._get_attribute(self._SDM_ATT_MAP['SetQueue'])
@SetQueue.setter
def SetQueue(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['SetQueue'], value)
def update(self, CopyTtlIn=None, CopyTtlOut=None, DecrementMplsTtl=None, DecrementNetworkTtl=None, Experimenter=None, Group=None, Output=None, PopMpls=None, PopPbb=None, PopVlan=None, PushMpls=None, PushPbb=None, PushVlan=None, SetField=None, SetMplsTtl=None, SetNetworkTtl=None, SetQueue=None):
# type: (bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool) -> ApplyActions
"""Updates applyActions resource on the server.
Args
----
- CopyTtlIn (bool): If selected, table supports Copy TTL In Apply Actions.
- CopyTtlOut (bool): If selected, table supports Copy TTL Out Apply Actions.
- DecrementMplsTtl (bool): If selected, table supports Decrement MPLS TTL Apply Actions.
- DecrementNetworkTtl (bool): If selected, table supports Decrement Network TTL Write Actions.
- Experimenter (bool): If selected, table supports Experimenter Write Actions.
- Group (bool): If selected, table supports Group Write Actions.
- Output (bool): If selected, table supports Output Apply Actions.
- PopMpls (bool): If selected, table supports Pop MPLS Apply Actions.
- PopPbb (bool): If selected, table supports Pop PBB Write Actions.
- PopVlan (bool): If selected, table supports Pop VLAN Apply Actions.
- PushMpls (bool): If selected, table supports Push MPLS Apply Actions.
- PushPbb (bool): If selected, table supports Push PBB Write Actions.
- PushVlan (bool): If selected, table supports Push VLAN Apply Actions.
- SetField (bool): If selected, table supports Set Field Write Actions.
- SetMplsTtl (bool): If selected, table supports Set MPLS TTL Apply Actions.
- SetNetworkTtl (bool): If selected, table supports Set Network TTL Write Actions.
- SetQueue (bool): If selected, table supports Set Queue Write Actions.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
| 35.230061
| 299
| 0.616282
|
7754d47fbaab55136d7c64559c60cc5709df8016
| 3,928
|
py
|
Python
|
fdia_simulation/tests/benchmarks/test_noise_finder_2radars.py
|
QDucasse/FDIA_simulation
|
bdd0cb072f07b9a96fd82df581c9c7493ae66cbc
|
[
"MIT"
] | 7
|
2020-12-11T16:20:59.000Z
|
2022-01-11T21:18:25.000Z
|
fdia_simulation/tests/benchmarks/test_noise_finder_2radars.py
|
QDucasse/FDIA_simulation
|
bdd0cb072f07b9a96fd82df581c9c7493ae66cbc
|
[
"MIT"
] | 2
|
2020-09-25T06:56:56.000Z
|
2021-06-25T15:40:38.000Z
|
fdia_simulation/tests/benchmarks/test_noise_finder_2radars.py
|
QDucasse/FDIA_simulation
|
bdd0cb072f07b9a96fd82df581c9c7493ae66cbc
|
[
"MIT"
] | 5
|
2019-08-27T11:13:31.000Z
|
2021-11-26T12:52:19.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 29 15:49:35 2019
@author: qde
"""
import unittest
import numpy as np
from abc import ABC, abstractmethod
from filterpy.kalman import IMMEstimator
from fdia_simulation.models import Radar
from fdia_simulation.filters import MultipleRadarsFilterCA,MultipleRadarsFilterCV,MultipleRadarsFilterCT,MultipleRadarsFilterTA
from fdia_simulation.benchmarks import Benchmark, NoiseFinderMultipleRadars
from fdia_simulation.tests.benchmarks import NoiseFinder1RadarTestEnv
class NoiseFinderMultipleRadarsTestEnv(NoiseFinder1RadarTestEnv):
def setUp_radars_states(self):
# Radars definitions
self.radar1 = Radar(x=2000,y=2000)
self.radar2 = Radar(x=1000,y=1000)
self.radars = [self.radar1, self.radar2]
# States definition
self.states = np.array([[i,i/2,i/10]*3 for i in range(3000)])
def test_initialization_noise_finder(self):
self.assertEqual(self.process_noise_finder.radars, [Radar(x=2000,y=2000),Radar(x=1000,y=1000)])
self.assertTrue(np.array_equal(self.process_noise_finder.states,np.array([[i,i/2,i/10]*3 for i in range(3000)])))
class NoiseFinderMultipleRadarsCATestCase(NoiseFinderMultipleRadarsTestEnv,unittest.TestCase):
def setUp(self):
# Radar and states generation
self.setUp_radars_states()
# Filter definition
self.filter = MultipleRadarsFilterCA
# Process noise finder definition
self.process_noise_finder = NoiseFinderMultipleRadars(radars = self.radars,
states = self.states,
filter = self.filter)
# Reduction of the actual list for testing purposes
self.process_noise_finder.TO_TEST = [1.,2.,3.,4.,5.]
class NoiseFinderMultipleRadarsCVTestCase(NoiseFinderMultipleRadarsTestEnv,unittest.TestCase):
def setUp(self):
# Radar and states generation
self.setUp_radars_states()
# Filter definition
self.filter = MultipleRadarsFilterCV
# Process noise finder definition
self.process_noise_finder = NoiseFinderMultipleRadars(radars = self.radars,
states = self.states,
filter = self.filter)
# Reduction of the actual list for testing purposes
self.process_noise_finder.TO_TEST = [1.,2.,3.,4.,5.]
class NoiseFinderMultipleRadarsCTTestCase(NoiseFinderMultipleRadarsTestEnv,unittest.TestCase):
def setUp(self):
# Radar and states generation
self.setUp_radars_states()
# Filter definition
self.filter = MultipleRadarsFilterCT
# Process noise finder definition
self.process_noise_finder = NoiseFinderMultipleRadars(radars = self.radars,
states = self.states,
filter = self.filter)
# Reduction of the actual list for testing purposes
self.process_noise_finder.TO_TEST = [1.,2.,3.,4.,5.]
class NoiseFinderMultipleRadarsTATestCase(NoiseFinderMultipleRadarsTestEnv,unittest.TestCase):
def setUp(self):
# Radar and states generation
self.setUp_radars_states()
# Filter definition
self.filter = MultipleRadarsFilterTA
# Process noise finder definition
self.process_noise_finder = NoiseFinderMultipleRadars(radars = self.radars,
states = self.states,
filter = self.filter)
# Reduction of the actual list for testing purposes
self.process_noise_finder.TO_TEST = [1.,2.,3.,4.,5.]
if __name__ == "__main__":
unittest.main()
| 44.636364
| 136
| 0.634165
|
5ae2e2436f066389421279e500acf712408a3acd
| 1,188
|
py
|
Python
|
backup/source_reading/tester-in-hardway/thoughts-in-python/intermedia/generator_iterator.py
|
qdriven/qdriven-read-write
|
7e49ee430470b9338392066d4bd5245e0a98aa1e
|
[
"MIT"
] | 1
|
2021-04-26T05:21:09.000Z
|
2021-04-26T05:21:09.000Z
|
backup/source_reading/tester-in-hardway/thoughts-in-python/intermedia/generator_iterator.py
|
qdriven/qdriven-read-write
|
7e49ee430470b9338392066d4bd5245e0a98aa1e
|
[
"MIT"
] | null | null | null |
backup/source_reading/tester-in-hardway/thoughts-in-python/intermedia/generator_iterator.py
|
qdriven/qdriven-read-write
|
7e49ee430470b9338392066d4bd5245e0a98aa1e
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
"""
# Generators
- iterator: enable to traverse a container
* iterable
* iterator
* iteration
- generator
## Iterable/Iterator/Iteration
***Iterable***:
- __iter__ and __getitem__
- return iterator
- any object provide an iterator
***Iterator***
- __next__
***Iteration***
A process of taking an item from something.
## Generator
Generators are iterators. 但是你只能iterate 一次,generator不会存储所有值到内存
- 大部分情况generator像函数一样使用
- yield it
下面例子表示了Generator的使用, generator yield出来一次,外面的print就执行一次,next函数可以对iterator使用,
因为iterator都有一个__next__()方法
```python
def simple_generator_func():
for index in range(10):
yield index
g = simple_generator_func()
print(next(g))
print(next(g))
for item in simple_generator_func():
print(item)
```
next for a iterator:
```python
print(next(iter("thisis"))
```
"""
def simple_generator_func():
for index in range(10):
yield index
g = simple_generator_func()
print(next(g))
print(next(g))
for item in simple_generator_func():
print(item)
def fibon(n):
a = b = 1
for i in range(n):
yield a
a, b = b, b + a
for x in fibon(10000):
print(x)
print(next(iter("thisis")))
| 15.230769
| 75
| 0.68771
|
f956732c9e3414a1208891001d371c6aa2ca650e
| 7,900
|
py
|
Python
|
test/new_tests/test_list_remove_range.py
|
jensengrey/aerospike-client-python
|
2242408f789c2f1521a1434bb39678c5e4034982
|
[
"Apache-2.0"
] | null | null | null |
test/new_tests/test_list_remove_range.py
|
jensengrey/aerospike-client-python
|
2242408f789c2f1521a1434bb39678c5e4034982
|
[
"Apache-2.0"
] | null | null | null |
test/new_tests/test_list_remove_range.py
|
jensengrey/aerospike-client-python
|
2242408f789c2f1521a1434bb39678c5e4034982
|
[
"Apache-2.0"
] | 1
|
2021-08-02T22:30:21.000Z
|
2021-08-02T22:30:21.000Z
|
# -*- coding: utf-8 -*-
import pytest
import sys
import random
from .test_base_class import TestBaseClass
from aerospike import exception as e
aerospike = pytest.importorskip("aerospike")
try:
import aerospike
except:
print("Please install aerospike python client.")
sys.exit(1)
class TestListRemoveRange(object):
@pytest.fixture(autouse=True)
def setup(self, request, as_connection):
keys = []
for i in range(5):
key = ('test', 'demo', i)
rec = {'name': 'name%s' % (str(i)),
'contact_no': [i, i + 1, i + 2, i + 3,
i + 4, i + 5],
'city': ['Pune', 'Dehli', 'Mumbai']}
self.as_connection.put(key, rec)
keys.append(key)
key = ('test', 'demo', 1)
self.as_connection.list_append(key, "contact_no", [45, 50, 80])
keys.append(key)
def teardown():
"""
Teardown method.
"""
for key in keys:
try:
as_connection.remove(key)
except e.RecordNotFound:
pass
request.addfinalizer(teardown)
def test_pos_list_remove_range_with_correct_paramters(self):
"""
Invoke list_remove_range() removes elements from list with correct
parameters
"""
key = ('test', 'demo', 1)
status = self.as_connection.list_remove_range(
key, "contact_no", 3, 3)
assert status == 0
(key, _, bins) = self.as_connection.get(key)
assert bins == {'city': ['Pune', 'Dehli', 'Mumbai'], 'contact_no': [
1, 2, 3, [45, 50, 80]], 'name': 'name1'}
def test_pos_list_remove_range_with_correct_policy(self):
"""
Invoke list_remove_range() removes elements from list with correct
policy
"""
key = ('test', 'demo', 2)
policy = {
'timeout': 1000,
'retry': aerospike.POLICY_RETRY_ONCE,
'commit_level': aerospike.POLICY_COMMIT_LEVEL_MASTER
}
status = self.as_connection.list_remove_range(
key, 'contact_no', 0, 3, {}, policy)
assert status == 0
(key, _, bins) = self.as_connection.get(key)
assert bins == {
'city': ['Pune', 'Dehli', 'Mumbai'], 'contact_no': [5, 6, 7],
'name': 'name2'}
# Negative Tests
def test_neg_list_remove_range_with_no_parameters(self):
"""
Invoke list_remove_range() without any mandatory parameters.
"""
with pytest.raises(TypeError) as typeError:
self.as_connection.list_remove_range()
assert "Required argument 'key' (pos 1) not found" in str(
typeError.value)
def test_neg_list_remove_range_with_incorrect_policy(self):
"""
Invoke list_remove_range() with incorrect policy
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 0.5
}
try:
self.as_connection.list_remove_range(
key, "contact_no", 0, 2, {}, policy)
except e.ParamError as exception:
assert exception.code == -2
assert exception.msg == "timeout is invalid"
def test_neg_list_remove_range_with_nonexistent_key(self):
"""
Invoke list_remove_range() with non-existent key
"""
if self.server_version < [3, 15, 2]:
pytest.skip("Change of error beginning in 3.15")
charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'
minLength = 5
maxLength = 30
length = random.randint(minLength, maxLength)
key = ('test', 'demo', ''.join(map(lambda unused:
random.choice(charSet),
range(length))) + ".com")
with pytest.raises(e.RecordNotFound):
self.as_connection.list_remove_range(key, "abc", 0, 1)
def test_neg_list_remove_range_with_nonexistent_bin(self):
"""
Invoke list_remove_range() with non-existent bin
"""
key = ('test', 'demo', 1)
charSet = 'abcdefghijklmnopqrstuvwxyz1234567890'
minLength = 5
maxLength = 10
length = random.randint(minLength, maxLength)
bin = ''.join(map(lambda unused:
random.choice(charSet), range(length))) + ".com"
try:
self.as_connection.list_remove_range(key, bin, 0, 1)
except e.BinIncompatibleType as exception:
assert exception.code == 12
def test_neg_list_remove_range_with_extra_parameter(self):
"""
Invoke list_remove_range() with extra parameter.
"""
key = ('test', 'demo', 1)
policy = {'timeout': 1000}
with pytest.raises(TypeError) as typeError:
self.as_connection.list_remove_range(
key, "contact_no", 1, 1, {}, policy, "")
assert "list_remove_range() takes at most 6 arguments (7 given)" \
in str(typeError.value)
def test_neg_list_remove_range_policy_is_string(self):
"""
Invoke list_remove_range() with policy is string
"""
key = ('test', 'demo', 1)
try:
self.as_connection.list_remove_range(
key, "contact_no", 0, 1, {}, "")
except e.ParamError as exception:
assert exception.code == -2
assert exception.msg == "policy must be a dict"
def test_neg_list_remove_range_key_is_none(self):
"""
Invoke list_remove_range() with key is none
"""
try:
self.as_connection.list_remove_range(
None, "contact_no", 0, 2)
except e.ParamError as exception:
assert exception.code == -2
assert exception.msg == "key is invalid"
def test_neg_list_remove_range_bin_is_none(self):
"""
Invoke list_remove_range() with bin is none
"""
key = ('test', 'demo', 1)
try:
self.as_connection.list_remove_range(key, None, 1, 3)
except e.ParamError as exception:
assert exception.code == -2
assert exception.msg == "Bin name should be of type string"
def test_neg_list_remove_range_with_negative_index(self):
"""
Invoke list_remove_range() with negative index
"""
key = ('test', 'demo', 1)
try:
self.as_connection.list_remove_range(
key, "contact_no", -56, 5)
except e.InvalidRequest as exception:
assert exception.code == 4
def test_neg_list_remove_range_with_negative_length(self):
"""
Invoke list_remove_range() with negative count
"""
key = ('test', 'demo', 1)
try:
self.as_connection.list_remove_range(
key, "contact_no", 0, -59)
except e.InvalidRequest as exception:
assert exception.code == 4
def test_neg_list_remove_range_meta_type_integer(self):
"""
Invoke list_remove_range() with metadata input is of type integer
"""
key = ('test', 'demo', 1)
try:
self.as_connection.list_remove_range(
key, "contact_no", 0, 2, 888)
except e.ParamError as exception:
assert exception.code == -2
assert exception.msg == "Metadata should be of type dictionary"
def test_neg_list_remove_range_index_type_string(self):
"""
Invoke list_remove_range() with index is of type string
"""
key = ('test', 'demo', 1)
with pytest.raises(TypeError) as typeError:
self.as_connection.list_remove_range(
key, "contact_no", "Fifth", 2)
assert "an integer is required" in str(typeError.value)
| 33.617021
| 76
| 0.563291
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.