content stringlengths 5 1.05M |
|---|
import random
messages = ['It is certain',
'It is decidedly so',
'Yes definitely',
'Reply hazy try again',
'Ask again later',
'Concentrate and ask again',
'My reply is no',
'Outlook not so good',
'Very doubtful']
print(messages[random.randint(0,len(messages)-1)])
|
import os
import re
import subprocess
import xml.etree.ElementTree as ET
from toolsws.utils import wait_for
from toolsws.wstypes import GenericWebService
from toolsws.wstypes import JSWebService
from toolsws.wstypes import LighttpdPlainWebService
from toolsws.wstypes import LighttpdWebService
from toolsws.wstypes import PythonWebService
from toolsws.wstypes import TomcatWebService
from toolsws.wstypes import UwsgiWebService
from .backend import Backend
class GridEngineBackend(Backend):
"""
A gridengine job that starts / stops a HTTP serving process (webservice)
"""
# Specify config for each type that this backend accepts
# Key is name of type passed in by commandline
# cls is the Webservice class to instantiate
# queue is an optional key that spcifies which queue to run ths one.
# options are: webgrid-lighttpd, webgrid-generic
# defaults to 'webgrid-generic'
# release is an optional key that specifies which release to run this on.
# options are: stretch, buster
# defaults to stretch
CONFIG = {
"lighttpd": {"cls": LighttpdWebService, "queue": "webgrid-lighttpd"},
"lighttpd-plain": {
"cls": LighttpdPlainWebService,
"queue": "webgrid-lighttpd",
},
"uwsgi-python": {"cls": PythonWebService},
"uwsgi-plain": {"cls": UwsgiWebService},
"nodejs": {"cls": JSWebService},
"tomcat": {"cls": TomcatWebService},
"generic": {"cls": GenericWebService},
}
def __init__(self, tool, wstype, release, extra_args=None):
super(GridEngineBackend, self).__init__(
tool, wstype, extra_args=extra_args
)
cfg = GridEngineBackend.CONFIG[self.wstype]
self.webservice = cfg["cls"](tool, extra_args)
self.release = cfg.get("release", release)
self.queue = cfg.get("queue", "webgrid-generic")
self.name = "{wstype}-{toolname}".format(
wstype=self.wstype, toolname=tool.name
)
try:
memlimit = "/data/project/.system/config/{}.web-memlimit".format(
self.tool.name
)
with open(memlimit) as f:
self.memlimit = f.read().strip()
except IOError:
self.memlimit = "4G"
def _get_job_xml(self):
"""
Gets job status xml of this job
:return: ET xml object if the job is found, None otherwise
"""
output = subprocess.check_output(["qstat", "-xml"])
# Fix XML.
output = re.sub("JATASK:[^>]*", "jatask", output.decode("utf-8"))
# GE is stupid.
# Returns output like:
# <><ST_name>blah</ST_name></>
# If the job is not found.
if "<unknown_jobs" in output and "<>" in output:
return None
xml = ET.fromstring(output)
job_name_node = xml.find('.//job_list[JB_name="%s"]' % self.name)
return job_name_node
def request_start(self):
self.webservice.check(self.wstype)
cmd = [
"qsub",
"-e",
os.path.expanduser("~/error.log"),
"-o",
os.path.expanduser("~/error.log"),
"-i",
"/dev/null",
"-q",
self.queue,
"-l",
"h_vmem=%s,release=%s" % (self.memlimit, self.release),
"-b",
"y",
"-N",
self.name,
"/usr/bin/webservice-runner",
"--register-proxy",
"--type",
self.webservice.name,
]
if self.extra_args:
cmd.extend(self.extra_args)
subprocess.check_call(cmd, stdout=open(os.devnull, "wb"))
def request_stop(self):
cmd = ["/usr/bin/qdel", self.name]
subprocess.check_call(cmd, stdout=open(os.devnull, "wb"))
def request_restart(self):
# On the grid, it is important to take down the service before starting
# it so it runs portreleaser, etc.
self.request_stop()
wait_for(lambda: self.get_state() == Backend.STATE_STOPPED, "")
self.request_start()
wait_for(
lambda: self.get_state() == Backend.STATE_RUNNING, "Restarting..."
)
def get_state(self):
job = self._get_job_xml()
if job is not None:
state = job.findtext(".//state").lower()
if "r" in state:
return Backend.STATE_RUNNING
else:
return Backend.STATE_PENDING
return Backend.STATE_STOPPED
|
"""
Test the Input block.
"""
# pylint: disable=missing-docstring, no-self-use, protected-access
# pylint: disable=invalid-name, redefined-outer-name, unused-argument, unused-variable
# pylint: disable=wildcard-import, unused-wildcard-import
import pytest
import edzed
from .utils import *
def test_noinit(circuit):
"""Test missing init."""
edzed.Input('no_init')
with pytest.raises(edzed.EdzedCircuitError, match='not initialized'):
init(circuit)
def test_init(circuit):
"""Initial value is assigned on init."""
INITIAL = 'default_value'
inp = edzed.Input('input', initdef=INITIAL)
assert inp.output is edzed.UNDEF
init(circuit)
assert inp.output == INITIAL
inp.event('put', value=3.14)
assert inp.output == 3.14
inp.event('put', value=inp.initdef) # reset to default
assert inp.output == INITIAL
def test_events(circuit):
"""Inputs support only the put event."""
inp = edzed.Input('input', initdef=None)
init(circuit)
inp.event('put', value=1)
assert inp.output == 1
inp.event('put', value=2, junk=-1) # extra keys ignored
assert inp.output == 2
inp.put(3) # shortcut for .event('put', value=X)
assert inp.output == 3
inp.put(4, junk=-1) # extra keys ignored
assert inp.output == 4
with pytest.raises(TypeError):
inp.event('put') # missing value
with pytest.raises(edzed.EdzedUnknownEvent):
inp.event('sleep') # unknown event
def test_schema(circuit):
"""schema validator test."""
inp = edzed.Input('input', schema=lambda x: int(x)+100, initdef=23)
init(circuit)
assert inp.output == 123 # schema is applied also to the default
assert inp.event('put', value='string') is False
assert inp.output == 123
assert inp.event('put', value='68') is True
assert inp.output == 168
def test_check(circuit):
"""check validator test."""
inp = edzed.Input('input', check=lambda x: x % 5 == 0, initdef=5)
init(circuit)
assert inp.output == 5
assert inp.put(25) is True
assert inp.output == 25
assert inp.put(68) is False
assert inp.output == 25
def test_check_initdef(circuit):
"""initdef value is checked immediately."""
with pytest.raises(ValueError, match="rejected"):
# default of 23 does not pass the modulo 5 check
edzed.Input('input', check=lambda x: x % 5 == 0, initdef=23)
def test_allowed(circuit):
"""allowed validator test."""
ALLOWED = (False, 'YES', 2.5)
NOT_ALLOWED = (True, None, '', 'hello', 99)
inp = edzed.Input('input', allowed=ALLOWED, initdef=False)
init(circuit)
for v in ALLOWED:
assert inp.event('put', value=v) is True
assert inp.output == v
last = inp.output
for v in NOT_ALLOWED:
assert inp.event('put', value=v) is False
assert inp.output == last
def test_validators(circuit):
"""Test multiple validators."""
# order = check, allowed, schema
inputs = [
edzed.Input('input1', allowed=[False, True], initdef=False),
edzed.Input(
'input2',
check=lambda x: isinstance(x, bool), allowed=[False, True], initdef=False),
edzed.Input('input3', schema=bool, allowed=[False, True], initdef=False),
edzed.Input('input4', schema=bool, initdef=False),
]
init(circuit)
VALUES = (None, 1, 99)
ACCEPTED = [
(False, True, False), # because 1 == True, i.e. 1 is in allowed
(False, False, False), # no value passes the strict type checking
(False, True, False), # similar to input1
(True, True, True), # each value will be converted to bool
]
OUTPUT = [
(False, 1, 1),
(False, False, False),
(False, True, True),
(False, True, True),
]
for inp, acc_list, out_list in zip(inputs, ACCEPTED, OUTPUT):
inp.event('put', value=inp.initdef) # reset to default
for val, acc, out in zip(VALUES, acc_list, out_list):
assert inp.put(val) is acc
assert inp.output == out
|
# -*- coding: utf-8 -*-
import json
import os
from datetime import timedelta
from openprocurement.api.models import get_now
import openprocurement.relocation.api.tests.base as base_test
from copy import deepcopy
from openprocurement.api.tests.base import (
PrefixedRequestClass, test_tender_data, test_organization
)
from openprocurement.relocation.api.tests.base import OwnershipWebTest, test_transfer_data, OpenEUOwnershipWebTest, test_eu_tender_data, test_ua_bid_data
from openprocurement.contracting.api.tests.base import test_contract_data, test_tender_token
from openprocurement.tender.competitivedialogue.tests.base import (BaseCompetitiveDialogWebTest,
test_tender_stage2_data_ua,
test_access_token_stage1)
from openprocurement.tender.openeu.models import TENDERING_DURATION
from openprocurement.api.models import get_now
from webtest import TestApp
class DumpsTestAppwebtest(TestApp):
def do_request(self, req, status=None, expect_errors=None):
req.headers.environ["HTTP_HOST"] = "api-sandbox.openprocurement.org"
if hasattr(self, 'file_obj') and not self.file_obj.closed:
self.file_obj.write(req.as_bytes(True))
self.file_obj.write("\n")
if req.body:
try:
self.file_obj.write(
'DATA:\n' + json.dumps(json.loads(req.body), indent=2, ensure_ascii=False).encode('utf8'))
self.file_obj.write("\n")
except:
pass
self.file_obj.write("\n")
resp = super(DumpsTestAppwebtest, self).do_request(req, status=status, expect_errors=expect_errors)
if hasattr(self, 'file_obj') and not self.file_obj.closed:
headers = [(n.title(), v)
for n, v in resp.headerlist
if n.lower() != 'content-length']
headers.sort()
self.file_obj.write(str('Response: %s\n%s\n') % (
resp.status,
str('\n').join([str('%s: %s') % (n, v) for n, v in headers]),
))
if resp.testbody:
try:
self.file_obj.write(json.dumps(json.loads(resp.testbody), indent=2, ensure_ascii=False).encode('utf8'))
except:
pass
self.file_obj.write("\n\n")
return resp
class TransferDocsTest(OwnershipWebTest):
def setUp(self):
self.app = DumpsTestAppwebtest(
"config:tests.ini", relative_to=os.path.dirname(base_test.__file__))
self.app.RequestClass = PrefixedRequestClass
self.app.authorization = ('Basic', ('broker', ''))
self.couchdb_server = self.app.app.registry.couchdb_server
self.db = self.app.app.registry.db
def test_docs(self):
data = deepcopy(test_tender_data)
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/create-tender.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders?opt_pretty=1', {"data": data})
self.assertEqual(response.status, '201 Created')
tender = response.json['data']
self.tender_id = tender['id']
owner_token = response.json['access']['token']
orig_tender_transfer_token = response.json['access']['transfer']
self.app.authorization = ('Basic', ('broker1', ''))
with open('docs/source/tutorial/create-transfer.http', 'w') as self.app.file_obj:
response = self.app.post_json('/transfers', {"data": {}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
transfer = response.json['data']
new_access_token = response.json['access']['token']
new_transfer_token = response.json['access']['transfer']
with open('docs/source/tutorial/get-transfer.http', 'w') as self.app.file_obj:
response = self.app.get('/transfers/{}'.format(transfer['id']))
with open('docs/source/tutorial/change-tender-ownership.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/ownership'.format(self.tender_id),
{"data": {"id": transfer['id'], 'transfer': orig_tender_transfer_token}})
self.assertEqual(response.status, '200 OK')
self.assertNotIn('transfer', response.json['data'])
self.assertNotIn('transfer_token', response.json['data'])
self.assertEqual('broker1', response.json['data']['owner'])
with open('docs/source/tutorial/get-used-transfer.http', 'w') as self.app.file_obj:
response = self.app.get('/transfers/{}'.format(transfer['id']))
with open('docs/source/tutorial/modify-tender.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(self.tender_id, new_access_token),
{"data": {"description": "broker1 now can change the tender"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['description'], 'broker1 now can change the tender')
#################
# Bid ownership #
#################
self.set_tendering_status()
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/create-bid.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), {'data': {'tenderers': [test_organization], "value": {"amount": 500}}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
bid = response.json['data']
bid_tokens = response.json['access']
self.app.authorization = ('Basic', ('broker2', ''))
with open('docs/source/tutorial/create-bid-transfer.http', 'w') as self.app.file_obj:
response = self.app.post_json('/transfers', {"data": {}})
self.assertEqual(response.status, '201 Created')
transfer = response.json['data']
transfer_tokens = response.json['access']
with open('docs/source/tutorial/change-bid-ownership.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/bids/{}/ownership'.format(self.tender_id, bid['id']),
{"data": {"id": transfer['id'], 'transfer': bid_tokens['transfer']}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/modify-bid.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/bids/{}?acc_token={}'.format(self.tender_id, bid['id'], transfer_tokens['token']), {"data": {'value': {"amount": 450}}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/get-used-bid-transfer.http', 'w') as self.app.file_obj:
response = self.app.get('/transfers/{}'.format(transfer['id']))
#######################
# Complaint ownership #
#######################
self.app.authorization = ('Basic', ('broker2', ''))
with open('docs/source/tutorial/create-complaint.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization, 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
complaint = response.json['data']
complaint_token = response.json['access']['token']
complaint_transfer = response.json['access']['transfer']
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/create-complaint-transfer.http', 'w') as self.app.file_obj:
response = self.app.post_json('/transfers', {"data": {}})
self.assertEqual(response.status, '201 Created')
transfer = response.json['data']
transfer_tokens = response.json['access']
with open('docs/source/tutorial/change-complaint-ownership.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/complaints/{}/ownership'.format(self.tender_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': complaint_transfer}})
self.assertEqual(response.status, '200 OK')
complaint_transfer = transfer_tokens['transfer']
with open('docs/source/tutorial/modify-complaint.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], transfer_tokens['token']), {"data": {'status': 'cancelled', 'cancellationReason': 'Important reason'}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/get-used-complaint-transfer.http', 'w') as self.app.file_obj:
response = self.app.get('/transfers/{}'.format(transfer['id']))
#############################
# Award complaint ownership #
#############################
self.app.authorization = ('Basic', ('broker2', ''))
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), {'data': {'tenderers': [test_organization], "value": {"amount": 350}}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
bid = response.json['data']
bid_tokens = response.json['access']
self.set_qualification_status()
self.app.authorization = ('Basic', ('token', ''))
response = self.app.post_json('/tenders/{}/awards'.format(
self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': bid['id']}})
award = response.json['data']
self.award_id = award['id']
self.app.authorization = ('Basic', ('broker2', ''))
with open('docs/source/tutorial/create-award-complaint.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format(self.tender_id, self.award_id, bid_tokens['token']), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization, 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
complaint = response.json['data']
complaint_token = response.json['access']['token']
complaint_transfer = response.json['access']['transfer']
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/create-award-complaint-transfer.http', 'w') as self.app.file_obj:
response = self.app.post_json('/transfers', {"data": {}})
self.assertEqual(response.status, '201 Created')
transfer = response.json['data']
transfer_tokens = response.json['access']
with open('docs/source/tutorial/change-award-complaint-ownership.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/awards/{}/complaints/{}/ownership'.format(self.tender_id, self.award_id, complaint['id']),
{"data": {"id": transfer['id'], 'transfer': complaint_transfer}})
self.assertEqual(response.status, '200 OK')
complaint_transfer = transfer_tokens['transfer']
with open('docs/source/tutorial/modify-award-complaint.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/tenders/{}/awards/{}/complaints/{}?acc_token={}'.format(self.tender_id, self.award_id, complaint['id'], transfer_tokens['token']), {"data": {'status': 'cancelled', 'cancellationReason': 'Important reason'}})
self.assertEqual(response.status, '200 OK')
with open('docs/source/tutorial/get-used-award-complaint-transfer.http', 'w') as self.app.file_obj:
response = self.app.get('/transfers/{}'.format(transfer['id']))
########################
# Contracting transfer #
########################
data = deepcopy(test_contract_data)
tender_token = data['tender_token']
self.app.authorization = ('Basic', ('contracting', ''))
response = self.app.post_json('/contracts', {'data': data})
self.assertEqual(response.status, '201 Created')
self.contract = response.json['data']
self.assertEqual('broker', response.json['data']['owner'])
self.contract_id = self.contract['id']
self.app.authorization = ('Basic', ('broker', ''))
with open('docs/source/tutorial/get-contract-transfer.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/contracts/{}/credentials?acc_token={}'.format(self.contract_id, tender_token),
{'data': ''})
self.assertEqual(response.status, '200 OK')
token = response.json['access']['token']
self.contract_transfer = response.json['access']['transfer']
self.app.authorization = ('Basic', ('broker3', ''))
with open('docs/source/tutorial/create-contract-transfer.http', 'w') as self.app.file_obj:
response = self.app.post_json('/transfers', {"data": test_transfer_data})
self.assertEqual(response.status, '201 Created')
transfer = response.json['data']
self.assertIn('date', transfer)
transfer_creation_date = transfer['date']
new_access_token = response.json['access']['token']
new_transfer_token = response.json['access']['transfer']
with open('docs/source/tutorial/change-contract-ownership.http', 'w') as self.app.file_obj:
response = self.app.post_json('/contracts/{}/ownership'.format(self.contract_id),
{"data": {"id": transfer['id'], 'transfer': self.contract_transfer}})
self.assertEqual(response.status, '200 OK')
self.assertNotIn('transfer', response.json['data'])
self.assertNotIn('transfer_token', response.json['data'])
self.assertEqual('broker3', response.json['data']['owner'])
with open('docs/source/tutorial/modify-contract.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/contracts/{}?acc_token={}'.format(self.contract_id, new_access_token),
{"data": {"description": "broker3 now can change the contract"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['description'], 'broker3 now can change the contract')
with open('docs/source/tutorial/get-used-contract-transfer.http', 'w') as self.app.file_obj:
response = self.app.get('/transfers/{}'.format(transfer['id']))
# Create Transfer
with open('docs/source/tutorial/create-contract-transfer-credentials.http', 'w') as self.app.file_obj:
response = self.app.post_json('/transfers', {"data": {}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
transfer = response.json['data']
contract_token = response.json['access']['token']
new_transfer_token = response.json['access']['transfer']
# Getting access
with open('docs/source/tutorial/change-contract-credentials.http', 'w') as self.app.file_obj:
response = self.app.post_json('/contracts/{}/ownership'.format(self.contract_id),
{"data": {"id": transfer['id'], 'tender_token': test_tender_token}})
self.assertEqual(response.status, '200 OK')
self.assertNotIn('transfer', response.json['data'])
self.assertNotIn('transfer_token', response.json['data'])
self.assertEqual('broker3', response.json['data']['owner'])
# Check Transfer is used
with open('docs/source/tutorial/get-used-contract-credentials-transfer.http', 'w') as self.app.file_obj:
response = self.app.get('/transfers/{}'.format(transfer['id']))
# Modify contract with new credentials
with open('docs/source/tutorial/modify-contract-credentials.http', 'w') as self.app.file_obj:
response = self.app.patch_json('/contracts/{}?acc_token={}'.format(self.contract_id, contract_token),
{"data": {"description": "new credentials works"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['description'], 'new credentials works')
class EuTransferDocsTest(OpenEUOwnershipWebTest):
def setUp(self):
self.app = DumpsTestAppwebtest(
"config:tests.ini", relative_to=os.path.dirname(base_test.__file__))
self.app.RequestClass = PrefixedRequestClass
self.app.authorization = ('Basic', ('broker', ''))
self.couchdb_server = self.app.app.registry.couchdb_server
self.db = self.app.app.registry.db
def test_eu_procedure(self):
##############################
# Qualification owner change #
##############################
self.app.authorization = ('Basic', ('broker', ''))
data = deepcopy(test_eu_tender_data)
with open('docs/source/tutorial/create-tender-for-qualification.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders?opt_pretty=1', {"data": data})
self.assertEqual(response.status, '201 Created')
tender = response.json['data']
self.tender_token = response.json['access']['token']
self.tender_id = tender['id']
self.set_tendering_status()
#broker(tender owner) create bid
with open('docs/source/tutorial/create-first-bid-for-qualification.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/bids'.format(self.tender_id), test_ua_bid_data)
self.assertEqual(response.status, '201 Created')
bid1_token = response.json['access']['token']
#broker4 create bid
auth = self.app.authorization
self.app.authorization = ('Basic', ('broker4', ''))
response = self.app.post_json('/tenders/{}/bids'.format(self.tender_id), test_ua_bid_data)
self.assertEqual(response.status, '201 Created')
bid2_id = response.json['data']['id']
bid2_token = response.json['access']['token']
#broker change status to pre-qualification
self.set_pre_qualification_status()
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.app.authorization = auth
#qualifications
response = self.app.get('/tenders/{}/qualifications'.format(self.tender_id))
self.assertEqual(response.status, "200 OK")
qualifications = response.json['data']
for qualification in qualifications:
response = self.app.patch_json('/tenders/{}/qualifications/{}?acc_token={}'.format(self.tender_id, qualification['id'], self.tender_token),
{"data": {"status": "active", "qualified": True, "eligible": True}})
self.assertEqual(response.status, "200 OK")
# active.pre-qualification.stand-still
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(self.tender_id, self.tender_token),
{"data": {"status": "active.pre-qualification.stand-still"}})
self.assertEqual(response.status, "200 OK")
self.assertEqual(response.json['data']['status'], "active.pre-qualification.stand-still")
qualification_id = qualifications[0]['id']
# broker4 create complaint
self.app.authorization = ('Basic', ('broker4', ''))
with open('docs/source/tutorial/create-qualification-complaint.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/qualifications/{}/complaints?acc_token={}'.format(self.tender_id, qualification_id, bid2_token),
{'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization, 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
complaint_id = response.json["data"]["id"]
complaint_transfer = response.json['access']['transfer']
# broker4 create Transfer
self.app.authorization = ('Basic', ('broker4', ''))
with open('docs/source/tutorial/create-qualification-complaint-transfer.http', 'w') as self.app.file_obj:
response = self.app.post_json('/transfers', {"data": test_transfer_data})
self.assertEqual(response.status, '201 Created')
transfer = response.json['data']
self.assertIn('date', transfer)
transfer = response.json['data']
transfer_tokens = response.json['access']
with open('docs/source/tutorial/change-qualification-complaint-owner.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/qualifications/{}/complaints/{}/ownership'.format(self.tender_id, qualification_id, complaint_id),
{"data": {"id": transfer['id'], 'transfer': complaint_transfer}})
self.assertEqual(response.status, '200 OK')
class CompetitiveDialogueStage2TransferDocsTest(BaseCompetitiveDialogWebTest):
def setUp(self):
self.app = DumpsTestAppwebtest(
"config:tests.ini", relative_to=os.path.dirname(base_test.__file__))
self.app.RequestClass = PrefixedRequestClass
self.app.authorization = ('Basic', ('broker', ''))
self.couchdb_server = self.app.app.registry.couchdb_server
self.db = self.app.app.registry.db
def test_stage2(self):
# create tender with bridge
self.app.authorization = ('Basic', ('competitive_dialogue', ''))
response = self.app.post_json('/tenders?opt_pretty=1', {"data": test_tender_stage2_data_ua})
self.assertEqual(response.status, '201 Created')
self.tender_id = response.json['data']['id']
tender = response.json['data']
# get credentials of tender
self.app.authorization = ('Basic', ('broker', ''))
self.set_status('draft.stage2')
response = self.app.patch_json('/tenders/{}/credentials?acc_token={}&opt_pretty=1'.format(self.tender_id, test_access_token_stage1))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['status'], 'draft.stage2')
tender_transfer_token = response.json['access']['transfer']
# change tender owner
self.app.authorization = ('Basic', ('broker3', ''))
with open('docs/source/tutorial/create-transfer-stage2.http', 'w') as self.app.file_obj:
response = self.app.post_json('/transfers', {"data": test_transfer_data})
self.assertEqual(response.status, '201 Created')
transfer = response.json['data']
self.assertIn('date', transfer)
new_access_token = response.json['access']['token']
new_transfer_token = response.json['access']['transfer']
with open('docs/source/tutorial/change-tender-ownership-stage2.http', 'w') as self.app.file_obj:
response = self.app.post_json('/tenders/{}/ownership'.format(self.tender_id),
{"data": {"id": transfer['id'], 'transfer': tender_transfer_token}})
self.assertEqual(response.status, '200 OK')
self.assertNotIn('transfer', response.json['data'])
self.assertNotIn('transfer_token', response.json['data'])
self.assertEqual('broker3', response.json['data']['owner'])
# broker3 can change the tender
with open('docs/source/tutorial/modify-tender-stage2.http', 'w') as self.app.file_obj:
now = get_now()
response = self.app.patch_json('/tenders/{}?acc_token={}'.format(self.tender_id, new_access_token),
{"data": {"tenderPeriod": {"endDate": (now + TENDERING_DURATION).isoformat()}}})
self.assertEqual(response.status, '200 OK')
self.assertNotIn('transfer', response.json['data'])
self.assertNotIn('transfer_token', response.json['data'])
self.assertIn('owner', response.json['data'])
self.assertEqual(response.json['data']['owner'], 'broker3')
self.assertEqual(response.json['data']["tenderPeriod"]['endDate'], (now + TENDERING_DURATION).isoformat()) |
from django.shortcuts import render
from django.http import HttpResponseRedirect
from core.models import Post
from django.contrib.admin.models import LogEntry
def theory_page(request):
"""
Theory page with blog items
:param request: request
:return: JSON with objects
"""
try:
query = Post.objects.filter(type=1).order_by('order_id')
except ValueError:
query = []
num = len(query)
posts = []
for obj in query:
words = obj.text.split(' ')[:10]
text = ""
isFile = len(obj.doc.name) == 0
for word in words:
text += " " + word
try:
size = round(obj.doc.size / 1024, 1)
except:
size = 0
isFile = False
post = {'title': obj.title,
'short': text + "...",
'date': str(obj.published).split('+')[0],
'isFile': isFile,
'file': obj.doc,
'name_s': len(obj.doc.name),
'size': size,
'text': obj.text}
posts.append(post)
# page — section id. If 0 —> theory page, 1 —> practice page, 2 —> info page
return render(request, 'main.html', {'posts': posts,
'len': num,
'page': 0,
'actions': get_recent_actions()})
def get_recent_actions():
"""
Returns all recent actions with database
:return:
"""
log = LogEntry.objects.all()
log = [str(obj) for obj in log]
info = []
for rec in log:
if "Added" in rec:
info.append("Добавлена " + rec.split(sep='Added')[1][2:-2])
if "Deleted" in rec:
info.append("Удалена " + rec.split(sep='Deleted')[1][2:-2])
return info
def practice_page(request):
"""
practice page with blog items
:param request: request
:return: JSON with objects
"""
try:
query = Post.objects.filter(type=0).order_by('order_id')
except ValueError:
query = []
num = len(query)
posts = []
for obj in query:
words = obj.text.split(' ')[:10]
text = ""
isFile = len(obj.doc.name) != 0
for word in words:
text += " " + word
try:
size = round(obj.doc.size / 1024, 1)
except:
size = 0
isFile = False
post = {'title': obj.title,
'short': text + "...",
'date': str(obj.published).split('+')[0],
'isFile': isFile,
'file': obj.doc,
'name_s': len(obj.doc.name),
'size': size,
'text': obj.text}
posts.append(post)
return render(request, 'main.html', {'posts': posts,
'len': num,
'page': 1,
'actions': get_recent_actions()})
def info_page(request):
"""
info page with blog items
:param request: request
:return: JSON with objects
"""
try:
query = Post.objects.filter(type=2).order_by('published')
except ValueError:
query = []
num = len(query)
posts = []
for obj in query:
words = obj.text.split(' ')[:10]
text = ""
isFile = len(obj.doc.name) == 0
for word in words:
text += " " + word
try:
size = round(obj.doc.size / 1024, 1)
except:
size = 0
isFile = False
post = {'title': obj.title,
'short': text + "...",
'date': str(obj.published).split('+')[0],
'isFile': isFile,
'file': obj.doc,
'name_s': len(obj.doc.name),
'size': size,
'text': obj.text}
posts.append(post)
return render(request, 'main.html', {'posts': posts,
'len': num,
'page': 2,
'actions': get_recent_actions()})
|
import json
import pickle
import time
from queue import Queue, Empty
from threading import Thread, Event
import numpy as np
import os
import sys
import cv2
# Multithreded script to run the evaluation for the Orig2D ablation experiment. Online version displays the result.
# Offline version first saves all detections and then tracks them separately.
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
# import keras_retinanet.bin # noqa: F401
# __package__ = "keras_retinanet.bin"
print(sys.path)
from dataset_utils.simple_tracker import SimpleTracker
from dataset_utils.writer import Writer
from keras_retinanet.utils.image import preprocess_image
import keras_retinanet.models
def test_video(model, video_path, json_path, im_w, im_h, batch, name, out_path=None, online=True):
# with open(json_path, 'r+') as file:
# with open(os.path.join(os.path.dirname(json_path), 'system_retinanet_first.json'), 'r+') as file:
cap = cv2.VideoCapture(video_path)
video_path = os.path.dirname(video_path)
if os.path.exists(os.path.join(video_path, 'video_mask.png')):
mask = cv2.imread(os.path.join(video_path, 'video_mask.png'), 0)
else:
mask = 255 * np.ones([1080, 1920], dtype=np.uint8)
ret, frame = cap.read()
if out_path is not None:
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
out = cv2.VideoWriter(out_path, fourcc, 25.0, (frame.shape[1], frame.shape[0]))
q_frames = Queue(10)
q_images = Queue(10)
q_predict = Queue(10)
e_stop = Event()
vid_name = os.path.basename(os.path.normpath(video_path))
def read():
while (cap.isOpened() and not e_stop.isSet()):
# read_time = time.time()
images = []
frames = []
for _ in range(batch):
ret, frame = cap.read()
if not ret:
cap.release()
continue
frames.append(frame)
image = cv2.bitwise_and(frame, frame, mask=mask)
t_image = cv2.resize(image, (im_w, im_h))
# cv2.imshow('transform', t_image)
# if cv2.waitKey(1) & 0xFF == ord('q'):
# e_stop.set()
# t_image = t_image[:, :, ::-1]
t_image = preprocess_image(t_image)
images.append(t_image)
# print("Read FPS: {}".format(batch / (time.time() - read_time)))
q_images.put(images)
q_frames.put(frames)
def read_offline():
while (cap.isOpened() and not e_stop.isSet()):
images = []
for _ in range(batch):
ret, frame = cap.read()
if not ret:
cap.release()
continue
image = cv2.bitwise_and(frame, frame, mask=mask)
t_image = cv2.resize(image, (im_w, im_h))
t_image = preprocess_image(t_image)
images.append(t_image)
q_images.put(images)
def inference():
while (not e_stop.isSet()):
try:
images = q_images.get(timeout=100)
except Empty:
break
gpu_time = time.time()
y_pred = model.predict_on_batch(np.array(images))
q_predict.put(y_pred)
print("GPU FPS: {}".format(batch / (time.time() - gpu_time)))
def postprocess():
tracker = SimpleTracker(json_path, im_w, im_h, name, threshold=0.2)
total_time = time.time()
while not e_stop.isSet():
try:
y_pred = q_predict.get(timeout=100)
frames = q_frames.get(timeout=100)
except Empty:
tracker.write()
break
# post_time = time.time()
for i in range(len(frames)):
boxes = np.concatenate([y_pred[1][i, :, None], y_pred[0][i, :, :]], 1)
image_b = tracker.process(boxes, frames[i])
# cv2.imwrite('frame_a_{}.png'.format(i), image_b)
if out_path is not None:
out.write(image_b)
cv2.imshow('frame', image_b)
# if cv2.waitKey(1) & 0xFF == ord('q'):
# e_stop.set()
# break
# print("Post FPS: {}".format(batch / (time.time() - post_time)))
# print("Total FPS: {}".format(batch / (time.time() - total_time)))
# total_time = time.time()
def postprocess_offline():
writer = Writer(json_path, name)
total_time = time.time()
frame_cnt = 1
while not e_stop.isSet():
try:
y_pred = q_predict.get(timeout=100)
except Empty:
writer.write()
break
for i in range(y_pred[0].shape[0]):
boxes = np.concatenate([y_pred[1][i, :, None], y_pred[0][i, :, :]], 1)
writer.process(boxes)
frame_cnt += 1
# print("Total FPS: {}".format(batch / (time.time() - total_time)))
print("Video: {} at frame: {}, FPS: {}".format(vid_name, frame_cnt, frame_cnt / (time.time() - total_time)))
# total_time = time.time()
inferencer = Thread(target=inference)
if online:
reader = Thread(target=read)
postprocesser = Thread(target=postprocess)
else:
reader = Thread(target=read_offline)
postprocesser = Thread(target=postprocess_offline)
reader.start()
inferencer.start()
postprocesser.start()
reader.join()
inferencer.join()
postprocesser.join()
if out_path is not None:
out.release()
# cv2.destroyAllWindows()
def track_detections(json_path, im_w, im_h, name, threshold, keep=5):
tracker = SimpleTracker(json_path, im_w, im_h, name, threshold=threshold, keep=keep)
tracker.read()
if __name__ == "__main__":
if os.name == 'nt':
vid_path = 'D:/Skola/PhD/data/2016-ITS-BrnoCompSpeed/dataset'
results_path = 'D:/Skola/PhD/data/2016-ITS-BrnoCompSpeed/results/'
else:
vid_path = '/home/k/kocur15/data/2016-ITS-BrnoCompSpeed/dataset/'
results_path = '/home/k/kocur15/data/2016-ITS-BrnoCompSpeed/results/'
vid_list = []
calib_list = []
for i in range(4, 7):
dir_list = ['session{}_center'.format(i), 'session{}_left'.format(i), 'session{}_right'.format(i)]
vid_list.extend([os.path.join(vid_path, d, 'video.avi') for d in dir_list])
calib_list.extend([os.path.join(results_path, d, 'system_SochorCVIU_Edgelets_BBScale_Reg.json') for d in dir_list])
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
name = '640_360_ablation'
if os.name == 'nt':
model = keras_retinanet.models.load_model(
'D:/Skola/PhD/code/keras-retinanet/models/resnet50_ablation_640_360.h5',
backbone_name='resnet50', convert=False)
else:
model = keras_retinanet.models.load_model(
'/home/k/kocur15/code/keras-retinanet/snapshots/{}/resnet50_{}_at30.h5'.format(name, name),
backbone_name='resnet50', convert=False)
print(model.summary)
model._make_predict_function()
name = 'ablation_640_360'
# for vid, calib in zip(vid_list, calib_list):
# test_video(model, vid, calib, 640, 360, 16, name, online=False)
for calib in calib_list:
track_detections(calib, 640, 360, name, threshold=0.5, keep=10)
|
#!/usr/bin/env python
"""
CloudGenix Python SDK - POST
**Author:** CloudGenix
**Copyright:** (c) 2017-2021 CloudGenix, Inc
**License:** MIT
"""
import logging
__author__ = "CloudGenix Developer Support <developers@cloudgenix.com>"
__email__ = "developers@cloudgenix.com"
__copyright__ = "Copyright (c) 2017-2021 CloudGenix, Inc"
__license__ = """
MIT License
Copyright (c) 2017-2021 CloudGenix, Inc
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# Set logging to function name
api_logger = logging.getLogger(__name__)
"""logging.getlogger object to enable debug printing via `cloudgenix.API.set_debug`"""
class Post(object):
"""
CloudGenix API - POST requests
Object to handle making Post requests via shared Requests Session.
"""
# placeholder for parent class namespace
_parent_class = None
def access_elementusers(self, elementuser_id, data, tenant_id=None, api_version="v2.1"):
"""
Grant Specific role to Element user on specific element
**Parameters:**:
- **elementuser_id**: Element User ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **element_id:** Type: string
- **role:** Type: string
- **tenant_id:** Type: string
- **user_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elementusers/{}/access".format(api_version,
tenant_id,
elementuser_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def anynetlinks_correlationevents_query(self, data, tenant_id=None, api_version="v2.1"):
"""
POST Anynetlinks_Correlationevents_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/anynetlinks/correlationevents/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def apnprofiles(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Apnprofiles API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/apnprofiles".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def apnprofiles_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Apnprofiles_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/apnprofiles/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def appdefs(self, data, tenant_id=None, api_version="v2.3"):
"""
Create a application definition
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.3)
**Payload Attributes:**
- **abbreviation:** Type: string
- **aggregate_flows:** Type: boolean
- **app_type:** Type: string
- **app_unreachability_detection:** Type: boolean
- **category:** Type: string
- **conn_idle_timeout:** Type: integer
- **description:** Type: string
- **display_name:** Type: string
- **domains:** [Type: string]
- **ingress_traffic_pct:** Type: integer
- **ip_rules:** [Type: object]
- **is_deprecated:** Type: boolean
- **network_scan_application:** Type: boolean
- **order_number:** Type: integer
- **overrides_allowed:** Type: boolean
- **parent_id:** Type: string
- **path_affinity:** Type: string
- **session_timeout:** Type: integer
- **system_app_overridden:** Type: boolean
- **tags:** [Type: string]
- **tcp_rules:** [Type: string]
- **transfer_type:** Type: string
- **udp_rules:** [Type: object]
- **use_parentapp_network_policy:** Type: boolean
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/appdefs".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def appdefs_overrides(self, appdef_id, data, tenant_id=None, api_version="v2.2"):
"""
Create a application definition overrides for system appdef
**Parameters:**:
- **appdef_id**: Application Definition ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
- **aggregate_flows:** Type: boolean
- **app_unreachability_detection:** Type: boolean
- **category:** Type: string
- **conn_idle_timeout:** Type: integer
- **description:** Type: string
- **domains:** [Type: string]
- **ingress_traffic_pct:** Type: integer
- **ip_rules:**
- **dest_filters:** [Type: string]
- **dest_prefixes:** [Type: string]
- **dscp:**
- **value:** Type: integer
- **protocol:** Type: string
- **src_filters:** [Type: string]
- **override_default_ip_rules:** Type: boolean
- **override_default_tcp_rules:** Type: boolean
- **override_default_udp_rules:** Type: boolean
- **override_domains:** Type: boolean
- **overrides_disable:** Type: boolean
- **path_affinity:** Type: string
- **session_timeout:** Type: integer
- **tags:** [Type: string]
- **tcp_rules:**
- **client_filters:** [Type: string]
- **client_port:**
- **end:** Type: string
- **start:** Type: string
- **dscp:**
- **value:** Type: integer
- **server_filters:** [Type: string]
- **server_port:**
- **end:** Type: string
- **start:** Type: string
- **server_prefixes:** [Type: string]
- **transfer_type:** Type: string
- **udp_rules:**
- **dest_prefixes:** [Type: string]
- **dscp:**
- **value:** Type: integer
- **udp_filters:** [Type: string]
- **udp_port:**
- **end:** Type: string
- **start:** Type: string
- **use_parentapp_network_policy:** Type: boolean
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/appdefs/{}/overrides".format(api_version,
tenant_id,
appdef_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def appdefs_query(self, data, tenant_id=None, api_version="v2.3"):
"""
Queries db for limit number of app defs that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.3)
**Payload Attributes:**
- **abbreviation:** Type: string
- **aggregate_flows:** Type: boolean
- **app_type:** Type: string
- **app_unreachability_detection:** Type: boolean
- **category:** Type: string
- **conn_idle_timeout:** Type: integer
- **description:** Type: string
- **display_name:** Type: string
- **domains:** [Type: string]
- **ingress_traffic_pct:** Type: integer
- **ip_rules:** [Type: object]
- **is_deprecated:** Type: boolean
- **network_scan_application:** Type: boolean
- **order_number:** Type: integer
- **overrides_allowed:** Type: boolean
- **parent_id:** Type: string
- **path_affinity:** Type: string
- **session_timeout:** Type: integer
- **system_app_overridden:** Type: boolean
- **tags:** [Type: string]
- **tcp_rules:** [Type: string]
- **transfer_type:** Type: string
- **udp_rules:** [Type: object]
- **use_parentapp_network_policy:** Type: boolean
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/appdefs/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def auditlog_query(self, data, tenant_id=None, api_version="v2.1"):
"""
POST Auditlog_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/auditlog/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def authtokens(self, operator_id, data, tenant_id=None, api_version="v2.1"):
"""
Create an auth token
**Parameters:**:
- **operator_id**: Operator ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **custom_roles:**
- **custom_permissions:**
- **allowed_after_ms:** Type: integer
- **allowed_before_ms:** Type: integer
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **disallow_permission:** Type: boolean
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **region:** Type: string
- **tenant_id:** Type: string
- **value:** Type: string
- **disabled:** Type: boolean
- **disallow_permissions:**
- **value:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **name:** Type: string
- **permissions:**
- **value:** Type: string
- **roles:**
- **name:** Type: string
- **expires_utc_ms:** Type: integer
- **is_system_owned:** Type: boolean
- **roles:**
- **name:** Type: string
- **session_key_c:** Type: string
- **x_auth_token:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/operators/{}/authtokens".format(api_version,
tenant_id,
operator_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def bgppeers(self, site_id, element_id, data, tenant_id=None, api_version="v2.2"):
"""
Create BGP peer config
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
- **bgp_config:**
- **adv_interval:** Type: integer
- **hold_time:** Type: integer
- **keepalive_time:** Type: integer
- **local_as_num:** Type: string
- **md5_secret:** Type: string
- **multi_hop_limit:** Type: integer
- **peer_auth_type:** Type: string
- **peer_retry_time:** Type: integer
- **description:** Type: string
- **name:** Type: string
- **peer_ip:** Type: string
- **peer_type:** Type: string
- **remote_as_num:** Type: string
- **route_map_in_id:** Type: string
- **route_map_out_id:** Type: string
- **scope:** Type: string
- **shutdown:** Type: boolean
- **tags:** [Type: string]
- **update_source:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/bgppeers".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def bgppeers_operations(self, site_id, element_id, bgppeer_id, data, tenant_id=None, api_version="v2.0"):
"""
Reset BGP peer config
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **bgppeer_id**: BGP Peer ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **value:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/bgppeers/{}/operations".format(api_version,
tenant_id,
site_id,
element_id,
bgppeer_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def bgppeers_query(self, site_id, element_id, data, tenant_id=None, api_version="v2.2"):
"""
Queries db for limit number of BGP peers that match query params.
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
- **bgp_config:**
- **adv_interval:** Type: integer
- **hold_time:** Type: integer
- **keepalive_time:** Type: integer
- **local_as_num:** Type: string
- **md5_secret:** Type: string
- **multi_hop_limit:** Type: integer
- **peer_auth_type:** Type: string
- **peer_retry_time:** Type: integer
- **description:** Type: string
- **name:** Type: string
- **peer_ip:** Type: string
- **peer_type:** Type: string
- **remote_as_num:** Type: string
- **route_map_in_id:** Type: string
- **route_map_out_id:** Type: string
- **scope:** Type: string
- **shutdown:** Type: boolean
- **tags:** [Type: string]
- **update_source:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/bgppeers/query".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def cellular_module_firmware_status_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Cellular_Module_Firmware_Status_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/cellular_module_firmware/status/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def cellular_modules_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Cellular_Modules_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/cellular_modules/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def certificate_operations(self, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Start CIC renewal process for an element device
**Parameters:**:
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **action:** Type: string
- **parameters:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elements/{}/certificate_operations".format(api_version,
tenant_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def clients_login(self, client_id, data, tenant_id=None, api_version="v2.0"):
"""
Login api for esp client
**Parameters:**:
- **client_id**: ESP/MSP Client ID (typically their tenant_id)
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **email:** Type: string
- **logout_others:** Type: boolean
- **password:** Type: string
- **requestId:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/clients/{}/login".format(api_version,
tenant_id,
client_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data, sensitive=True)
def clients_logout(self, data, tenant_id=None, api_version="v2.0"):
"""
Logout api for esp client. Reverts back to esp session
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **custom_roles:**
- **custom_permissions:**
- **allowed_after_ms:** Type: integer
- **allowed_before_ms:** Type: integer
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **disallow_permission:** Type: boolean
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **region:** Type: string
- **tenant_id:** Type: string
- **value:** Type: string
- **disabled:** Type: boolean
- **disallow_permissions:**
- **value:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **name:** Type: string
- **permissions:**
- **value:** Type: string
- **roles:**
- **name:** Type: string
- **disallowed_permissions:** Type: object
- **operator_id:** Type: string
- **permissions:** Type: object
- **redirect_region:** Type: string
- **redirect_urlpath:** Type: string
- **redirect_x_auth_token:** Type: string
- **resource_role_map:** [Type: object]
- **resource_uri_map:** Type: object
- **resource_version_map:** Type: object
- **tenant_id:** Type: string
- **version_exceptions_map:** [Type: object]
- **x_auth_token:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/logout".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def clients_machines_query(self, client_id, data, tenant_id=None, api_version="v2.1"):
"""
Query and get all machines allocated by ESP to a client tenant
**Parameters:**:
- **client_id**: ESP/MSP Client ID (typically their tenant_id)
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **count:** Type: integer
- **deleted_count:** Type: integer
- **deleted_ids:** [Type: string]
- **next_query:** Type: object
- **tenant_id:** Type: string
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/clients/{}/machines/query".format(api_version,
tenant_id,
client_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def clients_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Get esp tenant clients details for tenant id
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **canonical_name:** Type: string
- **clients:** [Type: string]
- **is_esp:** Type: boolean
- **name:** Type: string
- **tenant_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/clients/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def clients_reallocate(self, client_id, machine_id, data, tenant_id=None, api_version="v2.1"):
"""
Reallocate a specific machine from one client tenant to another, both client tenants are clients of the same ESP.
**Parameters:**:
- **client_id**: ESP/MSP Client ID (typically their tenant_id)
- **machine_id**: Machine ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **connected:** Type: boolean
- **console_conf_passphrase:** Type: string
- **em_element_id:** Type: string
- **esp_tenant_id:** Type: string
- **hw_id:** Type: string
- **image_version:** Type: string
- **inventory_op:** Type: string
- **machine_state:** Type: string
- **manufacture_id:** Type: string
- **model_name:** Type: string
- **ordering_info:** Type: string
- **pki_op:** - **renew_state:** Type: string
- **ship_state:** Type: string
- **sl_no:** Type: string
- **suspend_state:** Type: string
- **tenant_id:** Type: string
- **token:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/clients/{}/machines/{}/reallocate".format(api_version,
tenant_id,
client_id,
machine_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def demstatus_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Demstatus_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/demstatus/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def dhcpservers(self, site_id, data, tenant_id=None, api_version="v2.1"):
"""
Create a new dhcp server configuration for a subnet
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **broadcast_address:** Type: string
- **custom_options:**
- **option_definition:** Type: string
- **option_value:** Type: string
- **vendor_class_identifier:** Type: string
- **default_lease_time:** Type: integer
- **description:** Type: string
- **disabled:** Type: boolean
- **dns_servers:** [Type: string]
- **domain_name:** Type: string
- **gateway:** Type: string
- **ip_ranges:**
- **end_ip:** Type: string
- **start_ip:** Type: string
- **max_lease_time:** Type: integer
- **network_context_id:** Type: string
- **static_mappings:**
- **ip_address:** Type: string
- **mac:** Type: string
- **name:** Type: string
- **subnet:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/dhcpservers".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def dnsserviceprofiles(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new DNS service profile
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **authoritative_config:**
- **caa_records:**
- **flags:** Type: string
- **name:** Type: string
- **tag:** Type: string
- **value:** Type: string
- **cname_records:**
- **name:** [Type: string]
- **target:** Type: string
- **ttl:** Type: integer
- **dns_resource_records:**
- **hex_data:** Type: string
- **name:** Type: string
- **rr_number:** Type: integer
- **host_records:**
- **domain_names:** [Type: string]
- **ipv4_address:** Type: string
- **ipv6_address:** Type: string
- **ttl:** Type: integer
- **mx_host_records:**
- **hostname:** Type: string
- **mx_name:** Type: string
- **preference:** Type: integer
- **naptr_records:**
- **flags:** Type: string
- **name:** Type: string
- **order:** Type: integer
- **preference:** Type: integer
- **regexp:** Type: string
- **replacement:** Type: string
- **service:** Type: string
- **peers:** [Type: string]
- **ptr_records:**
- **name:** Type: string
- **target:** Type: string
- **secondary_servers:** [Type: string]
- **servers:**
- **dnsservicerole_id:** Type: string
- **domain_name:** Type: string
- **soa:**
- **expiry:** Type: integer
- **host_master:** Type: string
- **refresh:** Type: integer
- **retry:** Type: integer
- **serial_number:** Type: integer
- **srv_hosts:**
- **domain_name:** Type: string
- **port:** Type: integer
- **priority:** Type: integer
- **protocol:** Type: string
- **service:** Type: string
- **target:** Type: integer
- **weight:** Type: integer
- **synth_domains:**
- **domain:** Type: string
- **end_ipaddress:** Type: string
- **ipaddress_prefix:** Type: string
- **prefix:** Type: string
- **start_ipaddress:** Type: string
- **ttl:** Type: integer
- **txt_records:**
- **domain_name:** Type: string
- **texts:** [Type: string]
- **zones:**
- **domain_name:** Type: string
- **exclude_prefix:** [Type: string]
- **include_prefix:** [Type: string]
- **cache_config:**
- **cache_size:** Type: integer
- **disable_negative_caching:** Type: boolean
- **max_cache_ttl:** Type: integer
- **min_cache_ttl:** Type: integer
- **negative_cache_ttl:** Type: integer
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **dns_forward_config:**
- **dns_servers:**
- **dnsserver_ip:** Type: string
- **dnsserver_port:** Type: integer
- **domain_names:** [Type: string]
- **forward_dnsservicerole_id:** Type: string
- **ip_prefix:** Type: string
- **source_port:** Type: integer
- **max_source_port:** Type: integer
- **min_source_port:** Type: integer
- **send_to_all_dns_servers:** Type: boolean
- **dns_queries_metadata:**
- **add_client_mac:**
- **mac_encoding_format:** Type: string
- **add_customer_premises_equipment:**
- **identifier_text:** Type: string
- **type:** Type: string
- **add_subnets:**
- **ipv4_address:** Type: string
- **ipv4_prefix_length:** Type: integer
- **ipv6_address:** Type: string
- **ipv6_prefix_length:** Type: integer
- **dns_rebind_config:**
- **enable_localhost_rebind:** Type: boolean
- **rebind_domains:** [Type: string]
- **stop_dns_rebind_privateip:** Type: boolean
- **dns_response_overrides:**
- **aliases:**
- **mask:** Type: integer
- **original_end_ip:** Type: string
- **original_ip:** Type: string
- **original_start_ip:** Type: string
- **replace_ip:** Type: string
- **bogus_nx_domains:** [Type: string]
- **disable_private_ip_lookups:** Type: boolean
- **ignore_ip_addresses:** [Type: string]
- **local_ttl:** Type: integer
- **max_ttl:** Type: integer
- **dnssec_config:**
- **disable_dnssec_timecheck:** Type: boolean
- **dns_check_unsigned:** Type: boolean
- **enabled:** Type: boolean
- **trust_anchors:**
- **class:** Type: string
- **domain:** Type: string
- **key_digest:**
- **algorithm:** Type: integer
- **digest:** Type: string
- **digest_type:** Type: integer
- **key_tag:** Type: integer
- **domains_to_addresses:**
- **domain_names:** [Type: string]
- **ipv4_address:** Type: string
- **ipv6_address:** Type: string
- **edns_packet_max:** Type: integer
- **enable_dns_loop_detection:** Type: boolean
- **enable_dnssec_proxy:** Type: boolean
- **enable_strict_domain_name:** Type: boolean
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **listen_dnsservicerole_id:** Type: string
- **listen_port:** Type: integer
- **name:** Type: string
- **region:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/dnsserviceprofiles".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def dnsserviceprofiles_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query DNS service profile based on parameters
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/dnsserviceprofiles/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def dnsserviceroles(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new DNS service role
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/dnsserviceroles".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def dnsserviceroles_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query DNS service role based on parameters
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/dnsserviceroles/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def dnsservices(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Create a new DNS service config
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **cache_config:**
- **cache_size:** Type: integer
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **dns_queries_metadata:**
- **add_customer_premises_equipment:**
- **identifier_text:** Type: string
- **type:** Type: string
- **add_subnets:**
- **ipv4_address:** Type: string
- **ipv4_prefix_length:** Type: integer
- **ipv6_address:** Type: string
- **ipv6_prefix_length:** Type: integer
- **dnsservice_profile_id:** Type: string
- **dnsservicerole_bindings:**
- **dnsservicerole_id:** Type: string
- **interfaces:**
- **admin_state_changed:** Type: boolean
- **admin_up:** Type: boolean
- **attached_lan_networks:**
- **lan_network_id:** Type: string
- **vlan_id:** Type: integer
- **bound_interfaces:**
- **interface_id:** Type: string
- **type:** Type: string
- **cellular_config:**
- **apn_config:**
- **apn:** Type: string
- **authentication:** Type: string
- **clear_password:** Type: boolean
- **password:** Type: string
- **password_encrypted:** Type: string
- **user_name:** Type: string
- **apn_etag:** Type: integer
- **apnprofile_id:** Type: string
- **auto_apn:** Type: boolean
- **parent_module_id:** Type: string
- **parent_sim_slot_number:** Type: integer
- **config_state:** Type: string
- **description:** Type: string
- **devicemgmt_policysetstack_id:** Type: string
- **dhcp_relay:**
- **enabled:** Type: boolean
- **option_82:**
- **circuit_id:** Type: string
- **enabled:** Type: boolean
- **reforwarding_policy:** Type: string
- **remote_id:** Type: string
- **server_ips:** [Type: string]
- **source_interface:** Type: string
- **directed_broadcast:** Type: boolean
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **element_etag:** Type: integer
- **element_id:** Type: string
- **element_port_admin_up:** Type: boolean
- **ethernet_port:**
- **full_duplex:** Type: boolean
- **speed:** Type: integer
- **id:** Type: string
- **ifType:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **ipfixcollectorcontext_id:** Type: string
- **ipfixfiltercontext_id:** Type: string
- **ipv4_config:**
- **dhcp_config:**
- **client_id:** Type: string
- **hostname:** Type: string
- **dns_v4_config:**
- **name_servers:** [Type: string]
- **search:** [Type: string]
- **pppoe_config:**
- **chap_passwd:** Type: string
- **chap_user:** Type: string
- **set_route:** Type: boolean
- **routes:**
- **destination:** Type: string
- **via:** Type: string
- **static_config:**
- **address:** Type: string
- **type:** Type: string
- **ipv6_config:**
- **dhcpv6:** Type: boolean
- **dns_v6_config:**
- **name_servers:** [Type: string]
- **search:** [Type: string]
- **prefixes:** [Type: string]
- **type:** Type: string
- **is2kFlag:** Type: boolean
- **is_parent:** Type: boolean
- **is_service_link_parent:** Type: boolean
- **lan_state_propagation:** Type: boolean
- **mac_address:** Type: string
- **mtu:** Type: integer
- **multicast_config:**
- **igmp_version:** Type: string
- **multicast_enabled:** Type: boolean
- **name:** Type: string
- **nat_address:** Type: string
- **nat_pools:**
- **ipv4_ranges:**
- **end:** Type: string
- **start:** Type: string
- **nat_pool_id:** Type: string
- **nat_port:** Type: integer
- **nat_zone_id:** Type: string
- **network_context_id:** Type: string
- **parent:** Type: string
- **pppoe_config:**
- **host_uniq:** Type: string
- **parent:** Type: string
- **password:** Type: string
- **reconnection_delay:** Type: integer
- **service_name:** Type: string
- **username:** Type: string
- **propagation_state_changed:** Type: boolean
- **region:** Type: string
- **relay_changed:** Type: boolean
- **sb_api_version:** Type: string
- **scope:** Type: string
- **secondary_ip_configs:**
- **ipv4_address:** Type: string
- **scope:** Type: string
- **service_link_config:**
- **gre_config:**
- **csum:** Type: boolean
- **keepalive_enable:** Type: boolean
- **keepalive_fail_count:** Type: integer
- **keepalive_interval:** Type: integer
- **ipsec_config:**
- **authentication:**
- **certificate:** Type: string
- **ikev1_params:**
- **xauth_id:** Type: string
- **xauth_secret:** Type: string
- **xauth_secret_encrypted:** Type: string
- **xauth_secret_hash:** Type: string
- **xauth_type:** Type: string
- **local_ca_certificate:** Type: string
- **local_id:** Type: string
- **local_id_custom:** Type: string
- **passphrase:** Type: string
- **passphrase_encrypted:** Type: string
- **private_key:** Type: string
- **private_key_encrypted:** Type: string
- **remote_ca_certificate:** Type: string
- **remote_id:** Type: string
- **secret:** Type: string
- **secret_encrypted:** Type: string
- **secret_hash:** Type: string
- **type:** Type: string
- **x509Objects:**
- **certHolder:** Type: object
- **certificate:** Type: string
- **is_local_ca_cert_set:** Type: boolean
- **is_remote_ca_cert_set:** Type: boolean
- **keyPair:** Type: object
- **local_ca_certificate:** Type: string
- **local_ca_certs_set:** [Type: object]
- **passphrase:** Type: string
- **private_key:** Type: string
- **remote_ca_certificate:** Type: string
- **remote_ca_certs_set:** [Type: object]
- **ipsec_profile_id:** Type: string
- **last_parent:** Type: string
- **parent:** Type: string
- **peer:**
- **hostname:** Type: string
- **ip_addresses:** [Type: string]
- **service_endpoint_id:** Type: string
- **type:** Type: string
- **site_id:** Type: string
- **site_wan_interface_ids:** [Type: string]
- **state_id:**
- **cellular_state:**
- **active:** Type: boolean
- **apn_info:**
- **apn:** Type: string
- **authentication:** Type: string
- **device:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **dns_v4_config:**
- **name_servers:** [Type: string]
- **search:** [Type: string]
- **dns_v6_config:**
- **name_servers:** [Type: string]
- **search:** [Type: string]
- **element_id:** Type: string
- **extended_state:** Type: string
- **id:** Type: string
- **ike_algo:** Type: string
- **ike_last_rekeyed:** Type: integer
- **ike_next_rekey:** Type: integer
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **index:** Type: integer
- **ipsec_algo:** Type: string
- **ipsec_last_rekeyed:** Type: integer
- **ipsec_next_rekey:** Type: integer
- **ipv4_addresses:** [Type: string]
- **ipv4_addresses_changed:** Type: boolean
- **ipv6_addresses:** [Type: string]
- **last_state_change:** Type: integer
- **local_tunnel_v4_addr:** Type: string
- **mac_address:** Type: string
- **name:** Type: string
- **negotiated_mtu:** Type: integer
- **operational_state:** Type: string
- **operational_state_changed:** Type: boolean
- **port:**
- **end:** Type: string
- **start:** Type: string
- **region:** Type: string
- **remote_host_name:** Type: string
- **remote_v4_addr:** Type: string
- **routes:**
- **destination:** Type: string
- **via:** Type: string
- **secondary_ipv4_addresses:** [Type: string]
- **state:** Type: boolean
- **tenant_id:** Type: string
- **static_arp_configs:**
- **ipv4_address:** Type: string
- **mac_address:** Type: string
- **sub_interface:**
- **vlan_id:** Type: integer
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **tmpPortType:** Type: string
- **type:** Type: string
- **use_relay:** Type: boolean
- **used_for:** Type: string
- **vlan_ids:** [Type: integer]
- **wan_network_id:** Type: string
- **domains_to_addresses:**
- **domain_names:** [Type: string]
- **ipv4_address:** Type: string
- **ipv6_address:** Type: string
- **domains_to_interfaces:**
- **domain_names:** [Type: string]
- **interface_id:** Type: string
- **element_id:** Type: string
- **enabled:** Type: boolean
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **max_concurrent_dns_queries:** Type: integer
- **name:** Type: string
- **region:** Type: string
- **site_id:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **upperCaseName:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/dnsservices".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def dnsservices_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query DNS service config based on parameters
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/dnsservices/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def element_bulk_config_state_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Get element config/state info for queried elements from NB
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **count:** Type: integer
- **items:** [Type: object]
- **tenant_id:** Type: string
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elements/bulk_config_state/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def element_correlationevents_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Element_Correlationevents_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elements/correlationevents/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def element_extensions(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Create element level extension configuration
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **conf:** Type: object
- **disabled:** Type: boolean
- **name:** Type: string
- **namespace:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/extensions".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def element_extensions_query(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Query element level extensions that match query params
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/extensions/query".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def element_query(self, data, tenant_id=None, api_version="v2.5"):
"""
Queries db for limit number of elements that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.5)
**Payload Attributes:**
- **admin_action:** Type: string
- **admin_renew_state:** Type: string
- **admin_suspend_state:** Type: string
- **allowed_roles:** [Type: string]
- **cluster_insertion_mode:** Type: string
- **cluster_member_id:** Type: string
- **connected:** Type: boolean
- **deployment_op:** Type: string
- **description:** Type: string
- **fips_mode:** Type: string
- **fips_mode_change_start_time:** Type: integer
- **hw_id:** Type: string
- **l3_direct_private_wan_forwarding:** Type: boolean
- **l3_lan_forwarding:** Type: boolean
- **model_name:** Type: string
- **name:** Type: string
- **nat_policysetstack_id:** Type: string
- **network_policysetstack_id:** Type: string
- **priority_policysetstack_id:** Type: string
- **role:** Type: string
- **serial_number:** Type: string
- **site_id:** Type: string
- **software_version:** Type: string
- **spoke_ha_config:**
- **cluster_id:** Type: string
- **enable:** Type: boolean
- **priority:** Type: integer
- **source_interface:** Type: string
- **track:**
- **interfaces:**
- **interface_id:** Type: string
- **reduce_priority:** Type: integer
- **waninterfaces:**
- **reduce_priority:** Type: integer
- **wan_interface_id:** Type: string
- **state:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **vpn_to_vpn_forwarding:** Type: boolean
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elements/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def elementaccessconfigs(self, element_id, data, tenant_id=None, api_version="v2.2"):
"""
POST Elementaccessconfigs API Function
**Parameters:**:
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elements/{}/elementaccessconfigs".format(api_version,
tenant_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def elementsecurityzones(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Create an association between element and security zone.
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **interface_ids:** [Type: string]
- **lannetwork_ids:** [Type: string]
- **site_id:** Type: string
- **tenant_id:** Type: string
- **waninterface_ids:** [Type: string]
- **wanoverlay_ids:** [Type: string]
- **zone_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/securityzones".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def elementsecurityzones_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query element security zones.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **element_id:** Type: string
- **interface_ids:** [Type: string]
- **lannetwork_ids:** [Type: string]
- **site_id:** Type: string
- **tenant_id:** Type: string
- **waninterface_ids:** [Type: string]
- **wanoverlay_ids:** [Type: string]
- **zone_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elementsecurityzones/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def elementusers(self, data, tenant_id=None, api_version="v2.1"):
"""
Create Element User
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **is_tenant_level:** Type: boolean
- **login_id:** Type: string
- **role:** Type: string
- **tenant_id:** Type: string
- **username:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elementusers".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def entitlements(self, operator_id, session_id, data, tenant_id=None, api_version="v2.0"):
"""
POST Entitlements API Function
**Parameters:**:
- **operator_id**: Operator ID
- **session_id**: User Session ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/operators/{}/sessions/{}/actionservice/appportal/api/v1/entitlements".format(api_version,
tenant_id,
operator_id,
session_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def eventcorrelationpolicyrules(self, eventcorrelationpolicyset_id, data, tenant_id=None, api_version="v2.0"):
"""
Create event correlation policyrule configuration
**Parameters:**:
- **eventcorrelationpolicyset_id**: Event Correlation Policy Set ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **enabled:** Type: boolean
- **end_time:** Type: integer
- **escalation_rules:**
- **flap_rule:**
- **flap_duration:** Type: integer
- **flap_rate:** Type: integer
- **standing_rule:**
- **priority:** Type: string
- **standing_for:** Type: integer
- **event_codes:** [Type: string]
- **name:** Type: string
- **priority:** Type: string
- **resource_ids:** [Type: string]
- **resource_type:** Type: string
- **start_time:** Type: integer
- **sub_resource_type:** Type: string
- **suppress:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/eventcorrelationpolicysets/{}/eventcorrelationpolicyrules".format(api_version,
tenant_id,
eventcorrelationpolicyset_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def eventcorrelationpolicyrules_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of event correlation policyrules that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **enabled:** Type: boolean
- **end_time:** Type: integer
- **escalation_rules:**
- **flap_rule:**
- **flap_duration:** Type: integer
- **flap_rate:** Type: integer
- **standing_rule:**
- **priority:** Type: string
- **standing_for:** Type: integer
- **event_codes:** [Type: string]
- **name:** Type: string
- **policyset_id:** Type: string
- **priority:** Type: string
- **resource_ids:** [Type: string]
- **resource_type:** Type: string
- **start_time:** Type: integer
- **sub_resource_type:** Type: string
- **suppress:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/eventcorrelationpolicyrules/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def eventcorrelationpolicysets(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of event correlation policysets that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **active_policyset:** Type: boolean
- **clone_from:** Type: string
- **description:** Type: string
- **name:** Type: string
- **policyrule_order:** [Type: string]
- **severity_priority_mapping:**
- **priority:** Type: string
- **severity:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/eventcorrelationpolicysets".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def eventcorrelationpolicysets_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of event correlation policysets that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **active_policyset:** Type: boolean
- **clone_from:** Type: string
- **description:** Type: string
- **name:** Type: string
- **policyrule_order:** [Type: string]
- **severity_priority_mapping:**
- **priority:** Type: string
- **severity:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/eventcorrelationpolicysets/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def events_query(self, data, tenant_id=None, api_version="v3.4"):
"""
POST Events_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.4)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/events/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def externalcaconfigs(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Externalcaconfigs API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/externalcaconfigs".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def fips_mode_change_operations(self, element_id, data, tenant_id=None, api_version="v2.1"):
"""
POST Fips_Mode_Change_Operations API Function
**Parameters:**:
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elements/{}/fips_mode_change_operations".format(api_version,
tenant_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def globalprefixfilters(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new global prefix filter.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **filters:**
- **type:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/globalprefixfilters".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def globalprefixfilters_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query DB for the list of params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/globalprefixfilters/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def hubclustermembers(self, site_id, hubcluster_id, data, tenant_id=None, api_version="v3.0"):
"""
Creates a new hub cluster member.
**Parameters:**:
- **site_id**: Site ID
- **hubcluster_id**: Hub (DC) Cluster ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.0)
**Payload Attributes:**
- **headend1_site_ids:** [Type: string]
- **headend2_site_ids:** [Type: string]
- **hub_element_id:** Type: string
- **load_factors:**
- **alarm_threshold:** Type: integer
- **allocated:** Type: integer
- **subscription_factor:** Type: number
- **threshold:**
- **critical_alarm:** Type: integer
- **major_alarm:** Type: integer
- **subscription_factor:** Type: number
- **type:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/hubclusters/{}/hubclustermembers".format(api_version,
tenant_id,
site_id,
hubcluster_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def hubclusters(self, site_id, data, tenant_id=None, api_version="v3.0"):
"""
Creates a new hub cluster
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.0)
**Payload Attributes:**
- **admin_up:** Type: boolean
- **load_alarm_threshold:** Type: integer
- **name:** Type: string
- **subscription_factor:** Type: number
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/hubclusters".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def interfaces(self, site_id, element_id, data, tenant_id=None, api_version="v4.11"):
"""
Create a Interface
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v4.11)
**Payload Attributes:**
- **admin_up:** Type: boolean
- **attached_lan_networks:**
- **lan_network_id:** Type: string
- **vlan_id:** Type: integer
- **bound_interfaces:** [Type: string]
- **bypass_pair:**
- **lan:** Type: string
- **lan_state_propagation:** Type: boolean
- **use_relay:** Type: boolean
- **wan:** Type: string
- **cellular_config:**
- **apn_config:**
- **apn:** Type: string
- **authentication:** Type: string
- **clear_password:** Type: boolean
- **password:** Type: string
- **password_encrypted:** Type: string
- **user_name:** Type: string
- **apn_etag:** Type: integer
- **apnprofile_id:** Type: string
- **auto_apn:** Type: boolean
- **parent_module_id:** Type: string
- **parent_sim_slot_number:** Type: integer
- **description:** Type: string
- **devicemgmt_policysetstack_id:** Type: string
- **dhcp_relay:**
- **enabled:** Type: boolean
- **option_82:**
- **circuit_id:** Type: string
- **enabled:** Type: boolean
- **reforwarding_policy:** Type: string
- **remote_id:** Type: string
- **server_ips:** [Type: string]
- **source_interface:** Type: string
- **directed_broadcast:** Type: boolean
- **ethernet_port:**
- **full_duplex:** Type: boolean
- **speed:** Type: integer
- **ipfixcollectorcontext_id:** Type: string
- **ipfixfiltercontext_id:** Type: string
- **ipv4_config:**
- **dhcp_config:**
- **client_id:** Type: string
- **hostname:** Type: string
- **dns_v4_config:**
- **name_servers:** [Type: string]
- **search:** [Type: string]
- **pppoe_config:**
- **chap_passwd:** Type: string
- **chap_user:** Type: string
- **set_route:** Type: boolean
- **routes:**
- **destination:** Type: string
- **via:** Type: string
- **static_config:**
- **address:** Type: string
- **type:** Type: string
- **mac_address:** Type: string
- **mtu:** Type: integer
- **multicast_config:**
- **igmp_version:** Type: string
- **multicast_enabled:** Type: boolean
- **name:** Type: string
- **nat_address:** Type: string
- **nat_pools:**
- **ipv4_ranges:**
- **end:** Type: string
- **start:** Type: string
- **nat_pool_id:** Type: string
- **nat_port:** Type: integer
- **nat_zone_id:** Type: string
- **network_context_id:** Type: string
- **parent:** Type: string
- **pppoe_config:**
- **host_uniq:** Type: string
- **parent:** Type: string
- **password:** Type: string
- **reconnection_delay:** Type: integer
- **service_name:** Type: string
- **username:** Type: string
- **scope:** Type: string
- **secondary_ip_configs:**
- **ipv4_address:** Type: string
- **scope:** Type: string
- **service_link_config:**
- **gre_config:**
- **csum:** Type: boolean
- **keepalive_enable:** Type: boolean
- **keepalive_fail_count:** Type: integer
- **keepalive_interval:** Type: integer
- **ipsec_config:**
- **authentication:**
- **certificate:** Type: string
- **ikev1_params:**
- **xauth_id:** Type: string
- **xauth_secret:** Type: string
- **xauth_secret_encrypted:** Type: string
- **xauth_secret_hash:** Type: string
- **xauth_type:** Type: string
- **local_ca_certificate:** Type: string
- **local_id:** Type: string
- **local_id_custom:** Type: string
- **passphrase:** Type: string
- **passphrase_encrypted:** Type: string
- **private_key:** Type: string
- **private_key_encrypted:** Type: string
- **remote_ca_certificate:** Type: string
- **remote_id:** Type: string
- **secret:** Type: string
- **secret_encrypted:** Type: string
- **secret_hash:** Type: string
- **type:** Type: string
- **x509Objects:**
- **certHolder:** Type: object
- **certificate:** Type: string
- **is_local_ca_cert_set:** Type: boolean
- **is_remote_ca_cert_set:** Type: boolean
- **keyPair:** Type: object
- **local_ca_certificate:** Type: string
- **local_ca_certs_set:** [Type: object]
- **passphrase:** Type: string
- **private_key:** Type: string
- **remote_ca_certificate:** Type: string
- **remote_ca_certs_set:** [Type: object]
- **ipsec_profile_id:** Type: string
- **last_parent:** Type: string
- **parent:** Type: string
- **peer:**
- **hostname:** Type: string
- **ip_addresses:** [Type: string]
- **service_endpoint_id:** Type: string
- **type:** Type: string
- **site_wan_interface_ids:** [Type: string]
- **static_arp_configs:**
- **ipv4_address:** Type: string
- **mac_address:** Type: string
- **sub_interface:**
- **vlan_id:** Type: integer
- **tags:** [Type: string]
- **type:** Type: string
- **used_for:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/interfaces".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def interfaces_correlationevents_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Interfaces_Correlationevents_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/interfaces/correlationevents/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def interfaces_query(self, data, tenant_id=None, api_version="v4.11"):
"""
Queries db for limit number of interfaces that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v4.11)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/interfaces/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfix(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Create a IPFix Config
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **collector_config:**
- **host:** Type: string
- **host_port:** Type: integer
- **ipfixcollectorcontext_id:** Type: string
- **max_message_size:** Type: integer
- **protocol:** Type: string
- **description:** Type: string
- **export_cache_timeout:** Type: integer
- **filters:**
- **app_def_ids:** [Type: string]
- **dst_ports:**
- **end:** Type: string
- **start:** Type: string
- **dst_prefixes_id:** Type: string
- **ipfixfiltercontext_ids:** [Type: string]
- **priority_traffic_types:** [Type: string]
- **protocols:** [Type: string]
- **rtp_transport_type:** Type: string
- **src_ports:**
- **end:** Type: string
- **start:** Type: string
- **src_prefixes_id:** Type: string
- **wan_path_direction:** Type: string
- **ipfixprofile_id:** Type: string
- **ipfixtemplate_id:** Type: string
- **name:** Type: string
- **sampler:**
- **algorithm:** Type: string
- **time_interval:** Type: integer
- **time_spacing:** Type: integer
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/ipfix".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfix_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of ipfix configs that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **collector_config:**
- **host:** Type: string
- **host_port:** Type: integer
- **ipfixcollectorcontext_id:** Type: string
- **max_message_size:** Type: integer
- **protocol:** Type: string
- **description:** Type: string
- **element_id:** Type: string
- **export_cache_timeout:** Type: integer
- **filters:**
- **app_def_ids:** [Type: string]
- **dst_ports:**
- **end:** Type: string
- **start:** Type: string
- **dst_prefixes_id:** Type: string
- **ipfixfiltercontext_ids:** [Type: string]
- **priority_traffic_types:** [Type: string]
- **protocols:** [Type: string]
- **rtp_transport_type:** Type: string
- **src_ports:**
- **end:** Type: string
- **start:** Type: string
- **src_prefixes_id:** Type: string
- **wan_path_direction:** Type: string
- **ipfixprofile_id:** Type: string
- **ipfixtemplate_id:** Type: string
- **name:** Type: string
- **sampler:**
- **algorithm:** Type: string
- **time_interval:** Type: integer
- **time_spacing:** Type: integer
- **site_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfix/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixcollectorcontexts(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a IPFix Collector context
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixcollectorcontexts".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixcollectorcontexts_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of ipfix collector context that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixcollectorcontexts/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixfiltercontexts(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a IPFix Filter context
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixfiltercontexts".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixfiltercontexts_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of ipfix filter context that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixfiltercontexts/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixglobalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a IPFix Global prefix
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixglobalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixglobalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ipfixglobalprefixes_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixglobalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixlocalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of ipfix site prefix association that match query
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ipv4_prefixes:** [Type: string]
- **prefix_id:** Type: string
- **site_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixlocalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixprofiles(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a IPFix Profile
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **collector_config:**
- **host:** Type: string
- **host_port:** Type: integer
- **ipfixcollectorcontext_id:** Type: string
- **max_message_size:** Type: integer
- **protocol:** Type: string
- **description:** Type: string
- **export_cache_timeout:** Type: integer
- **filters:**
- **app_def_ids:** [Type: string]
- **dst_ports:**
- **end:** Type: string
- **start:** Type: string
- **dst_prefixes_id:** Type: string
- **ipfixfiltercontext_ids:** [Type: string]
- **priority_traffic_types:** [Type: string]
- **protocols:** [Type: string]
- **rtp_transport_type:** Type: string
- **src_ports:**
- **end:** Type: string
- **start:** Type: string
- **src_prefixes_id:** Type: string
- **wan_path_direction:** Type: string
- **ipfixtemplate_id:** Type: string
- **name:** Type: string
- **sampler:**
- **algorithm:** Type: string
- **time_interval:** Type: integer
- **time_spacing:** Type: integer
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixprofiles".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixprofiles_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of ipfix profiles that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ipv4_prefixes:** [Type: string]
- **prefix_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixprofiles/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixtemplates(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a IPFix template
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **flow_fields:** [Type: string]
- **generate_biflow:** Type: boolean
- **name:** Type: string
- **option_export_timeout:** Type: integer
- **options:** [Type: string]
- **tags:** [Type: string]
- **template_export_timeout:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixtemplates".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipfixtemplates_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of ipfix templates that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ipv4_prefixes:** [Type: string]
- **prefix_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixtemplates/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipsecprofiles(self, data, tenant_id=None, api_version="v2.1"):
"""
Create a new IPSEC Profile
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **authentication:**
- **certificate:** Type: string
- **ikev1_params:**
- **xauth_id:** Type: string
- **xauth_secret:** Type: string
- **xauth_secret_encrypted:** Type: string
- **xauth_secret_hash:** Type: string
- **xauth_type:** Type: string
- **local_ca_certificate:** Type: string
- **local_id:** Type: string
- **local_id_custom:** Type: string
- **passphrase:** Type: string
- **passphrase_encrypted:** Type: string
- **private_key:** Type: string
- **private_key_encrypted:** Type: string
- **remote_ca_certificate:** Type: string
- **remote_id:** Type: string
- **secret:** Type: string
- **secret_encrypted:** Type: string
- **secret_hash:** Type: string
- **type:** Type: string
- **x509Objects:**
- **certHolder:** Type: object
- **certificate:** Type: string
- **is_local_ca_cert_set:** Type: boolean
- **is_remote_ca_cert_set:** Type: boolean
- **keyPair:** Type: object
- **local_ca_certificate:** Type: string
- **local_ca_certs_set:** [Type: object]
- **passphrase:** Type: string
- **private_key:** Type: string
- **remote_ca_certificate:** Type: string
- **remote_ca_certs_set:** [Type: object]
- **description:** Type: string
- **dpd_delay:** Type: integer
- **dpd_enable:** Type: boolean
- **dpd_timeout:** Type: integer
- **esp_group:**
- **force_encapsulation:** Type: boolean
- **lifetime:** Type: integer
- **mode:** Type: string
- **proposals:**
- **dh_groups:** Type: string
- **encryption:** Type: string
- **hash:** Type: string
- **ike_group:**
- **aggressive:** Type: boolean
- **key_exchange:** Type: string
- **lifetime:** Type: integer
- **port:** Type: integer
- **proposals:**
- **dh_groups:** Type: string
- **encryption:** Type: string
- **hash:** Type: string
- **reauth:** Type: boolean
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipsecprofiles".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ipsecprofiles_query(self, data, tenant_id=None, api_version="v2.1"):
"""
Queries db for limit number of tenant level ipsec profiles that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **authentication:**
- **certificate:** Type: string
- **ikev1_params:**
- **xauth_id:** Type: string
- **xauth_secret:** Type: string
- **xauth_secret_encrypted:** Type: string
- **xauth_secret_hash:** Type: string
- **xauth_type:** Type: string
- **local_ca_certificate:** Type: string
- **local_id:** Type: string
- **local_id_custom:** Type: string
- **passphrase:** Type: string
- **passphrase_encrypted:** Type: string
- **private_key:** Type: string
- **private_key_encrypted:** Type: string
- **remote_ca_certificate:** Type: string
- **remote_id:** Type: string
- **secret:** Type: string
- **secret_encrypted:** Type: string
- **secret_hash:** Type: string
- **type:** Type: string
- **x509Objects:**
- **certHolder:** Type: object
- **certificate:** Type: string
- **is_local_ca_cert_set:** Type: boolean
- **is_remote_ca_cert_set:** Type: boolean
- **keyPair:** Type: object
- **local_ca_certificate:** Type: string
- **local_ca_certs_set:** [Type: object]
- **passphrase:** Type: string
- **private_key:** Type: string
- **remote_ca_certificate:** Type: string
- **remote_ca_certs_set:** [Type: object]
- **description:** Type: string
- **dpd_delay:** Type: integer
- **dpd_enable:** Type: boolean
- **dpd_timeout:** Type: integer
- **esp_group:**
- **force_encapsulation:** Type: boolean
- **lifetime:** Type: integer
- **mode:** Type: string
- **proposals:**
- **dh_groups:** Type: string
- **encryption:** Type: string
- **hash:** Type: string
- **ike_group:**
- **aggressive:** Type: boolean
- **key_exchange:** Type: string
- **lifetime:** Type: integer
- **port:** Type: integer
- **proposals:**
- **dh_groups:** Type: string
- **encryption:** Type: string
- **hash:** Type: string
- **reauth:** Type: boolean
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipsecprofiles/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def lannetworks(self, site_id, data, tenant_id=None, api_version="v3.1"):
"""
Create a new LAN
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.1)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_config:**
- **default_routers:** [Type: string]
- **dhcp_relay:**
- **enabled:** Type: boolean
- **option_82:**
- **circuit_id:** Type: string
- **enabled:** Type: boolean
- **reforwarding_policy:** Type: string
- **remote_id:** Type: string
- **server_ips:** [Type: string]
- **source_interface:** Type: string
- **dhcp_server:**
- **domain_name:** Type: string
- **domain_name_servers:** [Type: string]
- **ip_address_pool:**
- **end:** Type: string
- **start:** Type: string
- **lease_expiry_time:** Type: integer
- **lease_renew_time:** Type: integer
- **prefixes:** [Type: string]
- **name:** Type: string
- **network_context_id:** Type: string
- **scope:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/lannetworks".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def lannetworks_query(self, data, tenant_id=None, api_version="v3.1"):
"""
Query LAN networks that match query params
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.1)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_config:**
- **default_routers:** [Type: string]
- **dhcp_relay:**
- **enabled:** Type: boolean
- **option_82:**
- **circuit_id:** Type: string
- **enabled:** Type: boolean
- **reforwarding_policy:** Type: string
- **remote_id:** Type: string
- **server_ips:** [Type: string]
- **source_interface:** Type: string
- **dhcp_server:**
- **domain_name:** Type: string
- **domain_name_servers:** [Type: string]
- **ip_address_pool:**
- **end:** Type: string
- **start:** Type: string
- **lease_expiry_time:** Type: integer
- **lease_renew_time:** Type: integer
- **prefixes:** [Type: string]
- **name:** Type: string
- **network_context_id:** Type: string
- **scope:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/lannetworks/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def localprefixfilters(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new local prefix filter.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/localprefixfilters".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def localprefixfilters_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query DB for the list of params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/localprefixfilters/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def login(self, data, api_version="v2.0"):
"""
Login api
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/login".format(api_version)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data, sensitive=True)
def machine_upgrade_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query Machine Upgrade Config
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/machine_upgrade/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def machines_query(self, data, tenant_id=None, api_version="v2.1"):
"""
Query and get machines of a tenant
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **connected:** Type: boolean
- **console_conf_passphrase:** Type: string
- **em_element_id:** Type: string
- **esp_tenant_id:** Type: string
- **hw_id:** Type: string
- **image_version:** Type: string
- **inventory_op:** Type: string
- **machine_state:** Type: string
- **manufacture_id:** Type: string
- **model_name:** Type: string
- **ordering_info:** Type: string
- **pki_op:** - **renew_state:** Type: string
- **ship_state:** Type: string
- **sl_no:** Type: string
- **suspend_state:** Type: string
- **tenant_id:** Type: string
- **token:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/machines/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_aggregates(self, data, tenant_id=None, api_version="v3.0"):
"""
POST Monitor_Aggregates API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/aggregates".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_bulk_metrics(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Monitor_Bulk_Metrics API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/bulk_metrics".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_cellular_metrics(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Monitor_Cellular_Metrics API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/cellular_metrics".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_cellular_metrics_topn(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Monitor_Cellular_Metrics_Topn API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/cellular_metrics/topn".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_flows(self, data, tenant_id=None, api_version="v3.6"):
"""
POST Monitor_Flows API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.6)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/flows".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_insights(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Monitor_Insights API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/insights".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_insightslist(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Monitor_Insightslist API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/insightslist".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_lqm_point_metrics(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Lqm_Point_Metrics_Monitor API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/lqm_point_metrics".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_metrics(self, data, tenant_id=None, api_version="v2.2"):
"""
POST Monitor_Metrics API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/metrics".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_network_point_metrics(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Monitor_Network_Point_Metrics API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/network_point_metrics".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_object_stats(self, data, tenant_id=None, api_version="v2.2"):
"""
POST Monitor_Object_Stats API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/object_stats".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_sys_metrics(self, data, tenant_id=None, api_version="v2.1"):
"""
POST Monitor_Sys_Metrics API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/sys_metrics".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_sys_metrics_topn(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Topn_Sys_Metrics_Monitor API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/sys_metrics/topn".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_sys_point_metrics(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Monitor_Sys_Point_Metrics API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/sys_point_metrics".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def monitor_topn(self, data, tenant_id=None, api_version="v3.1"):
"""
POST Monitor_Topn API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.1)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/monitor/topn".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def multicastroutes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Multicastroutes_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/multicastroutes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def multicastrpconfigs_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Multicastrpconfigs_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/multicastrpconfigs/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def multicastrps(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
POST Multicastrps API Function
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/multicastrps".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def multicaststatus_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Multicaststatus_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/multicaststatus/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natglobalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new NAT global prefix.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natglobalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natglobalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query Global Prefixes.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natglobalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natlocalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new NAT local prefix.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natlocalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natlocalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query site local prefixes.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natlocalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natpolicypools(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new NATPolicy Pool.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natpolicypools".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natpolicypools_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query NAT policy pools.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natpolicypools/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natpolicyrules(self, natpolicyset_id, data, tenant_id=None, api_version="v2.0"):
"""
Create a new NAT Policy Rule
**Parameters:**:
- **natpolicyset_id**: NAT Policy Set ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **actions:**
- **nat_pool_id:** Type: string
- **port:** Type: integer
- **protocols:** [Type: string]
- **type:** Type: string
- **description:** Type: string
- **destination_ports:**
- **from:** Type: integer
- **to:** Type: integer
- **destination_prefixes:**
- **description:** Type: string
- **id:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
- **destination_prefixes_id:** Type: string
- **destination_zone:**
- **default_for_public_interfaces:** Type: boolean
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **region:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **destination_zone_id:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **enabled:** Type: boolean
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **natpolicypools:**
- **description:** Type: string
- **id:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
- **policyset_id:** Type: string
- **protocol:** Type: integer
- **region:** Type: string
- **source_ports:**
- **from:** Type: integer
- **to:** Type: integer
- **source_prefixes:**
- **description:** Type: string
- **id:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
- **source_prefixes_id:** Type: string
- **source_zone:**
- **default_for_public_interfaces:** Type: boolean
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **region:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **source_zone_id:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natpolicysets/{}/natpolicyrules".format(api_version,
tenant_id,
natpolicyset_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natpolicyrules_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query NAT policy rules.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **actions:**
- **nat_pool_id:** Type: string
- **port:** Type: integer
- **protocols:** [Type: string]
- **type:** Type: string
- **description:** Type: string
- **destination_ports:**
- **from:** Type: integer
- **to:** Type: integer
- **destination_prefixes_id:** Type: string
- **destination_zone_id:** Type: string
- **enabled:** Type: boolean
- **name:** Type: string
- **policyset_id:** Type: string
- **protocol:** Type: integer
- **source_ports:**
- **from:** Type: integer
- **to:** Type: integer
- **source_prefixes_id:** Type: string
- **source_zone_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natpolicyrules/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natpolicysets(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new NAT Policy Set
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **clone_from:** Type: string
- **description:** Type: string
- **destination_zone_policyrule_order:** [Type: string]
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **policy_req_version:** Type: string
- **policy_rules:**
- **actions:**
- **nat_pool_id:** Type: string
- **port:** Type: integer
- **protocols:** [Type: string]
- **type:** Type: string
- **description:** Type: string
- **destination_ports:**
- **from:** Type: integer
- **to:** Type: integer
- **destination_prefixes:**
- **description:** Type: string
- **id:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
- **destination_prefixes_id:** Type: string
- **destination_zone:**
- **default_for_public_interfaces:** Type: boolean
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **region:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **destination_zone_id:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **enabled:** Type: boolean
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **natpolicypools:**
- **description:** Type: string
- **id:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
- **policyset_id:** Type: string
- **protocol:** Type: integer
- **region:** Type: string
- **source_ports:**
- **from:** Type: integer
- **to:** Type: integer
- **source_prefixes:**
- **description:** Type: string
- **id:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
- **source_prefixes_id:** Type: string
- **source_zone:**
- **default_for_public_interfaces:** Type: boolean
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **region:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **source_zone_id:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **region:** Type: string
- **send_to_element:** Type: boolean
- **source_zone_policyrule_order:** [Type: string]
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **update_order:** Type: boolean
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natpolicysets".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natpolicysets_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query policy sets.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **clone_from:** Type: string
- **description:** Type: string
- **destination_zone_policyrule_order:** [Type: string]
- **name:** Type: string
- **source_zone_policyrule_order:** [Type: string]
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natpolicysets/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natpolicysetstacks(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new NATPolicySet Stack
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **default_policysetstack:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **policyset_ids:** [Type: string]
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natpolicysetstacks".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natpolicysetstacks_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query policyset stacks.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **default_policysetstack:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **policyset_ids:** [Type: string]
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natpolicysetstacks/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natzones(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a Nat Policy Zone.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **default_for_public_interfaces:** Type: boolean
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **region:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natzones".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def natzones_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query NAT policy zones.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **default_for_public_interfaces:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/natzones/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkcontexts(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new LAN segment
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkcontexts".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkcontexts_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of network contexts that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkcontexts/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkpolicyglobalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new global prefix.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicyglobalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkpolicyglobalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query Network Global Prefixes.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicyglobalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkpolicylocalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query site network prefix association.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ipv4_prefixes:** [Type: string]
- **prefix_id:** Type: string
- **site_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicylocalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkpolicyrules(self, networkpolicyset_id, data, tenant_id=None, api_version="v2.1"):
"""
Create a new NetworkPolicyRule
**Parameters:**:
- **networkpolicyset_id**: Network Policy Set ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **app_def_ids:** [Type: string]
- **description:** Type: string
- **destination_prefixes_id:** Type: string
- **enabled:** Type: boolean
- **name:** Type: string
- **network_context_id:** Type: string
- **order_number:** Type: integer
- **paths_allowed:**
- **active_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **backup_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **l3_failure_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **service_context:**
- **active_service_label_id:** Type: string
- **active_service_label_type:** Type: string
- **backup_service_label_id:** Type: string
- **backup_service_label_type:** Type: string
- **type:** Type: string
- **source_prefixes_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicysets/{}/networkpolicyrules".format(api_version,
tenant_id,
networkpolicyset_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkpolicyrules_query(self, data, tenant_id=None, api_version="v2.1"):
"""
Query Network policy rules.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **app_def_ids:** [Type: string]
- **description:** Type: string
- **destination_prefixes_id:** Type: string
- **enabled:** Type: boolean
- **name:** Type: string
- **network_context_id:** Type: string
- **order_number:** Type: integer
- **paths_allowed:**
- **active_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **backup_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **l3_failure_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **policyset_id:** Type: string
- **service_context:**
- **active_service_label_id:** Type: string
- **active_service_label_type:** Type: string
- **backup_service_label_id:** Type: string
- **backup_service_label_type:** Type: string
- **type:** Type: string
- **source_prefixes_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicyrules/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkpolicysets(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new NetworkPolicySet
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **clone_from:** Type: string
- **defaultrule_policyset:** Type: boolean
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **policy_req_version:** Type: string
- **policy_rules:**
- **app_def_ids:** [Type: string]
- **description:** Type: string
- **destination_prefixes_id:** Type: string
- **enabled:** Type: boolean
- **id:** Type: string
- **name:** Type: string
- **network_context_id:** Type: string
- **order_number:** Type: integer
- **paths_allowed:**
- **active_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **backup_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **l3_failure_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **service_context:**
- **active_service_label_id:** Type: string
- **active_service_label_type:** Type: string
- **backup_service_label_id:** Type: string
- **backup_service_label_type:** Type: string
- **type:** Type: string
- **source_prefixes_id:** Type: string
- **tags:** [Type: string]
- **region:** Type: string
- **send_to_element:** Type: boolean
- **tags:** [Type: string]
- **tenant_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicysets".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkpolicysets_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query Network policy sets.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **clone_from:** Type: string
- **defaultrule_policyset:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicysets/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkpolicysetstacks(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new NetworkPolicySetStack
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **default_policysetstack:** Type: boolean
- **defaultrule_policyset:**
- **clone_from:** Type: string
- **defaultrule_policyset:** Type: boolean
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **policy_req_version:** Type: string
- **policy_rules:**
- **app_def_ids:** [Type: string]
- **description:** Type: string
- **destination_prefixes_id:** Type: string
- **enabled:** Type: boolean
- **id:** Type: string
- **name:** Type: string
- **network_context_id:** Type: string
- **order_number:** Type: integer
- **paths_allowed:**
- **active_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **backup_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **l3_failure_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **service_context:**
- **active_service_label_id:** Type: string
- **active_service_label_type:** Type: string
- **backup_service_label_id:** Type: string
- **backup_service_label_type:** Type: string
- **type:** Type: string
- **source_prefixes_id:** Type: string
- **tags:** [Type: string]
- **region:** Type: string
- **send_to_element:** Type: boolean
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **defaultrule_policyset_id:** Type: string
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **legacy_policystack:** Type: boolean
- **name:** Type: string
- **policyset_ids:** [Type: string]
- **policysets:**
- **clone_from:** Type: string
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **policy_rules:**
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **name:** Type: string
- **policyset_id:** Type: string
- **region:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **region:** Type: string
- **send_to_element:** Type: boolean
- **tags:** [Type: string]
- **tenant_id:** Type: string
- **region:** Type: string
- **tags:** [Type: string]
- **tenant_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicysetstacks".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networkpolicysetstacks_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query network policyset stacks.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **default_policysetstack:** Type: boolean
- **defaultrule_policyset_id:** Type: string
- **description:** Type: string
- **name:** Type: string
- **policyset_ids:** [Type: string]
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicysetstacks/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def networks_bulk_config_state_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Get all config/state info for given network from NB
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networks/bulk_config_state/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicyglobalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicyglobalprefixes API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicyglobalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicyglobalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicyglobalprefixes_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicyglobalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicylocalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicylocalprefixes API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicylocalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicylocalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicylocalprefixes_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicylocalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicyrules(self, ngfwsecuritypolicyset_id, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicyrules API Function
**Parameters:**:
- **ngfwsecuritypolicyset_id**: NGFW Security Policy Set ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicysets/{}/ngfwsecuritypolicyrules".format(api_version,
tenant_id,
ngfwsecuritypolicyset_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicyrules_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicyrules_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicyrules/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicysets(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicysets API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicysets".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicysets_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicysets_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicysets/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicysetstacks(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicysetstacks API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicysetstacks".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ngfwsecuritypolicysetstacks_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ngfwsecuritypolicysetstacks_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ngfwsecuritypolicysetstacks/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ops_vpnlinks(self, vpnlink_id, data, tenant_id=None, api_version="v2.0"):
"""
Perform an operation on a VPN link
**Parameters:**:
- **vpnlink_id**: VPN Link ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **action:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/vpnlinks/{}/operations".format(api_version,
tenant_id,
vpnlink_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def otpaccess(self, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Verify Challenge phrase and generate response phrase
**Parameters:**:
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **challenge_phrase:** Type: string
- **response_phrase:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elements/{}/otpaccess".format(api_version,
tenant_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def password_change(self, data, tenant_id=None, api_version="v2.0"):
"""
Allows one to change password
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **oldPassword:** Type: string
- **password:** Type: string
- **repeatPassword:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/accounts/password/change".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def pathgroups(self, data, tenant_id=None, api_version="v2.1"):
"""
Create a Path Group for a tenant.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **paths:**
- **label:** Type: string
- **path_type:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/pathgroups".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def pathgroups_query(self, data, tenant_id=None, api_version="v2.1"):
"""
Queries db for limit number of network contexts that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **paths:**
- **label:** Type: string
- **path_type:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/pathgroups/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def policyrules(self, policyset_id, data, tenant_id=None, api_version="v3.1"):
"""
Create a new Policy
**Parameters:**:
- **policyset_id**: Policy Set ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.1)
**Payload Attributes:**
- **app_def_id:** Type: string
- **app_def_name:** Type: string
- **default_rule:** Type: boolean
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **lan_network_ids:** [Type: string]
- **name:** Type: string
- **network_context_id:** Type: string
- **paths_allowed:**
- **active_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **backup_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **l3_failure_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **policy_set_id:** Type: string
- **priority_num:** Type: integer
- **region:** Type: string
- **service_context:**
- **active_service_label_id:** Type: string
- **active_service_label_type:** Type: string
- **backup_service_label_id:** Type: string
- **backup_service_label_type:** Type: string
- **type:** Type: string
- **site_paths_allowed:**
- **wn_name:** Type: string
- **wp_type:** Type: string
- **tenant_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/policysets/{}/policyrules".format(api_version,
tenant_id,
policyset_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def policyrules_query(self, data, tenant_id=None, api_version="v3.1"):
"""
Queries db for policyrules that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.1)
**Payload Attributes:**
- **app_def_id:** Type: string
- **description:** Type: string
- **name:** Type: string
- **network_context_id:** Type: string
- **paths_allowed:**
- **active_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **backup_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **l3_failure_paths:**
- **label:** Type: string
- **path_type:** Type: string
- **policy_set_id:** Type: string
- **priority_num:** Type: integer
- **service_context:**
- **active_service_label_id:** Type: string
- **active_service_label_type:** Type: string
- **backup_service_label_id:** Type: string
- **backup_service_label_type:** Type: string
- **type:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/policyrules/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def policysets(self, data, tenant_id=None, api_version="v3.0"):
"""
Create a new Policy Set
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.0)
**Payload Attributes:**
- **bandwidth_allocation_schemes:**
- **bandwidth_range:**
- **high:** Type: number
- **low:** Type: number
- **business_priorities:**
- **bandwidth_allocation:** Type: number
- **bandwidth_split_per_type:**
- **bulk:** Type: number
- **rt_audio:** Type: number
- **rt_video:** Type: number
- **transactional:** Type: number
- **priority_num:** Type: integer
- **business_priority_names:**
- **priority_name:** Type: string
- **priority_num:** Type: integer
- **default_policy:** Type: boolean
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/policysets".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def policysets_bulk_config_state_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Get all config/state info across all policysets from NB
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/policysets/bulk_config_state/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def policysets_query(self, data, tenant_id=None, api_version="v3.0"):
"""
Queries db for policysets that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.0)
**Payload Attributes:**
- **bandwidth_allocation_schemes:**
- **bandwidth_range:**
- **high:** Type: number
- **low:** Type: number
- **business_priorities:**
- **bandwidth_allocation:** Type: number
- **bandwidth_split_per_type:**
- **bulk:** Type: number
- **rt_audio:** Type: number
- **rt_video:** Type: number
- **transactional:** Type: number
- **priority_num:** Type: integer
- **business_priority_names:**
- **priority_name:** Type: string
- **priority_num:** Type: integer
- **default_policy:** Type: boolean
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/policysets/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Prefixes_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prefixfilters(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Create an association between site and security prefix filter.
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **filters:**
- **type:** Type: string
- **prefix_filter_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/prefixfilters".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prefixfilters_query(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Query security prefix filter for NB API.
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **query_params:**
- **zone_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/prefixfilters/query".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prioritypolicyglobalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new global prefix.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicyglobalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prioritypolicyglobalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query Priority Global Prefixes.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **ipv4_prefixes:** [Type: string]
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicyglobalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prioritypolicylocalprefixes_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query site priority prefix association.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ipv4_prefixes:** [Type: string]
- **prefix_id:** Type: string
- **site_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicylocalprefixes/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prioritypolicyrules(self, prioritypolicyset_id, data, tenant_id=None, api_version="v2.0"):
"""
Create a new PriorityPolicyRule
**Parameters:**:
- **prioritypolicyset_id**: Priority Policy Set ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **app_def_ids:** [Type: string]
- **description:** Type: string
- **destination_prefixes_id:** Type: string
- **dscp:**
- **value:** Type: integer
- **enabled:** Type: boolean
- **name:** Type: string
- **network_context_id:** Type: string
- **order_number:** Type: integer
- **priority_number:** Type: integer
- **source_prefixes_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicysets/{}/prioritypolicyrules".format(api_version,
tenant_id,
prioritypolicyset_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prioritypolicyrules_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query Priority policy rules.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **app_def_ids:** [Type: string]
- **description:** Type: string
- **destination_prefixes_id:** Type: string
- **dscp:**
- **value:** Type: integer
- **enabled:** Type: boolean
- **name:** Type: string
- **network_context_id:** Type: string
- **order_number:** Type: integer
- **policyset_id:** Type: string
- **priority_number:** Type: integer
- **source_prefixes_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicyrules/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prioritypolicysets(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new PriorityPolicySet
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **bandwidth_allocation_schemes:**
- **bandwidth_range:**
- **high:** Type: number
- **low:** Type: number
- **business_priorities:**
- **bandwidth_allocation:** Type: number
- **bandwidth_split_per_type:**
- **bulk:** Type: number
- **rt_audio:** Type: number
- **rt_video:** Type: number
- **transactional:** Type: number
- **priority_number:** Type: integer
- **business_priority_names:**
- **priority_name:** Type: string
- **priority_num:** Type: integer
- **clone_from:** Type: string
- **default_rule_dscp_mappings:**
- **dscp:** [Type: integer]
- **priority_number:** Type: integer
- **transfer_type:** Type: string
- **defaultrule_policyset:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
- **template:** Type: boolean
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicysets".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prioritypolicysets_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query Priority policy sets.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **bandwidth_allocation_schemes:**
- **bandwidth_range:**
- **high:** Type: number
- **low:** Type: number
- **business_priorities:**
- **bandwidth_allocation:** Type: number
- **bandwidth_split_per_type:**
- **bulk:** Type: number
- **rt_audio:** Type: number
- **rt_video:** Type: number
- **transactional:** Type: number
- **priority_number:** Type: integer
- **business_priority_names:**
- **priority_name:** Type: string
- **priority_num:** Type: integer
- **clone_from:** Type: string
- **default_rule_dscp_mappings:**
- **dscp:** [Type: integer]
- **priority_number:** Type: integer
- **transfer_type:** Type: string
- **defaultrule_policyset:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
- **template:** Type: boolean
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicysets/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prioritypolicysetstacks(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new PriorityPolicySetStack
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **default_policysetstack:** Type: boolean
- **defaultrule_policyset_id:** Type: string
- **description:** Type: string
- **name:** Type: string
- **policyset_ids:** [Type: string]
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicysetstacks".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prioritypolicysetstacks_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query priority policyset stacks.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **default_policysetstack:** Type: boolean
- **defaultrule_policyset_id:** Type: string
- **description:** Type: string
- **name:** Type: string
- **policyset_ids:** [Type: string]
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicysetstacks/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prismaaccess_configs(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
POST Prismaaccess_Configs API Function
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/prismaaccess_configs".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def prismaaccess_configs_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Prismaaccess_Configs_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prismaaccess_configs/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def recovery_tokens(self, machine_id, data, tenant_id=None, api_version="v2.1"):
"""
POST Recovery_Tokens API Function
**Parameters:**:
- **machine_id**: Machine ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/machines/{}/recovery_tokens".format(api_version,
tenant_id,
machine_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def reports_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Reports_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/reports/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def reportsdir_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Reportsdir_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/reportsdir/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def roles(self, data, tenant_id=None, api_version="v2.1"):
"""
Add a custom role
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **custom_permissions:**
- **allowed_after_ms:** Type: integer
- **allowed_before_ms:** Type: integer
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **disallow_permission:** Type: boolean
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **region:** Type: string
- **tenant_id:** Type: string
- **value:** Type: string
- **description:** Type: string
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **disallow_permissions:**
- **value:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **is_system_owned:** Type: boolean
- **name:** Type: string
- **permissions:**
- **value:** Type: string
- **region:** Type: string
- **roles:**
- **name:** Type: string
- **tenant_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/roles".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def routing_aspathaccesslists(self, site_id, element_id, data, tenant_id=None, api_version="v2.1"):
"""
Create AS-Path Access List
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **as_path_regex_list:**
- **as_path_regex:** Type: string
- **order:** Type: integer
- **permit:** Type: boolean
- **auto_generated:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/routing_aspathaccesslists".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def routing_aspathaccesslists_query(self, site_id, element_id, data, tenant_id=None, api_version="v2.1"):
"""
Queries db for limit number of access lists that match query params.
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **as_path_regex_list:**
- **as_path_regex:** Type: string
- **order:** Type: integer
- **permit:** Type: boolean
- **auto_generated:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/routing_aspathaccesslists/query".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def routing_ipcommunitylists(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Create IP Community List
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **auto_generated:** Type: boolean
- **community_list:**
- **community_str:** Type: string
- **permit:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/routing_ipcommunitylists".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def routing_ipcommunitylists_query(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of community lists that match query params.
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **auto_generated:** Type: boolean
- **community_list:**
- **community_str:** Type: string
- **permit:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/routing_ipcommunitylists/query".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def routing_prefixlists(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Create IP Prefix List
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **auto_generated:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **prefix_filter_list:**
- **ge:** Type: integer
- **le:** Type: integer
- **order:** Type: integer
- **permit:** Type: boolean
- **prefix:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/routing_prefixlists".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def routing_prefixlists_query(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of prefix lists that match query params.
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **auto_generated:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **prefix_filter_list:**
- **ge:** Type: integer
- **le:** Type: integer
- **order:** Type: integer
- **permit:** Type: boolean
- **prefix:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/routing_prefixlists/query".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def routing_routemaps(self, site_id, element_id, data, tenant_id=None, api_version="v2.1"):
"""
Create Route Map
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **auto_generated:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **route_map_entries:**
- **continue_entry:** Type: string
- **match:**
- **as_path_id:** Type: string
- **community_list_id:** Type: string
- **ip_next_hop_id:** Type: string
- **ip_prefix_list_id:** Type: string
- **tag:** Type: integer
- **order:** Type: integer
- **permit:** Type: boolean
- **set:**
- **as_path_prepend:** Type: string
- **community:** Type: string
- **ip_next_hop:** Type: string
- **local_preference:** Type: integer
- **tag:** Type: integer
- **weight:** Type: integer
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/routing_routemaps".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def routing_routemaps_query(self, site_id, element_id, data, tenant_id=None, api_version="v2.1"):
"""
Queries db for limit number of route maps that match query params.
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **auto_generated:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **route_map_entries:**
- **continue_entry:** Type: string
- **match:**
- **as_path_id:** Type: string
- **community_list_id:** Type: string
- **ip_next_hop_id:** Type: string
- **ip_prefix_list_id:** Type: string
- **tag:** Type: integer
- **order:** Type: integer
- **permit:** Type: boolean
- **set:**
- **as_path_prepend:** Type: string
- **community:** Type: string
- **ip_next_hop:** Type: string
- **local_preference:** Type: integer
- **tag:** Type: integer
- **weight:** Type: integer
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/routing_routemaps/query".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def rquery(self, data, tenant_id=None, api_version="v3.0"):
"""
Query and get ESP machines across regions
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/machines/rquery".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def sdwanapps_configs(self, sdwanapp_id, data, tenant_id=None, api_version="v2.0"):
"""
POST Sdwanapps_Configs API Function
**Parameters:**:
- **sdwanapp_id**: SDWAN Application ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sdwanapps/{}/configs".format(api_version,
tenant_id,
sdwanapp_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def securitypolicyruleorder(self, securitypolicyset_id, data, tenant_id=None, api_version="v2.0"):
"""
Update a tenant security policy set.
**Parameters:**:
- **securitypolicyset_id**: Security Policy Set ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **policyrule_order:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/securitypolicysets/{}/firewallpolicyruleorder".format(api_version,
tenant_id,
securitypolicyset_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def securitypolicyrules(self, securitypolicyset_id, data, tenant_id=None, api_version="v2.0"):
"""
Create a new tenant security policy rule.
**Parameters:**:
- **securitypolicyset_id**: Security Policy Set ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **action:** Type: string
- **application_ids:** [Type: string]
- **description:** Type: string
- **destination_filter_ids:** [Type: string]
- **destination_zone_ids:** [Type: string]
- **disabled_flag:** Type: boolean
- **name:** Type: string
- **source_filter_ids:** [Type: string]
- **source_zone_ids:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/securitypolicysets/{}/securitypolicyrules".format(api_version,
tenant_id,
securitypolicyset_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def securitypolicyrules_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of LAN networks that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **action:** Type: string
- **application_ids:** [Type: string]
- **description:** Type: string
- **destination_filter_ids:** [Type: string]
- **destination_zone_ids:** [Type: string]
- **disabled_flag:** Type: boolean
- **name:** Type: string
- **security_policyset_id:** Type: string
- **source_filter_ids:** [Type: string]
- **source_zone_ids:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/securitypolicyrules/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def securitypolicysets(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new tenant security policy set.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **policyrule_order:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/securitypolicysets".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def securitypolicysets_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of security policysets that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **policyrule_order:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/securitypolicysets/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def securityzones(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new security zone
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/securityzones".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def securityzones_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of security zones that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/securityzones/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def servicebindingmaps(self, data, tenant_id=None, api_version="v2.1"):
"""
Create a new Service Binding Map
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **description:** Type: string
- **is_default:** Type: boolean
- **name:** Type: string
- **service_bindings:**
- **service_endpoint_ids:** [Type: string]
- **service_label_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/servicebindingmaps".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def servicebindingmaps_query(self, data, tenant_id=None, api_version="v2.1"):
"""
Queries db for limit number of service bindings that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **description:** Type: string
- **is_default:** Type: boolean
- **name:** Type: string
- **service_bindings:**
- **service_endpoint_ids:** [Type: string]
- **service_label_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/servicebindingmaps/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def serviceendpoints(self, data, tenant_id=None, api_version="v2.2"):
"""
Create a new Service Endpoint
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
- **address:**
- **city:** Type: string
- **country:** Type: string
- **post_code:** Type: string
- **state:** Type: string
- **street:** Type: string
- **street2:** Type: string
- **admin_up:** Type: boolean
- **allow_enterprise_traffic:** Type: boolean
- **description:** Type: string
- **liveliness_probe:**
- **http:**
- **failure_count:** Type: integer
- **http_status_codes:** [Type: integer]
- **interval:** Type: integer
- **url:** Type: string
- **icmp_ping:**
- **failure_count:** Type: integer
- **interval:** Type: integer
- **ip_addresses:** [Type: string]
- **location:**
- **description:** Type: string
- **latitude:** Type: number
- **longitude:** Type: number
- **name:** Type: string
- **service_link_peers:**
- **hostnames:** [Type: string]
- **ip_addresses:** [Type: string]
- **site_id:** Type: string
- **tags:** [Type: string]
- **type:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/serviceendpoints".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def serviceendpoints_query(self, data, tenant_id=None, api_version="v2.2"):
"""
Queries db for limit number of service bindings that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
- **address:**
- **city:** Type: string
- **country:** Type: string
- **post_code:** Type: string
- **state:** Type: string
- **street:** Type: string
- **street2:** Type: string
- **admin_up:** Type: boolean
- **allow_enterprise_traffic:** Type: boolean
- **description:** Type: string
- **liveliness_probe:**
- **http:**
- **failure_count:** Type: integer
- **http_status_codes:** [Type: integer]
- **interval:** Type: integer
- **url:** Type: string
- **icmp_ping:**
- **failure_count:** Type: integer
- **interval:** Type: integer
- **ip_addresses:** [Type: string]
- **location:**
- **description:** Type: string
- **latitude:** Type: number
- **longitude:** Type: number
- **name:** Type: string
- **service_link_peers:**
- **hostnames:** [Type: string]
- **ip_addresses:** [Type: string]
- **site_id:** Type: string
- **tags:** [Type: string]
- **type:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/serviceendpoints/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def servicelabels(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new Service Label
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
- **type:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/servicelabels".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def servicelabels_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of service labels that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/servicelabels/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def signup(self, data, tenant_id=None, api_version="v2.0"):
"""
Signup new operators
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **addresses:**
- **city:** Type: string
- **country:** Type: string
- **post_code:** Type: string
- **state:** Type: string
- **street:** Type: string
- **street2:** Type: string
- **custom_roles:**
- **custom_permissions:**
- **allowed_after_ms:** Type: integer
- **allowed_before_ms:** Type: integer
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **disallow_permission:** Type: boolean
- **id:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **region:** Type: string
- **tenant_id:** Type: string
- **value:** Type: string
- **disabled:** Type: boolean
- **disallow_permissions:**
- **value:** Type: string
- **id:** Type: string
- **inactive:** Type: boolean
- **name:** Type: string
- **permissions:**
- **value:** Type: string
- **roles:**
- **name:** Type: string
- **email:** Type: string
- **enable_session_ip_lock:** Type: boolean
- **first_name:** Type: string
- **ipv4_list:**
- **ipv4:** Type: string
- **last_name:** Type: string
- **logout_others:** Type: boolean
- **name:** Type: string
- **password:** Type: string
- **phone_numbers:**
- **country_code:** Type: integer
- **local_extension:** Type: integer
- **number:** Type: integer
- **types:**
- **value:** Type: string
- **repeatPassword:** Type: string
- **requestId:** Type: string
- **roles:**
- **name:** Type: string
- **secondary_emails:**
- **email:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/signup".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_bulk_config_state_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Get site config/state info for queried site from NB
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **count:** Type: integer
- **items:** [Type: object]
- **tenant_id:** Type: string
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/bulk_config_state/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_correlationevents_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Site_Correlationevents_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/correlationevents/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_extensions(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Create site level extension configuration
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **conf:** Type: object
- **disabled:** Type: boolean
- **name:** Type: string
- **namespace:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/extensions".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_extensions_query(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Query site level extensions that match query params
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/extensions/query".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_ipfixlocalprefixes(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Create a IPFix site prefix association
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ipv4_prefixes:** [Type: string]
- **prefix_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/ipfixlocalprefixes".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_lannetworks_query(self, site_id, data, tenant_id=None, api_version="v3.1"):
"""
POST Site_Lannetworks_Query API Function
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.1)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/lannetworks/query".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_natlocalprefixes(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Create an association between site and NAT Prefix.
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ipv4_prefixes:** [Type: string]
- **prefix_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/natlocalprefixes".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_networkpolicylocalprefixes(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Create an association between site and Network local Prefix.
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ipv4_prefixes:** [Type: string]
- **prefix_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/networkpolicylocalprefixes".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_ngfwsecuritypolicylocalprefixes(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
POST Site_Ngfwsecuritypolicylocalprefixes API Function
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/ngfwsecuritypolicylocalprefixes".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_prioritypolicylocalprefixes(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Create an association between site and Priority local Prefix.
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ipv4_prefixes:** [Type: string]
- **prefix_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/prioritypolicylocalprefixes".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def site_query(self, data, tenant_id=None, api_version="v4.5"):
"""
Queries db for limit number of sites that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v4.5)
**Payload Attributes:**
- **address:**
- **city:** Type: string
- **country:** Type: string
- **post_code:** Type: string
- **state:** Type: string
- **street:** Type: string
- **street2:** Type: string
- **admin_state:** Type: string
- **description:** Type: string
- **element_cluster_role:** Type: string
- **extended_tags:**
- **key:** Type: string
- **value:** Type: string
- **value_type:** Type: string
- **location:**
- **description:** Type: string
- **latitude:** Type: number
- **longitude:** Type: number
- **name:** Type: string
- **nat_policysetstack_id:** Type: string
- **network_policysetstack_id:** Type: string
- **policy_set_id:** Type: string
- **priority_policysetstack_id:** Type: string
- **security_policyset_id:** Type: string
- **service_binding:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def sites(self, data, tenant_id=None, api_version="v4.6"):
"""
Create a new v4.5 site
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v4.6)
**Payload Attributes:**
- **address:**
- **city:** Type: string
- **country:** Type: string
- **post_code:** Type: string
- **state:** Type: string
- **street:** Type: string
- **street2:** Type: string
- **admin_state:** Type: string
- **description:** Type: string
- **element_cluster_role:** Type: string
- **extended_tags:**
- **key:** Type: string
- **value:** Type: string
- **value_type:** Type: string
- **location:**
- **description:** Type: string
- **latitude:** Type: number
- **longitude:** Type: number
- **name:** Type: string
- **nat_policysetstack_id:** Type: string
- **network_policysetstack_id:** Type: string
- **policy_set_id:** Type: string
- **priority_policysetstack_id:** Type: string
- **security_policyset_id:** Type: string
- **security_policysetstack_id:** Type: string
- **service_binding:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def sitesecurityzones(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Create an association between site and security zone.
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **networks:**
- **network_id:** Type: string
- **network_type:** Type: string
- **zone_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/sitesecurityzones".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def sitesecurityzones_query(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Query security zone for NB API.
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **networks:**
- **network_id:** Type: string
- **network_type:** Type: string
- **zone_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/sitesecurityzones/query".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def snmpagents(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Create SNMP Agent
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **tags:** [Type: string]
- **v2_config:**
- **community:** Type: string
- **enabled:** Type: boolean
- **v3_config:**
- **user_access:**
- **auth_phrase:** Type: string
- **auth_type:** Type: string
- **enc_phrase:** Type: string
- **enc_type:** Type: string
- **engine_id:** Type: string
- **security_level:** Type: string
- **user_name:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/snmpagents".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def snmptraps(self, site_id, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Create SNMP Trap
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **enabled:** Type: boolean
- **server_ip:** Type: string
- **source_interface:** Type: string
- **tags:** [Type: string]
- **v2_config:**
- **community:** Type: string
- **enabled:** Type: boolean
- **v3_config:**
- **user_access:**
- **auth_phrase:** Type: string
- **auth_type:** Type: string
- **enc_phrase:** Type: string
- **enc_type:** Type: string
- **engine_id:** Type: string
- **security_level:** Type: string
- **user_name:** Type: string
- **version:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/snmptraps".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def software_current_status_query(self, data, tenant_id=None, api_version="v2.1"):
"""
Get the current image status of all the element
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **active_image_id:** Type: string
- **active_version:** Type: string
- **download_interval:** Type: integer
- **download_percent:** Type: integer
- **element_id:** Type: string
- **failure_info:** Type: string
- **previous_image_id:** Type: string
- **rollback_version:** Type: string
- **scheduled_download:** Type: string
- **scheduled_upgrade:** Type: string
- **upgrade_image_id:** Type: string
- **upgrade_interval:** Type: integer
- **upgrade_state:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/software/current_status/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def software_status_query(self, data, tenant_id=None, api_version="v2.1"):
"""
Query the software upgrade status of all tenant elements
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **active_image_id:** Type: string
- **active_version:** Type: string
- **download_interval:** Type: integer
- **download_percent:** Type: integer
- **element_id:** Type: string
- **failure_info:** Type: string
- **previous_image_id:** Type: string
- **rollback_version:** Type: string
- **scheduled_download:** Type: string
- **scheduled_upgrade:** Type: string
- **upgrade_image_id:** Type: string
- **upgrade_interval:** Type: integer
- **upgrade_state:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/software/status/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def softwarehistory_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for all software download done by a tenant
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/softwarehistory/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def spokeclusters(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Create Spoke Cluster
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **advertisement_interval:** Type: number
- **description:** Type: string
- **name:** Type: string
- **preempt:** Type: boolean
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/spokeclusters".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def spokeclusters_ops(self, site_id, spokecluster_id, data, tenant_id=None, api_version="v2.0"):
"""
Handle operations on spokecluster.
**Parameters:**:
- **site_id**: Site ID
- **spokecluster_id**: Spoke Cluster ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **action:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/spokeclusters/{}/operations".format(api_version,
tenant_id,
site_id,
spokecluster_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def spokeclusters_query(self, site_id, data, tenant_id=None, api_version="v2.0"):
"""
Query Spoke Clusters.
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **advertisement_interval:** Type: number
- **description:** Type: string
- **name:** Type: string
- **preempt:** Type: boolean
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/spokeclusters/query".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def staticroutes(self, site_id, element_id, data, tenant_id=None, api_version="v2.1"):
"""
Create static route
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **description:** Type: string
- **destination_prefix:** Type: string
- **name:** Type: string
- **network_context_id:** Type: string
- **nexthop_reachability_probe:** Type: boolean
- **nexthops:**
- **admin_distance:** Type: integer
- **nexthop_interface_id:** Type: string
- **nexthop_ip:** Type: string
- **self:** Type: boolean
- **scope:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/staticroutes".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def status_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Status_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elements/status/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def syslogserverprofiles(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Syslogserverprofiles API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/syslogserverprofiles".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def syslogservers(self, site_id, element_id, data, tenant_id=None, api_version="v2.2"):
"""
Create Syslog Server
**Parameters:**:
- **site_id**: Site ID
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
- **description:** Type: string
- **enable_flow_logging:** Type: boolean
- **enabled:** Type: boolean
- **name:** Type: string
- **protocol:** Type: string
- **remote_ca_certificate:** Type: string
- **server_fqdn:** Type: string
- **server_ip:** Type: string
- **server_port:** Type: integer
- **severity_level:** Type: string
- **source_interface:** Type: string
- **syslog_profile_id:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/elements/{}/syslogservers".format(api_version,
tenant_id,
site_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def templates_ntp(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new NTP Template
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **default_template:** Type: boolean
- **description:** Type: string
- **name:** Type: string
- **ntp_servers:**
- **host:** Type: string
- **max_poll:** Type: integer
- **min_poll:** Type: integer
- **version:** Type: integer
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/templates/ntp".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_anynetlinks(self, data, tenant_id=None, api_version="v3.2"):
"""
POST Tenant_Anynetlinks API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.2)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/anynetlinks".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_bgppeers_query(self, data, tenant_id=None, api_version="v2.2"):
"""
Queries db for BGP peers that match query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.2)
**Payload Attributes:**
- **bgp_config:**
- **adv_interval:** Type: integer
- **hold_time:** Type: integer
- **keepalive_time:** Type: integer
- **local_as_num:** Type: string
- **md5_secret:** Type: string
- **multi_hop_limit:** Type: integer
- **peer_auth_type:** Type: string
- **peer_retry_time:** Type: integer
- **description:** Type: string
- **name:** Type: string
- **peer_ip:** Type: string
- **peer_type:** Type: string
- **remote_as_num:** Type: string
- **route_map_in_id:** Type: string
- **route_map_out_id:** Type: string
- **scope:** Type: string
- **shutdown:** Type: boolean
- **tags:** [Type: string]
- **update_source:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/bgppeers/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_element_operations(self, element_id, data, tenant_id=None, api_version="v2.0"):
"""
Handle operations on element.
**Parameters:**:
- **element_id**: Element (Device) ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **action:** Type: string
- **parameters:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/elements/{}/operations".format(api_version,
tenant_id,
element_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_extensions_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Queries db for limit number of tenant extensions that match the query params.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **conf:** Type: object
- **disabled:** Type: boolean
- **name:** Type: string
- **namespace:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/extensions/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_forgot_password_login(self, data, tenant_id=None, api_version="v2.0"):
"""
Forgot password API
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **email:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/login/password/forgot".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data, sensitive=True)
def tenant_ipfixlocalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a IPFix local prefix
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ipfixlocalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_machine_operations(self, machine_id, data, tenant_id=None, api_version="v2.1"):
"""
Update a specific machine of a tenant using operations
**Parameters:**:
- **machine_id**: Machine ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **connected:** Type: boolean
- **console_conf_passphrase:** Type: string
- **em_element_id:** Type: string
- **esp_tenant_id:** Type: string
- **hw_id:** Type: string
- **image_version:** Type: string
- **inventory_op:** Type: string
- **machine_state:** Type: string
- **manufacture_id:** Type: string
- **model_name:** Type: string
- **ordering_info:** Type: string
- **pki_op:** - **renew_state:** Type: string
- **ship_state:** Type: string
- **sl_no:** Type: string
- **suspend_state:** Type: string
- **tenant_id:** Type: string
- **token:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/machines/{}/operations".format(api_version,
tenant_id,
machine_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_networkpolicylocalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new Network Policy local prefix.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/networkpolicylocalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_permissions(self, data, tenant_id=None, api_version="v2.0"):
"""
Add a custom permission
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **allowed_after_ms:** Type: integer
- **allowed_before_ms:** Type: integer
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **disallow_permission:** Type: boolean
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **region:** Type: string
- **tenant_id:** Type: string
- **value:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/permissions".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_prefixfilters_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query security prefix filter for NB API.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **filters:**
- **type:** Type: string
- **prefix_filter_id:** Type: string
- **site_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prefixfilters/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_prioritypolicylocalprefixes(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new Priority Policy local prefix.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **tags:** [Type: string]
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/prioritypolicylocalprefixes".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def tenant_waninterfaces_query(self, data, tenant_id=None, api_version="v2.7"):
"""
Query db for Site WAN interfaces that match query parameters
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.7)
**Payload Attributes:**
- **bfd_mode:** Type: string
- **bw_config_mode:** Type: string
- **bwc_enabled:** Type: boolean
- **cost:** Type: integer
- **description:** Type: string
- **label_id:** Type: string
- **link_bw_down:** Type: number
- **link_bw_up:** Type: number
- **lqm_config:**
- **hub_site_ids:** [Type: string]
- **inter_packet_gap:** Type: integer
- **statistic:** Type: string
- **lqm_enabled:** Type: boolean
- **name:** Type: string
- **network_id:** Type: string
- **site_id:** Type: string
- **tags:** [Type: string]
- **type:** Type: string
- **use_for_application_reachability_probes:** Type: boolean
- **use_for_controller_connections:** Type: boolean
- **use_lqm_for_non_hub_paths:** Type: boolean
- **vpnlink_configuration:**
- **keep_alive_failure_count:** Type: integer
- **keep_alive_interval:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/waninterfaces/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def toolkitsessions_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Toolkitsessions_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/toolkitsessions/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def topology(self, data, tenant_id=None, api_version="v3.3"):
"""
POST Topology API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v3.3)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/topology".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def upgrade_status_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query Machine Upgrade Status
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/upgrade_status/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def users(self, data, tenant_id=None, api_version="v2.0"):
"""
Create an user identity.
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **disabled:** Type: boolean
- **disabled_reason:** Type: string
- **first_name:** Type: string
- **inactive:** Type: boolean
- **inactive_reason:** Type: string
- **last_name:** Type: string
- **middle_name:** Type: string
- **region:** Type: string
- **tenant_id:** Type: string
- **user_dn:** Type: string
- **user_fqn:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/users".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def vff_token_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query Tenant Vff License Tokens
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ion_key:** Type: string
- **is_expired:** Type: boolean
- **is_multiuse:** Type: boolean
- **is_revoked:** Type: boolean
- **is_used:** Type: boolean
- **secret_key:** Type: string
- **valid_till_secs:** Type: integer
- **vfflicense_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/vfflicenses/tokens/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def vfflicense_tokens(self, vfflicense_id, data, tenant_id=None, api_version="v2.0"):
"""
Create Tenant Vff License Token
**Parameters:**:
- **vfflicense_id**: Virtual Form Factor License ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **ion_key:** Type: string
- **is_expired:** Type: boolean
- **is_multiuse:** Type: boolean
- **is_revoked:** Type: boolean
- **is_used:** Type: boolean
- **secret_key:** Type: string
- **valid_till_secs:** Type: integer
- **vfflicense_id:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/vfflicenses/{}/tokens".format(api_version,
tenant_id,
vfflicense_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def vpnlinks_query(self, data, tenant_id=None, api_version="v2.0"):
"""
Query db for VPNLinks that match query parameters
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/vpnlinks/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def waninterfacelabels_query(self, data, tenant_id=None, api_version="v2.4"):
"""
Query db for site WAN interfaces that match query parameters
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.4)
**Payload Attributes:**
- **bwc_enabled:** Type: boolean
- **description:** Type: string
- **label:** Type: string
- **lqm_enabled:** Type: boolean
- **name:** Type: string
- **tags:** [Type: string]
- **use_for_application_reachability_probes:** Type: boolean
- **use_for_controller_connections:** Type: boolean
- **use_lqm_for_non_hub_paths:** Type: boolean
- **vpnlink_configuration:**
- **keep_alive_failure_count:** Type: integer
- **keep_alive_interval:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/waninterfacelabels/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def waninterfaces(self, site_id, data, tenant_id=None, api_version="v2.7"):
"""
Create a new Site WAN interface
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.7)
**Payload Attributes:**
- **bfd_mode:** Type: string
- **bw_config_mode:** Type: string
- **bwc_enabled:** Type: boolean
- **cost:** Type: integer
- **description:** Type: string
- **label_id:** Type: string
- **link_bw_down:** Type: number
- **link_bw_up:** Type: number
- **lqm_config:**
- **hub_site_ids:** [Type: string]
- **inter_packet_gap:** Type: integer
- **statistic:** Type: string
- **lqm_enabled:** Type: boolean
- **name:** Type: string
- **network_id:** Type: string
- **tags:** [Type: string]
- **type:** Type: string
- **use_for_application_reachability_probes:** Type: boolean
- **use_for_controller_connections:** Type: boolean
- **use_lqm_for_non_hub_paths:** Type: boolean
- **vpnlink_configuration:**
- **keep_alive_failure_count:** Type: integer
- **keep_alive_interval:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/waninterfaces".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def waninterfaces_correlationevents_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Waninterfaces_Correlationevents_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/waninterfaces/correlationevents/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def waninterfaces_query(self, site_id, data, tenant_id=None, api_version="v2.5"):
"""
Query db for Site WAN interfaces that match query parameters
**Parameters:**:
- **site_id**: Site ID
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.5)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/sites/{}/waninterfaces/query".format(api_version,
tenant_id,
site_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def wannetworks(self, data, tenant_id=None, api_version="v2.1"):
"""
Create a new WAN
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **provider_as_numbers:** [Type: integer]
- **tags:** [Type: string]
- **type:** Type: string
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/wannetworks".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def wannetworks_query(self, data, tenant_id=None, api_version="v2.1"):
"""
Query db for WAN networks that match query parameters
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.1)
**Payload Attributes:**
- **dest_page:** Type: integer
- **getDeleted:** Type: boolean
- **last_query_ts:** Type: integer
- **limit:** Type: integer
- **next_query:** Type: object
- **query_params:** Type: object
- **retrieved_fields:** [Type: string]
- **retrieved_fields_mask:** Type: boolean
- **sort_params:** Type: object
- **total_count:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/wannetworks/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def wanoverlays(self, data, tenant_id=None, api_version="v2.0"):
"""
Create a new app/wan context
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
- **description:** Type: string
- **name:** Type: string
- **vni:** Type: integer
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/wanoverlays".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ws_extensions(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ws_Extensions API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ws/extensions".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
def ws_extensions_query(self, data, tenant_id=None, api_version="v2.0"):
"""
POST Ws_Extensions_Query API Function
**Parameters:**:
- **data**: Dictionary containing data to POST as JSON
- **tenant_id**: Tenant ID
- **api_version**: API version to use (default v2.0)
**Payload Attributes:**
**Returns:** requests.Response object extended with cgx_status and cgx_content properties.
"""
if tenant_id is None and self._parent_class.tenant_id:
# Pull tenant_id from parent namespace cache.
tenant_id = self._parent_class.tenant_id
elif not tenant_id:
# No value for tenant_id.
raise TypeError("tenant_id is required but not set or cached.")
cur_ctlr = self._parent_class.controller
url = str(cur_ctlr) + "/{}/api/tenants/{}/ws/extensions/query".format(api_version,
tenant_id)
api_logger.debug("URL = %s", url)
return self._parent_class.rest_call(url, "post", data=data)
# Public Digest compatibility maps below, mapping what is available via
# /v2.0/permissions API versus what is used in this SDK.
aggregates_monitor = monitor_aggregates
""" Backwards-compatibility alias of `aggregates_monitor` to `monitor_aggregates`"""
anynetlinks_t = tenant_anynetlinks
""" Backwards-compatibility alias of `anynetlinks_t` to `tenant_anynetlinks`"""
bulk_metrics_monitor = monitor_bulk_metrics
""" Backwards-compatibility alias of `bulk_metrics_monitor` to `monitor_bulk_metrics`"""
cellular_metrics_monitor = monitor_cellular_metrics
""" Backwards-compatibility alias of `cellular_metrics_monitor` to `monitor_cellular_metrics`"""
change_password = password_change
""" Backwards-compatibility alias of `change_password` to `password_change`"""
configs_sdwanapps = sdwanapps_configs
""" Backwards-compatibility alias of `configs_sdwanapps` to `sdwanapps_configs`"""
extensions_i = element_extensions
""" Backwards-compatibility alias of `extensions_i` to `element_extensions`"""
extensions_s = site_extensions
""" Backwards-compatibility alias of `extensions_s` to `site_extensions`"""
extensions_ws = ws_extensions
""" Backwards-compatibility alias of `extensions_ws` to `ws_extensions`"""
flows_monitor = monitor_flows
""" Backwards-compatibility alias of `flows_monitor` to `monitor_flows`"""
forgot_password_login_t = tenant_forgot_password_login
""" Backwards-compatibility alias of `forgot_password_login_t` to `tenant_forgot_password_login`"""
insights_monitor = monitor_insights
""" Backwards-compatibility alias of `insights_monitor` to `monitor_insights`"""
insightslist_monitor = monitor_insightslist
""" Backwards-compatibility alias of `insightslist_monitor` to `monitor_insightslist`"""
ipfixlocalprefixes_s = site_ipfixlocalprefixes
""" Backwards-compatibility alias of `ipfixlocalprefixes_s` to `site_ipfixlocalprefixes`"""
ipfixlocalprefixes_t = tenant_ipfixlocalprefixes
""" Backwards-compatibility alias of `ipfixlocalprefixes_t` to `tenant_ipfixlocalprefixes`"""
login_clients = clients_login
""" Backwards-compatibility alias of `login_clients` to `clients_login`"""
logout_clients = clients_logout
""" Backwards-compatibility alias of `logout_clients` to `clients_logout`"""
lqm_point_metrics_monitor = monitor_lqm_point_metrics
""" Backwards-compatibility alias of `lqm_point_metrics_monitor` to `monitor_lqm_point_metrics`"""
metrics_monitor = monitor_metrics
""" Backwards-compatibility alias of `metrics_monitor` to `monitor_metrics`"""
natlocalprefixes_s = site_natlocalprefixes
""" Backwards-compatibility alias of `natlocalprefixes_s` to `site_natlocalprefixes`"""
natlocalprefixes_t = natlocalprefixes
""" Backwards-compatibility alias of `natlocalprefixes_t` to `natlocalprefixes`"""
network_point_metrics_monitor = monitor_network_point_metrics
""" Backwards-compatibility alias of `network_point_metrics_monitor` to `monitor_network_point_metrics`"""
networkpolicylocalprefixes_s = site_networkpolicylocalprefixes
""" Backwards-compatibility alias of `networkpolicylocalprefixes_s` to `site_networkpolicylocalprefixes`"""
networkpolicylocalprefixes_t = tenant_networkpolicylocalprefixes
""" Backwards-compatibility alias of `networkpolicylocalprefixes_t` to `tenant_networkpolicylocalprefixes`"""
ngfwsecuritypolicylocalprefixes_s = site_ngfwsecuritypolicylocalprefixes
""" Backwards-compatibility alias of `ngfwsecuritypolicylocalprefixes_s` to `site_ngfwsecuritypolicylocalprefixes`"""
ngfwsecuritypolicylocalprefixes_t = ngfwsecuritypolicylocalprefixes
""" Backwards-compatibility alias of `ngfwsecuritypolicylocalprefixes_t` to `ngfwsecuritypolicylocalprefixes`"""
ntp_templates = templates_ntp
""" Backwards-compatibility alias of `ntp_templates` to `templates_ntp`"""
object_stats_monitor = monitor_object_stats
""" Backwards-compatibility alias of `object_stats_monitor` to `monitor_object_stats`"""
operations_e = tenant_element_operations
""" Backwards-compatibility alias of `operations_e` to `tenant_element_operations`"""
operations_t = tenant_machine_operations
""" Backwards-compatibility alias of `operations_t` to `tenant_machine_operations`"""
ops_bgppeers = bgppeers_operations
""" Backwards-compatibility alias of `ops_bgppeers` to `bgppeers_operations`"""
ops_spokeclusters = spokeclusters_ops
""" Backwards-compatibility alias of `ops_spokeclusters` to `spokeclusters_ops`"""
overrides_appdefs = appdefs_overrides
""" Backwards-compatibility alias of `overrides_appdefs` to `appdefs_overrides`"""
permissions_t = tenant_permissions
""" Backwards-compatibility alias of `permissions_t` to `tenant_permissions`"""
prioritypolicylocalprefixes_s = site_prioritypolicylocalprefixes
""" Backwards-compatibility alias of `prioritypolicylocalprefixes_s` to `site_prioritypolicylocalprefixes`"""
prioritypolicylocalprefixes_t = tenant_prioritypolicylocalprefixes
""" Backwards-compatibility alias of `prioritypolicylocalprefixes_t` to `tenant_prioritypolicylocalprefixes`"""
query_apnprofiles = apnprofiles_query
""" Backwards-compatibility alias of `query_apnprofiles` to `apnprofiles_query`"""
query_appdefs = appdefs_query
""" Backwards-compatibility alias of `query_appdefs` to `appdefs_query`"""
query_auditlog = auditlog_query
""" Backwards-compatibility alias of `query_auditlog` to `auditlog_query`"""
query_bgppeers = bgppeers_query
""" Backwards-compatibility alias of `query_bgppeers` to `bgppeers_query`"""
query_bgppeers_t = tenant_bgppeers_query
""" Backwards-compatibility alias of `query_bgppeers_t` to `tenant_bgppeers_query`"""
query_bulk_config_state_e = element_bulk_config_state_query
""" Backwards-compatibility alias of `query_bulk_config_state_e` to `element_bulk_config_state_query`"""
query_bulk_config_state_networks = networks_bulk_config_state_query
""" Backwards-compatibility alias of `query_bulk_config_state_networks` to `networks_bulk_config_state_query`"""
query_bulk_config_state_policysets = policysets_bulk_config_state_query
""" Backwards-compatibility alias of `query_bulk_config_state_policysets` to `policysets_bulk_config_state_query`"""
query_bulk_config_state_s = site_bulk_config_state_query
""" Backwards-compatibility alias of `query_bulk_config_state_s` to `site_bulk_config_state_query`"""
query_cellular_modules = cellular_modules_query
""" Backwards-compatibility alias of `query_cellular_modules` to `cellular_modules_query`"""
query_clients = clients_query
""" Backwards-compatibility alias of `query_clients` to `clients_query`"""
query_correlationevents_anynetlinks = anynetlinks_correlationevents_query
""" Backwards-compatibility alias of `query_correlationevents_anynetlinks` to `anynetlinks_correlationevents_query`"""
query_correlationevents_e = element_correlationevents_query
""" Backwards-compatibility alias of `query_correlationevents_e` to `element_correlationevents_query`"""
query_correlationevents_interfaces = interfaces_correlationevents_query
""" Backwards-compatibility alias of `query_correlationevents_interfaces` to `interfaces_correlationevents_query`"""
query_correlationevents_s = site_correlationevents_query
""" Backwards-compatibility alias of `query_correlationevents_s` to `site_correlationevents_query`"""
query_correlationevents_waninterfaces = waninterfaces_correlationevents_query
""" Backwards-compatibility alias of `query_correlationevents_waninterfaces` to `waninterfaces_correlationevents_query`"""
query_current_status_software = software_current_status_query
""" Backwards-compatibility alias of `query_current_status_software` to `software_current_status_query`"""
query_demstatus = demstatus_query
""" Backwards-compatibility alias of `query_demstatus` to `demstatus_query`"""
query_dnsserviceprofiles = dnsserviceprofiles_query
""" Backwards-compatibility alias of `query_dnsserviceprofiles` to `dnsserviceprofiles_query`"""
query_dnsserviceroles = dnsserviceroles_query
""" Backwards-compatibility alias of `query_dnsserviceroles` to `dnsserviceroles_query`"""
query_dnsservices = dnsservices_query
""" Backwards-compatibility alias of `query_dnsservices` to `dnsservices_query`"""
query_e = element_query
""" Backwards-compatibility alias of `query_e` to `element_query`"""
query_elementsecurityzones = elementsecurityzones_query
""" Backwards-compatibility alias of `query_elementsecurityzones` to `elementsecurityzones_query`"""
query_eventcorrelationpolicyrules = eventcorrelationpolicyrules_query
""" Backwards-compatibility alias of `query_eventcorrelationpolicyrules` to `eventcorrelationpolicyrules_query`"""
query_eventcorrelationpolicysets = eventcorrelationpolicysets_query
""" Backwards-compatibility alias of `query_eventcorrelationpolicysets` to `eventcorrelationpolicysets_query`"""
query_events = events_query
""" Backwards-compatibility alias of `query_events` to `events_query`"""
query_extensions_i = element_extensions_query
""" Backwards-compatibility alias of `query_extensions_i` to `element_extensions_query`"""
query_extensions_s = site_extensions_query
""" Backwards-compatibility alias of `query_extensions_s` to `site_extensions_query`"""
query_extensions_t = tenant_extensions_query
""" Backwards-compatibility alias of `query_extensions_t` to `tenant_extensions_query`"""
query_extensions_ws = ws_extensions_query
""" Backwards-compatibility alias of `query_extensions_ws` to `ws_extensions_query`"""
query_globalprefixfilters = globalprefixfilters_query
""" Backwards-compatibility alias of `query_globalprefixfilters` to `globalprefixfilters_query`"""
query_interfaces = interfaces_query
""" Backwards-compatibility alias of `query_interfaces` to `interfaces_query`"""
query_ipfix = ipfix_query
""" Backwards-compatibility alias of `query_ipfix` to `ipfix_query`"""
query_ipfixcollectorcontexts = ipfixcollectorcontexts_query
""" Backwards-compatibility alias of `query_ipfixcollectorcontexts` to `ipfixcollectorcontexts_query`"""
query_ipfixfiltercontexts = ipfixfiltercontexts_query
""" Backwards-compatibility alias of `query_ipfixfiltercontexts` to `ipfixfiltercontexts_query`"""
query_ipfixglobalprefixes = ipfixglobalprefixes_query
""" Backwards-compatibility alias of `query_ipfixglobalprefixes` to `ipfixglobalprefixes_query`"""
query_ipfixlocalprefixes = ipfixlocalprefixes_query
""" Backwards-compatibility alias of `query_ipfixlocalprefixes` to `ipfixlocalprefixes_query`"""
query_ipfixprofiles = ipfixprofiles_query
""" Backwards-compatibility alias of `query_ipfixprofiles` to `ipfixprofiles_query`"""
query_ipfixtemplates = ipfixtemplates_query
""" Backwards-compatibility alias of `query_ipfixtemplates` to `ipfixtemplates_query`"""
query_ipsecprofiles = ipsecprofiles_query
""" Backwards-compatibility alias of `query_ipsecprofiles` to `ipsecprofiles_query`"""
query_lannetworks = site_lannetworks_query
""" Backwards-compatibility alias of `query_lannetworks` to `site_lannetworks_query`"""
query_lannetworks_t = lannetworks_query
""" Backwards-compatibility alias of `query_lannetworks_t` to `lannetworks_query`"""
query_localprefixfilters = localprefixfilters_query
""" Backwards-compatibility alias of `query_localprefixfilters` to `localprefixfilters_query`"""
query_m = machines_query
""" Backwards-compatibility alias of `query_m` to `machines_query`"""
query_machine_upgrade = machine_upgrade_query
""" Backwards-compatibility alias of `query_machine_upgrade` to `machine_upgrade_query`"""
query_machines_c = clients_machines_query
""" Backwards-compatibility alias of `query_machines_c` to `clients_machines_query`"""
query_multicastroutes = multicastroutes_query
""" Backwards-compatibility alias of `query_multicastroutes` to `multicastroutes_query`"""
query_multicastrpconfigs = multicastrpconfigs_query
""" Backwards-compatibility alias of `query_multicastrpconfigs` to `multicastrpconfigs_query`"""
query_multicaststatus = multicaststatus_query
""" Backwards-compatibility alias of `query_multicaststatus` to `multicaststatus_query`"""
query_natglobalprefixes = natglobalprefixes_query
""" Backwards-compatibility alias of `query_natglobalprefixes` to `natglobalprefixes_query`"""
query_natlocalprefixes = natlocalprefixes_query
""" Backwards-compatibility alias of `query_natlocalprefixes` to `natlocalprefixes_query`"""
query_natpolicypools = natpolicypools_query
""" Backwards-compatibility alias of `query_natpolicypools` to `natpolicypools_query`"""
query_natpolicyrules = natpolicyrules_query
""" Backwards-compatibility alias of `query_natpolicyrules` to `natpolicyrules_query`"""
query_natpolicysets = natpolicysets_query
""" Backwards-compatibility alias of `query_natpolicysets` to `natpolicysets_query`"""
query_natpolicysetstacks = natpolicysetstacks_query
""" Backwards-compatibility alias of `query_natpolicysetstacks` to `natpolicysetstacks_query`"""
query_natzones = natzones_query
""" Backwards-compatibility alias of `query_natzones` to `natzones_query`"""
query_networkcontexts = networkcontexts_query
""" Backwards-compatibility alias of `query_networkcontexts` to `networkcontexts_query`"""
query_networkpolicyglobalprefixes = networkpolicyglobalprefixes_query
""" Backwards-compatibility alias of `query_networkpolicyglobalprefixes` to `networkpolicyglobalprefixes_query`"""
query_networkpolicylocalprefixes = networkpolicylocalprefixes_query
""" Backwards-compatibility alias of `query_networkpolicylocalprefixes` to `networkpolicylocalprefixes_query`"""
query_networkpolicyrules = networkpolicyrules_query
""" Backwards-compatibility alias of `query_networkpolicyrules` to `networkpolicyrules_query`"""
query_networkpolicysets = networkpolicysets_query
""" Backwards-compatibility alias of `query_networkpolicysets` to `networkpolicysets_query`"""
query_networkpolicysetstacks = networkpolicysetstacks_query
""" Backwards-compatibility alias of `query_networkpolicysetstacks` to `networkpolicysetstacks_query`"""
query_ngfwsecuritypolicyglobalprefixes = ngfwsecuritypolicyglobalprefixes_query
""" Backwards-compatibility alias of `query_ngfwsecuritypolicyglobalprefixes` to `ngfwsecuritypolicyglobalprefixes_query`"""
query_ngfwsecuritypolicylocalprefixes = ngfwsecuritypolicylocalprefixes_query
""" Backwards-compatibility alias of `query_ngfwsecuritypolicylocalprefixes` to `ngfwsecuritypolicylocalprefixes_query`"""
query_ngfwsecuritypolicyrules = ngfwsecuritypolicyrules_query
""" Backwards-compatibility alias of `query_ngfwsecuritypolicyrules` to `ngfwsecuritypolicyrules_query`"""
query_ngfwsecuritypolicysets = ngfwsecuritypolicysets_query
""" Backwards-compatibility alias of `query_ngfwsecuritypolicysets` to `ngfwsecuritypolicysets_query`"""
query_ngfwsecuritypolicysetstacks = ngfwsecuritypolicysetstacks_query
""" Backwards-compatibility alias of `query_ngfwsecuritypolicysetstacks` to `ngfwsecuritypolicysetstacks_query`"""
query_pathgroups = pathgroups_query
""" Backwards-compatibility alias of `query_pathgroups` to `pathgroups_query`"""
query_policyrules = policyrules_query
""" Backwards-compatibility alias of `query_policyrules` to `policyrules_query`"""
query_policysets = policysets_query
""" Backwards-compatibility alias of `query_policysets` to `policysets_query`"""
query_prefixes = prefixes_query
""" Backwards-compatibility alias of `query_prefixes` to `prefixes_query`"""
query_prefixfilters = prefixfilters_query
""" Backwards-compatibility alias of `query_prefixfilters` to `prefixfilters_query`"""
query_prefixfilters_t = tenant_prefixfilters_query
""" Backwards-compatibility alias of `query_prefixfilters_t` to `tenant_prefixfilters_query`"""
query_prioritypolicyglobalprefixes = prioritypolicyglobalprefixes_query
""" Backwards-compatibility alias of `query_prioritypolicyglobalprefixes` to `prioritypolicyglobalprefixes_query`"""
query_prioritypolicylocalprefixes = prioritypolicylocalprefixes_query
""" Backwards-compatibility alias of `query_prioritypolicylocalprefixes` to `prioritypolicylocalprefixes_query`"""
query_prioritypolicyrules = prioritypolicyrules_query
""" Backwards-compatibility alias of `query_prioritypolicyrules` to `prioritypolicyrules_query`"""
query_prioritypolicysets = prioritypolicysets_query
""" Backwards-compatibility alias of `query_prioritypolicysets` to `prioritypolicysets_query`"""
query_prioritypolicysetstacks = prioritypolicysetstacks_query
""" Backwards-compatibility alias of `query_prioritypolicysetstacks` to `prioritypolicysetstacks_query`"""
query_prismaaccess_configs = prismaaccess_configs_query
""" Backwards-compatibility alias of `query_prismaaccess_configs` to `prismaaccess_configs_query`"""
query_reports = reports_query
""" Backwards-compatibility alias of `query_reports` to `reports_query`"""
query_reportsdir = reportsdir_query
""" Backwards-compatibility alias of `query_reportsdir` to `reportsdir_query`"""
query_routing_aspathaccesslists = routing_aspathaccesslists_query
""" Backwards-compatibility alias of `query_routing_aspathaccesslists` to `routing_aspathaccesslists_query`"""
query_routing_ipcommunitylists = routing_ipcommunitylists_query
""" Backwards-compatibility alias of `query_routing_ipcommunitylists` to `routing_ipcommunitylists_query`"""
query_routing_prefixlists = routing_prefixlists_query
""" Backwards-compatibility alias of `query_routing_prefixlists` to `routing_prefixlists_query`"""
query_routing_routemaps = routing_routemaps_query
""" Backwards-compatibility alias of `query_routing_routemaps` to `routing_routemaps_query`"""
query_s = site_query
""" Backwards-compatibility alias of `query_s` to `site_query`"""
query_securitypolicyrules = securitypolicyrules_query
""" Backwards-compatibility alias of `query_securitypolicyrules` to `securitypolicyrules_query`"""
query_securitypolicysets = securitypolicysets_query
""" Backwards-compatibility alias of `query_securitypolicysets` to `securitypolicysets_query`"""
query_securityzones = securityzones_query
""" Backwards-compatibility alias of `query_securityzones` to `securityzones_query`"""
query_servicebindingmaps = servicebindingmaps_query
""" Backwards-compatibility alias of `query_servicebindingmaps` to `servicebindingmaps_query`"""
query_serviceendpoints = serviceendpoints_query
""" Backwards-compatibility alias of `query_serviceendpoints` to `serviceendpoints_query`"""
query_servicelabels = servicelabels_query
""" Backwards-compatibility alias of `query_servicelabels` to `servicelabels_query`"""
query_sitesecurityzones = sitesecurityzones_query
""" Backwards-compatibility alias of `query_sitesecurityzones` to `sitesecurityzones_query`"""
query_sitesecurityzones_t = sitesecurityzones_query
""" Backwards-compatibility alias of `query_sitesecurityzones_t` to `sitesecurityzones_query`"""
query_softwarehistory = softwarehistory_query
""" Backwards-compatibility alias of `query_softwarehistory` to `softwarehistory_query`"""
query_spokeclusters = spokeclusters_query
""" Backwards-compatibility alias of `query_spokeclusters` to `spokeclusters_query`"""
query_spokeclusters_t = spokeclusters_query
""" Backwards-compatibility alias of `query_spokeclusters_t` to `spokeclusters_query`"""
query_status = status_query
""" Backwards-compatibility alias of `query_status` to `status_query`"""
query_status_cellular_module_firmware = cellular_module_firmware_status_query
""" Backwards-compatibility alias of `query_status_cellular_module_firmware` to `cellular_module_firmware_status_query`"""
query_status_software = software_status_query
""" Backwards-compatibility alias of `query_status_software` to `software_status_query`"""
query_tokens_vfflicenses = vff_token_query
""" Backwards-compatibility alias of `query_tokens_vfflicenses` to `vff_token_query`"""
query_toolkitsessions = toolkitsessions_query
""" Backwards-compatibility alias of `query_toolkitsessions` to `toolkitsessions_query`"""
query_upgrade_status = upgrade_status_query
""" Backwards-compatibility alias of `query_upgrade_status` to `upgrade_status_query`"""
query_vpnlinks = vpnlinks_query
""" Backwards-compatibility alias of `query_vpnlinks` to `vpnlinks_query`"""
query_waninterfacelabels = waninterfacelabels_query
""" Backwards-compatibility alias of `query_waninterfacelabels` to `waninterfacelabels_query`"""
query_waninterfaces = waninterfaces_query
""" Backwards-compatibility alias of `query_waninterfaces` to `waninterfaces_query`"""
query_waninterfaces_t = tenant_waninterfaces_query
""" Backwards-compatibility alias of `query_waninterfaces_t` to `tenant_waninterfaces_query`"""
query_wannetworks = wannetworks_query
""" Backwards-compatibility alias of `query_wannetworks` to `wannetworks_query`"""
reallocate_clients = clients_reallocate
""" Backwards-compatibility alias of `reallocate_clients` to `clients_reallocate`"""
sys_metrics_monitor = monitor_sys_metrics
""" Backwards-compatibility alias of `sys_metrics_monitor` to `monitor_sys_metrics`"""
sys_point_metrics_monitor = monitor_sys_point_metrics
""" Backwards-compatibility alias of `sys_point_metrics_monitor` to `monitor_sys_point_metrics`"""
tokens_vfflicenses = vfflicense_tokens
""" Backwards-compatibility alias of `tokens_vfflicenses` to `vfflicense_tokens`"""
topn_cellular_metrics_monitor = monitor_cellular_metrics_topn
""" Backwards-compatibility alias of `topn_cellular_metrics_monitor` to `monitor_cellular_metrics_topn`"""
topn_monitor = monitor_topn
""" Backwards-compatibility alias of `topn_monitor` to `monitor_topn`"""
topn_sys_metrics_monitor = monitor_sys_metrics_topn
""" Backwards-compatibility alias of `topn_sys_metrics_monitor` to `monitor_sys_metrics_topn`"""
elements_bulk_config_state_query = element_bulk_config_state_query
""" Backwards-compatibility alias of `elements_bulk_config_state_query` to `element_bulk_config_state_query`"""
elements_query = element_query
""" Backwards-compatibility alias of `elements_query` to `element_query`"""
sites_bulk_config_state_query = site_bulk_config_state_query
""" Backwards-compatibility alias of `sites_bulk_config_state_query` to `site_bulk_config_state_query`"""
sites_query = site_query
""" Backwards-compatibility alias of `sites_query` to `site_query`"""
|
# coding: utf-8
'''
-----------------------------------------------------------------------------
Copyright 2016 Esri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------------------------------
==================================================
RangeRingUtils.py
--------------------------------------------------
requirements: ArcGIS 10.3.1+, ArcGIS Pro 1.2+
author: ArcGIS Solutions
company: Esri
==================================================
description: Utilities to create range ring features
==================================================
history:
3/29/2016 - mf - design & original coding
==================================================
'''
import os
import sys
import traceback
import arcpy
acceptableDistanceUnits = ['METERS', 'KILOMETERS',
'MILES', 'NAUTICAL_MILES',
'FEET', 'US_SURVEY_FEET']
srDefault = arcpy.SpatialReference(54032) # World_Azimuthal_Equidistant
def rangeRingsFromList(centerFC, rangeList, distanceUnits, numRadials, outputRingFeatures, outputRadialFeatures, sr):
''' Make range ring features from a center, and list of distances '''
try:
if (centerFC is None) or (rangeList is None) or (len(rangeList) == 0) \
or (outputRingFeatures == None) :
arcpy.AddError("Bad parameters supplied to rangeRingsFromList")
return [None, None]
if not sr:
msg = r"Using default spatial reference: " + str(srDefault.name)
arcpy.AddWarning(msg)
print(msg)
sr = srDefault
rm = RingMaker(centerFC, rangeList, distanceUnits, sr)
# Create Rings...
numCenterPoints = arcpy.GetCount_management(centerFC).getOutput(0)
if int(numCenterPoints) < 1:
arcpy.AddError("At least one input center point is required")
return [None, None]
numRingsPerCenter = len(rangeList)
totalNumRings = int(numCenterPoints) * int(numRingsPerCenter)
totalNumRadials = int(numCenterPoints) * int(numRadials)
arcpy.AddMessage("Making rings " + str(totalNumRings) + " (" + str(numRingsPerCenter) + " for " + str(numCenterPoints) + " centers)...")
rm.makeRingsFromDistances()
outRings = rm.saveRingsAsFeatures(outputRingFeatures)
# Create Radials...
arcpy.AddMessage("Making radials " + str(totalNumRadials) + " (" + str(numRadials) + " for " + str(numCenterPoints) + " centers)...")
if (outputRadialFeatures is not None) and (numRadials > 0):
rm.makeRadials(numRadials)
outRadials = rm.saveRadialsAsFeatures(outputRadialFeatures)
else:
outRadials = None
if (numRadials < 0):
arcpy.AddWarning("Number of radials must be positive")
return [outRings, outRadials]
except arcpy.ExecuteError:
# Get the tool error messages
msgs = arcpy.GetMessages()
arcpy.AddError(msgs)
print(msgs)
except:
# Get the traceback object
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
# Concatenate information together concerning the error into a message string
pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + \
"\nError Info:\n" + str(sys.exc_info()[1])
msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
# Return python error messages for use in script tool or Python Window
arcpy.AddError(pymsg)
arcpy.AddError(msgs)
# Print Python error messages for use in Python / Python Window
print(pymsg + "\n")
print(msgs)
def rangeRingsFromMinMax(centerFC, rangeMin, rangeMax, distanceUnits, numRadials, outputRingFeatures, outputRadialFeatures, sr):
''' Make range ring features from only two distances, a minimum and a maximum '''
if (rangeMin < 0.0) or (rangeMax <= 0.0) or (rangeMin > rangeMax):
arcpy.AddError("Range parameters are not valid")
return [None, None]
rangeList = [min(rangeMin, rangeMax), max(rangeMin, rangeMax)]
return rangeRingsFromList(centerFC, rangeList, distanceUnits, numRadials, outputRingFeatures, outputRadialFeatures, sr)
def rangeRingsFromInterval(centerFC, numRings, distBetween, distanceUnits, numRadials, outputRingFeatures, outputRadialFeatures, sr):
''' Classic range rings from number of rings, and distance between rings '''
if distBetween <= 0.0:
arcpy.AddError("Distance between rings must be > 0")
return [None, None]
rangeList = [x * distBetween for x in range(1, numRings + 1)]
return rangeRingsFromList(centerFC, rangeList, distanceUnits, numRadials, outputRingFeatures, outputRadialFeatures, sr)
class RingMaker:
'''
Core class for making range rings.
'''
def __init__(self, center, inputRangeList, distanceUnits, sr):
''' initialize rings '''
self.deleteme = []
# project center to sr, and keep it as a list of PointGeometries object
originalGeom = arcpy.CopyFeatures_management(center, arcpy.Geometry())
newGeom = []
for g in originalGeom:
newGeom.append(g.projectAs(sr))
self.center = newGeom
self.rangeList = self._sortList(inputRangeList)
if distanceUnits == None or distanceUnits == "#" or distanceUnits == "":
self.distanceUnits = sr.linearUnitName
else:
self.distanceUnits = distanceUnits
if not sr == None or not sr == "#" or not sr == "":
self.sr = sr
else:
self.sr = srDefault
self.ringFeatures = None
self.radialFeatures = None
self.ringCount = len(self.rangeList)
self.ringMin = min(self.rangeList)
self.ringMax = max(self.rangeList)
def __del__(self):
''' clean up rings '''
for i in self.deleteme:
if arcpy.Exists(i):
arcpy.Delete_management(i)
def _sortList(self, listToSort):
''' sort list of distances '''
if len(listToSort) == 0:
print("Empty distance list")
return None
return sorted(listToSort)
def _addFieldsToTable(self, tab, fields):
''' add fields from dictionary: {'<fieldname>':'type'} '''
for f in list(fields.keys()):
arcpy.AddField_management(tab, f, fields[f])
return tab
def _makeTempTable(self, name, fields):
''' make a temporary, in_memory table '''
tab = os.path.join("in_memory", name)
arcpy.CreateTable_management(os.path.dirname(tab),
os.path.basename(tab))
self.deleteme.append(tab)
if fields:
newtab = self._addFieldsToTable(tab, fields)
else:
print("no fields to add")
newtab = tab
return newtab
def makeRingsFromDistances(self):
''' make geodesic rings from distance list '''
# make a table for TableToEllipse
fields = {'x':'DOUBLE', 'y':'DOUBLE', 'Range':'DOUBLE'}
inTable = self._makeTempTable("ringTable", fields)
cursor = arcpy.da.InsertCursor(inTable, ['x', 'y', 'Range'])
#self.center is a list of PointGeometry
for i in self.center:
pt = i.firstPoint
for r in self.rangeList:
cursor.insertRow([pt.X, pt.Y, r * 2])
del cursor
self.deleteme.append(inTable)
outFeatures = os.path.join("in_memory", "outRings")
arcpy.TableToEllipse_management(inTable, outFeatures,
'x', 'y', 'Range', 'Range',
self.distanceUnits,
'#', '#', '#', self.sr)
self.deleteme.append(outFeatures)
self.ringFeatures = outFeatures
arcpy.CalculateField_management(outFeatures, "Range",'!Range! / 2.0','PYTHON_9.3')
return outFeatures
def makeRadials(self, numRadials):
''' make geodesic radials from number of radials '''
segmentAngle = 360.0/float(numRadials)
segmentAngleList = []
a = 0.0
while a < 360.0:
segmentAngleList.append(a)
a += segmentAngle
fields = {'x':'DOUBLE', 'y':'DOUBLE', 'Bearing':'DOUBLE', 'Range':'DOUBLE'}
tab = self._makeTempTable("radTable", fields)
cursor = arcpy.da.InsertCursor(tab, ['x', 'y', 'Bearing', 'Range'])
for i in self.center:
pt = i.firstPoint
for r in segmentAngleList:
cursor.insertRow([pt.X, pt.Y, r, self.ringMax])
del cursor
self.deleteme.append(tab)
outRadialFeatures = os.path.join("in_memory", "outRadials")
arcpy.BearingDistanceToLine_management(tab, outRadialFeatures, 'x', 'y',
'Range', self.distanceUnits, 'Bearing', "DEGREES",
"GEODESIC", "#", self.sr)
self.deleteme.append(outRadialFeatures)
self.radialFeatures = outRadialFeatures
return outRadialFeatures
def saveRingsAsFeatures(self, outputFeatureClass):
''' save rings to featureclass '''
arcpy.CopyFeatures_management(self.ringFeatures, outputFeatureClass)
return outputFeatureClass
def saveRadialsAsFeatures(self, outputFeatureClass):
''' save radials to featureclass '''
arcpy.CopyFeatures_management(self.radialFeatures, outputFeatureClass)
return outputFeatureClass
|
cars = 100
space_in_a_car = 4.0
drivers = 30
passengers = 90
cars_not_driver = cars - drivers
cars_driven = drivers
carpool_capacity = cars_driven * space_in_a_car
averger_passergers_per_car = passengers / cars_driven
print('There are', cars, 'cars avaliable.')
|
from utils import load_data, data_to_series_features, get_data_loader, is_minimum, make_cuda
from algorithm import (initialize_weights, individual_to_key,
pop_to_weights, select, reconstruct_population)
from train import train, evaluate
from model import weightedLSTM
import argparse
import numpy as np
from copy import deepcopy
from sklearn.model_selection import train_test_split
def parse_arguments():
# argument parsing
parser = argparse.ArgumentParser(description="Specify Params for Experimental Setting")
parser.add_argument('--iterations', type=int, default=20,
help="Specify the number of evolution iterations")
parser.add_argument('--batch_size', type=int, default=256,
help="Specify batch size")
parser.add_argument('--initial_epochs', type=int, default=300,
help="Specify the number of epochs for initial training")
parser.add_argument('--num_epochs', type=int, default=20,
help="Specify the number of epochs for competitive search")
parser.add_argument('--log_step', type=int, default=100,
help="Specify log step size for training")
parser.add_argument('--learning_rate', type=float, default=1e-3,
help="Learning rate")
parser.add_argument('--data', type=str, default='pollution.csv',
help="Path to the dataset")
parser.add_argument('--pop_size', type=int, default=36)
parser.add_argument('--code_length', type=int, default=6)
parser.add_argument('--n_select', type=int, default=6)
parser.add_argument('--time_steps', type=int, default=18)
parser.add_argument('--n_hidden', type=int, default=128)
parser.add_argument('--n_layers', type=int, default=2)
parser.add_argument('--n_output', type=int, default=1)
parser.add_argument('--bidirectional', action='store_true', default=False)
parser.add_argument('--max_grad_norm', type=float, default=1.0)
return parser.parse_args()
def main():
args = parse_arguments()
data, y_scaler = load_data(args.data)
args.n_features = np.size(data, axis=-1)
features = data_to_series_features(data, args.time_steps)
train_features, features = train_test_split(features, test_size=0.3)
valid_features, test_features = train_test_split(features, test_size=0.5)
train_data_loader = get_data_loader(train_features, args.batch_size)
valid_data_loader = get_data_loader(valid_features, args.batch_size)
test_data_loader = get_data_loader(test_features, args.batch_size)
best_model = weightedLSTM(args.n_features, args.n_hidden, args.n_layers,
args.n_output, [1.0] * args.time_steps, args.bidirectional)
best_model = make_cuda(best_model)
print("Initial training before competitive random search")
best_model = train(args, best_model, train_data_loader, initial=True)
print("\nInitial training is done. Start competitive random search.\n")
pop, weights = initialize_weights(args.pop_size, args.time_steps, args.code_length)
key_to_rmse = {}
for iteration in range(args.iterations):
for enum, (indiv, weight) in enumerate(zip(pop, weights)):
print('iteration: [%d/%d] indiv_no: [%d/%d]' % (iteration + 1, args.iterations, enum, args.pop_size))
key = individual_to_key(indiv)
if key not in key_to_rmse.keys():
model = weightedLSTM(args.n_features, args.n_hidden, args.n_layers,
args.n_output, weight, args.bidirectional)
model = make_cuda(model)
model.load_state_dict(best_model.state_dict())
model = train(args, model, train_data_loader)
rmse, mae = evaluate(args, model, y_scaler, valid_data_loader)
if is_minimum(rmse, key_to_rmse):
best_model = deepcopy(model)
key_to_rmse[key] = rmse
pop_selected, fitness_selected = select(pop, args.n_select, key_to_rmse)
pop = reconstruct_population(pop_selected, args.pop_size)
weights = pop_to_weights(pop, args.time_steps, args.code_length)
print('test evaluation:')
evaluate(args, best_model, test_data_loader)
if __name__ == '__main__':
main()
|
from descent.case import CaseUnapply1
class Tree:
def copy(self):
return self
class Empty(Tree):
pass
class Ignore(Tree):
def consume(self, val):
return self
class Rule:
def __init__(self, name):
self.name = name
self.body = None
def define(self, body):
self.body = body
def __call__(self, stream, pos, tree):
return self.body(stream, pos, tree)
def parse(self, stream):
return self(stream, 0, Empty())[1]
def sequence(*subparsers):
def _parser(stream, pos, tree):
org = pos
for parser in subparsers:
pos, tree = parser(stream, pos, tree)
if tree is None:
return org, None
return pos, tree
return _parser
def choice(*subparsers):
def _parser(stream, pos, tree):
for parser in subparsers:
new_pos, new_tree = parser(stream, pos, tree.copy())
if new_tree is not None:
return new_pos, new_tree
return pos, None
return _parser
def repeat(parser):
def _parser(stream, pos, tree):
while True:
current = pos, tree
pos, tree = parser(stream, pos, tree)
if tree is None:
return current
return _parser
def repeat1(parser):
def _parser(stream, pos, tree):
pos, tree = parser(stream, pos, tree)
if tree is None:
return pos, None
while True:
current = pos, tree
pos, tree = parser(stream, pos, tree)
if tree is None:
return current
return _parser
def optional(parser):
def _parser(stream, pos, tree):
new_pos, new_tree = parser(stream, pos, tree)
if new_tree is None:
return pos, tree
return new_pos, new_tree
return _parser
def not_follow(parser):
def _parser(stream, pos, tree):
_, new_tree = parser(stream, pos, Ignore())
if new_tree is None:
return pos, tree
return pos, None
return _parser
def follow(parser):
def _parser(stream, pos, tree):
_, new_tree = parser(stream, pos, Ignore())
if new_tree is None:
return pos, None
return pos, tree
return _parser
def node(name, classes):
cls = classes[name]
def _parser(stream, pos, tree):
return pos, cls()
return _parser
def append(parser, name):
method = "append_" + name
def _parser(stream, pos, tree):
new_pos, subtree = parser(stream, pos, Empty())
if subtree is None:
return pos, None
if isinstance(tree, Ignore):
return new_pos, tree
return new_pos, getattr(tree, method)(subtree)
return _parser
def top(parser, name):
method = "append_" + name
def _parser(stream, pos, tree):
new_pos, top_tree = parser(stream, pos, Empty())
if top_tree is None:
return pos, None
if isinstance(tree, Ignore):
return new_pos, tree
return new_pos, getattr(top_tree, method)(tree)
return _parser
def splice(parser, converters):
def _parser(stream, pos, tree):
new_pos, subtree = parser(stream, pos, Empty())
if subtree is None:
return pos, None
return new_pos, subtree.splice_to(tree, converters)
return _parser
def top_splice(parser, converters):
def _parser(stream, pos, tree):
new_pos, top_tree = parser(stream, pos, Empty())
if top_tree is None:
return pos, None
return new_pos, tree.splice_to(top_tree, converters)
return _parser
def ignore(parser):
def _parser(stream, pos, tree):
new_pos, new_tree = parser(stream, pos, Ignore())
if new_tree is None:
return pos, None
return new_pos, tree
return _parser
def replace(parser, value):
def _parser(stream, pos, tree):
new_pos, new_tree = parser(stream, pos, Ignore())
if new_tree is None:
return pos, None
return new_pos, tree.consume(value)
return _parser
def char_sequence(val):
def _parser(stream, pos, tree):
if pos + len(val) <= len(stream) and stream.startswith(val, pos):
return pos + len(val), tree.consume(val)
return pos, None
return _parser
def char_range(start, end):
def _parser(stream, pos, tree):
if pos < len(stream) and start <= stream[pos] <= end:
return pos + 1, tree.consume(stream[pos])
return pos, None
return _parser
def char_any(stream, pos, tree):
if pos < len(stream):
return pos + 1, tree.consume(stream[pos])
return pos, None
def fail(stream, pos, tree):
return pos, None
class Compiler(CaseUnapply1):
def char_any(self, val):
return char_any
def string(self, val):
return char_sequence(val)
def char(self, val):
return char_sequence(val)
def char_range(self, val):
return char_range(str(val.start), str(val.end))
def sequence(self, val):
return sequence(*(self(v) for v in val))
def choice(self, val):
return choice(*(self(v) for v in val))
def repeat(self, val):
return repeat(self(val))
def repeat1(self, val):
return repeat1(self(val))
def optional(self, val):
return optional(self(val))
def not_follow(self, val):
return not_follow(self(val))
def follow(self, val):
return follow(self(val))
def reference(self, val):
return self.rules[val]
def node(self, val):
return node(val, self.classes)
def append(self, val):
return append(self(val.expr), str(val.name))
def top(self, val):
return top(self(val.expr), str(val.name))
def splice(self, val):
return splice(self(val), self.converters)
def top_splice(self, val):
return top_splice(self(val), self.converters)
def ignore(self, val):
return ignore(self(val))
def replace(self, val):
return replace(self(val.expr), str(val.value))
def fail(self, val):
return fail
def compile_parser(gram, classes, converters=None):
rules = {k: Rule(k) for k in gram}
case = Compiler(
rules=rules,
classes=classes,
converters=converters or {}
)
for rule, body in gram.items():
rules[rule].define(case(body))
return rules[list(gram)[0]]
|
# Solve the Assignment problem using the Hungarian method.
# input A - square cost matrix
# return - the optimal assignment
import numpy as np
from scipy.optimize import linear_sum_assignment
def Hungarian(A):
_, col_ind = linear_sum_assignment(A)
# Cost can be found as A[row_ind, col_ind].sum()
return col_ind
if __name__ == "__main__":
pass
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import sys
import os
import msrest
from azure.iot.hub import DigitalTwinClient
iothub_connection_str = os.getenv("IOTHUB_CONNECTION_STRING")
device_id = os.getenv("IOTHUB_DEVICE_ID")
component_name = os.getenv(
"IOTHUB_COMPONENT_NAME"
) # for the TemperatureController, try thermostat1
command_name = os.getenv("IOTHUB_COMMAND_NAME") # for the thermostat you can try getMaxMinReport
payload = os.getenv("IOTHUB_COMMAND_PAYLOAD") # it really doesn't matter, any string will do.
# Optional parameters
connect_timeout_in_seconds = 3
response_timeout_in_seconds = 7 # Must be within 5-300
try:
# Create DigitalTwinClient
digital_twin_client = DigitalTwinClient.from_connection_string(iothub_connection_str)
# Invoke component command
invoke_component_command_result = digital_twin_client.invoke_component_command(
device_id,
component_name,
command_name,
payload,
connect_timeout_in_seconds,
response_timeout_in_seconds,
)
if invoke_component_command_result:
print(invoke_component_command_result)
else:
print("No invoke_component_command_result found")
except msrest.exceptions.HttpOperationError as ex:
print("HttpOperationError error {0}".format(ex.response.text))
except Exception as exc:
print("Unexpected error {0}".format(exc))
except KeyboardInterrupt:
print("{} stopped".format(__file__))
finally:
print("{} finished".format(__file__))
|
import locale
import curses
print(curses.baudrate())
print(curses.beep())
print(curses.can_change_color())
print(curses.cbreak())
color_number = 1000
print(curses.color_content(color_number))
print(curses.color_pair(color_number))
print(curses.curs_set(visibility))
print(curses.def_prog_mode())
print(curses.def_shell_mode())
print(curses.delay_output(ms))
print(curses.doupdate())
print(curses.echo())
print(curses.endwin())
print(curses.erasechar())
print(curses.filter())
print(curses.flash())
print(curses.flushinp())
print(curses.getmouse())
print(curses.getsyx())
print(curses.getwin(file))
print(curses.has_colors())
print(curses.has_ic())
print(curses.has_il())
print(curses.has_key(ch))
print(curses.halfdelay(tenths))
print(curses.init_color(color_number, r, g, b))
print(curses.init_pair(pair_number, fg, bg))
print(curses.initscr())
print(curses.is_term_resized(nlines, ncols))
print(curses.isendwin())
print(curses.keyname(k))
print(curses.killchar())
print(curses.longname())
print(curses.meta(flag))
print(curses.mouseinterval(interval))
print(curses.mousemask(mousemask))
print(curses.napms(ms))
print(curses.newpad(nlines, ncols))
print(curses.newwin(nlines, ncols))
print(curses.nl())
print(curses.nocbreak())
print(curses.noecho())
print(curses.nonl())
print(curses.noqiflush())
print(curses.noraw())
print(curses.pair_content(pair_number))
print(curses.pair_number(attr))
print(curses.putp(str))
print(curses.qiflush([flag]))
print(curses.raw())
print(curses.reset_prog_mode())
print(curses.reset_shell_mode())
print(curses.resetty())
print(curses.resize_term(nlines, ncols))
print(curses.resizeterm(nlines, ncols))
print(curses.savetty())
print(curses.setsyx(y, x))
print(curses.setupterm(term=None, fd=-1))
print(curses.start_color())
print(curses.termattrs())
print(curses.termname())
print(curses.tigetflag(capname))
print(curses.tigetnum(capname))
print(curses.tigetstr(capname))
print(curses.tparm(curses.tigetstr('cup'), 5, 3))
print(curses.typeahead(fd))
print(curses.unctrl(ch))
print(curses.ungetch(ch))
print(curses.update_lines_cols())
print(curses.unget_wch(ch))
print(curses.ungetmouse(id, x, y, z, bstate))
print(curses.use_env(flag))
print(curses.use_default_colors())
print(curses.wrapper(func, ...))
|
from structure import *
allpassed = True
for sg in range(1, 231):
print("Calculating spacegroup " + str(sg))
wyckoffs = get_wyckoffs(sg)
for index, wp in enumerate(wyckoffs):
v = np.random.random(3)
for i in range(3):
if np.random.random() < 0.5:
v[i] *= -1
# v = SymmOp.from_rotation_and_translation(np.zeros([3,3]), v)
points = []
for p in wp:
points.append(p.operate(v))
for i, p in enumerate(points):
for j in range(3):
a = np.random.random()
if a < 1 / 3:
points[i][j] += 1
elif a < 2 / 3:
points[i][j] -= 1
if check_wyckoff_position(points, sg) is not False:
pass
else:
allpassed = False
print("sg: " + str(sg) + ", index: " + str(index))
print("points:")
for p in points:
print(p)
if allpassed is True:
print("All spacegroups passed.")
|
_global_para = {
"category": {
"red": 0,
"blue": 1
},
"DEBUG_SETTING": {
True: True,
False: False
}
}
_global_dict = {}
def set_value(name, key, value):
_global_dict[name] = _global_para[key][value]
def get_value(name, defValue=None):
try:
return _global_dict[name]
except:
raise KeyError('''
Make sure the key is declared before referencing it.
You can try launching the program from main.py''')
|
def normalize_cf(cf_json):
if isinstance(cf_json, dict):
ref = cf_json.get('Ref')
if ref is not None:
return ref
normalized = {}
for key, value in cf_json.items():
normalized[key] = normalize_cf(value)
return normalized
if isinstance(cf_json, list):
return [normalize_cf(f) for f in cf_json]
return cf_json |
# _*_ coding:gbk _*_
'''
Author: Ruan Yang
Email: ruanyang_njut@163.com
Reference: https://keras-cn.readthedocs.io/en/latest/getting_started/sequential_model/
'''
import keras
from keras.models import Sequential
from keras.layers import Dense,Dropout,Activation
from keras.optimizers import SGD
# Generate dummy data
import numpy as np
x_train=np.random.random((1000,20))
y_train=keras.utils.to_categorical(np.random.randint(10,size=(1000,1)),\
num_classes=10)
x_test=np.random.random((100,20))
y_test=keras.utils.to_categorical(np.random.randint(10,size=(100,1)),\
num_classes=10)
model=Sequential()
# Dense(64) is a fully-connected layer with 64 hidden units.
# in the first layer, you must specify the expected input data shape:
# here, 20-dimensional vectors.
model.add(Dense(64,activation="relu",input_dim=20))
model.add(Dropout(0.5))
model.add(Dense(64,activation="relu"))
model.add(Dropout(0.5))
model.add(Dense(10,activation="softmax"))
# Defined the optimizer parameters
sgd=SGD(lr=0.01,decay=1e-6,momentum=0.9,nesterov=True)
# compile the model
model.compile(loss="categorical_crossentropy",optimizer=sgd,\
metrics=["accuracy"])
# fit the model
model.fit(x_train,y_train,epochs=20,batch_size=128)
# get the results
score=model.evaluate(x_test,y_test,batch_size=128)
print("#----------------------------------#")
print(score)
print("#----------------------------------#")
print("\n")
|
import time
from multiprocessing import Process
import sys
from p001 import P001
from p002 import P002
from p003 import P003
from p004 import P004
from p005 import P005
from p006 import P006
from p007 import P007
from p008 import P008
from p009 import P009
from p010 import P010
from p011 import P011
from p012 import P012
from p013 import P013
from p014 import P014
from p015 import P015
from p016 import P016
from p017 import P017
from p018 import P018
from p019 import P019
from p020 import P020
from p021 import P021
def main(argv):
start_time = time.time()
if (len(argv) > 0) and argv[0]=='-p':
solve_parallel()
else:
solve_sequential()
total_time = time.time() - start_time
print('Total time:', total_time, 's')
def solve_sequential():
P001().run()
P002().run()
P003().run()
P004().run()
P005().run()
P006().run()
P007().run()
P008().run()
P009().run()
P010().run()
P011().run()
P012().run()
P013().run()
P014().run()
P015().run()
P016().run()
P017().run()
P018().run()
P019().run()
P020().run()
P021().run()
def solve_parallel():
processes = []
processes.append(Process(name='p001', target=P001().run))
processes.append(Process(name='p002', target=P002().run))
processes.append(Process(name='p003', target=P003().run))
processes.append(Process(name='p004', target=P004().run))
processes.append(Process(name='p005', target=P005().run))
processes.append(Process(name='p006', target=P006().run))
processes.append(Process(name='p007', target=P007().run))
processes.append(Process(name='p008', target=P008().run))
processes.append(Process(name='p009', target=P009().run))
processes.append(Process(name='p010', target=P010().run))
processes.append(Process(name='p011', target=P011().run))
processes.append(Process(name='p012', target=P012().run))
processes.append(Process(name='p013', target=P013().run))
processes.append(Process(name='p014', target=P014().run))
processes.append(Process(name='p015', target=P015().run))
processes.append(Process(name='p016', target=P016().run))
processes.append(Process(name='p017', target=P017().run))
processes.append(Process(name='p018', target=P018().run))
processes.append(Process(name='p019', target=P019().run))
processes.append(Process(name='p020', target=P020().run))
processes.append(Process(name='p020', target=P021().run))
for i in processes:
i.start()
for i in processes:
i.join()
if __name__ == "__main__":
main(sys.argv[1:]) |
from nltk.tokenize import word_tokenize, sent_tokenize
print(word_tokenize("This is the queen's castle. Yay!"))
# ['This', 'is', 'the', 'queen', "'s", 'castle', '.', 'Yay', '!']
print(sent_tokenize(got)[1:3])
# ['"The wildlings are \ndead."', '"Do the dead frighten you?"']
|
import FWCore.ParameterSet.Config as cms
source = cms.Source("EmptySource")
from GeneratorInterface.Hydjet2Interface.hydjet2DefaultParameters_cff import *
generator = cms.EDFilter("Hydjet2GeneratorFilter",
collisionParameters5020GeV,
qgpParameters,
hydjet2Parameters,
fNhsel = cms.int32(2), # Flag to include jet (J)/jet quenching (JQ) and hydro (H) state production, fNhsel (0 H on & J off, 1 H/J on & JQ off, 2 H/J/HQ on, 3 J on & H/JQ off, 4 H off & J/JQ on)
PythiaParameters = cms.PSet(PythiaDefaultBlock,
parameterSets = cms.vstring(
#'pythiaUESettings',
'ProQ2Otune',
'hydjet2PythiaDefault',
'pythiaJets',
'pythiaPromptPhotons',
'myParameters',
'pythiaZjets',
'pythiaBottomoniumNRQCD',
'pythiaCharmoniumNRQCD',
'pythiaQuarkoniaSettings',
'pythiaWeakBosons',
'TDB'
)
),
maxEventsToPrint = cms.untracked.int32(0),
pythiaPylistVerbosity = cms.untracked.int32(0),
fIfb = cms.int32(1), # Flag of type of centrality generation, fBfix (=0 is fixed by fBfix, >0 distributed [fBfmin, fBmax])
fBmin = cms.double(0.), # Minimum impact parameter, fBmin
fBmax = cms.double(30.), # Maximum impact parameter, fBmax
fBfix = cms.double(0.), # Fixed impact parameter, fBfix
)
'''
RA(Pb) ~= 6.813740957 fm
% cent b/RA
0 0
5 0.51
6 0.57
10 0.74
12 0.81
15 0.91
20 1.05
25 1.18
30 1.29
35 1.39
40 1.49
45 1.58
50 1.67
55 1.75
60 1.83
65 1.90
70 1.97
75 2.06
'''
|
"""
This file is part of the TheLMA (THe Laboratory Management Application) project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
Worklist series experiment design rack table.
"""
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import Table
__docformat__ = "reStructuredText en"
__all__ = ['create_table']
def create_table(metadata, experiment_design_rack_tbl, worklist_series_tbl):
"Table factory."
tbl = Table('worklist_series_experiment_design_rack', metadata,
Column('experiment_design_rack_id', Integer,
ForeignKey(experiment_design_rack_tbl.c.\
experiment_design_rack_id,
ondelete='CASCADE', onupdate='CASCADE'),
nullable=False, unique=True),
Column('worklist_series_id', Integer,
ForeignKey(worklist_series_tbl.c.worklist_series_id,
ondelete='CASCADE', onupdate='CASCADE'),
nullable=False)
)
PrimaryKeyConstraint(tbl.c.experiment_design_rack_id,
tbl.c.worklist_series_id)
return tbl
|
r"""
Vector Space Morphisms (aka Linear Transformations)
AUTHOR:
- Rob Beezer: (2011-06-29)
A vector space morphism is a homomorphism between vector spaces, better known
as a linear transformation. These are a specialization of Sage's free module
homomorphisms. (A free module is like a vector space, but with scalars from a
ring that may not be a field.) So references to free modules in the
documentation or error messages should be understood as simply reflecting a
more general situation.
Creation
--------
The constructor :func:`linear_transformation` is designed to accept a
variety of inputs that can define a linear transformation. See the
documentation of the function for all the possibilities. Here we give two.
First a matrix representation. By default input matrices are understood
to act on vectors placed to left of the matrix. Optionally, an input
matrix can be described as acting on vectors placed to the right. ::
sage: A = matrix(QQ, [[-1, 2, 3], [4, 2, 0]])
sage: phi = linear_transformation(A)
sage: phi
Vector space morphism represented by the matrix:
[-1 2 3]
[ 4 2 0]
Domain: Vector space of dimension 2 over Rational Field
Codomain: Vector space of dimension 3 over Rational Field
sage: phi([2, -3])
(-14, -2, 6)
A symbolic function can be used to specify the "rule" for a
linear transformation, along with explicit descriptions of the
domain and codomain. ::
sage: F = Integers(13)
sage: D = F^3
sage: C = F^2
sage: x, y, z = var('x y z')
sage: f(x, y, z) = [2*x + 3*y + 5*z, x + z]
sage: rho = linear_transformation(D, C, f)
sage: f(1, 2, 3)
(23, 4)
sage: rho([1, 2, 3])
(10, 4)
A "vector space homspace" is the set of all linear transformations
between two vector spaces. Various input can be coerced into a
homspace to create a linear transformation. See
:mod:`sage.modules.vector_space_homspace` for more. ::
sage: D = QQ^4
sage: C = QQ^2
sage: hom_space = Hom(D, C)
sage: images = [[1, 3], [2, -1], [4, 0], [3, 7]]
sage: zeta = hom_space(images)
sage: zeta
Vector space morphism represented by the matrix:
[ 1 3]
[ 2 -1]
[ 4 0]
[ 3 7]
Domain: Vector space of dimension 4 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field
A homomorphism may also be created via a method on the domain. ::
sage: F = QQ[sqrt(3)]
sage: a = F.gen(0)
sage: D = F^2
sage: C = F^2
sage: A = matrix(F, [[a, 1], [2*a, 2]])
sage: psi = D.hom(A, C)
sage: psi
Vector space morphism represented by the matrix:
[ sqrt3 1]
[2*sqrt3 2]
Domain: Vector space of dimension 2 over Number Field in sqrt3 with defining polynomial x^2 - 3 with sqrt3 = 1.732050807568878?
Codomain: Vector space of dimension 2 over Number Field in sqrt3 with defining polynomial x^2 - 3 with sqrt3 = 1.732050807568878?
sage: psi([1, 4])
(9*sqrt3, 9)
Properties
----------
Many natural properties of a linear transformation can be computed.
Some of these are more general methods of objects in the classes
:class:`sage.modules.free_module_morphism.FreeModuleMorphism` and
:class:`sage.modules.matrix_morphism.MatrixMorphism`.
Values are computed in a natural way, an inverse image of an
element can be computed with the ``lift()`` method, when the inverse
image actually exists. ::
sage: A = matrix(QQ, [[1,2], [2,4], [3,6]])
sage: phi = linear_transformation(A)
sage: phi([1,2,0])
(5, 10)
sage: phi.lift([10, 20])
(10, 0, 0)
sage: phi.lift([100, 100])
Traceback (most recent call last):
...
ValueError: element is not in the image
Images and pre-images can be computed as vector spaces. ::
sage: A = matrix(QQ, [[1,2], [2,4], [3,6]])
sage: phi = linear_transformation(A)
sage: phi.image()
Vector space of degree 2 and dimension 1 over Rational Field
Basis matrix:
[1 2]
sage: phi.inverse_image( (QQ^2).span([[1,2]]) )
Vector space of degree 3 and dimension 3 over Rational Field
Basis matrix:
[1 0 0]
[0 1 0]
[0 0 1]
sage: phi.inverse_image( (QQ^2).span([[1,1]]) )
Vector space of degree 3 and dimension 2 over Rational Field
Basis matrix:
[ 1 0 -1/3]
[ 0 1 -2/3]
Injectivity and surjectivity can be checked. ::
sage: A = matrix(QQ, [[1,2], [2,4], [3,6]])
sage: phi = linear_transformation(A)
sage: phi.is_injective()
False
sage: phi.is_surjective()
False
Restrictions and Representations
--------------------------------
It is possible to restrict the domain and codomain of a linear
transformation to make a new linear transformation. We will use
those commands to replace the domain and codomain by equal vector
spaces, but with alternate bases. The point here is that the
matrix representation used to represent linear transformations are
relative to the bases of both the domain and codomain. ::
sage: A = graphs.PetersenGraph().adjacency_matrix()
sage: V = QQ^10
sage: phi = linear_transformation(V, V, A)
sage: phi
Vector space morphism represented by the matrix:
[0 1 0 0 1 1 0 0 0 0]
[1 0 1 0 0 0 1 0 0 0]
[0 1 0 1 0 0 0 1 0 0]
[0 0 1 0 1 0 0 0 1 0]
[1 0 0 1 0 0 0 0 0 1]
[1 0 0 0 0 0 0 1 1 0]
[0 1 0 0 0 0 0 0 1 1]
[0 0 1 0 0 1 0 0 0 1]
[0 0 0 1 0 1 1 0 0 0]
[0 0 0 0 1 0 1 1 0 0]
Domain: Vector space of dimension 10 over Rational Field
Codomain: Vector space of dimension 10 over Rational Field
sage: B1 = [V.gen(i) + V.gen(i+1) for i in range(9)] + [V.gen(9)]
sage: B2 = [V.gen(0)] + [-V.gen(i-1) + V.gen(i) for i in range(1,10)]
sage: D = V.subspace_with_basis(B1)
sage: C = V.subspace_with_basis(B2)
sage: rho = phi.restrict_codomain(C)
sage: zeta = rho.restrict_domain(D)
sage: zeta
Vector space morphism represented by the matrix:
[6 5 4 3 3 2 1 0 0 0]
[6 5 4 3 2 2 2 1 0 0]
[6 6 5 4 3 2 2 2 1 0]
[6 5 5 4 3 2 2 2 2 1]
[6 4 4 4 3 3 3 3 2 1]
[6 5 4 4 4 4 4 4 3 1]
[6 6 5 4 4 4 3 3 3 2]
[6 6 6 5 4 4 2 1 1 1]
[6 6 6 6 5 4 3 1 0 0]
[3 3 3 3 3 2 2 1 0 0]
Domain: Vector space of degree 10 and dimension 10 over Rational Field
User basis matrix:
[1 1 0 0 0 0 0 0 0 0]
[0 1 1 0 0 0 0 0 0 0]
[0 0 1 1 0 0 0 0 0 0]
[0 0 0 1 1 0 0 0 0 0]
[0 0 0 0 1 1 0 0 0 0]
[0 0 0 0 0 1 1 0 0 0]
[0 0 0 0 0 0 1 1 0 0]
[0 0 0 0 0 0 0 1 1 0]
[0 0 0 0 0 0 0 0 1 1]
[0 0 0 0 0 0 0 0 0 1]
Codomain: Vector space of degree 10 and dimension 10 over Rational Field
User basis matrix:
[ 1 0 0 0 0 0 0 0 0 0]
[-1 1 0 0 0 0 0 0 0 0]
[ 0 -1 1 0 0 0 0 0 0 0]
[ 0 0 -1 1 0 0 0 0 0 0]
[ 0 0 0 -1 1 0 0 0 0 0]
[ 0 0 0 0 -1 1 0 0 0 0]
[ 0 0 0 0 0 -1 1 0 0 0]
[ 0 0 0 0 0 0 -1 1 0 0]
[ 0 0 0 0 0 0 0 -1 1 0]
[ 0 0 0 0 0 0 0 0 -1 1]
An endomorphism is a linear transformation with an equal domain and codomain,
and here each needs to have the same basis. We are using a
matrix that has well-behaved eigenvalues, as part of showing that these
do not change as the representation changes. ::
sage: A = graphs.PetersenGraph().adjacency_matrix()
sage: V = QQ^10
sage: phi = linear_transformation(V, V, A)
sage: phi.eigenvalues()
[3, -2, -2, -2, -2, 1, 1, 1, 1, 1]
sage: B1 = [V.gen(i) + V.gen(i+1) for i in range(9)] + [V.gen(9)]
sage: C = V.subspace_with_basis(B1)
sage: zeta = phi.restrict(C)
sage: zeta
Vector space morphism represented by the matrix:
[ 1 0 1 -1 2 -1 2 -2 2 -2]
[ 1 0 1 0 0 0 1 0 0 0]
[ 0 1 0 1 0 0 0 1 0 0]
[ 1 -1 2 -1 2 -2 2 -2 3 -2]
[ 2 -2 2 -1 1 -1 1 0 1 0]
[ 1 0 0 0 0 0 0 1 1 0]
[ 0 1 0 0 0 1 -1 1 0 2]
[ 0 0 1 0 0 2 -1 1 -1 2]
[ 0 0 0 1 0 1 1 0 0 0]
[ 0 0 0 0 1 -1 2 -1 1 -1]
Domain: Vector space of degree 10 and dimension 10 over Rational Field
User basis matrix:
[1 1 0 0 0 0 0 0 0 0]
[0 1 1 0 0 0 0 0 0 0]
[0 0 1 1 0 0 0 0 0 0]
[0 0 0 1 1 0 0 0 0 0]
[0 0 0 0 1 1 0 0 0 0]
[0 0 0 0 0 1 1 0 0 0]
[0 0 0 0 0 0 1 1 0 0]
[0 0 0 0 0 0 0 1 1 0]
[0 0 0 0 0 0 0 0 1 1]
[0 0 0 0 0 0 0 0 0 1]
Codomain: Vector space of degree 10 and dimension 10 over Rational Field
User basis matrix:
[1 1 0 0 0 0 0 0 0 0]
[0 1 1 0 0 0 0 0 0 0]
[0 0 1 1 0 0 0 0 0 0]
[0 0 0 1 1 0 0 0 0 0]
[0 0 0 0 1 1 0 0 0 0]
[0 0 0 0 0 1 1 0 0 0]
[0 0 0 0 0 0 1 1 0 0]
[0 0 0 0 0 0 0 1 1 0]
[0 0 0 0 0 0 0 0 1 1]
[0 0 0 0 0 0 0 0 0 1]
sage: zeta.eigenvalues()
[3, -2, -2, -2, -2, 1, 1, 1, 1, 1]
Equality
--------
Equality of linear transformations is a bit nuanced. The equality operator
``==`` tests if two linear transformations have equal matrix representations,
while we determine if two linear transformations are the same function with the
``.is_equal_function()`` method. Notice in this example that the function
never changes, just the representations. ::
sage: f = lambda x: vector(QQ, [x[1], x[0]+x[1], x[0]])
sage: H = Hom(QQ^2, QQ^3)
sage: phi = H(f)
sage: rho = linear_transformation(QQ^2, QQ^3, matrix(QQ,2, 3, [[0,1,1], [1,1,0]]))
sage: phi == rho
True
sage: U = (QQ^2).subspace_with_basis([[1, 2], [-3, 1]])
sage: V = (QQ^3).subspace_with_basis([[0, 1, 0], [2, 3, 1], [-1, 1, 6]])
sage: K = Hom(U, V)
sage: zeta = K(f)
sage: zeta == phi
False
sage: zeta.is_equal_function(phi)
True
sage: zeta.is_equal_function(rho)
True
TESTS::
sage: V = QQ^2
sage: H = Hom(V, V)
sage: f = H([V.1,-2*V.0])
sage: loads(dumps(f))
Vector space morphism represented by the matrix:
[ 0 1]
[-2 0]
Domain: Vector space of dimension 2 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field
sage: loads(dumps(f)) == f
True
"""
####################################################################################
# Copyright (C) 2011 Rob Beezer <beezer@ups.edu>
#
# Distributed under the terms of the GNU General Public License (GPL)
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# The full text of the GPL is available at:
#
# http://www.gnu.org/licenses/
####################################################################################
import sage.modules.matrix_morphism as matrix_morphism
import sage.modules.free_module_morphism as free_module_morphism
from . import vector_space_homspace
from sage.structure.element import is_Matrix
def linear_transformation(arg0, arg1=None, arg2=None, side='left'):
r"""
Create a linear transformation from a variety of possible inputs.
FORMATS:
In the following, ``D`` and ``C`` are vector spaces over
the same field that are the domain and codomain
(respectively) of the linear transformation.
``side`` is a keyword that is either 'left' or 'right'.
When a matrix is used to specify a linear transformation,
as in the first two call formats below, you may specify
if the function is given by matrix multiplication with
the vector on the left, or the vector on the right.
The default is 'left'. The matrix
representation may be obtained as either version, no matter
how it is created.
- ``linear_transformation(A, side='left')``
Where ``A`` is a matrix. The domain and codomain are inferred
from the dimension of the matrix and the base ring of the matrix.
The base ring must be a field, or have its fraction field implemented
in Sage.
- ``linear_transformation(D, C, A, side='left')``
``A`` is a matrix that behaves as above. However, now the domain
and codomain are given explicitly. The matrix is checked for
compatibility with the domain and codomain. Additionally, the
domain and codomain may be supplied with alternate ("user") bases
and the matrix is interpreted as being a representation relative
to those bases.
- ``linear_transformation(D, C, f)``
``f`` is any function that can be applied to the basis elements of the
domain and that produces elements of the codomain. The linear
transformation returned is the unique linear transformation that
extends this mapping on the basis elements. ``f`` may come from a
function defined by a Python ``def`` statement, or may be defined as a
``lambda`` function.
Alternatively, ``f`` may be specified by a callable symbolic function,
see the examples below for a demonstration.
- ``linear_transformation(D, C, images)``
``images`` is a list, or tuple, of codomain elements, equal in number
to the size of the basis of the domain. Each basis element of the domain
is mapped to the corresponding element of the ``images`` list, and the
linear transformation returned is the unique linear transformation that
extends this mapping.
OUTPUT:
A linear transformation described by the input. This is a
"vector space morphism", an object of the class
:class:`sage.modules.vector_space_morphism`.
EXAMPLES:
We can define a linear transformation with just a matrix, understood to
act on a vector placed on one side or the other. The field for the
vector spaces used as domain and codomain is obtained from the base
ring of the matrix, possibly promoting to a fraction field. ::
sage: A = matrix(ZZ, [[1, -1, 4], [2, 0, 5]])
sage: phi = linear_transformation(A)
sage: phi
Vector space morphism represented by the matrix:
[ 1 -1 4]
[ 2 0 5]
Domain: Vector space of dimension 2 over Rational Field
Codomain: Vector space of dimension 3 over Rational Field
sage: phi([1/2, 5])
(21/2, -1/2, 27)
sage: B = matrix(Integers(7), [[1, 2, 1], [3, 5, 6]])
sage: rho = linear_transformation(B, side='right')
sage: rho
Vector space morphism represented by the matrix:
[1 3]
[2 5]
[1 6]
Domain: Vector space of dimension 3 over Ring of integers modulo 7
Codomain: Vector space of dimension 2 over Ring of integers modulo 7
sage: rho([2, 4, 6])
(2, 6)
We can define a linear transformation with a matrix, while explicitly
giving the domain and codomain. Matrix entries will be coerced into the
common field of scalars for the vector spaces. ::
sage: D = QQ^3
sage: C = QQ^2
sage: A = matrix([[1, 7], [2, -1], [0, 5]])
sage: A.parent()
Full MatrixSpace of 3 by 2 dense matrices over Integer Ring
sage: zeta = linear_transformation(D, C, A)
sage: zeta.matrix().parent()
Full MatrixSpace of 3 by 2 dense matrices over Rational Field
sage: zeta
Vector space morphism represented by the matrix:
[ 1 7]
[ 2 -1]
[ 0 5]
Domain: Vector space of dimension 3 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field
Matrix representations are relative to the bases for the domain
and codomain. ::
sage: u = vector(QQ, [1, -1])
sage: v = vector(QQ, [2, 3])
sage: D = (QQ^2).subspace_with_basis([u, v])
sage: x = vector(QQ, [2, 1])
sage: y = vector(QQ, [-1, 4])
sage: C = (QQ^2).subspace_with_basis([x, y])
sage: A = matrix(QQ, [[2, 5], [3, 7]])
sage: psi = linear_transformation(D, C, A)
sage: psi
Vector space morphism represented by the matrix:
[2 5]
[3 7]
Domain: Vector space of degree 2 and dimension 2 over Rational Field
User basis matrix:
[ 1 -1]
[ 2 3]
Codomain: Vector space of degree 2 and dimension 2 over Rational Field
User basis matrix:
[ 2 1]
[-1 4]
sage: psi(u) == 2*x + 5*y
True
sage: psi(v) == 3*x + 7*y
True
Functions that act on the domain may be used to compute images of
the domain's basis elements, and this mapping can be extended to
a unique linear transformation. The function may be a Python
function (via ``def`` or ``lambda``) or a Sage symbolic function. ::
sage: def g(x):
....: return vector(QQ, [2*x[0]+x[2], 5*x[1]])
sage: phi = linear_transformation(QQ^3, QQ^2, g)
sage: phi
Vector space morphism represented by the matrix:
[2 0]
[0 5]
[1 0]
Domain: Vector space of dimension 3 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field
sage: f = lambda x: vector(QQ, [2*x[0]+x[2], 5*x[1]])
sage: rho = linear_transformation(QQ^3, QQ^2, f)
sage: rho
Vector space morphism represented by the matrix:
[2 0]
[0 5]
[1 0]
Domain: Vector space of dimension 3 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field
sage: x, y, z = var('x y z')
sage: h(x, y, z) = [2*x + z, 5*y]
sage: zeta = linear_transformation(QQ^3, QQ^2, h)
sage: zeta
Vector space morphism represented by the matrix:
[2 0]
[0 5]
[1 0]
Domain: Vector space of dimension 3 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field
sage: phi == rho
True
sage: rho == zeta
True
We create a linear transformation relative to non-standard bases,
and capture its representation relative to standard bases. With this, we
can build functions that create the same linear transformation relative
to the nonstandard bases. ::
sage: u = vector(QQ, [1, -1])
sage: v = vector(QQ, [2, 3])
sage: D = (QQ^2).subspace_with_basis([u, v])
sage: x = vector(QQ, [2, 1])
sage: y = vector(QQ, [-1, 4])
sage: C = (QQ^2).subspace_with_basis([x, y])
sage: A = matrix(QQ, [[2, 5], [3, 7]])
sage: psi = linear_transformation(D, C, A)
sage: rho = psi.restrict_codomain(QQ^2).restrict_domain(QQ^2)
sage: rho.matrix()
[ -4/5 97/5]
[ 1/5 -13/5]
sage: f = lambda x: vector(QQ, [(-4/5)*x[0] + (1/5)*x[1], (97/5)*x[0] + (-13/5)*x[1]])
sage: psi = linear_transformation(D, C, f)
sage: psi.matrix()
[2 5]
[3 7]
sage: s, t = var('s t')
sage: h(s, t) = [(-4/5)*s + (1/5)*t, (97/5)*s + (-13/5)*t]
sage: zeta = linear_transformation(D, C, h)
sage: zeta.matrix()
[2 5]
[3 7]
Finally, we can give an explicit list of images for the basis
elements of the domain. ::
sage: x = polygen(QQ)
sage: F.<a> = NumberField(x^3+x+1)
sage: u = vector(F, [1, a, a^2])
sage: v = vector(F, [a, a^2, 2])
sage: w = u + v
sage: D = F^3
sage: C = F^3
sage: rho = linear_transformation(D, C, [u, v, w])
sage: rho.matrix()
[ 1 a a^2]
[ a a^2 2]
[ a + 1 a^2 + a a^2 + 2]
sage: C = (F^3).subspace_with_basis([u, v])
sage: D = (F^3).subspace_with_basis([u, v])
sage: psi = linear_transformation(C, D, [u+v, u-v])
sage: psi.matrix()
[ 1 1]
[ 1 -1]
TESTS:
We test some bad inputs. First, the wrong things in the wrong places. ::
sage: linear_transformation('junk')
Traceback (most recent call last):
...
TypeError: first argument must be a matrix or a vector space, not junk
sage: linear_transformation(QQ^2, QQ^3, 'stuff')
Traceback (most recent call last):
...
TypeError: third argument must be a matrix, function, or list of images, not stuff
sage: linear_transformation(QQ^2, 'garbage')
Traceback (most recent call last):
...
TypeError: if first argument is a vector space, then second argument must be a vector space, not garbage
sage: linear_transformation(QQ^2, Integers(7)^2)
Traceback (most recent call last):
...
TypeError: vector spaces must have the same field of scalars, not Rational Field and Ring of integers modulo 7
Matrices must be over a field (or a ring that can be promoted to a field),
and of the right size. ::
sage: linear_transformation(matrix(Integers(6), [[2, 3],[4, 5]]))
Traceback (most recent call last):
...
TypeError: matrix must have entries from a field, or a ring with a fraction field, not Ring of integers modulo 6
sage: A = matrix(QQ, 3, 4, range(12))
sage: linear_transformation(QQ^4, QQ^4, A)
Traceback (most recent call last):
...
TypeError: domain dimension is incompatible with matrix size
sage: linear_transformation(QQ^3, QQ^3, A, side='right')
Traceback (most recent call last):
...
TypeError: domain dimension is incompatible with matrix size
sage: linear_transformation(QQ^3, QQ^3, A)
Traceback (most recent call last):
...
TypeError: codomain dimension is incompatible with matrix size
sage: linear_transformation(QQ^4, QQ^4, A, side='right')
Traceback (most recent call last):
...
TypeError: codomain dimension is incompatible with matrix size
Lists of images can be of the wrong number, or not really
elements of the codomain. ::
sage: linear_transformation(QQ^3, QQ^2, [vector(QQ, [1,2])])
Traceback (most recent call last):
...
ValueError: number of images should equal the size of the domain's basis (=3), not 1
sage: C = (QQ^2).subspace_with_basis([vector(QQ, [1,1])])
sage: linear_transformation(QQ^1, C, [vector(QQ, [1,2])])
Traceback (most recent call last):
...
ArithmeticError: some proposed image is not in the codomain, because
element [1, 2] is not in free module
Functions may not apply properly to domain elements,
or return values outside the codomain. ::
sage: f = lambda x: vector(QQ, [x[0], x[4]])
sage: linear_transformation(QQ^3, QQ^2, f)
Traceback (most recent call last):
...
ValueError: function cannot be applied properly to some basis element because
vector index out of range
sage: f = lambda x: vector(QQ, [x[0], x[1]])
sage: C = (QQ^2).span([vector(QQ, [1, 1])])
sage: linear_transformation(QQ^2, C, f)
Traceback (most recent call last):
...
ArithmeticError: some image of the function is not in the codomain, because
element [1, 0] is not in free module
A Sage symbolic function can come in a variety of forms that are
not representative of a linear transformation. ::
sage: x, y = var('x, y')
sage: f(x, y) = [y, x, y]
sage: linear_transformation(QQ^3, QQ^3, f)
Traceback (most recent call last):
...
ValueError: symbolic function has the wrong number of inputs for domain
sage: linear_transformation(QQ^2, QQ^2, f)
Traceback (most recent call last):
...
ValueError: symbolic function has the wrong number of outputs for codomain
sage: x, y = var('x y')
sage: f(x, y) = [y, x*y]
sage: linear_transformation(QQ^2, QQ^2, f)
Traceback (most recent call last):
...
ValueError: symbolic function must be linear in all the inputs:
unable to convert y to a rational
sage: x, y = var('x y')
sage: f(x, y) = [x, 2*y]
sage: C = (QQ^2).span([vector(QQ, [1, 1])])
sage: linear_transformation(QQ^2, C, f)
Traceback (most recent call last):
...
ArithmeticError: some image of the function is not in the codomain, because
element [1, 0] is not in free module
"""
from sage.matrix.constructor import matrix
from sage.modules.module import is_VectorSpace
from sage.modules.free_module import VectorSpace
from sage.categories.homset import Hom
from sage.symbolic.ring import SR
from sage.modules.vector_callable_symbolic_dense import Vector_callable_symbolic_dense
if not side in ['left', 'right']:
raise ValueError("side must be 'left' or 'right', not {0}".format(side))
if not (is_Matrix(arg0) or is_VectorSpace(arg0)):
raise TypeError('first argument must be a matrix or a vector space, not {0}'.format(arg0))
if is_Matrix(arg0):
R = arg0.base_ring()
if not R.is_field():
try:
R = R.fraction_field()
except (NotImplementedError, TypeError):
msg = 'matrix must have entries from a field, or a ring with a fraction field, not {0}'
raise TypeError(msg.format(R))
if side == 'right':
arg0 = arg0.transpose()
side = 'left'
arg2 = arg0
arg0 = VectorSpace(R, arg2.nrows())
arg1 = VectorSpace(R, arg2.ncols())
elif is_VectorSpace(arg0):
if not is_VectorSpace(arg1):
msg = 'if first argument is a vector space, then second argument must be a vector space, not {0}'
raise TypeError(msg.format(arg1))
if arg0.base_ring() != arg1.base_ring():
msg = 'vector spaces must have the same field of scalars, not {0} and {1}'
raise TypeError(msg.format(arg0.base_ring(), arg1.base_ring()))
# Now arg0 = domain D, arg1 = codomain C, and
# both are vector spaces with common field of scalars
# use these to make a VectorSpaceHomSpace
# arg2 might be a matrix that began in arg0
D = arg0
C = arg1
H = Hom(D, C, category=None)
# Examine arg2 as the "rule" for the linear transformation
# Pass on matrices, Python functions and lists to homspace call
# Convert symbolic function here, to a matrix
if is_Matrix(arg2):
if side == 'right':
arg2 = arg2.transpose()
elif isinstance(arg2, (list, tuple)):
pass
elif isinstance(arg2, Vector_callable_symbolic_dense):
args = arg2.parent().base_ring()._arguments
exprs = arg2.change_ring(SR)
m = len(args)
n = len(exprs)
if m != D.degree():
raise ValueError('symbolic function has the wrong number of inputs for domain')
if n != C.degree():
raise ValueError('symbolic function has the wrong number of outputs for codomain')
arg2 = [[e.coefficient(a) for e in exprs] for a in args]
try:
arg2 = matrix(D.base_ring(), m, n, arg2)
except TypeError as e:
msg = 'symbolic function must be linear in all the inputs:\n' + e.args[0]
raise ValueError(msg)
# have matrix with respect to standard bases, now consider user bases
images = [v*arg2 for v in D.basis()]
try:
arg2 = matrix([C.coordinates(C(a)) for a in images])
except (ArithmeticError, TypeError) as e:
msg = 'some image of the function is not in the codomain, because\n' + e.args[0]
raise ArithmeticError(msg)
elif callable(arg2):
pass
else:
msg = 'third argument must be a matrix, function, or list of images, not {0}'
raise TypeError(msg.format(arg2))
# arg2 now compatible with homspace H call method
# __init__ will check matrix sizes versus domain/codomain dimensions
return H(arg2)
def is_VectorSpaceMorphism(x):
r"""
Returns ``True`` if ``x`` is a vector space morphism (a linear transformation).
INPUT:
``x`` - anything
OUTPUT:
``True`` only if ``x`` is an instance of a vector space morphism,
which are also known as linear transformations.
EXAMPLES::
sage: V = QQ^2; f = V.hom([V.1,-2*V.0])
sage: sage.modules.vector_space_morphism.is_VectorSpaceMorphism(f)
True
sage: sage.modules.vector_space_morphism.is_VectorSpaceMorphism('junk')
False
"""
return isinstance(x, VectorSpaceMorphism)
class VectorSpaceMorphism(free_module_morphism.FreeModuleMorphism):
def __init__(self, homspace, A, side="left"):
r"""
Create a linear transformation, a morphism between vector spaces.
INPUT:
- ``homspace`` - a homspace (of vector spaces) to serve
as a parent for the linear transformation and a home for
the domain and codomain of the morphism
- ``A`` - a matrix representing the linear transformation,
which will act on vectors placed to the left of the matrix
EXAMPLES:
Nominally, we require a homspace to hold the domain
and codomain and a matrix representation of the morphism
(linear transformation). ::
sage: from sage.modules.vector_space_homspace import VectorSpaceHomspace
sage: from sage.modules.vector_space_morphism import VectorSpaceMorphism
sage: H = VectorSpaceHomspace(QQ^3, QQ^2)
sage: A = matrix(QQ, 3, 2, range(6))
sage: zeta = VectorSpaceMorphism(H, A)
sage: zeta
Vector space morphism represented by the matrix:
[0 1]
[2 3]
[4 5]
Domain: Vector space of dimension 3 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field
See the constructor,
:func:`sage.modules.vector_space_morphism.linear_transformation`
for another way to create linear transformations.
The ``.hom()`` method of a vector space will create a vector
space morphism. ::
sage: V = QQ^3; W = V.subspace_with_basis([[1,2,3], [-1,2,5/3], [0,1,-1]])
sage: phi = V.hom(matrix(QQ, 3, range(9)), codomain=W) # indirect doctest
sage: type(phi)
<class 'sage.modules.vector_space_morphism.VectorSpaceMorphism'>
A matrix may be coerced into a vector space homspace to
create a vector space morphism. ::
sage: from sage.modules.vector_space_homspace import VectorSpaceHomspace
sage: H = VectorSpaceHomspace(QQ^3, QQ^2)
sage: A = matrix(QQ, 3, 2, range(6))
sage: rho = H(A) # indirect doctest
sage: type(rho)
<class 'sage.modules.vector_space_morphism.VectorSpaceMorphism'>
"""
if not vector_space_homspace.is_VectorSpaceHomspace(homspace):
raise TypeError('homspace must be a vector space hom space, not {0}'.format(homspace))
if isinstance(A, matrix_morphism.MatrixMorphism):
A = A.matrix()
if not is_Matrix(A):
msg = 'input must be a matrix representation or another matrix morphism, not {0}'
raise TypeError(msg.format(A))
# now have a vector space homspace, and a matrix, check compatibility
if side == "left":
if homspace.domain().dimension() != A.nrows():
raise TypeError('domain dimension is incompatible with matrix size')
if homspace.codomain().dimension() != A.ncols():
raise TypeError('codomain dimension is incompatible with matrix size')
if side == "right":
if homspace.codomain().dimension() != A.nrows():
raise TypeError('Domain dimension is incompatible with matrix size')
if homspace.domain().dimension() != A.ncols():
raise TypeError('codomain dimension is incompatible with matrix size')
A = homspace._matrix_space(side)(A)
free_module_morphism.FreeModuleMorphism.__init__(self, homspace, A, side)
def is_invertible(self):
r"""
Determines if the vector space morphism has an inverse.
OUTPUT:
``True`` if the vector space morphism is invertible, otherwise
``False``.
EXAMPLES:
If the dimension of the domain does not match the dimension
of the codomain, then the morphism cannot be invertible. ::
sage: V = QQ^3
sage: U = V.subspace_with_basis([V.0 + V.1, 2*V.1 + 3*V.2])
sage: phi = V.hom([U.0, U.0 + U.1, U.0 - U.1], U)
sage: phi.is_invertible()
False
An invertible linear transformation. ::
sage: A = matrix(QQ, 3, [[-3, 5, -5], [4, -7, 7], [6, -8, 10]])
sage: A.determinant()
2
sage: H = Hom(QQ^3, QQ^3)
sage: rho = H(A)
sage: rho.is_invertible()
True
A non-invertible linear transformation, an endomorphism of
a vector space over a finite field. ::
sage: F.<a> = GF(11^2)
sage: A = matrix(F, [[6*a + 3, 8*a + 2, 10*a + 3],
....: [2*a + 7, 4*a + 3, 2*a + 3],
....: [9*a + 2, 10*a + 10, 3*a + 3]])
sage: A.nullity()
1
sage: E = End(F^3)
sage: zeta = E(A)
sage: zeta.is_invertible()
False
"""
# endomorphism or not, this is equivalent to invertibility of
# the matrix representation, so any test of this will suffice
m = self.matrix()
if not m.is_square():
return False
return m.rank() == m.ncols()
def _latex_(self):
r"""
A LaTeX representation of this vector space morphism.
EXAMPLES::
sage: H = Hom(QQ^3, QQ^2)
sage: f = H(matrix(3, 2, range(6)))
sage: f._latex_().split(' ')
['\\text{vector', 'space', 'morphism', 'from',
'}\n\\Bold{Q}^{3}\\text{', 'to', '}\n\\Bold{Q}^{2}\\text{',
'represented', 'by', 'the', 'matrix',
'}\n\\left(\\begin{array}{rr}\n0', '&', '1',
'\\\\\n2', '&', '3', '\\\\\n4', '&', '5\n\\end{array}\\right)']
"""
s = ('\\text{vector space morphism from }\n', self.domain()._latex_(),
'\\text{ to }\n', self.codomain()._latex_(),
'\\text{ represented by the matrix }\n', self.matrix()._latex_())
return ''.join(s)
def _repr_(self):
r"""
A text representation of this vector space morphism.
EXAMPLES::
sage: H = Hom(QQ^3, QQ^2)
sage: f = H(matrix(3, 2, range(6)))
sage: f._repr_().split(' ')
['Vector', 'space', 'morphism', 'represented', 'by',
'the', 'matrix:\n[0', '1]\n[2', '3]\n[4', '5]\nDomain:',
'Vector', 'space', 'of', 'dimension', '3', 'over',
'Rational', 'Field\nCodomain:', 'Vector', 'space', 'of',
'dimension', '2', 'over', 'Rational', 'Field']
"""
m = self.matrix()
act = ""
if self.side() == "right":
act = "as left-multiplication "
msg = ("Vector space morphism represented {}by the matrix:\n",
"{!r}\n",
"Domain: {}\n",
"Codomain: {}")
return ''.join(msg).format(act, m, self.domain(), self.codomain())
|
# Generated by Django 3.2.5 on 2021-07-26 20:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('clinic_app', '0002_patient'),
]
operations = [
migrations.CreateModel(
name='Visit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_visited', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('note', models.TextField()),
('patient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='clinic_app.patient')),
],
),
]
|
'''
Python module for Mopidy Pummeluff volume tag.
'''
__all__ = (
'Volume',
'VolumeUp',
'VolumeDown',
)
from logging import getLogger
from .base import Action
LOGGER = getLogger(__name__)
class Volume(Action):
'''
Sets the volume to the percentage value retreived from the tag's parameter.
'''
@classmethod
def execute(cls, core, volume): # pylint: disable=arguments-differ
'''
Set volume of the mixer.
:param mopidy.core.Core core: The mopidy core instance
:param volume: The new (percentage) volume
:type volume: int|str
'''
LOGGER.info('Setting volume to %s', volume)
try:
core.mixer.set_volume(int(volume))
except ValueError as ex:
LOGGER.error(str(ex))
def validate(self):
'''
Validates if the parameter is an integer between 0 and 100.
:param mixed parameter: The parameter
:raises ValueError: When parameter is invalid
'''
super().validate()
try:
number = int(self.parameter)
assert 0 <= number <= 100
except (ValueError, AssertionError):
raise ValueError('Volume parameter has to be a number between 0 and 100')
class VolumeUp(Action):
'''
sets the volume up by 5%
'''
@classmethod
def execute(cls, core):
'''
Set volume of the mixer.
'''
number = int(core.mixer.get_volume())
if (number < 95):
number = number + 5
LOGGER.info('Setting volume to %s', number)
core.mixer.set_volume(number)
else:
core.mixer.stes_volume(100)
LOGGER.info('Setting volume to 100')
class VolumeDown(Action):
'''
sets the volume down by 5%
'''
@classmethod
def execute(cls, core):
'''
Set volume of the mixer.
'''
number = int(core.mixer.get_volume())
if (number > 5):
number = number - 5
LOGGER.info('Setting volume to %s', number)
core.mixer.set_volume(number)
else:
core.mixer.stes_volume(0)
LOGGER.info('Setting volume to 0')
|
from .target_marker import TargetMarker
|
import numpy as np
import torch
from torch import nn
import copy
from collections import defaultdict
from absl import logging
from musco.pytorch.compressor.decompositions.tucker2 import Tucker2DecomposedLayer
from musco.pytorch.compressor.decompositions.cp3 import CP3DecomposedLayer
from musco.pytorch.compressor.decompositions.cp4 import CP4DecomposedLayer
from musco.pytorch.compressor.decompositions.svd_layer import SVDDecomposedLayer, SVDDecomposedConvLayer
def get_compressed_model(model,
layer_names,
ranks,
decompositions,
layer_types,
rank_selection,
vbmf_weaken_factors=None,
param_reduction_rates = None,
pretrained=None,
return_ranks=False):
'''
layer_names:list,
ranks: defaultdict,
decompositions: defaultdict,
layer_types: defaultdict,
vbmf_weaken_factors: defaultdict
'''
compressed_model = copy.deepcopy(model)
new_ranks = defaultdict()
model = None
for lname in layer_names:
rank = ranks[lname]
if rank is not None:
logging.info('Decompose layer {}'.format(lname))
subm_names = lname.strip().split('.')
## model before
#print('subm_name: {} \n'.format(subm_names))
layer = compressed_model.__getattr__(subm_names[0])
for s in subm_names[1:]:
layer = layer.__getattr__(s)
decomposition = decompositions[lname]
layer_type = layer_types[lname]['type']
if vbmf_weaken_factors is not None:
vbmf_weaken_factor = vbmf_weaken_factors[lname]
else:
vbmf_weaken_factor = None
if param_reduction_rates is not None:
param_reduction_rate = param_reduction_rates[lname]
else:
param_reduction_rate = None
print(lname, decomposition)
#print(layer)
if decomposition == 'tucker2':
decomposed_layer = Tucker2DecomposedLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
vbmf_weaken_factor=vbmf_weaken_factor,\
param_reduction_rate = param_reduction_rate)
elif decomposition == 'cp3':
decomposed_layer = CP3DecomposedLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
param_reduction_rate = param_reduction_rate)
elif decomposition == 'cp4':
decomposed_layer = CP4DecomposedLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
param_reduction_rate = param_reduction_rate)
elif decomposition == 'svd':
if layer_type == nn.Conv2d:
decomposed_layer = SVDDecomposedConvLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
vbmf_weaken_factor=vbmf_weaken_factor,\
param_reduction_rate = param_reduction_rate)
elif layer_type == nn.Linear:
decomposed_layer = SVDDecomposedLayer(layer,\
subm_names[-1],\
rank_selection,\
rank,\
pretrained=pretrained,\
vbmf_weaken_factor=vbmf_weaken_factor,\
param_reduction_rate = param_reduction_rate)
try:
new_ranks[lname] = decomposed_layer.ranks
except:
new_ranks[lname] = decomposed_layer.rank
logging.info('\t new rank: {}'.format(new_ranks[lname]))
if len(subm_names) > 1:
m = compressed_model.__getattr__(subm_names[0])
for s in subm_names[1:-1]:
m = m.__getattr__(s)
m.__setattr__(subm_names[-1], decomposed_layer.new_layers)
else:
compressed_model.__setattr__(subm_names[-1], decomposed_layer.new_layers)
else:
logging.info('Skip layer {}'.format(lname))
if return_ranks:
return compressed_model, new_ranks
else:
return compressed_model
|
import glob
import os
import numpy as np
import scipy.io.wavfile as wavfile
import librosa
import argparse
from scipy.io.wavfile import write
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--data_path", default='../datasets/vcc2018', type=str)
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_args()
fnames = glob.glob(args.data_path + '/**/**/*.wav')
for i, f in enumerate(fnames):
if i % 100 == 0:
print('{}/{}'.format(i, len(fnames)))
x, fs = librosa.core.load(f)
if fs != 16000:
x = librosa.core.resample(x, fs, 16000)
write(f, 16000, (2**15*x).astype(np.int16)) |
print('1533776805') |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: ondewo/nlu/session.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
from ondewo.nlu import context_pb2 as ondewo_dot_nlu_dot_context__pb2
from ondewo.nlu import intent_pb2 as ondewo_dot_nlu_dot_intent__pb2
from ondewo.nlu import entity_type_pb2 as ondewo_dot_nlu_dot_entity__type__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='ondewo/nlu/session.proto',
package='ondewo.nlu',
syntax='proto3',
serialized_options=b'\n\036com.google.cloud.dialogflow.v2B\014SessionProtoP\001ZDgoogle.golang.org/genproto/googleapis/cloud/dialogflow/v2;dialogflow\370\001\001\242\002\002DF\252\002\032Google.Cloud.Dialogflow.V2',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x18ondewo/nlu/session.proto\x12\nondewo.nlu\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x17google/rpc/status.proto\x1a\x18google/type/latlng.proto\x1a\x18ondewo/nlu/context.proto\x1a\x17ondewo/nlu/intent.proto\x1a\x1condewo/nlu/entity_type.proto\"\x9b\x01\n\x13\x44\x65tectIntentRequest\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x31\n\x0cquery_params\x18\x02 \x01(\x0b\x32\x1b.ondewo.nlu.QueryParameters\x12+\n\x0bquery_input\x18\x03 \x01(\x0b\x32\x16.ondewo.nlu.QueryInput\x12\x13\n\x0binput_audio\x18\x05 \x01(\x0c\"\x86\x01\n\x14\x44\x65tectIntentResponse\x12\x13\n\x0bresponse_id\x18\x01 \x01(\t\x12-\n\x0cquery_result\x18\x02 \x01(\x0b\x32\x17.ondewo.nlu.QueryResult\x12*\n\x0ewebhook_status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\"\xb8\x01\n\x0fQueryParameters\x12\x11\n\ttime_zone\x18\x01 \x01(\t\x12)\n\x0cgeo_location\x18\x02 \x01(\x0b\x32\x13.google.type.LatLng\x12%\n\x08\x63ontexts\x18\x03 \x03(\x0b\x32\x13.ondewo.nlu.Context\x12\x16\n\x0ereset_contexts\x18\x04 \x01(\x08\x12(\n\x07payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.Struct\"\x9b\x01\n\nQueryInput\x12\x34\n\x0c\x61udio_config\x18\x01 \x01(\x0b\x32\x1c.ondewo.nlu.InputAudioConfigH\x00\x12%\n\x04text\x18\x02 \x01(\x0b\x32\x15.ondewo.nlu.TextInputH\x00\x12\'\n\x05\x65vent\x18\x03 \x01(\x0b\x32\x16.ondewo.nlu.EventInputH\x00\x42\x07\n\x05input\"\x88\x04\n\x0bQueryResult\x12\x12\n\nquery_text\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x0f \x01(\t\x12%\n\x1dspeech_recognition_confidence\x18\x02 \x01(\x02\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x12+\n\nparameters\x18\x04 \x01(\x0b\x32\x17.google.protobuf.Struct\x12#\n\x1b\x61ll_required_params_present\x18\x05 \x01(\x08\x12\x18\n\x10\x66ulfillment_text\x18\x06 \x01(\t\x12\x38\n\x14\x66ulfillment_messages\x18\x07 \x03(\x0b\x32\x1a.ondewo.nlu.Intent.Message\x12\x16\n\x0ewebhook_source\x18\x08 \x01(\t\x12\x30\n\x0fwebhook_payload\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12,\n\x0foutput_contexts\x18\n \x03(\x0b\x32\x13.ondewo.nlu.Context\x12\"\n\x06intent\x18\x0b \x01(\x0b\x32\x12.ondewo.nlu.Intent\x12#\n\x1bintent_detection_confidence\x18\x0c \x01(\x02\x12\x30\n\x0f\x64iagnostic_info\x18\x0e \x01(\x0b\x32\x17.google.protobuf.Struct\"\xbe\x01\n\x1cStreamingDetectIntentRequest\x12\x0f\n\x07session\x18\x01 \x01(\t\x12\x31\n\x0cquery_params\x18\x02 \x01(\x0b\x32\x1b.ondewo.nlu.QueryParameters\x12+\n\x0bquery_input\x18\x03 \x01(\x0b\x32\x16.ondewo.nlu.QueryInput\x12\x18\n\x10single_utterance\x18\x04 \x01(\x08\x12\x13\n\x0binput_audio\x18\x06 \x01(\x0c\"\xd3\x01\n\x1dStreamingDetectIntentResponse\x12\x13\n\x0bresponse_id\x18\x01 \x01(\t\x12\x42\n\x12recognition_result\x18\x02 \x01(\x0b\x32&.ondewo.nlu.StreamingRecognitionResult\x12-\n\x0cquery_result\x18\x03 \x01(\x0b\x32\x17.ondewo.nlu.QueryResult\x12*\n\x0ewebhook_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.Status\"\xfa\x01\n\x1aStreamingRecognitionResult\x12H\n\x0cmessage_type\x18\x01 \x01(\x0e\x32\x32.ondewo.nlu.StreamingRecognitionResult.MessageType\x12\x12\n\ntranscript\x18\x02 \x01(\t\x12\x10\n\x08is_final\x18\x03 \x01(\x08\x12\x12\n\nconfidence\x18\x04 \x01(\x02\"X\n\x0bMessageType\x12\x1c\n\x18MESSAGE_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nTRANSCRIPT\x10\x01\x12\x1b\n\x17\x45ND_OF_SINGLE_UTTERANCE\x10\x02\"\x8d\x01\n\x10InputAudioConfig\x12\x31\n\x0e\x61udio_encoding\x18\x01 \x01(\x0e\x32\x19.ondewo.nlu.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x15\n\rlanguage_code\x18\x03 \x01(\t\x12\x14\n\x0cphrase_hints\x18\x04 \x03(\t\"0\n\tTextInput\x12\x0c\n\x04text\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\"^\n\nEventInput\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\nparameters\x18\x02 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x15\n\rlanguage_code\x18\x03 \x01(\t\"\xba\x01\n\x07Session\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12.\n\rsession_steps\x18\x02 \x03(\x0b\x32\x17.ondewo.nlu.SessionStep\x12-\n\x0csession_info\x18\x03 \x01(\x0b\x32\x17.ondewo.nlu.SessionInfo\"<\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\r\n\tVIEW_FULL\x10\x01\x12\x0f\n\x0bVIEW_SPARSE\x10\x02\"\xb6\x01\n\x0bSessionStep\x12>\n\x15\x64\x65tect_intent_request\x18\x01 \x01(\x0b\x32\x1f.ondewo.nlu.DetectIntentRequest\x12@\n\x16\x64\x65tect_intent_response\x18\x02 \x01(\x0b\x32 .ondewo.nlu.DetectIntentResponse\x12%\n\x08\x63ontexts\x18\x03 \x03(\x0b\x32\x13.ondewo.nlu.Context\"\x8c\x01\n\x17TrackSessionStepRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12-\n\x0csession_step\x18\x02 \x01(\x0b\x32\x17.ondewo.nlu.SessionStep\x12.\n\x0csession_view\x18\x03 \x01(\x0e\x32\x18.ondewo.nlu.Session.View\"\x9c\x01\n\x13ListSessionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\x0csession_view\x18\x02 \x01(\x0e\x32\x18.ondewo.nlu.Session.View\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x31\n\x0esession_filter\x18\x05 \x01(\x0b\x32\x19.ondewo.nlu.SessionFilter\"\x9c\x03\n\rSessionFilter\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t\x12+\n\x0fmatched_intents\x18\x02 \x03(\x0b\x32\x12.ondewo.nlu.Intent\x12\x34\n\x14matched_entity_types\x18\x03 \x03(\x0b\x32\x16.ondewo.nlu.EntityType\x12\"\n\x1amin_intents_confidence_min\x18\x04 \x01(\x02\x12\"\n\x1amin_intents_confidence_max\x18\x05 \x01(\x02\x12\'\n\x1fmin_entity_types_confidence_min\x18\x06 \x01(\x02\x12\'\n\x1fmin_entity_types_confidence_max\x18\x07 \x01(\x02\x12\x10\n\x08\x65\x61rliest\x18\x08 \x01(\x02\x12\x0e\n\x06latest\x18\t \x01(\x02\x12\x18\n\x10min_number_turns\x18\n \x01(\x05\x12\x18\n\x10max_number_turns\x18\x0b \x01(\x05\x12\x0e\n\x06labels\x18\x0c \x03(\t\x12\x10\n\x08user_ids\x18\r \x03(\t\"\xa7\x02\n\x0bSessionInfo\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t\x12+\n\x0fmatched_intents\x18\x02 \x03(\x0b\x32\x12.ondewo.nlu.Intent\x12\x34\n\x14matched_entity_types\x18\x03 \x03(\x0b\x32\x16.ondewo.nlu.EntityType\x12\x1e\n\x16min_intents_confidence\x18\x04 \x01(\x02\x12#\n\x1bmin_entity_types_confidence\x18\x05 \x01(\x02\x12\x10\n\x08\x65\x61rliest\x18\x06 \x01(\x02\x12\x0e\n\x06latest\x18\x07 \x01(\x02\x12\x14\n\x0cnumber_turns\x18\x08 \x01(\x05\x12\x0e\n\x06labels\x18\t \x03(\t\x12\x10\n\x08user_ids\x18\n \x03(\t\"V\n\x14ListSessionsResponse\x12%\n\x08sessions\x18\x01 \x03(\x0b\x32\x13.ondewo.nlu.Session\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"W\n\x11GetSessionRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12.\n\x0csession_view\x18\x02 \x01(\x0e\x32\x18.ondewo.nlu.Session.View\"<\n\x14\x43reateSessionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x14\n\x0csession_uuid\x18\x02 \x01(\t\"*\n\x14\x44\x65leteSessionRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\"\xba\x01\n\x1a\x43reateSessionReviewRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12\x18\n\x10parent_review_id\x18\x02 \x01(\t\x12\x31\n\x0esession_review\x18\x03 \x01(\x0b\x32\x19.ondewo.nlu.SessionReview\x12;\n\x13session_review_view\x18\x04 \x01(\x0e\x32\x1e.ondewo.nlu.SessionReview.View\"\xa5\x01\n\rSessionReview\x12\x19\n\x11session_review_id\x18\x01 \x01(\t\x12;\n\x14session_review_steps\x18\x02 \x03(\x0b\x32\x1d.ondewo.nlu.SessionReviewStep\"<\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\r\n\tVIEW_FULL\x10\x01\x12\x0f\n\x0bVIEW_SPARSE\x10\x02\"\xf1\x01\n\x11SessionReviewStep\x12=\n\x12\x61nnotated_usersays\x18\x01 \x01(\x0b\x32!.ondewo.nlu.Intent.TrainingPhrase\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\x12\x34\n\x10\x64\x65tected_intents\x18\x03 \x03(\x0b\x32\x1a.ondewo.nlu.DetectedIntent\x12%\n\x08\x63ontexts\x18\x04 \x03(\x0b\x32\x13.ondewo.nlu.Context\x12)\n\x0c\x63ontexts_out\x18\x05 \x03(\x0b\x32\x13.ondewo.nlu.Context\"\xb0\x01\n\x0e\x44\x65tectedIntent\x12\"\n\x06intent\x18\x01 \x01(\x0b\x32\x12.ondewo.nlu.Intent\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x11\n\talgorithm\x18\x03 \x01(\t\x12\x38\n\x14\x66ulfillment_messages\x18\x04 \x03(\x0b\x32\x1a.ondewo.nlu.Intent.Message\x12\x1e\n\x16required_param_missing\x18\x05 \x01(\x08\"*\n\x18ListSessionLabelsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\"+\n\x19ListSessionLabelsResponse\x12\x0e\n\x06labels\x18\x01 \x03(\t\"m\n\x17\x41\x64\x64SessionLabelsRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12\x0e\n\x06labels\x18\x02 \x03(\t\x12.\n\x0csession_view\x18\x03 \x01(\x0e\x32\x18.ondewo.nlu.Session.View\"p\n\x1aRemoveSessionLabelsRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12\x0e\n\x06labels\x18\x02 \x03(\t\x12.\n\x0csession_view\x18\x03 \x01(\x0e\x32\x18.ondewo.nlu.Session.View\"\x80\x01\n\x19ListSessionReviewsRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12;\n\x13session_review_view\x18\x02 \x01(\x0e\x32\x1e.ondewo.nlu.SessionReview.View\x12\x12\n\npage_token\x18\x04 \x01(\t\"i\n\x1aListSessionReviewsResponse\x12\x32\n\x0fsession_reviews\x18\x01 \x03(\x0b\x32\x19.ondewo.nlu.SessionReview\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"q\n\x17GetSessionReviewRequest\x12\x19\n\x11session_review_id\x18\x01 \x01(\t\x12;\n\x13session_review_view\x18\x02 \x01(\x0e\x32\x1e.ondewo.nlu.SessionReview.View\"p\n\x1dGetLatestSessionReviewRequest\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12;\n\x13session_review_view\x18\x02 \x01(\x0e\x32\x1e.ondewo.nlu.SessionReview.View*\xfb\x01\n\rAudioEncoding\x12\x1e\n\x1a\x41UDIO_ENCODING_UNSPECIFIED\x10\x00\x12\x1c\n\x18\x41UDIO_ENCODING_LINEAR_16\x10\x01\x12\x17\n\x13\x41UDIO_ENCODING_FLAC\x10\x02\x12\x18\n\x14\x41UDIO_ENCODING_MULAW\x10\x03\x12\x16\n\x12\x41UDIO_ENCODING_AMR\x10\x04\x12\x19\n\x15\x41UDIO_ENCODING_AMR_WB\x10\x05\x12\x1b\n\x17\x41UDIO_ENCODING_OGG_OPUS\x10\x06\x12)\n%AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE\x10\x07\x32\x8b\x10\n\x08Sessions\x12\x94\x01\n\x0c\x44\x65tectIntent\x12\x1f.ondewo.nlu.DetectIntentRequest\x1a .ondewo.nlu.DetectIntentResponse\"A\x82\xd3\xe4\x93\x02;\"6/v2/{session=projects/*/agent/sessions/*}:detectIntent:\x01*\x12p\n\x15StreamingDetectIntent\x12(.ondewo.nlu.StreamingDetectIntentRequest\x1a).ondewo.nlu.StreamingDetectIntentResponse(\x01\x30\x01\x12\x81\x01\n\x0cListSessions\x12\x1f.ondewo.nlu.ListSessionsRequest\x1a .ondewo.nlu.ListSessionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v2/{parent=projects/*/agent}/sessions\x12v\n\nGetSession\x12\x1d.ondewo.nlu.GetSessionRequest\x1a\x13.ondewo.nlu.Session\"4\x82\xd3\xe4\x93\x02.\x12,/v2/{session_id=projects/*/agent/sessions/*}\x12y\n\rCreateSession\x12 .ondewo.nlu.CreateSessionRequest\x1a\x13.ondewo.nlu.Session\"1\x82\xd3\xe4\x93\x02+\"&/v2/{parent=projects/*/agent}/sessions:\x01*\x12\x96\x01\n\x10TrackSessionStep\x12#.ondewo.nlu.TrackSessionStepRequest\x1a\x13.ondewo.nlu.Session\"H\x82\xd3\xe4\x93\x02\x42\"=/v2/{session_id=projects/*/agent/sessions/*}:trackSessionStep:\x01*\x12\x7f\n\rDeleteSession\x12 .ondewo.nlu.DeleteSessionRequest\x1a\x16.google.protobuf.Empty\"4\x82\xd3\xe4\x93\x02.*,/v2/{session_id=projects/*/agent/sessions/*}\x12\x97\x01\n\x11ListSessionLabels\x12$.ondewo.nlu.ListSessionLabelsRequest\x1a%.ondewo.nlu.ListSessionLabelsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v2/{parent=projects/*/agent}/sessions/labels\x12\x90\x01\n\x10\x41\x64\x64SessionLabels\x12#.ondewo.nlu.AddSessionLabelsRequest\x1a\x13.ondewo.nlu.Session\"B\x82\xd3\xe4\x93\x02<\"7/v2/{session_id=projects/*/agent/sessions/*}/labels:add:\x01*\x12\x99\x01\n\x13RemoveSessionLabels\x12&.ondewo.nlu.RemoveSessionLabelsRequest\x1a\x13.ondewo.nlu.Session\"E\x82\xd3\xe4\x93\x02?\":/v2/{session_id=projects/*/agent/sessions/*}/labels:remove:\x01*\x12\xa1\x01\n\x12ListSessionReviews\x12%.ondewo.nlu.ListSessionReviewsRequest\x1a&.ondewo.nlu.ListSessionReviewsResponse\"<\x82\xd3\xe4\x93\x02\x36\x12\x34/v2/{session_id=projects/*/agent/sessions/*}/reviews\x12\x99\x01\n\x10GetSessionReview\x12#.ondewo.nlu.GetSessionReviewRequest\x1a\x19.ondewo.nlu.SessionReview\"E\x82\xd3\xe4\x93\x02?\x12=/v2/{session_review_id=projects/*/agent/sessions/*/reviews/*}\x12\xb3\x01\n\x16GetLatestSessionReview\x12).ondewo.nlu.GetLatestSessionReviewRequest\x1a\x19.ondewo.nlu.SessionReview\"S\x82\xd3\xe4\x93\x02M\x12K/v2/{session_id=projects/*/agent/sessions/*}/reviews:getLatestSessionReview\x12\xa5\x01\n\x13\x43reateSessionReview\x12&.ondewo.nlu.CreateSessionReviewRequest\x1a\x19.ondewo.nlu.SessionReview\"K\x82\xd3\xe4\x93\x02\x45\"@/v2/{session_id=projects/*/agent/sessions/*}:createSessionReview:\x01*B\x9b\x01\n\x1e\x63om.google.cloud.dialogflow.v2B\x0cSessionProtoP\x01ZDgoogle.golang.org/genproto/googleapis/cloud/dialogflow/v2;dialogflow\xf8\x01\x01\xa2\x02\x02\x44\x46\xaa\x02\x1aGoogle.Cloud.Dialogflow.V2b\x06proto3'
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,ondewo_dot_nlu_dot_context__pb2.DESCRIPTOR,ondewo_dot_nlu_dot_intent__pb2.DESCRIPTOR,ondewo_dot_nlu_dot_entity__type__pb2.DESCRIPTOR,])
_AUDIOENCODING = _descriptor.EnumDescriptor(
name='AudioEncoding',
full_name='ondewo.nlu.AudioEncoding',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='AUDIO_ENCODING_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUDIO_ENCODING_LINEAR_16', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUDIO_ENCODING_FLAC', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUDIO_ENCODING_MULAW', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUDIO_ENCODING_AMR', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUDIO_ENCODING_AMR_WB', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUDIO_ENCODING_OGG_OPUS', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=5608,
serialized_end=5859,
)
_sym_db.RegisterEnumDescriptor(_AUDIOENCODING)
AudioEncoding = enum_type_wrapper.EnumTypeWrapper(_AUDIOENCODING)
AUDIO_ENCODING_UNSPECIFIED = 0
AUDIO_ENCODING_LINEAR_16 = 1
AUDIO_ENCODING_FLAC = 2
AUDIO_ENCODING_MULAW = 3
AUDIO_ENCODING_AMR = 4
AUDIO_ENCODING_AMR_WB = 5
AUDIO_ENCODING_OGG_OPUS = 6
AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE = 7
_STREAMINGRECOGNITIONRESULT_MESSAGETYPE = _descriptor.EnumDescriptor(
name='MessageType',
full_name='ondewo.nlu.StreamingRecognitionResult.MessageType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='MESSAGE_TYPE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRANSCRIPT', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='END_OF_SINGLE_UTTERANCE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1994,
serialized_end=2082,
)
_sym_db.RegisterEnumDescriptor(_STREAMINGRECOGNITIONRESULT_MESSAGETYPE)
_SESSION_VIEW = _descriptor.EnumDescriptor(
name='View',
full_name='ondewo.nlu.Session.View',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='VIEW_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VIEW_FULL', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VIEW_SPARSE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=2501,
serialized_end=2561,
)
_sym_db.RegisterEnumDescriptor(_SESSION_VIEW)
_SESSIONREVIEW_VIEW = _descriptor.EnumDescriptor(
name='View',
full_name='ondewo.nlu.SessionReview.View',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='VIEW_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VIEW_FULL', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VIEW_SPARSE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=2501,
serialized_end=2561,
)
_sym_db.RegisterEnumDescriptor(_SESSIONREVIEW_VIEW)
_DETECTINTENTREQUEST = _descriptor.Descriptor(
name='DetectIntentRequest',
full_name='ondewo.nlu.DetectIntentRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session', full_name='ondewo.nlu.DetectIntentRequest.session', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='query_params', full_name='ondewo.nlu.DetectIntentRequest.query_params', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='query_input', full_name='ondewo.nlu.DetectIntentRequest.query_input', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='input_audio', full_name='ondewo.nlu.DetectIntentRequest.input_audio', index=3,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=262,
serialized_end=417,
)
_DETECTINTENTRESPONSE = _descriptor.Descriptor(
name='DetectIntentResponse',
full_name='ondewo.nlu.DetectIntentResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='response_id', full_name='ondewo.nlu.DetectIntentResponse.response_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='query_result', full_name='ondewo.nlu.DetectIntentResponse.query_result', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='webhook_status', full_name='ondewo.nlu.DetectIntentResponse.webhook_status', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=420,
serialized_end=554,
)
_QUERYPARAMETERS = _descriptor.Descriptor(
name='QueryParameters',
full_name='ondewo.nlu.QueryParameters',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='time_zone', full_name='ondewo.nlu.QueryParameters.time_zone', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geo_location', full_name='ondewo.nlu.QueryParameters.geo_location', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='contexts', full_name='ondewo.nlu.QueryParameters.contexts', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reset_contexts', full_name='ondewo.nlu.QueryParameters.reset_contexts', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='payload', full_name='ondewo.nlu.QueryParameters.payload', index=4,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=557,
serialized_end=741,
)
_QUERYINPUT = _descriptor.Descriptor(
name='QueryInput',
full_name='ondewo.nlu.QueryInput',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='audio_config', full_name='ondewo.nlu.QueryInput.audio_config', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text', full_name='ondewo.nlu.QueryInput.text', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event', full_name='ondewo.nlu.QueryInput.event', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='input', full_name='ondewo.nlu.QueryInput.input',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=744,
serialized_end=899,
)
_QUERYRESULT = _descriptor.Descriptor(
name='QueryResult',
full_name='ondewo.nlu.QueryResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='query_text', full_name='ondewo.nlu.QueryResult.query_text', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language_code', full_name='ondewo.nlu.QueryResult.language_code', index=1,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='speech_recognition_confidence', full_name='ondewo.nlu.QueryResult.speech_recognition_confidence', index=2,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action', full_name='ondewo.nlu.QueryResult.action', index=3,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='parameters', full_name='ondewo.nlu.QueryResult.parameters', index=4,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='all_required_params_present', full_name='ondewo.nlu.QueryResult.all_required_params_present', index=5,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fulfillment_text', full_name='ondewo.nlu.QueryResult.fulfillment_text', index=6,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fulfillment_messages', full_name='ondewo.nlu.QueryResult.fulfillment_messages', index=7,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='webhook_source', full_name='ondewo.nlu.QueryResult.webhook_source', index=8,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='webhook_payload', full_name='ondewo.nlu.QueryResult.webhook_payload', index=9,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_contexts', full_name='ondewo.nlu.QueryResult.output_contexts', index=10,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='intent', full_name='ondewo.nlu.QueryResult.intent', index=11,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='intent_detection_confidence', full_name='ondewo.nlu.QueryResult.intent_detection_confidence', index=12,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='diagnostic_info', full_name='ondewo.nlu.QueryResult.diagnostic_info', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=902,
serialized_end=1422,
)
_STREAMINGDETECTINTENTREQUEST = _descriptor.Descriptor(
name='StreamingDetectIntentRequest',
full_name='ondewo.nlu.StreamingDetectIntentRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session', full_name='ondewo.nlu.StreamingDetectIntentRequest.session', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='query_params', full_name='ondewo.nlu.StreamingDetectIntentRequest.query_params', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='query_input', full_name='ondewo.nlu.StreamingDetectIntentRequest.query_input', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='single_utterance', full_name='ondewo.nlu.StreamingDetectIntentRequest.single_utterance', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='input_audio', full_name='ondewo.nlu.StreamingDetectIntentRequest.input_audio', index=4,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1425,
serialized_end=1615,
)
_STREAMINGDETECTINTENTRESPONSE = _descriptor.Descriptor(
name='StreamingDetectIntentResponse',
full_name='ondewo.nlu.StreamingDetectIntentResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='response_id', full_name='ondewo.nlu.StreamingDetectIntentResponse.response_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='recognition_result', full_name='ondewo.nlu.StreamingDetectIntentResponse.recognition_result', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='query_result', full_name='ondewo.nlu.StreamingDetectIntentResponse.query_result', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='webhook_status', full_name='ondewo.nlu.StreamingDetectIntentResponse.webhook_status', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1618,
serialized_end=1829,
)
_STREAMINGRECOGNITIONRESULT = _descriptor.Descriptor(
name='StreamingRecognitionResult',
full_name='ondewo.nlu.StreamingRecognitionResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='message_type', full_name='ondewo.nlu.StreamingRecognitionResult.message_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='transcript', full_name='ondewo.nlu.StreamingRecognitionResult.transcript', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_final', full_name='ondewo.nlu.StreamingRecognitionResult.is_final', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='confidence', full_name='ondewo.nlu.StreamingRecognitionResult.confidence', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_STREAMINGRECOGNITIONRESULT_MESSAGETYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1832,
serialized_end=2082,
)
_INPUTAUDIOCONFIG = _descriptor.Descriptor(
name='InputAudioConfig',
full_name='ondewo.nlu.InputAudioConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='audio_encoding', full_name='ondewo.nlu.InputAudioConfig.audio_encoding', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sample_rate_hertz', full_name='ondewo.nlu.InputAudioConfig.sample_rate_hertz', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language_code', full_name='ondewo.nlu.InputAudioConfig.language_code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='phrase_hints', full_name='ondewo.nlu.InputAudioConfig.phrase_hints', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2085,
serialized_end=2226,
)
_TEXTINPUT = _descriptor.Descriptor(
name='TextInput',
full_name='ondewo.nlu.TextInput',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='text', full_name='ondewo.nlu.TextInput.text', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language_code', full_name='ondewo.nlu.TextInput.language_code', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2228,
serialized_end=2276,
)
_EVENTINPUT = _descriptor.Descriptor(
name='EventInput',
full_name='ondewo.nlu.EventInput',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='ondewo.nlu.EventInput.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='parameters', full_name='ondewo.nlu.EventInput.parameters', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language_code', full_name='ondewo.nlu.EventInput.language_code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2278,
serialized_end=2372,
)
_SESSION = _descriptor.Descriptor(
name='Session',
full_name='ondewo.nlu.Session',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='ondewo.nlu.Session.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_steps', full_name='ondewo.nlu.Session.session_steps', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_info', full_name='ondewo.nlu.Session.session_info', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_SESSION_VIEW,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2375,
serialized_end=2561,
)
_SESSIONSTEP = _descriptor.Descriptor(
name='SessionStep',
full_name='ondewo.nlu.SessionStep',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='detect_intent_request', full_name='ondewo.nlu.SessionStep.detect_intent_request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='detect_intent_response', full_name='ondewo.nlu.SessionStep.detect_intent_response', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='contexts', full_name='ondewo.nlu.SessionStep.contexts', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2564,
serialized_end=2746,
)
_TRACKSESSIONSTEPREQUEST = _descriptor.Descriptor(
name='TrackSessionStepRequest',
full_name='ondewo.nlu.TrackSessionStepRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='ondewo.nlu.TrackSessionStepRequest.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_step', full_name='ondewo.nlu.TrackSessionStepRequest.session_step', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_view', full_name='ondewo.nlu.TrackSessionStepRequest.session_view', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2749,
serialized_end=2889,
)
_LISTSESSIONSREQUEST = _descriptor.Descriptor(
name='ListSessionsRequest',
full_name='ondewo.nlu.ListSessionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='parent', full_name='ondewo.nlu.ListSessionsRequest.parent', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_view', full_name='ondewo.nlu.ListSessionsRequest.session_view', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='page_token', full_name='ondewo.nlu.ListSessionsRequest.page_token', index=2,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_filter', full_name='ondewo.nlu.ListSessionsRequest.session_filter', index=3,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2892,
serialized_end=3048,
)
_SESSIONFILTER = _descriptor.Descriptor(
name='SessionFilter',
full_name='ondewo.nlu.SessionFilter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='language_codes', full_name='ondewo.nlu.SessionFilter.language_codes', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='matched_intents', full_name='ondewo.nlu.SessionFilter.matched_intents', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='matched_entity_types', full_name='ondewo.nlu.SessionFilter.matched_entity_types', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_intents_confidence_min', full_name='ondewo.nlu.SessionFilter.min_intents_confidence_min', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_intents_confidence_max', full_name='ondewo.nlu.SessionFilter.min_intents_confidence_max', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_entity_types_confidence_min', full_name='ondewo.nlu.SessionFilter.min_entity_types_confidence_min', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_entity_types_confidence_max', full_name='ondewo.nlu.SessionFilter.min_entity_types_confidence_max', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='earliest', full_name='ondewo.nlu.SessionFilter.earliest', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='latest', full_name='ondewo.nlu.SessionFilter.latest', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_number_turns', full_name='ondewo.nlu.SessionFilter.min_number_turns', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_number_turns', full_name='ondewo.nlu.SessionFilter.max_number_turns', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='ondewo.nlu.SessionFilter.labels', index=11,
number=12, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_ids', full_name='ondewo.nlu.SessionFilter.user_ids', index=12,
number=13, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3051,
serialized_end=3463,
)
_SESSIONINFO = _descriptor.Descriptor(
name='SessionInfo',
full_name='ondewo.nlu.SessionInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='language_codes', full_name='ondewo.nlu.SessionInfo.language_codes', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='matched_intents', full_name='ondewo.nlu.SessionInfo.matched_intents', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='matched_entity_types', full_name='ondewo.nlu.SessionInfo.matched_entity_types', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_intents_confidence', full_name='ondewo.nlu.SessionInfo.min_intents_confidence', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_entity_types_confidence', full_name='ondewo.nlu.SessionInfo.min_entity_types_confidence', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='earliest', full_name='ondewo.nlu.SessionInfo.earliest', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='latest', full_name='ondewo.nlu.SessionInfo.latest', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='number_turns', full_name='ondewo.nlu.SessionInfo.number_turns', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='ondewo.nlu.SessionInfo.labels', index=8,
number=9, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user_ids', full_name='ondewo.nlu.SessionInfo.user_ids', index=9,
number=10, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3466,
serialized_end=3761,
)
_LISTSESSIONSRESPONSE = _descriptor.Descriptor(
name='ListSessionsResponse',
full_name='ondewo.nlu.ListSessionsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sessions', full_name='ondewo.nlu.ListSessionsResponse.sessions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_page_token', full_name='ondewo.nlu.ListSessionsResponse.next_page_token', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3763,
serialized_end=3849,
)
_GETSESSIONREQUEST = _descriptor.Descriptor(
name='GetSessionRequest',
full_name='ondewo.nlu.GetSessionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='ondewo.nlu.GetSessionRequest.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_view', full_name='ondewo.nlu.GetSessionRequest.session_view', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3851,
serialized_end=3938,
)
_CREATESESSIONREQUEST = _descriptor.Descriptor(
name='CreateSessionRequest',
full_name='ondewo.nlu.CreateSessionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='parent', full_name='ondewo.nlu.CreateSessionRequest.parent', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_uuid', full_name='ondewo.nlu.CreateSessionRequest.session_uuid', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3940,
serialized_end=4000,
)
_DELETESESSIONREQUEST = _descriptor.Descriptor(
name='DeleteSessionRequest',
full_name='ondewo.nlu.DeleteSessionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='ondewo.nlu.DeleteSessionRequest.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4002,
serialized_end=4044,
)
_CREATESESSIONREVIEWREQUEST = _descriptor.Descriptor(
name='CreateSessionReviewRequest',
full_name='ondewo.nlu.CreateSessionReviewRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='ondewo.nlu.CreateSessionReviewRequest.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='parent_review_id', full_name='ondewo.nlu.CreateSessionReviewRequest.parent_review_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_review', full_name='ondewo.nlu.CreateSessionReviewRequest.session_review', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_review_view', full_name='ondewo.nlu.CreateSessionReviewRequest.session_review_view', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4047,
serialized_end=4233,
)
_SESSIONREVIEW = _descriptor.Descriptor(
name='SessionReview',
full_name='ondewo.nlu.SessionReview',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_review_id', full_name='ondewo.nlu.SessionReview.session_review_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_review_steps', full_name='ondewo.nlu.SessionReview.session_review_steps', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_SESSIONREVIEW_VIEW,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4236,
serialized_end=4401,
)
_SESSIONREVIEWSTEP = _descriptor.Descriptor(
name='SessionReviewStep',
full_name='ondewo.nlu.SessionReviewStep',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='annotated_usersays', full_name='ondewo.nlu.SessionReviewStep.annotated_usersays', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language_code', full_name='ondewo.nlu.SessionReviewStep.language_code', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='detected_intents', full_name='ondewo.nlu.SessionReviewStep.detected_intents', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='contexts', full_name='ondewo.nlu.SessionReviewStep.contexts', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='contexts_out', full_name='ondewo.nlu.SessionReviewStep.contexts_out', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4404,
serialized_end=4645,
)
_DETECTEDINTENT = _descriptor.Descriptor(
name='DetectedIntent',
full_name='ondewo.nlu.DetectedIntent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='intent', full_name='ondewo.nlu.DetectedIntent.intent', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='score', full_name='ondewo.nlu.DetectedIntent.score', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='algorithm', full_name='ondewo.nlu.DetectedIntent.algorithm', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fulfillment_messages', full_name='ondewo.nlu.DetectedIntent.fulfillment_messages', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='required_param_missing', full_name='ondewo.nlu.DetectedIntent.required_param_missing', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4648,
serialized_end=4824,
)
_LISTSESSIONLABELSREQUEST = _descriptor.Descriptor(
name='ListSessionLabelsRequest',
full_name='ondewo.nlu.ListSessionLabelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='parent', full_name='ondewo.nlu.ListSessionLabelsRequest.parent', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4826,
serialized_end=4868,
)
_LISTSESSIONLABELSRESPONSE = _descriptor.Descriptor(
name='ListSessionLabelsResponse',
full_name='ondewo.nlu.ListSessionLabelsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='labels', full_name='ondewo.nlu.ListSessionLabelsResponse.labels', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4870,
serialized_end=4913,
)
_ADDSESSIONLABELSREQUEST = _descriptor.Descriptor(
name='AddSessionLabelsRequest',
full_name='ondewo.nlu.AddSessionLabelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='ondewo.nlu.AddSessionLabelsRequest.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='ondewo.nlu.AddSessionLabelsRequest.labels', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_view', full_name='ondewo.nlu.AddSessionLabelsRequest.session_view', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4915,
serialized_end=5024,
)
_REMOVESESSIONLABELSREQUEST = _descriptor.Descriptor(
name='RemoveSessionLabelsRequest',
full_name='ondewo.nlu.RemoveSessionLabelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='ondewo.nlu.RemoveSessionLabelsRequest.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='ondewo.nlu.RemoveSessionLabelsRequest.labels', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_view', full_name='ondewo.nlu.RemoveSessionLabelsRequest.session_view', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5026,
serialized_end=5138,
)
_LISTSESSIONREVIEWSREQUEST = _descriptor.Descriptor(
name='ListSessionReviewsRequest',
full_name='ondewo.nlu.ListSessionReviewsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='ondewo.nlu.ListSessionReviewsRequest.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_review_view', full_name='ondewo.nlu.ListSessionReviewsRequest.session_review_view', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='page_token', full_name='ondewo.nlu.ListSessionReviewsRequest.page_token', index=2,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5141,
serialized_end=5269,
)
_LISTSESSIONREVIEWSRESPONSE = _descriptor.Descriptor(
name='ListSessionReviewsResponse',
full_name='ondewo.nlu.ListSessionReviewsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_reviews', full_name='ondewo.nlu.ListSessionReviewsResponse.session_reviews', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_page_token', full_name='ondewo.nlu.ListSessionReviewsResponse.next_page_token', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5271,
serialized_end=5376,
)
_GETSESSIONREVIEWREQUEST = _descriptor.Descriptor(
name='GetSessionReviewRequest',
full_name='ondewo.nlu.GetSessionReviewRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_review_id', full_name='ondewo.nlu.GetSessionReviewRequest.session_review_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_review_view', full_name='ondewo.nlu.GetSessionReviewRequest.session_review_view', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5378,
serialized_end=5491,
)
_GETLATESTSESSIONREVIEWREQUEST = _descriptor.Descriptor(
name='GetLatestSessionReviewRequest',
full_name='ondewo.nlu.GetLatestSessionReviewRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='session_id', full_name='ondewo.nlu.GetLatestSessionReviewRequest.session_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_review_view', full_name='ondewo.nlu.GetLatestSessionReviewRequest.session_review_view', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5493,
serialized_end=5605,
)
_DETECTINTENTREQUEST.fields_by_name['query_params'].message_type = _QUERYPARAMETERS
_DETECTINTENTREQUEST.fields_by_name['query_input'].message_type = _QUERYINPUT
_DETECTINTENTRESPONSE.fields_by_name['query_result'].message_type = _QUERYRESULT
_DETECTINTENTRESPONSE.fields_by_name['webhook_status'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_QUERYPARAMETERS.fields_by_name['geo_location'].message_type = google_dot_type_dot_latlng__pb2._LATLNG
_QUERYPARAMETERS.fields_by_name['contexts'].message_type = ondewo_dot_nlu_dot_context__pb2._CONTEXT
_QUERYPARAMETERS.fields_by_name['payload'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_QUERYINPUT.fields_by_name['audio_config'].message_type = _INPUTAUDIOCONFIG
_QUERYINPUT.fields_by_name['text'].message_type = _TEXTINPUT
_QUERYINPUT.fields_by_name['event'].message_type = _EVENTINPUT
_QUERYINPUT.oneofs_by_name['input'].fields.append(
_QUERYINPUT.fields_by_name['audio_config'])
_QUERYINPUT.fields_by_name['audio_config'].containing_oneof = _QUERYINPUT.oneofs_by_name['input']
_QUERYINPUT.oneofs_by_name['input'].fields.append(
_QUERYINPUT.fields_by_name['text'])
_QUERYINPUT.fields_by_name['text'].containing_oneof = _QUERYINPUT.oneofs_by_name['input']
_QUERYINPUT.oneofs_by_name['input'].fields.append(
_QUERYINPUT.fields_by_name['event'])
_QUERYINPUT.fields_by_name['event'].containing_oneof = _QUERYINPUT.oneofs_by_name['input']
_QUERYRESULT.fields_by_name['parameters'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_QUERYRESULT.fields_by_name['fulfillment_messages'].message_type = ondewo_dot_nlu_dot_intent__pb2._INTENT_MESSAGE
_QUERYRESULT.fields_by_name['webhook_payload'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_QUERYRESULT.fields_by_name['output_contexts'].message_type = ondewo_dot_nlu_dot_context__pb2._CONTEXT
_QUERYRESULT.fields_by_name['intent'].message_type = ondewo_dot_nlu_dot_intent__pb2._INTENT
_QUERYRESULT.fields_by_name['diagnostic_info'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_STREAMINGDETECTINTENTREQUEST.fields_by_name['query_params'].message_type = _QUERYPARAMETERS
_STREAMINGDETECTINTENTREQUEST.fields_by_name['query_input'].message_type = _QUERYINPUT
_STREAMINGDETECTINTENTRESPONSE.fields_by_name['recognition_result'].message_type = _STREAMINGRECOGNITIONRESULT
_STREAMINGDETECTINTENTRESPONSE.fields_by_name['query_result'].message_type = _QUERYRESULT
_STREAMINGDETECTINTENTRESPONSE.fields_by_name['webhook_status'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_STREAMINGRECOGNITIONRESULT.fields_by_name['message_type'].enum_type = _STREAMINGRECOGNITIONRESULT_MESSAGETYPE
_STREAMINGRECOGNITIONRESULT_MESSAGETYPE.containing_type = _STREAMINGRECOGNITIONRESULT
_INPUTAUDIOCONFIG.fields_by_name['audio_encoding'].enum_type = _AUDIOENCODING
_EVENTINPUT.fields_by_name['parameters'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_SESSION.fields_by_name['session_steps'].message_type = _SESSIONSTEP
_SESSION.fields_by_name['session_info'].message_type = _SESSIONINFO
_SESSION_VIEW.containing_type = _SESSION
_SESSIONSTEP.fields_by_name['detect_intent_request'].message_type = _DETECTINTENTREQUEST
_SESSIONSTEP.fields_by_name['detect_intent_response'].message_type = _DETECTINTENTRESPONSE
_SESSIONSTEP.fields_by_name['contexts'].message_type = ondewo_dot_nlu_dot_context__pb2._CONTEXT
_TRACKSESSIONSTEPREQUEST.fields_by_name['session_step'].message_type = _SESSIONSTEP
_TRACKSESSIONSTEPREQUEST.fields_by_name['session_view'].enum_type = _SESSION_VIEW
_LISTSESSIONSREQUEST.fields_by_name['session_view'].enum_type = _SESSION_VIEW
_LISTSESSIONSREQUEST.fields_by_name['session_filter'].message_type = _SESSIONFILTER
_SESSIONFILTER.fields_by_name['matched_intents'].message_type = ondewo_dot_nlu_dot_intent__pb2._INTENT
_SESSIONFILTER.fields_by_name['matched_entity_types'].message_type = ondewo_dot_nlu_dot_entity__type__pb2._ENTITYTYPE
_SESSIONINFO.fields_by_name['matched_intents'].message_type = ondewo_dot_nlu_dot_intent__pb2._INTENT
_SESSIONINFO.fields_by_name['matched_entity_types'].message_type = ondewo_dot_nlu_dot_entity__type__pb2._ENTITYTYPE
_LISTSESSIONSRESPONSE.fields_by_name['sessions'].message_type = _SESSION
_GETSESSIONREQUEST.fields_by_name['session_view'].enum_type = _SESSION_VIEW
_CREATESESSIONREVIEWREQUEST.fields_by_name['session_review'].message_type = _SESSIONREVIEW
_CREATESESSIONREVIEWREQUEST.fields_by_name['session_review_view'].enum_type = _SESSIONREVIEW_VIEW
_SESSIONREVIEW.fields_by_name['session_review_steps'].message_type = _SESSIONREVIEWSTEP
_SESSIONREVIEW_VIEW.containing_type = _SESSIONREVIEW
_SESSIONREVIEWSTEP.fields_by_name['annotated_usersays'].message_type = ondewo_dot_nlu_dot_intent__pb2._INTENT_TRAININGPHRASE
_SESSIONREVIEWSTEP.fields_by_name['detected_intents'].message_type = _DETECTEDINTENT
_SESSIONREVIEWSTEP.fields_by_name['contexts'].message_type = ondewo_dot_nlu_dot_context__pb2._CONTEXT
_SESSIONREVIEWSTEP.fields_by_name['contexts_out'].message_type = ondewo_dot_nlu_dot_context__pb2._CONTEXT
_DETECTEDINTENT.fields_by_name['intent'].message_type = ondewo_dot_nlu_dot_intent__pb2._INTENT
_DETECTEDINTENT.fields_by_name['fulfillment_messages'].message_type = ondewo_dot_nlu_dot_intent__pb2._INTENT_MESSAGE
_ADDSESSIONLABELSREQUEST.fields_by_name['session_view'].enum_type = _SESSION_VIEW
_REMOVESESSIONLABELSREQUEST.fields_by_name['session_view'].enum_type = _SESSION_VIEW
_LISTSESSIONREVIEWSREQUEST.fields_by_name['session_review_view'].enum_type = _SESSIONREVIEW_VIEW
_LISTSESSIONREVIEWSRESPONSE.fields_by_name['session_reviews'].message_type = _SESSIONREVIEW
_GETSESSIONREVIEWREQUEST.fields_by_name['session_review_view'].enum_type = _SESSIONREVIEW_VIEW
_GETLATESTSESSIONREVIEWREQUEST.fields_by_name['session_review_view'].enum_type = _SESSIONREVIEW_VIEW
DESCRIPTOR.message_types_by_name['DetectIntentRequest'] = _DETECTINTENTREQUEST
DESCRIPTOR.message_types_by_name['DetectIntentResponse'] = _DETECTINTENTRESPONSE
DESCRIPTOR.message_types_by_name['QueryParameters'] = _QUERYPARAMETERS
DESCRIPTOR.message_types_by_name['QueryInput'] = _QUERYINPUT
DESCRIPTOR.message_types_by_name['QueryResult'] = _QUERYRESULT
DESCRIPTOR.message_types_by_name['StreamingDetectIntentRequest'] = _STREAMINGDETECTINTENTREQUEST
DESCRIPTOR.message_types_by_name['StreamingDetectIntentResponse'] = _STREAMINGDETECTINTENTRESPONSE
DESCRIPTOR.message_types_by_name['StreamingRecognitionResult'] = _STREAMINGRECOGNITIONRESULT
DESCRIPTOR.message_types_by_name['InputAudioConfig'] = _INPUTAUDIOCONFIG
DESCRIPTOR.message_types_by_name['TextInput'] = _TEXTINPUT
DESCRIPTOR.message_types_by_name['EventInput'] = _EVENTINPUT
DESCRIPTOR.message_types_by_name['Session'] = _SESSION
DESCRIPTOR.message_types_by_name['SessionStep'] = _SESSIONSTEP
DESCRIPTOR.message_types_by_name['TrackSessionStepRequest'] = _TRACKSESSIONSTEPREQUEST
DESCRIPTOR.message_types_by_name['ListSessionsRequest'] = _LISTSESSIONSREQUEST
DESCRIPTOR.message_types_by_name['SessionFilter'] = _SESSIONFILTER
DESCRIPTOR.message_types_by_name['SessionInfo'] = _SESSIONINFO
DESCRIPTOR.message_types_by_name['ListSessionsResponse'] = _LISTSESSIONSRESPONSE
DESCRIPTOR.message_types_by_name['GetSessionRequest'] = _GETSESSIONREQUEST
DESCRIPTOR.message_types_by_name['CreateSessionRequest'] = _CREATESESSIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteSessionRequest'] = _DELETESESSIONREQUEST
DESCRIPTOR.message_types_by_name['CreateSessionReviewRequest'] = _CREATESESSIONREVIEWREQUEST
DESCRIPTOR.message_types_by_name['SessionReview'] = _SESSIONREVIEW
DESCRIPTOR.message_types_by_name['SessionReviewStep'] = _SESSIONREVIEWSTEP
DESCRIPTOR.message_types_by_name['DetectedIntent'] = _DETECTEDINTENT
DESCRIPTOR.message_types_by_name['ListSessionLabelsRequest'] = _LISTSESSIONLABELSREQUEST
DESCRIPTOR.message_types_by_name['ListSessionLabelsResponse'] = _LISTSESSIONLABELSRESPONSE
DESCRIPTOR.message_types_by_name['AddSessionLabelsRequest'] = _ADDSESSIONLABELSREQUEST
DESCRIPTOR.message_types_by_name['RemoveSessionLabelsRequest'] = _REMOVESESSIONLABELSREQUEST
DESCRIPTOR.message_types_by_name['ListSessionReviewsRequest'] = _LISTSESSIONREVIEWSREQUEST
DESCRIPTOR.message_types_by_name['ListSessionReviewsResponse'] = _LISTSESSIONREVIEWSRESPONSE
DESCRIPTOR.message_types_by_name['GetSessionReviewRequest'] = _GETSESSIONREVIEWREQUEST
DESCRIPTOR.message_types_by_name['GetLatestSessionReviewRequest'] = _GETLATESTSESSIONREVIEWREQUEST
DESCRIPTOR.enum_types_by_name['AudioEncoding'] = _AUDIOENCODING
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DetectIntentRequest = _reflection.GeneratedProtocolMessageType('DetectIntentRequest', (_message.Message,), {
'DESCRIPTOR' : _DETECTINTENTREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.DetectIntentRequest)
})
_sym_db.RegisterMessage(DetectIntentRequest)
DetectIntentResponse = _reflection.GeneratedProtocolMessageType('DetectIntentResponse', (_message.Message,), {
'DESCRIPTOR' : _DETECTINTENTRESPONSE,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.DetectIntentResponse)
})
_sym_db.RegisterMessage(DetectIntentResponse)
QueryParameters = _reflection.GeneratedProtocolMessageType('QueryParameters', (_message.Message,), {
'DESCRIPTOR' : _QUERYPARAMETERS,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.QueryParameters)
})
_sym_db.RegisterMessage(QueryParameters)
QueryInput = _reflection.GeneratedProtocolMessageType('QueryInput', (_message.Message,), {
'DESCRIPTOR' : _QUERYINPUT,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.QueryInput)
})
_sym_db.RegisterMessage(QueryInput)
QueryResult = _reflection.GeneratedProtocolMessageType('QueryResult', (_message.Message,), {
'DESCRIPTOR' : _QUERYRESULT,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.QueryResult)
})
_sym_db.RegisterMessage(QueryResult)
StreamingDetectIntentRequest = _reflection.GeneratedProtocolMessageType('StreamingDetectIntentRequest', (_message.Message,), {
'DESCRIPTOR' : _STREAMINGDETECTINTENTREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.StreamingDetectIntentRequest)
})
_sym_db.RegisterMessage(StreamingDetectIntentRequest)
StreamingDetectIntentResponse = _reflection.GeneratedProtocolMessageType('StreamingDetectIntentResponse', (_message.Message,), {
'DESCRIPTOR' : _STREAMINGDETECTINTENTRESPONSE,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.StreamingDetectIntentResponse)
})
_sym_db.RegisterMessage(StreamingDetectIntentResponse)
StreamingRecognitionResult = _reflection.GeneratedProtocolMessageType('StreamingRecognitionResult', (_message.Message,), {
'DESCRIPTOR' : _STREAMINGRECOGNITIONRESULT,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.StreamingRecognitionResult)
})
_sym_db.RegisterMessage(StreamingRecognitionResult)
InputAudioConfig = _reflection.GeneratedProtocolMessageType('InputAudioConfig', (_message.Message,), {
'DESCRIPTOR' : _INPUTAUDIOCONFIG,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.InputAudioConfig)
})
_sym_db.RegisterMessage(InputAudioConfig)
TextInput = _reflection.GeneratedProtocolMessageType('TextInput', (_message.Message,), {
'DESCRIPTOR' : _TEXTINPUT,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.TextInput)
})
_sym_db.RegisterMessage(TextInput)
EventInput = _reflection.GeneratedProtocolMessageType('EventInput', (_message.Message,), {
'DESCRIPTOR' : _EVENTINPUT,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.EventInput)
})
_sym_db.RegisterMessage(EventInput)
Session = _reflection.GeneratedProtocolMessageType('Session', (_message.Message,), {
'DESCRIPTOR' : _SESSION,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.Session)
})
_sym_db.RegisterMessage(Session)
SessionStep = _reflection.GeneratedProtocolMessageType('SessionStep', (_message.Message,), {
'DESCRIPTOR' : _SESSIONSTEP,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.SessionStep)
})
_sym_db.RegisterMessage(SessionStep)
TrackSessionStepRequest = _reflection.GeneratedProtocolMessageType('TrackSessionStepRequest', (_message.Message,), {
'DESCRIPTOR' : _TRACKSESSIONSTEPREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.TrackSessionStepRequest)
})
_sym_db.RegisterMessage(TrackSessionStepRequest)
ListSessionsRequest = _reflection.GeneratedProtocolMessageType('ListSessionsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTSESSIONSREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.ListSessionsRequest)
})
_sym_db.RegisterMessage(ListSessionsRequest)
SessionFilter = _reflection.GeneratedProtocolMessageType('SessionFilter', (_message.Message,), {
'DESCRIPTOR' : _SESSIONFILTER,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.SessionFilter)
})
_sym_db.RegisterMessage(SessionFilter)
SessionInfo = _reflection.GeneratedProtocolMessageType('SessionInfo', (_message.Message,), {
'DESCRIPTOR' : _SESSIONINFO,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.SessionInfo)
})
_sym_db.RegisterMessage(SessionInfo)
ListSessionsResponse = _reflection.GeneratedProtocolMessageType('ListSessionsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTSESSIONSRESPONSE,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.ListSessionsResponse)
})
_sym_db.RegisterMessage(ListSessionsResponse)
GetSessionRequest = _reflection.GeneratedProtocolMessageType('GetSessionRequest', (_message.Message,), {
'DESCRIPTOR' : _GETSESSIONREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.GetSessionRequest)
})
_sym_db.RegisterMessage(GetSessionRequest)
CreateSessionRequest = _reflection.GeneratedProtocolMessageType('CreateSessionRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATESESSIONREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.CreateSessionRequest)
})
_sym_db.RegisterMessage(CreateSessionRequest)
DeleteSessionRequest = _reflection.GeneratedProtocolMessageType('DeleteSessionRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETESESSIONREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.DeleteSessionRequest)
})
_sym_db.RegisterMessage(DeleteSessionRequest)
CreateSessionReviewRequest = _reflection.GeneratedProtocolMessageType('CreateSessionReviewRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATESESSIONREVIEWREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.CreateSessionReviewRequest)
})
_sym_db.RegisterMessage(CreateSessionReviewRequest)
SessionReview = _reflection.GeneratedProtocolMessageType('SessionReview', (_message.Message,), {
'DESCRIPTOR' : _SESSIONREVIEW,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.SessionReview)
})
_sym_db.RegisterMessage(SessionReview)
SessionReviewStep = _reflection.GeneratedProtocolMessageType('SessionReviewStep', (_message.Message,), {
'DESCRIPTOR' : _SESSIONREVIEWSTEP,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.SessionReviewStep)
})
_sym_db.RegisterMessage(SessionReviewStep)
DetectedIntent = _reflection.GeneratedProtocolMessageType('DetectedIntent', (_message.Message,), {
'DESCRIPTOR' : _DETECTEDINTENT,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.DetectedIntent)
})
_sym_db.RegisterMessage(DetectedIntent)
ListSessionLabelsRequest = _reflection.GeneratedProtocolMessageType('ListSessionLabelsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTSESSIONLABELSREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.ListSessionLabelsRequest)
})
_sym_db.RegisterMessage(ListSessionLabelsRequest)
ListSessionLabelsResponse = _reflection.GeneratedProtocolMessageType('ListSessionLabelsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTSESSIONLABELSRESPONSE,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.ListSessionLabelsResponse)
})
_sym_db.RegisterMessage(ListSessionLabelsResponse)
AddSessionLabelsRequest = _reflection.GeneratedProtocolMessageType('AddSessionLabelsRequest', (_message.Message,), {
'DESCRIPTOR' : _ADDSESSIONLABELSREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.AddSessionLabelsRequest)
})
_sym_db.RegisterMessage(AddSessionLabelsRequest)
RemoveSessionLabelsRequest = _reflection.GeneratedProtocolMessageType('RemoveSessionLabelsRequest', (_message.Message,), {
'DESCRIPTOR' : _REMOVESESSIONLABELSREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.RemoveSessionLabelsRequest)
})
_sym_db.RegisterMessage(RemoveSessionLabelsRequest)
ListSessionReviewsRequest = _reflection.GeneratedProtocolMessageType('ListSessionReviewsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTSESSIONREVIEWSREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.ListSessionReviewsRequest)
})
_sym_db.RegisterMessage(ListSessionReviewsRequest)
ListSessionReviewsResponse = _reflection.GeneratedProtocolMessageType('ListSessionReviewsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTSESSIONREVIEWSRESPONSE,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.ListSessionReviewsResponse)
})
_sym_db.RegisterMessage(ListSessionReviewsResponse)
GetSessionReviewRequest = _reflection.GeneratedProtocolMessageType('GetSessionReviewRequest', (_message.Message,), {
'DESCRIPTOR' : _GETSESSIONREVIEWREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.GetSessionReviewRequest)
})
_sym_db.RegisterMessage(GetSessionReviewRequest)
GetLatestSessionReviewRequest = _reflection.GeneratedProtocolMessageType('GetLatestSessionReviewRequest', (_message.Message,), {
'DESCRIPTOR' : _GETLATESTSESSIONREVIEWREQUEST,
'__module__' : 'ondewo.nlu.session_pb2'
# @@protoc_insertion_point(class_scope:ondewo.nlu.GetLatestSessionReviewRequest)
})
_sym_db.RegisterMessage(GetLatestSessionReviewRequest)
DESCRIPTOR._options = None
_SESSIONS = _descriptor.ServiceDescriptor(
name='Sessions',
full_name='ondewo.nlu.Sessions',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=5862,
serialized_end=7921,
methods=[
_descriptor.MethodDescriptor(
name='DetectIntent',
full_name='ondewo.nlu.Sessions.DetectIntent',
index=0,
containing_service=None,
input_type=_DETECTINTENTREQUEST,
output_type=_DETECTINTENTRESPONSE,
serialized_options=b'\202\323\344\223\002;\"6/v2/{session=projects/*/agent/sessions/*}:detectIntent:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StreamingDetectIntent',
full_name='ondewo.nlu.Sessions.StreamingDetectIntent',
index=1,
containing_service=None,
input_type=_STREAMINGDETECTINTENTREQUEST,
output_type=_STREAMINGDETECTINTENTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListSessions',
full_name='ondewo.nlu.Sessions.ListSessions',
index=2,
containing_service=None,
input_type=_LISTSESSIONSREQUEST,
output_type=_LISTSESSIONSRESPONSE,
serialized_options=b'\202\323\344\223\002(\022&/v2/{parent=projects/*/agent}/sessions',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetSession',
full_name='ondewo.nlu.Sessions.GetSession',
index=3,
containing_service=None,
input_type=_GETSESSIONREQUEST,
output_type=_SESSION,
serialized_options=b'\202\323\344\223\002.\022,/v2/{session_id=projects/*/agent/sessions/*}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateSession',
full_name='ondewo.nlu.Sessions.CreateSession',
index=4,
containing_service=None,
input_type=_CREATESESSIONREQUEST,
output_type=_SESSION,
serialized_options=b'\202\323\344\223\002+\"&/v2/{parent=projects/*/agent}/sessions:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='TrackSessionStep',
full_name='ondewo.nlu.Sessions.TrackSessionStep',
index=5,
containing_service=None,
input_type=_TRACKSESSIONSTEPREQUEST,
output_type=_SESSION,
serialized_options=b'\202\323\344\223\002B\"=/v2/{session_id=projects/*/agent/sessions/*}:trackSessionStep:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteSession',
full_name='ondewo.nlu.Sessions.DeleteSession',
index=6,
containing_service=None,
input_type=_DELETESESSIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002.*,/v2/{session_id=projects/*/agent/sessions/*}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListSessionLabels',
full_name='ondewo.nlu.Sessions.ListSessionLabels',
index=7,
containing_service=None,
input_type=_LISTSESSIONLABELSREQUEST,
output_type=_LISTSESSIONLABELSRESPONSE,
serialized_options=b'\202\323\344\223\002/\022-/v2/{parent=projects/*/agent}/sessions/labels',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='AddSessionLabels',
full_name='ondewo.nlu.Sessions.AddSessionLabels',
index=8,
containing_service=None,
input_type=_ADDSESSIONLABELSREQUEST,
output_type=_SESSION,
serialized_options=b'\202\323\344\223\002<\"7/v2/{session_id=projects/*/agent/sessions/*}/labels:add:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='RemoveSessionLabels',
full_name='ondewo.nlu.Sessions.RemoveSessionLabels',
index=9,
containing_service=None,
input_type=_REMOVESESSIONLABELSREQUEST,
output_type=_SESSION,
serialized_options=b'\202\323\344\223\002?\":/v2/{session_id=projects/*/agent/sessions/*}/labels:remove:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListSessionReviews',
full_name='ondewo.nlu.Sessions.ListSessionReviews',
index=10,
containing_service=None,
input_type=_LISTSESSIONREVIEWSREQUEST,
output_type=_LISTSESSIONREVIEWSRESPONSE,
serialized_options=b'\202\323\344\223\0026\0224/v2/{session_id=projects/*/agent/sessions/*}/reviews',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetSessionReview',
full_name='ondewo.nlu.Sessions.GetSessionReview',
index=11,
containing_service=None,
input_type=_GETSESSIONREVIEWREQUEST,
output_type=_SESSIONREVIEW,
serialized_options=b'\202\323\344\223\002?\022=/v2/{session_review_id=projects/*/agent/sessions/*/reviews/*}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetLatestSessionReview',
full_name='ondewo.nlu.Sessions.GetLatestSessionReview',
index=12,
containing_service=None,
input_type=_GETLATESTSESSIONREVIEWREQUEST,
output_type=_SESSIONREVIEW,
serialized_options=b'\202\323\344\223\002M\022K/v2/{session_id=projects/*/agent/sessions/*}/reviews:getLatestSessionReview',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateSessionReview',
full_name='ondewo.nlu.Sessions.CreateSessionReview',
index=13,
containing_service=None,
input_type=_CREATESESSIONREVIEWREQUEST,
output_type=_SESSIONREVIEW,
serialized_options=b'\202\323\344\223\002E\"@/v2/{session_id=projects/*/agent/sessions/*}:createSessionReview:\001*',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_SESSIONS)
DESCRIPTOR.services_by_name['Sessions'] = _SESSIONS
# @@protoc_insertion_point(module_scope)
|
from picamera.array import PiRGBArray
from picamera import PiCamera
from time import sleep
import cv2 as cv
import time
camera = PiCamera()
rawCapture = PiRGBArray(camera)
#cap = camera.start_preview()
sleep(0.1)
camera.capture(rawCapture, format="bgr")
image = rawCapture.array
cv2.imshow("image", image)
cv2.waitkey(0)
#while True:
# pass
#sleep(5)
#cap.stop_preview()
#camera.stop_preview()
|
# coding=utf-8
import os
__all__ = ['is_running_under_teamcity']
teamcity_presence_env_var = "TEAMCITY_VERSION"
def is_running_under_teamcity():
return os.getenv(teamcity_presence_env_var) is not None
|
import unittest
import json
import sys
sys.path.insert(0, '../')
import app.MessageParser
class UnitTests(unittest.TestCase):
def test_HighestProbabilityTagMeetingThreshold(self):
MessageParser = app.MessageParser.MessageParser()
message1=json.loads("[{\"Tag\": \"banana\",\"Probability\": 0.4}, {\"Tag\": \"apple\",\"Probability\": 0.3}]")
self.assertEqual(MessageParser.highestProbabilityTagMeetingThreshold(message1, 0.5), 'none')
message2=json.loads("[{\"Tag\": \"banana\",\"Probability\": 0.4}, {\"Tag\": \"apple\",\"Probability\": 0.5}]")
self.assertEqual(MessageParser.highestProbabilityTagMeetingThreshold(message2, 0.3), 'apple')
message3=json.loads("[{\"Probability\": 0.038001421838998795, \"Tag\": \"apple\"}, {\"Probability\": 0.38567957282066345, \"Tag\": \"banana\"}]")
self.assertEqual(MessageParser.highestProbabilityTagMeetingThreshold(message3, 0.3), 'banana')
if __name__ == '__main__':
unittest.main() |
class WrapperRobertaTokenizer():
def __init__(self, tokeniser):
self.tokeniser = tokeniser
def convertTokenToIds(self, token):
return self.tokeniser(token)['input_ids'][1:-1]
def convertSentToIds(self, sent):
return [1] + [item for sublist in sent.split() for item in self.convertTokenToIds(sublist)] + [2]
def convertIdsToSent(self, ids):
return self.tokeniser.convert_ids_to_tokens(ids)
|
"""
Font Style
Contains the style attributes of font.
"""
from dataclasses import dataclass
from .slant import FontSlant
from .weight import FontWeight
__all__ = [
"FontStyle",
]
@dataclass
class FontStyle:
slant: FontSlant
weight: FontWeight
|
import re
from setuptools import setup
import subprocess
import sys
try:
result = subprocess.run(
[sys.executable, "-m", "pip", "show", "pkg_utils"],
check=True, capture_output=True)
match = re.search(r'\nVersion: (.*?)\n', result.stdout.decode(), re.DOTALL)
assert match and tuple(match.group(1).split('.')) >= ('0', '0', '5')
except (subprocess.CalledProcessError, AssertionError):
subprocess.run(
[sys.executable, "-m", "pip", "install", "-U", "pkg_utils"],
check=True)
import os
import pkg_utils
name = 'nmrfamv2'
dirname = os.path.join(os.path.dirname(__file__))
package_data = {
name: [],
}
# get package metadata
md = pkg_utils.get_package_metadata(dirname, name)
# install package
setup(
name=name,
version=md.version,
description=(
"Python package for NMRFAMV2"
),
long_description='README.rst',
url="",
download_url='',
author='NMFAMV2',
author_email="marupilla@uchc.edu",
license="MIT",
keywords=['Metabolites'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
)
|
import unittest
import rpy3.robjects as robjects
rinterface = robjects.rinterface
import array
class REnvironmentTestCase(unittest.TestCase):
def testNew(self):
env = robjects.REnvironment()
self.assertEquals(rinterface.ENVSXP, env.typeof)
def testNewValueError(self):
self.assertRaises(ValueError, robjects.REnvironment, 'a')
def testSetItem(self):
env = robjects.REnvironment()
env['a'] = 123
self.assertTrue('a' in env)
def suite():
suite = unittest.TestLoader().loadTestsFromTestCase(REnvironmentTestCase)
return suite
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import logging
import qb.logging
# Find a Display if possible
try:
from __main__ import display
except ImportError:
try:
from ansible.utils.display import Display
except ImportError:
display = None
else:
display = Display()
class DisplayHandler(logging.Handler):
'''
A handler class that writes messages to Ansible's
`ansible.utils.display.Display`, which then writes them to the user output.
Includes static methods that let it act as a sort of a singleton, with
a single instance created on-demand.
'''
# Singleton instance
_instance = None
@staticmethod
def getDisplay():
'''
Get the display instance, if we were able to import or create one.
:rtype: None
:return: No display could be found or created.
:rtype: ansible.util.display.Display
:return: The display we're using.
'''
return display
# .getDisplay
@staticmethod
def getInstance():
'''
:rtype: DisplayHandler
:return: The singleton instance.
'''
if DisplayHandler._instance is None:
DisplayHandler._instance = DisplayHandler()
return DisplayHandler._instance
# .getInstance
@staticmethod
def enable():
'''
Enable logging to Ansible's display by sending {.getInstance()} to
{qb.logging.addHandler()}.
:raises:
'''
instance = DisplayHandler.getInstance()
if instance.display is None:
raise RuntimeError("No display available")
return qb.logging.addHandler(instance)
# .enable
def disable():
'''
Disable logging to Ansible's display be sending {.getInstance()} to
{qb.logging.removeHandler()}.
'''
return qb.logging.removeHandler(DisplayHandler.getInstance())
# .disable
def is_enabled():
return qb.logging.hasHandler(DisplayHandler.getInstance())
# .is_enabled
def __init__(self, display=None):
logging.Handler.__init__(self)
if display is None:
display = DisplayHandler.getDisplay()
self.display = display
# #__init__
def emit(self, record):
'''
Overridden to send log records to Ansible's display.
'''
if self.display is None:
# Nothing we can do, drop it
return
try:
self.format(record)
if record.levelname == 'DEBUG':
return self.display.verbose(record.message, caplevel=1)
elif record.levelname == 'INFO':
return self.display.verbose(record.message, caplevel=0)
elif record.levelname == 'WARNING':
self.display.warning(record.message)
elif record.levelname == 'ERROR':
self.display.error(record.message)
elif record.levelname == 'CRITICAL':
self.display.error("(CRITICAL) {}".format(record.message))
else:
pass
except (KeyboardInterrupt, SystemExit):
raise
except:
raise
# self.handleError(record)
# #emit |
"""Intialize Database file."""
import os
import random
import sys
import transaction
from pyramid.paster import (
get_appsettings,
setup_logging,
)
from pyramid.scripts.common import parse_vars
from ..models.meta import Base
from ..models import (
get_engine,
get_session_factory,
get_tm_session,
)
from mood_bot.models import User
from mood_bot.models.mymodel import Sentiments
# from faker import Faker
from passlib.apps import custom_app_context as context
# fake_data = Faker()
# FAKE_DATA = [{'body': fake_data.text(), 'negative_sentiment': fake_data.random.random(), 'positive_sentiment': fake_data.random.random(), 'user_id': random.randint(1, 3)} for i in range(20)]
# FAKE_USER =[{'username': 'turbo', 'password': context.hash('maple')},
# {'username': 'kitties', 'password': context.hash('fluff')},
# {'username': 'tree', 'password': context.hash('leafy')}]
def usage(argv):
"""."""
cmd = os.path.basename(argv[0])
print('usage: %s <config_uri> [var=value]\n'
'(exMample: "%s development.ini")' % (cmd, cmd))
sys.exit(1)
def main(argv=sys.argv):
"""."""
if len(argv) < 2:
usage(argv)
config_uri = argv[1]
options = parse_vars(argv[2:])
setup_logging(config_uri)
settings = get_appsettings(config_uri, options=options)
settings["sqlalchemy.url"] = os.environ["DATABASE_URL"]
engine = get_engine(settings)
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
session_factory = get_session_factory(engine)
# with transaction.manager:
# dbsession = get_tm_session(session_factory, transaction.manager)
# faker_user = []
# for fake in FAKE_USER:
# even_newer_result = User(
# username=fake['username'],
# password=fake['password']
# )
# faker_user.append(even_newer_result)
# dbsession.add_all(faker_user)
# faker_models = []
# for fake in FAKE_DATA:
# newer_result = Sentiments(
# body=fake['body'],
# negative_sentiment=fake['negative_sentiment'],
# positive_sentiment=fake['positive_sentiment'],
# user_id=fake['user_id']
# )
# faker_models.append(newer_result)
# dbsession.add_all(faker_models)
|
import json
import os
import torch
from nltk import sent_tokenize, word_tokenize
import copy
import Constants
from Dict import Dict
import string
total_data = 0
def tokenize(st, sentence_split=None, option=False):
#TODO: The tokenizer's performance is suboptimal
if option and (st[-1] in string.punctuation or st[0] == "$"): #to deal with options
st = st[:-1]
if len(st) > 0:
if option and (st[0] in string.punctuation or st[0] == "$"): # to deal with options
st = st[0:]
st = st.replace("<IMG>", "")
st = st.replace("[KS5UKS5U]", "")
st = st.replace("[:Z|xx|k.Com]", "")
st = st.replace("(;)", "")
ans = []
for sent in sent_tokenize(st):
if sentence_split is not None and len(ans) > 0:
ans += [sentence_split]
for w in word_tokenize(sent):
if len(ans) > 0 and (w == "'re" or w == "n't" or w == "'s" or w == "'m" or w == "'" and len(ans[-1]) > 0) and ans[-1] != "_":
ans[-1] += w
else:
ans += [w]
ans = " ".join(ans).lower()
if option and ans.find(" ") != -1:
print ans
return ans
def tokenize_data(dir, data_set, difficulty_set):
data = []
for d in difficulty_set:
new_path = os.path.join(dir, data_set, d)
for inf in os.listdir(new_path):
inf_path = os.path.join(new_path, inf)
obj = json.load(open(inf_path, "r"))
obj["article"] = tokenize(obj["article"])
for k in range(len(obj['options'])):
for i in range(4):
obj['options'][k][i] = tokenize(obj['options'][k][i], option=True)
data += [obj]
return data
def get_data(dir, data_set, difficulty_set, vocab=None):
global total_data
data = []
total_data = 0
print data_set, difficulty_set
for d in difficulty_set:
new_path = os.path.join(dir, data_set, d)
for inf in os.listdir(new_path):
total_data += 1
inf_path = os.path.join(new_path, inf)
obj = json.load(open(inf_path, "r"))
obj["article"] = obj["article"].replace(".", " . ")
obj["article"] = tokenize(obj["article"])
article_sentence_split = tokenize(obj["article"], "////").split("////")
place_holder_sentences = []
for st in article_sentence_split:
for k in range(st.count("_")):
place_holder_sentences += [st.strip()]
words = obj["article"].split()
if vocab:
obj["article"] = vocab.convertToIdx(words, Constants.UNK_WORD, Constants.EOS_WORD, Constants.BOS_WORD)
words = vocab.convertToLabels(obj["article"], )
options = copy.deepcopy(obj['options'])
for k in range(len(obj['answers'])):
obj["answers"][k] = ord(str(obj["answers"][k])) - ord('A')
for k in range(len(obj['options'])):
for i in range(4):
options[k][i] = tokenize(options[k][i], option=True)
if options[k][i].find(" ") != -1:
options[k][i] = options[k][i].replace(" ", "")
if vocab:
if options[k][i] == "": #convert empty option to unk
options[k][i] = "qwer"
obj["options"][k][i] = vocab.convertToIdx(options[k][i].split(), Constants.UNK_WORD)
obj["place_holder_pos"] = []
for i in range(len(words)):
if words[i] == "_":
obj["place_holder_pos"].append(i)
assert len(obj["place_holder_pos"]) == len(obj['answers']) == len(obj['options'])
obj["place_holder_pos"] = torch.LongTensor(obj["place_holder_pos"])
obj["types"] = []
for i in range(len(obj["answers"])):
obj["types"] += [""]
data += [obj]
return data
def makeVocabulary(sentences, size=1000000, min_freq=None):
vocab = Dict([Constants.PAD_WORD, Constants.UNK_WORD, Constants.EOS_WORD, Constants.BOS_WORD])
for sentence in sentences:
for word in sentence.split():
vocab.add(word)
originalSize = vocab.size()
if min_freq is not None:
vocab = vocab.prune_by_freq(min_freq)
else:
vocab = vocab.prune(size)
print('Created dictionary of size %d (pruned from %d)' %
(vocab.size(), originalSize))
return vocab
def get_sentences(data):
sentences = []
for d in data:
sentences += [d["article"]]
for k in range(len(d["options"])):
for i in range(4):
sentences += [d["options"][k][i]]
return sentences
if __name__ == "__main__":
difficulty_set = ["middle", "high"]
data_dir = "../../data/CLOTH/"
output_dir = "../../data/"
min_freq = 2
#set vocabulary
#vocab = makeVocabulary(get_sentences(tokenize_data(data_dir, "train", difficulty_set)), min_freq=min_freq)
#vocab.writeFile(os.path.join(output_dir, "dict.txt"))
dataset = torch.load('../../data/train.pt') #the tokenizer uses random functions, we fix the vocab here
vocab = dataset["vocab"]
unigram_dis = dataset["unigram_dis"]
for i in string.punctuation:
idx = vocab.lookup(i)
if idx is not None:
unigram_dis[idx] = 0
unigram_dis[vocab.lookup("_")] = 0
normalized_unigram = unigram_dis.float() * 1. / unigram_dis.sum()
data = {}
data_sets = ["train", "valid", "test"]
for data_set in data_sets:
if data_set != "test":
data[data_set] = get_data(data_dir, data_set, difficulty_set, vocab)
else:
data["test"] = {}
for d in difficulty_set:
data["test"][d] = get_data(data_dir, data_set, [d], vocab)
data["test"]["whole"] = get_data(data_dir, data_set, difficulty_set, vocab)
save_data = {'vocab': vocab,
'data': data,
'unigram_dis': unigram_dis}
save_name = "train_debug.pt"
torch.save(save_data, os.path.join(output_dir, save_name))
|
import torch
import distdl.nn.padnd as padnd
from distdl.utilities.misc import DummyContext
t = torch.ones(3, 4)
print(f't =\n{t}')
ctx = DummyContext()
pad_width = [(1, 2), (3, 4)]
t_padded = padnd.PadNdFunction.forward(ctx, t, pad_width, value=0)
print(f't_padded =\n{t_padded}')
t_unpadded = padnd.PadNdFunction.backward(ctx, t_padded)[0]
print(f't_unpadded =\n{t_unpadded}')
|
import argparse
from processing.hdf5creator import create_hdf_database, set_createdb_parser
from processing.core import set_geometry_parser, process, set_charges_parser, set_rism_parser
functions = {"createdb":create_hdf_database,"geometry":process,"charges":process,"rism":process}
parser = argparse.ArgumentParser(description='Clever -- a program for calculation 3D molecular fields')
actions_parsers = parser.add_subparsers(title="Actions",
description="actions available")
set_createdb_parser(actions_parsers)
set_geometry_parser(actions_parsers)
set_charges_parser(actions_parsers)
set_rism_parser(actions_parsers)
args = parser.parse_args()
functions[args.action](**vars(args))
|
#!/usr/bin/env python3
import apx
import sys, os
if __name__ == "__main__":
with open(sys.argv[1]) as fp:
node = apx.Parser().load(fp)
inDataSize = sum([port.dsg.packLen() for port in node.requirePorts])
outDataSize = sum([port.dsg.packLen() for port in node.providePorts])
print("in:\t{:d}\tout:\t{:d}".format(inDataSize, outDataSize)) |
"""Different models to describe the vertical relative humidity distribution."""
import abc
import numpy as np
from scipy.interpolate import interp1d
from konrad.component import Component
from konrad.physics import vmr2relative_humidity
from konrad.utils import gaussian
class RelativeHumidityModel(Component, metaclass=abc.ABCMeta):
def __call__(self, atmosphere, **kwargs):
"""Return the vertical distribution of relative humidity.
Parameters:
atmosphere (konrad.atmosphere.Atmosphere: Atmosphere component.
**kwargs: Arbitrary number of additional arguments,
depending on the actual implementation.
Returns:
ndarray: Relative humidity profile.
"""
...
class CacheFromAtmosphere(RelativeHumidityModel):
"""Calculate and cache relative humidity from initial atmosphere."""
def __init__(self):
self._rh_profile = None
def __call__(self, atmosphere, **kwargs):
if self._rh_profile is None:
self._rh_profile = vmr2relative_humidity(
vmr=atmosphere['H2O'][-1],
pressure=atmosphere['plev'],
temperature=atmosphere['T'][-1]
)
return self._rh_profile
class HeightConstant(RelativeHumidityModel):
"""Fix the relative humidity to a single value throughout the atmosphere."""
def __init__(self, rh_surface=0.8):
"""
Parameters:
rh_surface (float): Relative humidity at first
pressure level (surface)
"""
self.rh_surface = rh_surface
self._rh_cache = None
def __call__(self, atmosphere, **kwargs):
if self._rh_cache is None:
p = atmosphere['plev']
self._rh_cache = self.rh_surface * np.ones_like(p)
return self._rh_cache
class VerticallyUniform(RelativeHumidityModel):
"""Use a single value of relative humidity up to the convective top and
then a linearly decreasing value towards the cold point."""
def __init__(self, rh_surface=0.5, rh_tropopause=0.3):
"""
Parameters:
rh_surface (float): relative humidity from the first pressure level
(surface) up to the convective top
rh_tropopause (float): relative humidity at the tropopause
"""
self.rh_surface = rh_surface
self.rh_tropopause = rh_tropopause
self.convective_top = 300e2
self.cold_point = 100e2
def __call__(self, atmosphere, convection, **kwargs):
p = atmosphere['plev']
self.convective_top = convection.get('convective_top_plev')[0]
self.cold_point = atmosphere.get_cold_point_plev()
rh = (
(self.rh_tropopause - self.rh_surface)
/ (self.cold_point - self.convective_top)
* (p - self.convective_top) + self.rh_surface
)
rh[p > self.convective_top] = self.rh_surface
return rh
class ConstantFreezingLevel(RelativeHumidityModel):
"""Constant relative humidity up to the freezing level and then
decreasing."""
def __init__(self, rh_surface=0.77):
"""
Parameters:
rh_surface (float): Relative humidity at first
pressure level (surface)
"""
self.rh_surface = rh_surface
def __call__(self, atmosphere, **kwargs):
plev = atmosphere['plev']
rh_profile = self.rh_surface * np.ones_like(plev)
fl = atmosphere.get_triple_point_index()
rh_profile[fl:] = (
self.rh_surface * (plev[fl:] / plev[fl])**1.3
)
return rh_profile
class FixedUTH(RelativeHumidityModel):
"""Idealised model of a fixed C-shaped relative humidity distribution."""
def __init__(self, rh_surface=0.77, uth=0.75, uth_plev=170e2,
uth_offset=0):
"""Couple the upper-tropospheric humidity peak to the convective top.
Parameters:
rh_surface (float): Relative humidity at first
pressure level (surface).
uth (float): Relative humidity at the upper-tropospheric peak.
uth_plev (float): Pressure level of second humidity maximum [Pa].
uth_offset (float): Offset between UTH peak and convective top.
"""
self.rh_surface = rh_surface
self.uth = uth
self.uth_plev = uth_plev
self.uth_offset = uth_offset
self._rh_base_profile = None
def get_relative_humidity_profile(self, atmosphere):
p = atmosphere['plev']
# Use Manabe (1967) relative humidity model as base/background.
if self._rh_base_profile is None:
manabe_model = Manabe67(rh_surface=self.rh_surface)
self._rh_base_profile = manabe_model(atmosphere)
# Gaussian upper-tropospheric UTH peak in ln(p) coordinates
x = p / (self.uth_plev + self.uth_offset)
uth = self.uth * np.exp(-np.log(x)**2 * np.pi)
return np.maximum(self._rh_base_profile, uth)
def __call__(self, atmosphere, **kwargs):
return self.get_relative_humidity_profile(atmosphere)
class CoupledUTH(FixedUTH):
"""Idealised model of a coupled C-shaped relative humidity distribution.
This relative humidity works in the same way as ``FixedUTH`` but the
``uth_plev`` is updated automatically depending on the convective top.
"""
def __call__(self, atmosphere, convection, **kwargs):
self.uth_plev = convection.get('convective_top_plev')[0]
return self.get_relative_humidity_profile(atmosphere)
class CshapeConstant(RelativeHumidityModel):
"""Idealized model of a C-shaped RH profile using a quadratic equation."""
def __init__(self, uth_plev=200e2, rh_min=0.3, uth=0.8):
self.uth_plev = uth_plev
self.rh_min = rh_min
self.uth = uth
self.rh_surface = uth
def __call__(self, atmosphere, convection, **kwargs):
self.uth_plev = convection.get('convective_top_plev')[0]
x = np.log10(atmosphere['plev'])
xmin = np.log10(self.uth_plev)
xmax = x[0]
a = (self.uth - self.rh_min) * 4 / (xmin - xmax)**2
b = (xmin + xmax) / 2
c = self.rh_min
return np.clip(a * (x - b)**2 + c, a_min=0, a_max=1)
class CshapeDecrease(RelativeHumidityModel):
"""Idealized model of a C-shaped RH profile using a quadratic equation."""
def __init__(self, uth_plev=200e2, rh_min=0.3, uth=0.8):
self.uth_plev = uth_plev
self.rh_min = rh_min
self.uth = uth
self.rh_surface = uth
def __call__(self, atmosphere, convection, **kwargs):
self.uth_plev = convection.get('convective_top_plev')[0]
x = np.log10(atmosphere['plev'])
xmin = np.log10(self.uth_plev)
xmax = x[0]
a = (self.uth - self.rh_min) * 4 / (xmin - xmax)**2
b = (xmin + xmax) / 2
c = self.rh_min
rh = np.clip(a * (x - b)**2 + c, a_min=0, a_max=1)
rh[x < xmin] *= (10**x / 10**xmin)[x < xmin]
return rh
class Manabe67(RelativeHumidityModel):
"""Relative humidity model following Manabe and Wetherald (1967)."""
def __init__(self, rh_surface=0.77):
"""Initialize a humidity model.
Parameters:
rh_surface (float): Relative humidity at the surface.
"""
self.rh_surface = rh_surface
def __call__(self, atmosphere, **kwargs):
p = atmosphere['plev']
return self.rh_surface * (p / p[0] - 0.02) / (1 - 0.02)
class Cess76(RelativeHumidityModel):
"""Relative humidity model following Cess (1976).
The relative humidity profile depends on the surface temperature.
This results in moister atmospheres at warmer temperatures.
"""
def __init__(self, rh_surface=0.8, T_surface=288):
"""Initialize a humidity model.
Parameters:
rh_surface (float): Relative humidity at the surface.
T_surface (float): Surface temperature [K].
"""
self.rh_surface = rh_surface
self.T_surface = T_surface
@property
def omega(self):
"""Temperature dependent scaling factor for the RH profile."""
return 1.0 - 0.03 * (self.T_surface - 288)
def __call__(self, atmosphere, surface, **kwargs):
p = atmosphere['plev']
self.T_surface = surface['temperature'][-1]
return self.rh_surface * ((p / p[0] - 0.02) / (1 - 0.02))**self.omega
class Romps14(RelativeHumidityModel):
"""Relative humidity following an invariant RH-T relation."""
def __init__(self):
self._rh_func = None
def __call__(self, atmosphere, **kwargs):
if self._rh_func is None:
self._rh_func = interp1d(
# Values read from Fig. 6 in Romps (2014).
x=np.array([300, 240, 200, 190, 188, 186]),
y=np.array([0.8, 0.6, 0.7, 1.0, 0.5, 0.1]),
kind='linear',
fill_value='extrapolate',
)
return self._rh_func(atmosphere['T'][-1, :])
class PolynomialCshapedRH(RelativeHumidityModel):
def __init__(self, top_slope = 7.5e-5, top_peak_rh = 0.75, mid_p = 500e2, mid_rh = 0.4, low_peak_p = 940e2, low_peak_rh = 0.85, bl_slope = -2e-5):
"""
Defines a C-shaped polynomial model.
The RH increases linearly in the boundary layer and decreases linearly above the upper-tropospheric peak.
The point above the upper-tropospheric peak where the RH is half the peak is coupled to the cold-point.
Between the two peaks, a quadratic function is defined by the point of the two peaks and one in the mid-troposphere.
Default values from RCEMIP large experiment statistics.
Parameters:
top_slope (float): slope of the linear function above the upper-tropospheric peak.
top_peak_rh (float in [0;1]): value of relative humidity at the upper-tropospheric peak.
mid_p (float): Pressure of the mid-tropospheric point.
mid_rh (float in [0;1]): value of relative humidity at the mid-tropospheric point.
low_peak_p (float): Pressure of the low-tropospheric peak.
low_peak_rh (float in [0;1]): value of the relative humidity at the low-tropospheric peak.
bl_slope (float): slope of the relative humidity in the boudary layer.
"""
## Convert percent to dimensionless
if top_peak_rh > 1 : top_peak_rh /= 100;
if mid_rh > 1 : mid_rh /= 100;
if low_peak_rh > 1 : low_peak_rh /= 100;
# Affect values to self
self.top_slope = top_slope
self.top_peak_rh = top_peak_rh
self.mid_p = mid_p
self.mid_rh = mid_rh
self.low_peak_p = low_peak_p
self.low_peak_rh = low_peak_rh
self.bl_slope = bl_slope
def __call__(self, atmosphere, **kwargs):
"""
Parameters:
atmosphere (konrad.atmosphere.Atmosphere): The atmosphere component.
Returns:
ndarray: The relative humidity profile.
"""
plev = atmosphere["plev"]
## Top layer
cold_point_p = atmosphere.get_cold_point_plev()
tl_func = lambda p : self.top_peak_rh/2 + self.top_slope * (p - cold_point_p)
top_peak_p = cold_point_p + self.top_peak_rh / (2 * self.top_slope)
## Mid-troposphere
coeffs = np.polyfit([top_peak_p, self.mid_p, self.low_peak_p], [self.top_peak_rh, self.mid_rh, self.low_peak_rh], deg = 2)
mid_func = lambda p : coeffs[0] * p ** 2 + coeffs[1] * p + coeffs[2]
## Boundary layer
bl_func = lambda p : self.low_peak_rh + self.bl_slope*(p - self.low_peak_p)
## Reconstructed function
rh_profile = np.piecewise(plev, [plev <= top_peak_p, plev > top_peak_p, plev > self.low_peak_p], [tl_func, mid_func, bl_func])
rh_profile[rh_profile < 0] = 0
return rh_profile
class PerturbProfile(RelativeHumidityModel):
""" Wrapper to add a perturbation to a Relative Humidity profile. """
def __init__(self, base_profile = HeightConstant(), shape = "square", center_plev = 500e2, width = 50e2, intensity = 0.1, fixed_T = False) :
"""
Parameters:
base_profile (konrad.relative_humidity model): initial profile on which we will add the perturbation.
shape (str): name of the shape of the perturbation.
Implemented : "square", "gaussian". For a Dirac use a square with width 0.
center_plev (float): Pressure of the center of the square perturbation in [Pa].
width (float): width of the perturbation in [Pa].
intensity (float): Change in RH where the profile is perturbed, positive or negative.
Fixed_T (boolean): If set to true, the temperature at center_plev at the first step is kept as the central point for the perturbation throughout the simulation, and the pressure at the center of the perturbation is no longer constant.
"""
self._base_profile = base_profile
self._shape = shape
self.center_plev = center_plev
self.width = width
self.fixed_T = fixed_T
self.center_T = None
if intensity > 1 : #If intensity given in percents
intensity /= 100
self.intensity = float(intensity)
def __call__(self, atmosphere, **kwargs) :
"""
Parameters:
atmosphere (konrad.atmosphere.Atmosphere): The atmosphere component.
Returns:
ndarray: The relative humidity profile.
"""
plev = atmosphere["plev"]
T = atmosphere["T"][-1]
if self.center_T == None : # Initialize T at center_plev at the first step
idx_center = np.abs(plev - self.center_plev).argmin()
self.center_T = T[idx_center]
if self.fixed_T : # Compute center_plev to correspond to the fixed T
idx_center = np.abs(T - self.center_T).argmin()
self.center_plev = plev[idx_center]
rh_profile = self._base_profile(atmosphere).copy()
if self._shape == "square" :
idx_low = np.abs(plev - (self.center_plev + self.width/2)).argmin()
idx_high = np.abs(plev - (self.center_plev - self.width/2)).argmin()
if idx_low != idx_high :
rh_profile[idx_low:idx_high] += self.intensity
else :
rh_profile[idx_low] += self.intensity
if self._shape == "gaussian" :
G = gaussian(plev, self.center_plev, self.width /2) # Gaussian profile
# Compute boundary of the perturbation
p_low = self.center_plev + 1.5 * self.width
idx_low = np.abs(plev - p_low).argmin()
p_high = self.center_plev - 1.5 * self.width
idx_high = np.abs(plev - p_high).argmin()
if idx_low != idx_high :
rh_profile[idx_low:idx_high] = rh_profile[idx_low:idx_high] + G[idx_low:idx_high]/np.max(G) * self.intensity
else :
rh_profile[idx_low] += self.intensity
return rh_profile
class ProfileFromData(RelativeHumidityModel):
def __init__(self, p_data, rh_data):
"""
Defines a relative humidity from data.
Parameters:
p_data (np.ndarray): pressure coordinates corresponding to rh_data, in Pa
rh_data (np.ndarray): the rh profile on p_data, in unit of RH
"""
self._rh_func = interp1d(p_data, rh_data, fill_value = "extrapolate")
def __call__(self, atmosphere, **kwargs):
"""Return the vertical distribution of relative humidity.
Parameters:
atmosphere (konrad.atmosphere.Atmosphere: Atmosphere component.
**kwargs: Arbitrary number of additional arguments,
depending on the actual implementation.
Returns:
ndarray: Relative humidity profile.
"""
plev = atmosphere["plev"]
return self._rh_func(plev)
|
#
# This file is part of Python Client Library for STAC.
# Copyright (C) 2019 INPE.
#
# Python Client Library for STAC is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
#
"""STAC Item module."""
import json
import shutil
import requests
from urllib.parse import urlparse
from pkg_resources import resource_string
from .common import Link, Provider
from .utils import Utils
class Asset(dict):
"""Asset object."""
def __init__(self, data):
"""Initialize instance with dictionary data.
:param data: Dict with Asset metadata.
"""
super(Asset, self).__init__(data or {})
@property
def href(self):
""":return: the Asset href."""
return self['href']
@property
def title(self):
""":return: the Asset title."""
return self['title']
@property
def type(self):
""":return: the Asset type."""
return self['type']
def download(self, folder_path=None): # pragma: no cover
"""
Download the asset to an indicated folder.
:param folder_path: Folder path to download the asset, if left None,
the asset will be downloaded to the current
working directory.
:return: path to downloaded file.
"""
local_filename = urlparse(self['href'])[2].split('/')[-1]
if folder_path is not None:
folder_path += local_filename
with requests.get(self['href'], stream=True) as r:
with open(local_filename, 'wb') as f:
shutil.copyfileobj(r.raw, f)
return local_filename
class Geometry(dict):
"""The Geometry Object."""
def __init__(self, data):
"""Initialize instance with dictionary data.
:param data: Dict with Geometry metadata.
"""
super(Geometry, self).__init__(data or {})
@property
def type(self):
""":return: the Geometry type."""
return self['type']
@property
def coordinates(self):
""":return: the Geometry coordinates."""
return self['coordinates']
class Properties(dict):
"""The Properties Object."""
def __init__(self, data):
"""Initialize instance with dictionary data.
:param data: Dict with Properties metadata.
"""
super(Properties, self).__init__(data or {})
@property
def datetime(self):
""":return: the datetime property."""
return self['datetime']
@property
def license(self):
""":return: the license property."""
return self['license']
@property
def providers(self):
""":return: the providers property."""
return [Provider(p) for p in self['providers']]
@property
def title(self):
""":return: the title property."""
return self['title']
@property
def created(self):
""":return: the created property."""
return self['created']
@property
def updated(self):
""":return: the updated property."""
return self['updated']
class Item(dict):
"""The GeoJSON Feature of a STAC Item."""
def __init__(self, data, validate=False):
"""Initialize instance with dictionary data.
:param data: Dict with Item metadata.
:param validate: true if the Item should be validate using its jsonschema. Default is False.
"""
self._validate = validate
super(Item, self).__init__(data or {})
if self._validate:
Utils.validate(self)
@property
def stac_version(self):
""":return: the STAC version."""
return self['stac_version'] if 'stac_version' in self else '0.7.0'
@property
def id(self):
""":return: the Item identifier."""
return self['id']
@property
def type(self):
""":return: the Item type."""
return self['type']
@property
def bbox(self):
""":return: the Item Bounding Box."""
return self['bbox']
@property
def collection(self):
""":return: the Item Collection."""
return self['collection']
@property
def geometry(self):
""":return: the Item Geometry."""
return Geometry(self['geometry'])
@property
def properties(self):
""":return: the Item properties."""
return Properties(self['properties'])
@property
def links(self):
""":return: the Item related links."""
return [Link(link) for link in self['links']]
@property
def assets(self):
""":return: the Item related assets."""
return {key: Asset(value) for key,value in self['assets'].items()}
@property
def _schema(self):
""":return: the Collection jsonschema."""
schema = resource_string(__name__, f'jsonschemas/{self.stac_version}/item.json')
_schema = json.loads(schema)
return _schema
class ItemCollection(dict):
"""The GeoJSON Feature Collection of STAC Items."""
def __init__(self, data, validate=False):
"""Initialize instance with dictionary data.
:param data: Dict with Item Collection metadata.
:param validate: true if the Item Collection should be validate using its jsonschema. Default is False.
"""
self._validate = validate
super(ItemCollection, self).__init__(data or {})
@property
def type(self):
""":return: the Item Collection type."""
return self['type']
@property
def features(self):
""":return: the Item Collection list of GeoJSON Features."""
return [Item(i, self._validate) for i in self['features']]
@property
def links(self):
""":return: the Item Collection list of GeoJSON Features."""
return [Link(i) for i in self['links']]
|
#!/usr/bin/env python
# -*-mode: python -*- -*- coding: utf-8 -*-
"""
Implements the (W3C WG Note) RIF-in-RDF mapping.
By Sandro Hawke, 16 June 2010.
Revised 3 October 2010 to match current version of RIF_in_RDF.
Revised 12 May 2011 to fix a few bugs.
Copyright © 2010,2011 World Wide Web Consortium, (Massachusetts
Institute of Technology, European Research Consortium for Informatics
and Mathematics, Keio University). All Rights Reserved. This work is
distributed under the W3C® Software License [1] in the hope that it
will be useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
[1] http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231
TODO:
-- improve use of etree, using len and subscripting
-- extract, driven by simplified schema?
-- run through test cases
"""
import sys
import urllib2
from urlparse import urljoin
# http://docs.python.org/library/xml.etree.elementtree.html
import xml.etree.cElementTree as etree
rifns = "http://www.w3.org/2007/rif#"
rdfns = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns = "http://www.w3.org/XML/1998/namespace"
xsdns = "http://www.w3.org/2001/XMLSchema#"
table_3 = {
("Document", "directive") : ("directives", 2),
("Group", "sentence") : ("sentences", 2),
("Forall", "declare") : ("vars", 2),
("Exists", "declare") : ("vars", 2),
("And", "formula") : ("formulas", 2),
("Or", "formula") : ("formulas", 2),
("Frame", "slot") : ("slots", 3),
("Atom", "slot") : ("namedargs", 3),
("Expr", "slot") : ("namedargs", 3),
}
#
# Classes used to represent RDF. We could use rdflib or something,
# but we want this to be readable even if you don't know rdflib.
#
class BlankNode(object):
counter = 0
def __init__(self, label=None):
if label:
self.label = label
else:
self.label = "n"+str(BlankNode.counter)
BlankNode.counter += 1
def as_turtle(self):
return "_:"+self.label
# can we use [...] notation somehow? That's hard.
class LabeledNode(object):
def __init__(self, iri):
self.iri = iri
def as_turtle(self):
return "<"+self.iri+">"
class PlainLiteral(object):
def __init__(self, text, lang=None):
self.text = text
self.lang = lang
def as_turtle(self):
if self.lang:
return '"'+turtle_escape(self.text)+'"@'+self.lang
else:
return '"'+turtle_escape(self.text)+'"'
class TypedLiteral(object):
def __init__(self, lexrep, datatype):
self.lexrep = lexrep
self.datatype = datatype
def as_turtle(self):
return '"'+turtle_escape(self.lexrep)+'"^^<'+self.datatype+'>'
class RDFList(object):
def __init__(self, items):
self.items = items
def as_turtle(self):
return " ( "+ " ".join([x.as_turtle() for x in self.items]) + " ) "
#
# General Utility Functions
#
class Namespace(object):
"""
For conveniences, lets us write rif.foo as shorthand for
"{"+rifns+"}"+"foo".
"""
def __init__(self, ns):
self.ns = ns
def __getattr__(self, term):
return "{" + self.ns + "}" + term
def ns_split(term):
"""
Take apart an ElementTree namespaced name, returning the namespace
and a localpart.
"""
(ns, local) = term.split("}")
assert ns[0] == "{"
ns = ns[1:]
return ns, local
def group_children(x):
"""Given an XML element, return a sequence of lists of its
children, gathered by element tag. So, for instance, if the
children are: a b b c c c d, the result would be [a] [b b] [c c c]
[d].
Assumes same-tag children are together in the order, but XML
Schema requires that, so it's a good assumption."""
buffer = []
prev = None
for p in x.getchildren():
if prev is not None and prev != p.tag:
yield buffer
buffer = []
buffer.append(p)
prev = p.tag
yield buffer
def the_child_of(x):
children = [e for e in x.getchildren()]
assert(len(children) == 1)
return children[0]
def contains_markup(x):
children = [e for e in x.getchildren()]
assert len(x) == len(children) # I'm unclear what len dooes
return len(children) > 0
def turtle_escape(s):
# http://www.w3.org/TeamSubmission/turtle/#sec-strings
s = s.replace("\\", "\\\\")
s = s.replace("\n", "\\n")
s = s.replace("\t", "\\t")
s = s.replace("\r", "\\r")
s = s.replace('"', '\\"')
return s
#
# Parts of describe() that have been moved out into separate functions,
# just for readability.
#
rif = Namespace(rifns)
def get_focus(rifxml, default_iri=None):
# allow an override, eg for the document URI? Should the document address
# be the IRI of the document node...?
id_child = rifxml.find(rif.id)
if id_child is not None:
# oddly, the id is required to be a rif:iri. A rif:local
# would make a lot of sense, too, for when you want metadata
# without making up an IRI.
id_const = id_child.find(rif.Const)
t = id_const.get("type")
if t == rifns+"iri":
iri = id_const.text
return LabeledNode(iri)
else:
error("""<id> elements must contain a <Const type="rif:iri"> element""")
if default_iri:
return default_iri
return BlankNode()
def extract_meta(rifxml):
"""Optionally for a <meta> element and convert the data in it into
triples and return them"""
return []
def error(x, msg):
print >>sys.stderr, "Error:", msg
print >>sys.stderr, "Near ", etree.tostring(x)[0:60],"..."
sys.exit(1)
#
# The main describe() function
#
def describe(rifxml, default_iri=None, base=None):
"""Given a RIF XML document, or a part of an RIF XML document
(where the element is a "class stripe"), return the pair <focus,
triples>) where triples is a set of RDF triples (an RDF graph),
and focus is the node in that graph which represents the top-level
element in the provided XML."""
base = rifxml.get("{"+xmlns+"}base", base)
focus = get_focus(rifxml, default_iri)
triples = []
tag = rifxml.tag
(ns,local) = ns_split(rifxml.tag)
triples.extend(extract_meta(rifxml))
# Table 1 Processing
if tag == rif.Var:
triples.append( (focus, rifns+"varname", PlainLiteral(rifxml.text)) )
triples.append( (focus, rdfns+"type", LabeledNode(rifns+"Var")) )
return (focus, triples)
if tag == rif.Const:
t = rifxml.get("type")
text = rifxml.text
if text is None: # etree does this sometimes (!)
text = ""
if t == rifns + "iri":
iri=urljoin(base, text, allow_fragments=True)
new = (focus, rifns+"constIRI",
TypedLiteral(iri, datatype=xsdns+"anyURI"))
elif t == rifns + "local":
new = (focus, rifns+"constname", PlainLiteral(text))
elif t == 'http://www.w3.org/1999/02/22-rdf-syntax-ns#PlainLiteral':
(val, lang) = text.rsplit('@', 1)
new = (focus, rifns+"value", PlainLiteral(val, lang))
else:
new = (focus, rifns+"value", TypedLiteral(text, t))
triples.append(new)
triples.append( (focus, rdfns+"type", LabeledNode(rifns+"Const")) )
return (focus, triples)
(parent_ns, parent_local) = ns_split(tag)
parent_class = LabeledNode(parent_ns+parent_local)
ensure_children = []
for (P,p),(np,mode) in table_3.items():
if P == local and mode == 2:
ensure_children.append(rifns+np)
# Do the basic matching from Table 2
for group in group_children(rifxml):
group_tag = group[0].tag
(group_ns, group_local) = ns_split(group_tag)
if group_tag == rif.id:
continue
# mode 0 -- the child, when present, has the ordered=yes attribute
# mode 1 -- the child is required to appear exactly once
# mode 2 -- the child is optional, or it may be repeated
# mode 3 -- just for the <slot> element
prop = group_ns+group_local
if group[0].get("ordered") == "yes":
mode = 0
else:
mode = 1
if group_ns == rifns:
try:
# Table 3 contains over-rides for property names and modes.
# It's the only way to get Mode 2 or Mode 3
(prop_suffix, mode) = table_3[ (local, group_local) ]
if prop_suffix:
prop = group_ns+prop_suffix
except KeyError:
pass
#print " group", group_tag, mode, prop
if mode == 0: # ORDERED=YES
if len(group) > 1:
error("elements with ordered='yes' must not be repeated")
values=[]
for child in group[0].getchildren():
(child_focus, child_triples) = describe(child, base=base)
values.append(child_focus)
triples.extend(child_triples)
value = RDFList(values)
elif mode == 1: # REQUIRED TO APPEAR EXACTLY ONCE
if len(group) > 1:
error(group[0], "only elements in listed as Mode=2 in Table 3 may be repeated")
if contains_markup(group[0]):
child = the_child_of(group[0])
(value, child_triples) = describe(child, base=base)
triples.extend(child_triples)
else:
# eg <location>
value = PlainLiteral(group[0].text)
elif mode == 2: # OPTIONAL/REPEATED -- GATHERED INTO A LIST
values=[]
for occurance in group:
if contains_markup(occurance):
child = the_child_of(occurance)
(child_focus, child_triples) = describe(child, base=base)
else:
# eg <profile> [optional, not repeatable]
child_focus = PlainLiteral(occurance.text)
child_triples = []
values.append(child_focus)
triples.extend(child_triples)
value = RDFList(values)
elif mode == 3: # SLOTS -- TRANSFORMED AND GATHERED INTO LIST
values=[]
for occurance in group:
assert occurance.get("ordered") == "yes"
node = BlankNode()
values.append(node)
if tag == rif.Expr or tag == rif.Atom:
assert(len(occurance) == 2)
assert(occurance[0].tag == rif.Name)
name = occurance[0].text
(v, vt) = describe(occurance[1], base=base)
triples.extend(vt)
triples.append( (node, rifns+"argname", PlainLiteral(name)) )
triples.append( (node, rifns+"argvalue", v) )
else:
# in std dialects, tag == rif.Frame here
assert(len(occurance) == 2)
(k, kt) = describe(occurance[0], base=base)
(v, vt) = describe(occurance[1], base=base)
triples.extend(kt)
triples.extend(vt)
triples.append( (node, rdfns+"type",
LabeledNode(rifns+"Slot")) )
triples.append( (node, rifns+"slotkey", k) )
triples.append( (node, rifns+"slotvalue", v) )
value = RDFList(values)
else:
raise Exception
triples.append( (focus, prop, value) )
triples.append( (focus, rdfns+"type", parent_class) )
try:
ensure_children.remove(prop)
except:
pass
for prop in ensure_children:
triples.append( (focus, prop, LabeledNode(rdfns+"nil")) )
triples.append( (focus, rdfns+"type", parent_class) )
return (focus, triples)
def main():
if len(sys.argv) == 1:
stream = sys.stdin
doc = etree.fromstring(stream.read())
stream.close()
base = None
else:
src = sys.argv[1]
if ":" in src:
stream = urllib2.urlopen(src)
base = src
doc = etree.fromstring(stream.read())
stream.close()
else:
with open(src) as stream:
doc = etree.fromstring(stream.read())
base = "file:/"+src
if doc.tag != rif.Document:
error(doc, "Root element is not rif:Document.")
if base:
focus = LabeledNode(focus)
else:
focus = BlankNode()
(focus, triples) = describe(doc, focus, base)
print "# RIF focus is", focus.as_turtle()
print "# %d triples" % len(triples)
for (s,p,o) in triples:
print s.as_turtle(), "<"+p+">", o.as_turtle(),"."
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Copyright (c) 2011-2016 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
# ----------------------------------------------------------------------
# A CH server providing a single-threaded one-at-a-time service that responds
# to CH API XMLRPC/SSL requests.
# Note: Need to place a /etc/geni-chapi/chapi-dev.ini containing an entry:
# ; database URL
# ; Syntax: postgresql://USER:PASSWORD@HOST/DB
#
from gcf.geni.SecureXMLRPCServer import SecureXMLRPCRequestHandler
from gcf.geni.SecureXMLRPCServer import SecureXMLRPCServer
import optparse
import os
import sys
import urlparse
import xmlrpclib
import tools.pluginmanager as pm
import plugins.chapiv1rpc.plugin
import plugins.chrm.plugin
import plugins.csrm.plugin
import plugins.flaskrest.plugin
import plugins.logging.plugin
import plugins.opsmon.plugin
import plugins.marm.plugin
import plugins.sarm.plugin
from plugins.chapiv1rpc.chapi.Parameters import set_auxiliary_config_file
from tools.chapi_log import *
from tools.ch_server import handleCall, initialize
opts = None
args = None
test_server_initialized = False
# This server overrides certain parameters (notably the database)
# in a subsequently parsed parameters config file.
set_auxiliary_config_file('chapi-test.ini')
pm.registerService('xmlrpc', pm.XMLRPCHandler())
pm.registerService('config', pm.ConfigDB())
pm.registerService('rpcserver', pm.RESTServer())
pm.registerService(pm.ENVIRONMENT_SERVICE, pm.WSGIEnvironment())
class MySecureXMLRPCRequestHandler(SecureXMLRPCRequestHandler):
def __init__(self, request, client_address, server):
SecureXMLRPCRequestHandler.__init__(self, request,
client_address, server)
def do_POST(self):
# Set up environment to be compatible with WSGI application environment
environ = {}
environ['CONTENT_LENGTH'] = self.headers.getheader('content-length', 0)
environ['wsgi.input'] = self.rfile
environ['wsgi.url_scheme'] = 'https'
sockname = self.request.getsockname()
environ['SERVER_NAME'] = sockname[0]
environ['SERVER_PORT'] = str(sockname[1])
environ['REQUEST_URI'] = self.path
environ['SSL_CLIENT_CERT'] = self.server.pem_cert
environ['PATH_INFO'] = self.path
try:
response = handleCall(environ)
except Exception as e:
msg = "%s: %s" % (type(e).__name__, str(e))
fault = xmlrpclib.Fault(1, msg)
response = xmlrpclib.dumps(fault, methodresponse=True,
allow_none=True)
self.wfile.write(response)
self.wfile.flush()
self.connection.shutdown(1)
def parseOptions():
parser = optparse.OptionParser()
parser.add_option("--hostname", help="Server hostname/IP",
default="localhost")
parser.add_option("--port", help="Server TCP Port", default="9999")
default = '/usr/share/geni-ch/portal/gcf.d/trusted_roots/CATedCACerts.pem'
parser.add_option("--trusted_roots",
help="Concatenated set of trusted X509 certs",
default=default)
parser.add_option("--cert_file", help="Server certificate",
default="/usr/share/geni-ch/ma/ma-cert.pem")
parser.add_option("--key_file", help="Server private key",
default="/usr/share/geni-ch/ma/ma-key.pem")
return parser.parse_args(sys.argv)
def main():
global opts, args
opts, args = parseOptions()
initialize()
server = SecureXMLRPCServer((opts.hostname, int(opts.port)),
requestHandler=MySecureXMLRPCRequestHandler,
ca_certs=opts.trusted_roots,
keyfile=opts.key_file,
certfile=opts.cert_file)
print "Serving on %s:%d" % (opts.hostname, int(opts.port))
server.serve_forever()
if __name__ == "__main__":
main()
|
# coding=utf-8
import sys,os
import sqlite3
import errno
import json
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import re
import csv
import xlrd
import xlwt
import codecs
def open_excel(file= 'file.xls'):
try:
data = xlrd.open_workbook(file)
return data
except Exception,e:
print str(e)
def parseTable(table, colnameindex):
nrows = table.nrows # 行数
ncols = table.ncols # 列数
map_dict = {}
colnames = table.row_values(colnameindex) # 某一行数据,表头
for rownum in range(0, nrows):
row = table.row_values(rownum)
row_value_list = []
for i in range(len(colnames)):
row_value_list.append(row[i])
map_dict[rownum] = row_value_list
return map_dict
#根据索引获取Excel表格中的数据
# 参数:file:Excel文件路径
# colnameindex:表头列名所在行的所以 ,by_index:表的索引
def excel_table_byindex(file= 'file.xls',colnameindex=0,by_index=0):
data = open_excel(file)
table = data.sheets()[by_index]
return parseTable(table, colnameindex)
# colnameindex:表头列名所在行的所以 ,by_name:Sheet1名称
def excel_table_byname(file= 'file.xls',colnameindex=0,by_name=u'Sheet1'):
data = open_excel(file)
table = data.sheet_by_name(by_name)
return parseTable(table, colnameindex)
def is_number(s):
try:
int(s)
return True
except ValueError:
pass
try:
int(s,16)
return True
except ValueError:
pass
try:
import unicodedata
unicodedata.numeric(s)
return True
except (TypeError, ValueError):
pass
return False
def getUintStr(devUnitVal, devUnit):
if is_number(devUnitVal):
#print "Unit val"
if (devUnit == "Reserved"):
return None
pattern1 = re.compile('\((.+?)\)')
unit_list = pattern1.findall(devUnit)
if (unit_list):
return unit_list[0]
else:
if len(devUnit) < 10:
return devUnit
else:
return None
else :
#print "Not Unit Val"
return None
def save_result_txt(file='res.txt', data=""):
list_res = ""
with open(file, 'w') as fp:
fp.write(data)
sensor_dict = excel_table_byname("sds13812-2_multilevel_sensor_command_class_list_of_assigned_multilevel_sensor_types_and_scales.xls", 4, "Multilevel Sensor")
devName = ""
devType = None
devUnit = None
devUnitVal = None
resDict = {}
devObj = []
devDict = {}
unit_list = []
devList = []
for (i, value) in sensor_dict.items():
newDevFlag = 0
unit_name = None
#print value
if (i < 5):
continue
devUnit = value[4]
devUnitVal = value[5]
unit_name = getUintStr(devUnitVal, devUnit)
if value[0] != None and value[0] != '' and value[1] != None and value[1] != '':
print "new Device"
devName = value[0]
devType = value[1]
newDevFlag = 1
else :
print "Last Device "
if devUnit and devName and devType:
if newDevFlag == 1:
devDict = {}
devObj = []
unit_list = []
unit_obj = {}
devDict["type"] = devType
devDict["name"] = devName
devDict['unit'] = unit_list
if (devUnitVal and unit_name):
unit_obj['type'] = devUnitVal
unit_obj['name'] = unit_name
unit_list.append(unit_obj)
devList.append(devDict)
else:
unit_obj = {}
if (devUnitVal and unit_name):
unit_obj['type'] = devUnitVal
unit_obj['name'] = unit_name
unit_list.append(unit_obj)
#print devList
data = json.dumps(devList, sort_keys=True,indent=2)
save_result_txt("zwave_multilevel_xls.json", data)
|
# Copyright (c) OpenMMLab. All rights reserved.
import os.path as osp
from .base_generation_dataset import BaseGenerationDataset
from .registry import DATASETS
@DATASETS.register_module()
class GenerationPairedDataset(BaseGenerationDataset):
"""General paired image folder dataset for image generation.
It assumes that the training directory is '/path/to/data/train'.
During test time, the directory is '/path/to/data/test'. '/path/to/data'
can be initialized by args 'dataroot'. Each sample contains a pair of
images concatenated in the w dimension (A|B).
Args:
dataroot (str | :obj:`Path`): Path to the folder root of paired images.
pipeline (List[dict | callable]): A sequence of data transformations.
test_mode (bool): Store `True` when building test dataset.
Default: `False`.
"""
def __init__(self, dataroot, pipeline, test_mode=False):
super().__init__(pipeline, test_mode)
phase = 'test' if test_mode else 'train'
self.dataroot = osp.join(str(dataroot), phase)
self.data_infos = self.load_annotations()
def load_annotations(self):
"""Load paired image paths.
Returns:
list[dict]: List that contains paired image paths.
"""
data_infos = []
pair_paths = sorted(self.scan_folder(self.dataroot))
for pair_path in pair_paths:
data_infos.append(dict(pair_path=pair_path))
return data_infos
|
from django.contrib import admin
from .models import Description, Category
admin.site.register(Description)
admin.site.register(Category)
|
#!/usr/bin/python
# coding: UTF-8
# Winstar WEG010032ALPP5N00000 Graphic OLED and WS0010 OLED driver code for
# Raspberry Pi GPIO library. Some code originally found at
# http://forums.adafruit.com/viewtopic.php?f=8&t=29207&start=15#p163445
# Based on http://www.rpiblog.com/2012/11/interfacing-16x2-lcd-with-raspberry-pi.html
# Massive respec to UHCLEM in that post for getting me on the right track.
#
# Timing & initialisation issues resolved with help of LCD Initialization guide
# http://web.alfredstate.edu/weimandn/lcd/lcd_initialization/lcd_initialization_index.html
# I also found this helpful http://www.8051projects.net/lcd-interfacing/commands.php
#
# Find the latest version of this file at https://gist.github.com/lardconcepts/4947360/
# Fork and comment but I'd REALLY appreciate getting the graphics mode working.
# I'm trying to achieve "full height" letters, 32 pixels high. Anyone?
#
# based on code from lrvick and LiquidCrystal
# lrvic - https://github.com/lrvick/raspi-hd44780/blob/master/hd44780.py
# LiquidCrystal - https://github.com/arduino/Arduino/blob/master/libraries/LiquidCrystal/LiquidCrystal.cpp
#
# For Winstar WEG1xxxxxx OLED displays, I've wired it up as follows:
# OLED PN | FUNCT | GPIO | P1 pin | Audiophonics pin
# 4 RS 25 22 26
# 5 R/W 18 12 N/A (tied to GND)
# 6 E 24 18 24
# 11 D4 23 16 22
# 12 D5 17 11 18
# 13 D6 21 13 16
# 14 D7 22 15 10
# 15 CS1 9 21 N/A
# 16 CS2 1 5 N/A
#
# Handy Binary to Hex converter
# 0x0 = 0000 0x1 = 0001 0x2 = 0010 0x3 = 0011 0x4 = 0100 0x5 = 0101
# 0x6 = 0110 0x7 = 0111 0x8 = 1000 0x9 = 1001 0xa = 1010 0xb = 1011
# 0xc = 1100 0xd = 1101 0xe = 1110 0xf = 1111
#
# For some reason, with the Winstar in 4-bit mode, the command needs 0x00 before
# the real command. For example, shift whole display right is 0001,1100 = 0x01,0x0c
# So you prefix with 0x00 and, in the next command, combine the two 4-groups into 0x1c eg:
# lcd.write4bits(0x00)
# lcd.write4bits(0x1c)
#
# PS - Adafruit WebIDE FTMFW! http://learn.adafruit.com/webide/
# Quick hack to differentiate between a raspberry Pi or not
try:
import RPi.GPIO as GPIO
DISPLAY_INSTALLED = True
except:
DISPLAY_INSTALLED = False
import curses
import time
# Make sure to set your pin mappings to the correct wiring for your display!!
# Pin Mappings V2
#OLED_DB4=25
#OLED_DB5=24
#OLED_DB6=23
#OLED_DB7=15
#OLED_RS=7
#OLED_E=8
# Pin Mappings V3
OLED_DB4=25
OLED_DB5=24
OLED_DB6=23
OLED_DB7=27
OLED_RS=7
OLED_E=8
class Winstar_GraphicOLED:
# commands
LCD_CLEARDISPLAY = 0x01
LCD_RETURNHOME = 0x02
LCD_ENTRYMODESET = 0x04
LCD_DISPLAYCONTROL = 0x08
LCD_CURSORSHIFT = 0x10
LCD_FUNCTIONSET = 0x20
LCD_SETCGRAMADDR = 0x40
LCD_SETDDRAMADDR = 0x80
# flags for display entry mode
LCD_ENTRYRIGHT = 0x00
LCD_ENTRYLEFT = 0x02
LCD_ENTRYSHIFTINCREMENT = 0x01
LCD_ENTRYSHIFTDECREMENT = 0x00
# flags for display on/off control
LCD_DISPLAYON = 0x04
LCD_DISPLAYOFF = 0x00
LCD_CURSORON = 0x02
LCD_CURSOROFF = 0x00
LCD_BLINKON = 0x01
LCD_BLINKOFF = 0x00
# flags for display/cursor shift
LCD_DISPLAYMOVE = 0x08
LCD_CURSORMOVE = 0x00
# flags for display/cursor shift
LCD_DISPLAYMOVE = 0x08
LCD_CURSORMOVE = 0x00
LCD_MOVERIGHT = 0x04
LCD_MOVELEFT = 0x00
# flags for function set
LCD_8BITMODE = 0x10
LCD_4BITMODE = 0x00
LCD_2LINE = 0x08
LCD_1LINE = 0x00
LCD_5x10s = 0x04
LCD_5x8DOTS = 0x00
character_translation = [ 0,0,0,0,0,0,0,0,0,32, #0
0,0,0,0,0,0,0,0,0,0, #10
0,0,0,0,0,0,0,0,0,0, #20
0,0,32,33,34,35,36,37,38,39, #30
40,41,42,43,44,45,46,47,48,49, #40
50,51,52,53,54,55,56,57,58,59, #50
60,61,62,63,64,65,66,67,68,69, #60
70,71,72,73,74,75,76,77,78,79, #70
80,81,82,83,84,85,86,87,88,89, #80
90,91,92,93,94,95,96,97,98,99, #90
100,101,102,103,104,105,106,107,108,109, #100
110,111,112,113,114,115,116,117,118,119, #110
120,121,122,123,124,125,126,0,0,0, #120
0,0,0,0,0,0,0,0,0,0, #130
0,0,0,0,0,0,0,0,0,0, #140
0,0,0,0,0,0,0,0,0,0, #150
32,33,204,179,198,32,32,176,209,221, #160
32,215,32,32,220,32,210,177,32,32, #170
211,200,188,32,32,32,210,216,227,226, #180
229,143,152,152,65,203,153,65,175,196, #190
145,146,144,147,73,73,73,73,194,166, #200
136,137,135,206,79,88,201,129,130,128, #210
131,89,254,195,156,157,155,205,158,97, #220
32,196,149,150,148,151,162,163,161,164, #230
111,167,140,141,139,207,142,214,192,133, #240
134,132,117,121,250,202 ] #250
def __init__(self):
self.numlines=2
if DISPLAY_INSTALLED == False:
self.stdscr = curses.initscr()
self.curx = 0
self.cury = 0
def oledReset(self):
if DISPLAY_INSTALLED:
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
self.pins_db=[OLED_DB4, OLED_DB5, OLED_DB6, OLED_DB7]
self.pin_rs = OLED_RS
self.pin_e = OLED_E
# Initialize GPIO pins
for pin in self.pins_db:
GPIO.setup(pin, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(OLED_E, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(OLED_RS, GPIO.OUT, initial=GPIO.LOW)
# initialization sequence taken from audiophonics.fr site
# there is a good writeup on the HD44780 at Wikipedia
# https://en.wikipedia.org/wiki/Hitachi_HD44780_LCD_controller
# Assuming that the display may already be in 4 bit mode
# send five 0000 instructions to resync the display
# NOTE: There is a resync function that is included later but is
# not being used here due to the need to place the display in
# 8 bit mode temporarily
for i in range(1,5):
self.writeonly4bits(0x00, False)
self.delayMicroseconds(1000)
# Now place in 8 bit mode so that we start from a known state
# issuing function set twice in case we are in 4 bit mode
self.writeonly4bits(0x03, False)
self.writeonly4bits(0x03, False)
self.delayMicroseconds(1000)
# placing display in 4 bit mode
self.writeonly4bits(0x02, False)
self.delayMicroseconds(1000)
# From this point forward, we need to use write4bits function which
# implements the two stage write that 4 bit mode requires
self.write4bits(0x08, False) # Turn display off
self.write4bits(0x29, False) # Function set for 4 bits, 2 lines, 5x8 font, Western European font table
self.write4bits(0x06, False) # Entry Mode set to increment and no shift
self.write4bits(0x17, False) # Set to char mode and turn on power
self.write4bits(0x01, False) # Clear display and reset cursor
self.write4bits(0x0c, False) # Turn on display
else:
self.stdscr.clear()
self.curx = 0
self.cury = 0
def home(self):
if DISPLAY_INSTALLED:
self.write4bits(self.LCD_RETURNHOME) # set cursor position to zero
self.delayMicroseconds(2000) # this command takes a long time!
else:
self.curx = 0
self.cury = 0
def clear(self):
if DISPLAY_INSTALLED:
self.write4bits(self.LCD_CLEARDISPLAY) # command to clear display
self.delayMicroseconds(2000) # 2000 microsecond sleep, clearing the display takes a long time
else:
self.stdscr.clear()
self.stdscr.refresh()
self.curx = 0
self.cury = 0
def setCursor(self, col, row):
if DISPLAY_INSTALLED:
self.row_offsets = [ 0x00, 0x40, 0x14, 0x54 ]
if (row > self.numlines):
row = self.numlines - 1 # we count rows starting w/0
self.write4bits(self.LCD_SETDDRAMADDR | (col + self.row_offsets[row]))
else:
self.curx = col
self.cury = row
def noDisplay(self):
''' Turn the display off (quickly) '''
if DISPLAY_INSTALLED:
self.displaycontrol &= ~self.LCD_DISPLAYON
self.write4bits(self.LCD_DISPLAYCONTROL | self.displaycontrol)
def display(self):
''' Turn the display on (quickly) '''
if DISPLAY_INSTALLED:
self.displaycontrol |= self.LCD_DISPLAYON
self.write4bits(self.LCD_DISPLAYCONTROL | self.displaycontrol)
def noCursor(self):
''' Turns the underline cursor on/off '''
if DISPLAY_INSTALLED:
self.displaycontrol &= ~self.LCD_CURSORON
self.write4bits(self.LCD_DISPLAYCONTROL | self.displaycontrol)
def cursor(self):
''' Cursor On '''
if DISPLAY_INSTALLED:
self.displaycontrol |= self.LCD_CURSORON
self.write4bits(self.LCD_DISPLAYCONTROL | self.displaycontrol)
def noBlink(self):
''' Turn on and off the blinking cursor '''
if DISPLAY_INSTALLED:
self.displaycontrol &= ~self.LCD_BLINKON
self.write4bits(self.LCD_DISPLAYCONTROL | self.displaycontrol)
def DisplayLeft(self):
''' These commands scroll the display without changing the RAM '''
if DISPLAY_INSTALLED:
self.write4bits(self.LCD_CURSORSHIFT | self.LCD_DISPLAYMOVE | self.LCD_MOVELEFT)
def scrollDisplayRight(self):
''' These commands scroll the display without changing the RAM '''
if DISPLAY_INSTALLED:
self.write4bits(self.LCD_CURSORSHIFT | self.LCD_DISPLAYMOVE | self.LCD_MOVERIGHT);
def leftToRight(self):
''' This is for text that flows Left to Right '''
if DISPLAY_INSTALLED:
self.displaymode |= self.LCD_ENTRYLEFT
self.write4bits(self.LCD_ENTRYMODESET | self.displaymode);
def rightToLeft(self):
''' This is for text that flows Right to Left '''
if DISPLAY_INSTALLED:
self.displaymode &= ~self.LCD_ENTRYLEFT
self.write4bits(self.LCD_ENTRYMODESET | self.displaymode)
def autoscroll(self):
''' This will 'right justify' text from the cursor '''
if DISPLAY_INSTALLED:
self.displaymode |= self.LCD_ENTRYSHIFTINCREMENT
self.write4bits(self.LCD_ENTRYMODESET | self.displaymode)
def noAutoscroll(self):
''' This will 'left justify' text from the cursor '''
if DISPLAY_INSTALLED:
self.displaymode &= ~self.LCD_ENTRYSHIFTINCREMENT
self.write4bits(self.LCD_ENTRYMODESET | self.displaymode)
def writeonly4bits(self, bits, char_mode=False):
if DISPLAY_INSTALLED:
if bits > 15: return
#self.delayMicroseconds(1000)
bits = bin(bits)[2:].zfill(4)
GPIO.output(self.pin_rs, char_mode)
for pin in self.pins_db:
GPIO.output(pin, False)
for i in range(4):
if bits[i] == "1":
GPIO.output(self.pins_db[::-1][i], True)
self.pulseEnable()
def write4bits(self, bits, char_mode=False):
if DISPLAY_INSTALLED:
''' Send command to LCD '''
#self.delayMicroseconds(1000) # 1000 microsecond sleep
bits = bin(bits)[2:].zfill(8)
GPIO.output(self.pin_rs, char_mode)
for pin in self.pins_db:
GPIO.output(pin, False)
for i in range(4):
if bits[i] == "1":
GPIO.output(self.pins_db[::-1][i], True)
self.pulseEnable()
for pin in self.pins_db:
GPIO.output(pin, False)
for i in range(4, 8):
if bits[i] == "1":
GPIO.output(self.pins_db[::-1][i - 4], True)
self.pulseEnable()
# self.delayMicroseconds(1000)
#self.waitForReady()
#self.delayMicroseconds(5)
def resyncDisplay(self):
# Per the last page (pg 24) of OLED doc located at
# www.picaxe.com/docs/oled.pdf
# To resync the display send 5 0000's and then
# two function sets for 4 bit mode 0010
# The display should now be resynced to be ready to accept a new cmd
if DISPLAY_INSTALLED:
for i in range(1,5):
self.writeonly4bits(0x0, False)
self.write4bits(0x02, False)
def delayMicroseconds(self, microseconds):
seconds = microseconds / 1000000.0 # divide microseconds by 1 million for seconds
time.sleep(seconds)
def pulseEnable(self):
# the pulse timing in the 16x2_oled_volumio 2.py file is 1000/500
# the pulse timing in the original version of this file is 10/10
# with a 100 post time for setting
if DISPLAY_INSTALLED:
GPIO.output(self.pin_e, False)
self.delayMicroseconds(.1) # 1 microsecond pause - enable pulse must be > 450ns
GPIO.output(self.pin_e, True)
self.delayMicroseconds(.1) # 1 microsecond pause - enable pulse must be > 450ns
#time.sleep(0.000000001)
GPIO.output(self.pin_e, False)
#self.delayMicroseconds(10) # commands need > 37us to settle
def message(self, text):
''' Send string to LCD. Newline wraps to second line'''
if DISPLAY_INSTALLED:
for char in text:
if char == '\n':
self.write4bits(0xC0) # next line
else:
#time.sleep(.000001)
# Translate incoming character into correct value for European charset
# and then send it to display. Use space if character is out of range.
c = ord(char)
if c > 255: c = 32
self.write4bits(self.character_translation[c], True)
else:
self.stdscr.addstr(self.cury, self.curx, text.encode('utf-8'))
self.stdscr.refresh()
if __name__ == '__main__':
try:
print("Winstar OLED Display Test")
lcd = Winstar_GraphicOLED()
lcd.oledReset()
lcd.home()
lcd.clear()
lcd.message("Winstar OLED\nPi Powered")
time.sleep(4)
lcd.home()
lcd.clear()
accent_min = u"àáâãäçèéëêìíî \nïòóôöøùúûüþÿ"
#for char in accent_min: print char, ord(char)
lcd.message(accent_min)
time.sleep(5)
lcd.home()
lcd.clear()
accent_maj = u"ÀÁÂÆÇÈÉÊËÌÍÎÐ \nÑÒÓÔÕÙÚÛÜÝÞß"
#for char in accent_maj: print char, ord(char)
lcd.message(accent_maj)
time.sleep(5)
lcd.home()
lcd.clear()
except KeyboardInterrupt:
pass
finally:
lcd.home()
lcd.clear()
lcd.message("Goodbye!")
time.sleep(2)
lcd.home()
lcd.clear()
if DISPLAY_INSTALLED:
GPIO.cleanup()
else:
curses.endwin()
print("Winstar OLED Display Test Complete")
|
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Skylib module containing shell utility functions."""
def _array_literal(iterable):
"""Creates a string from a sequence that can be used as a shell array.
For example, `shell.array_literal(["a", "b", "c"])` would return the string
`("a" "b" "c")`, which can be used in a shell script wherever an array
literal is needed.
Note that all elements in the array are quoted (using `shell.quote`) for
safety, even if they do not need to be.
Args:
iterable: A sequence of elements. Elements that are not strings will be
converted to strings first, by calling `str()`.
Returns:
A string that represents the sequence as a shell array; that is,
parentheses containing the quoted elements.
"""
return "(" + " ".join([_quote(str(i)) for i in iterable]) + ")"
def _quote(s):
"""Quotes the given string for use in a shell command.
This function quotes the given string (in case it contains spaces or other
shell metacharacters.)
Args:
s: The string to quote.
Returns:
A quoted version of the string that can be passed to a shell command.
"""
return "'" + s.replace("'", "'\\''") + "'"
shell = struct(
array_literal = _array_literal,
quote = _quote,
)
|
# GUI to visualize how a galaxy spectrum evolves over time
'''
'''
import fsps
import os
import sys
import time
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button, SpanSelector
from generate_ssp import get_filename, ages_allowed
os.environ["SPS_HOME"] = "/Users/galaxies-air/SPS_Conroy/fsps/"
loc = '/Users/galaxies-air/Desktop/Galaxies/visualization/'
ssp_folder = '/Users/galaxies-air/Desktop/Galaxies/visualization/ssps/'
def find_nearest(array, value):
array = np.asarray(array)
idx = (np.abs(array - value)).argmin()
return array[idx]
class StellarPop:
def __init__(self, age=0):
# Read the initial SSP in here
# Main variable to contain running total of ssps. Fromat as (birthtime, metallicity, dust)
self.ssps = self.create_ssps_var()
self.frozen = -1
self.gal_age = 0.0 # Current age of the galaxy
self.gal_dust = 0.0 # Current dust content of the galaxy
self.zoom_limits = [3000, 7000] # Region of spectrum to zoom in on
# File that stores wavelength for sps
self.wavelength = pd.read_csv(ssp_folder+'wavelength.df')
self.frequency = (3*10**18)/self.wavelength
initial_ssp = self.create_sp()
# Running total of ssps, will iterate over this for every observable
self.ssps = self.ssps.append(initial_ssp, ignore_index=True)
self.create_plot()
self.initialize_buttons()
self.update_plot(self.ssps)
def create_ssps_var(self):
"""Creates the self.ssps variable that is used throughout the code
Parameters:
Returns:
ssps (pd.Dataframe): Dataframe setup in the proper format
"""
return pd.DataFrame(columns=['Birthtime', 'Metallicity', 'Dust'])
def create_sp(self, metallicity=0.0, birthtime=0.0):
"""Creates a stellar population dict that can easily be appended to self.ssps dataframe
Parameters:
metallicity (float): log(Z) in solar units (so 0.0 is solar metallicity)
age (float): how long after galaxy formation to birth this stellar population
Returns:
sp (dict): dict containing birthtime, metallicity
"""
sp = {'Birthtime': birthtime, 'Metallicity': metallicity}
return sp
def read_sp(self, metallicity=0.0, age=1.0, dust=0.0):
"""Reads the stellar population model with the given parameters.
Parameters:
metallicity (float): log(Z) in solar units (so 0.0 is solar metallicity)
age (float): how long after galaxy formation to birth this stellar population
dust (float): how much dust there is
Returns:
pd.Dataframe: Dataframe containing spectrum and fraction of stars left at that age, Z, dust
# MUST BE FORMATTED AS 0.0, 1.0, 0.0. Floats need decimals
"""
filename = get_filename(metallicity=metallicity, dust=dust, age=age)
sp_df = pd.read_csv(ssp_folder+filename)
print(f'Read {filename}')
return sp_df
def create_plot(self):
# Sets up the figure for first time use. Called from __init__
'''
'''
self.fig = plt.figure(figsize=(10, 8))
# Axis where the spectrum is
self.ax_spec = self.fig.add_axes([0.1, 0.63, 0.8, 0.35])
self.ax_zoom = self.fig.add_axes([0.1, 0.3, 0.8, 0.25])
self.nans_arr = np.ones(len(self.wavelength))
self.nans_arr[:] = np.nan
self.spectrum, = self.ax_spec.plot(
self.wavelength, np.ones(
len(self.wavelength)), label='Spectrum', color='black')
self.spectrum_freeze, = self.ax_spec.plot(
self.wavelength, self.nans_arr, label='Spectrum', color='red', alpha=0.5)
self.zoom_spectrum, = self.ax_zoom.plot(self.wavelength, np.ones(
len(self.wavelength)), label='Spectrum', color='black')
self.zoom_spectrum_freeze, = self.ax_zoom.plot(
self.wavelength, self.nans_arr, label='Spectrum', color='red', alpha=0.5)
self.ax_spec.set_xlabel('Wavelength ($\\AA$)')
self.ax_spec.set_ylabel('Intensity (L$_\odot$)')
self.ax_zoom.set_xlabel('Wavelength ($\\AA$)')
self.ax_zoom.set_ylabel('Intensity (L$_\odot$)')
self.ax_spec.set_xscale('log')
self.ax_spec.set_yscale('log')
self.ax_zoom.set_xscale('log')
self.ax_zoom.set_yscale('log')
plt.show()
def get_sp_files(self, ssps, gal_age, gal_dust):
sp_files = []
for i in range(len(ssps)):
metallicity = ssps.iloc[i]['Metallicity']
birthtime = ssps.iloc[i]['Birthtime']
# Comptue the age of the current population as galaxy_age - population_birthtime
age = gal_age - birthtime
if age >= 0:
age = find_nearest(ages_allowed, age)
sp_file = self.read_sp(
metallicity=metallicity, age=age, dust=self.gal_dust)
sp_files.append(sp_file)
return sp_files
def update_plot(self, ssps, gal_age=0.0, gal_dust=0.0):
"""Updates the figure after a change to the ssps
Parameters:
ssps (pd.Dataframe): self.ssps variable that gets passed around. Stores all ssps and their ages
Returns:
"""
sp_files = self.get_sp_files(ssps, gal_age, gal_dust)
print(len(sp_files))
self.spectra = [sp_files[i]['Spectrum'] for i in range(len(sp_files))]
if self.frozen == 1:
self.spectrum_freeze.set_ydata(self.total_spectrum)
self.zoom_spectrum_freeze.set_ydata(self.total_spectrum)
self.ax_spec.text(10**7, 100, f'Age: {np.round(self.gal_age,2)} Gyr', color='red', alpha=0.5)
self.ax_spec.text(10**7, 10, f'log Z: {ssps.iloc[0]["Metallicity"]} Z$_\odot$', color='red', alpha=0.5)
self.ax_spec.text(10**7, 1, f'Dust: {self.gal_dust}', color='red', alpha=0.5)
self.frozen_spec = self.spectrum_freeze.get_ydata()
self.frozen = 0
if self.frozen == -1:
self.spectrum_freeze.set_ydata(self.nans_arr)
self.zoom_spectrum_freeze.set_ydata(self.nans_arr)
for txt in self.ax_spec.texts:
txt.set_visible(False)
# Multiply by nu since units of spectrum are Lsun/nu
self.total_spectrum = np.multiply(
np.transpose(np.array(self.frequency)), np.sum(self.spectra, axis=0))
self.spectrum.set_ydata(self.total_spectrum)
self.zoom_spectrum.set_ydata(self.total_spectrum)
#
self.ax_zoom.set_xlim(self.zoom_limits)
# self.set_axis_limits(self.ax_spec)
self.ax_spec.set_ylim([10**-12, 1000])
wavelength = np.array(self.wavelength['Wavelength'].to_list())
# Find the region that we are currently zoomed over
idx = np.logical_and(
wavelength > self.zoom_limits[0], wavelength < self.zoom_limits[-1])
zoom_lower_lim = np.max([
np.min(self.total_spectrum[0][idx]) * 0.5, 10**-12])
zoom_upper_lim = np.max(self.total_spectrum[0][idx])*2
if self.frozen == 0:
freeze_lower_lim = np.min(self.frozen_spec[0][idx])
freeze_upper_lim = np.max(self.frozen_spec[0][idx])
zoom_lower_lim = np.min([freeze_lower_lim, zoom_lower_lim])
zoom_upper_lim = np.max([freeze_upper_lim, zoom_upper_lim])
self.ax_zoom.set_ylim([zoom_lower_lim, zoom_upper_lim])
# self.set_axis_limits(self.ax_zoom)
plt.draw()
def initialize_buttons(self):
"""Gets the buttons and sliders ready
Parameters:
Returns:
"""
# Sliders for age, metal, dust
slidercolor = 'dodgerblue'
ax_age = self.fig.add_axes([0.1, 0.18, 0.8, 0.03])
self.age_slider = Slider(
ax_age, 'Age (Gyr)', 0.0, 15.0, valinit=0, valstep=0.01, color=slidercolor)
self.age_slider.on_changed(self.set_age)
ax_metal = self.fig.add_axes([0.1, 0.13, 0.8, 0.03])
self.metal_slider = Slider(ax_metal, 'log Z (Z$_\odot$)', -1.0, 1.0,
valinit=0, valstep=0.25, color=slidercolor)
self.metal_slider.on_changed(self.set_metal)
ax_dust = self.fig.add_axes([0.1, 0.08, 0.8, 0.03])
self.dust_slider = Slider(ax_dust, 'Dust', 0.0, 1.0,
valinit=0, valstep=0.25, color=slidercolor)
self.dust_slider.on_changed(self.set_dust)
# Button to add spectrum
ax_adsp = self.fig.add_axes([0.1, 0.02, 0.15, 0.03])
self.ax_adsp = Button(ax_adsp, 'Starburst')
self.ax_adsp.on_clicked(self.button_add_sp)
# Button to freeze the current spectrum
ax_freeze = self.fig.add_axes([0.3166, 0.02, 0.15, 0.03])
self.ax_freeze = Button(ax_freeze, 'Freeze')
self.ax_freeze.on_clicked(self.button_freeze)
# Button to play a movie
ax_movie = self.fig.add_axes([0.5333, 0.02, 0.15, 0.03])
self.ax_movie = Button(ax_movie, 'Play Movie')
self.ax_movie.on_clicked(self.button_play_movie)
# Button to reset all sp
ax_reset = self.fig.add_axes([0.75, 0.02, 0.15, 0.03])
self.ax_reset = Button(ax_reset, 'Reset')
self.ax_reset.on_clicked(self.button_reset)
# Slider for zoom region
self.zoom_span = SpanSelector(self.ax_spec, self.zoomslider, 'horizontal',
useblit=True, rectprops=dict(alpha=0.5, facecolor='red'))
def set_age(self, age):
"""Sets the age of the galaxy to the given value, then updates the plot
Parameters:
age (float): age of the galaxy
Returns:
"""
self.gal_age = age
self.update_plot(self.ssps, gal_age=self.gal_age,
gal_dust=self.gal_dust)
def set_metal(self, metal):
"""Sets the metallicity of the galaxy to the given value, then updates the plot
Parameters:
metal (float): metallicity of the galaxy
Returns:
"""
self.ssps['Metallicity'] = metal
self.update_plot(self.ssps, gal_age=self.gal_age,
gal_dust=self.gal_dust)
def add_sp(self, birthtime=1.0, metallicity=0.0):
"""Adds a starburst at the current time, with its own birthtime and metallicity
Parameters:
birthtime (float): time that the sp was formed (Gyr)
metallicity (float): Z in log solar
Returns:
None: Updates the self.ssps dataframe with new row appended
"""
sp = self.create_sp(birthtime=birthtime, metallicity=metallicity)
self.ssps = self.ssps.append(sp, ignore_index=True)
self.update_plot(self.ssps, gal_age=self.gal_age,
gal_dust=self.gal_dust)
def set_dust(self, dust):
"""Sets the dust of the galaxy to the given value, then updates the plot
Parameters:
dust (float): age of the galaxy
Returns:
"""
self.gal_dust = dust
self.update_plot(self.ssps, gal_age=self.gal_age,
gal_dust=self.gal_dust)
def button_add_sp(self, event):
print(f'Starburst at {self.gal_age} Gyr!')
self.add_sp(birthtime=self.gal_age)
def button_play_movie(self, event):
print(f'Playing Movie')
moviespeed = 0.1
for i in np.arange(0, 15+moviespeed, moviespeed):
self.gal_age = i
self.update_plot(self.ssps, gal_age=self.gal_age,
gal_dust=self.gal_dust)
self.age_slider.set_val(i)
plt.pause(0.1)
def button_reset(self, event):
print(f'Resetting')
self.ssps = self.create_ssps_var()
self.update_plot(self.ssps, gal_age=self.gal_age,
gal_dust=self.gal_dust)
def button_freeze(self, event):
print(f'Freezing')
self.frozen = - self.frozen
if self.frozen == 0:
self.frozen = -1
if self.frozen == -1:
self.ax_freeze.label.set_text('Freeze')
else:
self.ax_freeze.label.set_text('Unfreeze')
self.update_plot(self.ssps, gal_age=self.gal_age,
gal_dust=self.gal_dust)
def zoomslider(self, xmin, xmax):
self.zoom_limits = [xmin, xmax]
self.update_plot(self.ssps, gal_age=self.gal_age,
gal_dust=self.gal_dust)
stellar_pop = StellarPop()
|
#
# Corda Certificate Generator
# requires Java Keytool to be installed
# see also requirements.txt for Python dependencies
#
import sys
import re
import argparse
import os
import subprocess
from ruamel.yaml import YAML
yaml = YAML()
from munch import Munch
from colorama import Fore, Back, Style
CERT_ROLES = {
"DOORMAN_CA": "020101",
"NETWORK_MAP": "020102",
"SERVICE_IDENTITY": "020103",
"NODE_CA": "020104",
"TLS": "020105",
"LEGAL_IDENTITY": "020106",
"CONFIDENTIAL_IDENTITY": "020107",
"NETWORK_PARAMETERS": "020108"
}
def cert_role_string(cr):
return f'1.3.6.1.4.1.50530.1.1:non-critical={CERT_ROLES[cr]}'
def execute_verbose(cmd):
print(Fore.CYAN + cmd, flush=True)
print(Style.RESET_ALL)
try:
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)#stdout=subprocess.PIPE)
except:
pass
print(result.stdout.decode('utf-8'))
print(result.stderr.decode('utf-8'))
def execute(cmd):
try:
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)#stdout=subprocess.PIPE)
except:
pass
#print(result.stdout.decode('utf-8'))
#print(result.stderr.decode('utf-8'))
class CertGenerator:
def __init__(self, workdir = 'work'):
self.WORK_DIR = workdir
os.makedirs(workdir, exist_ok=True)
def __mkpath(self, file):
file = os.path.normpath(file)
parts = os.path.split(file)
if len(parts) == 2 and parts[0]:
os.makedirs(parts[0], exist_ok=True)
return file
#
# http://openssl.cs.utah.edu/docs/apps/x509v3_config.html
#def x509_to_openssl(dn):
# change from "CN=Corda, OU=Blah" to "/CN=Corda/OU=Blah"
# return re.sub(r'(^|[,]\s*)([A-Z]+)=', lambda x:'/'+x.group(2)+'=',dn)
#
# Format the certificate extensions required by keytool
#
def __extensions_str(self, cert):
exts = ''
# cert role
if hasattr(cert, 'role'):
exts = exts + f' -ext {cert_role_string(cert.role)}'
# add extensions
for extn in cert.extensions:
extv = cert.extensions[extn]
if extv.startswith('critical'):
extn += ':critical'
extv = extv[9:]
exts = exts + f' -ext "{extn}={extv}"'
# expiration
if hasattr(cert, 'expires'):
exts = exts + f' -validity {cert.expires}'
return exts
#
# load YAML config; return as nested python object
#
def load_config(self, config_file, params):
with open(config_file) as f:
p = re.compile('.*\".*{.*}.*\"')
conf = ""
for line in f:
if p.match(line):
try:
line = line.format(**params)
except KeyError as e:
print(f'{e} must be defined: --param {str(e)[1:-1]}:<value> ')
exit(1)
conf = conf + line
#print(conf)
#exit(0)
y = yaml.load(conf)
m = Munch.fromDict(y)
#print(m)
#exit(0)
return m
def create_cert(self, config, cert, executor):
""" create certificate based on the specified config """
store = config.stores[cert.store]
outfile = self.__mkpath(store.file)
keyfile = self.__mkpath(f'{self.WORK_DIR}/{cert.alias}.key')
csrfile = self.__mkpath(f'{self.WORK_DIR}/{cert.alias}.csr')
crtfile = self.__mkpath(f'{self.WORK_DIR}/{cert.alias}.crt')
rootfile = self.__mkpath(f'{self.WORK_DIR}/root.crt')
# delete the existing keypair
cmd0 = f'keytool -delete -alias {cert.alias} -keystore {outfile} -storepass {store.password}'
executor(cmd0)
# create self-signed keypair ( & cert)
cmd1 = f'keytool -genkeypair -dname "{cert.subject}" -alias {cert.alias} -keyalg EC -keysize 256 -keystore {outfile} -storepass {store.password} -keypass {cert.key.password} -v'
cmd1 = cmd1 + self.__extensions_str(cert)
executor(cmd1)
if cert.issuer:
# find the signer. If it is not explicitly in config
if '.' in cert.issuer:
castore = cert.issuer.split('.')[0]
castore = config.stores[castore]
issuer = cert.issuer.split('.')[1]
else:
castore = store
issuer = cert.issuer
if ':' in issuer:
issuerpass = issuer.split(':')[1]
issuer = issuer.split(':')[0]
else:
issuerpass = issuer.key.password
cmd0 = f'keytool -exportcert -alias cordarootca -keystore {castore.file} -file {rootfile} -storepass {castore.password} -v'
executor(cmd0)
# create CSR for issuer to sign
cmd2 = f'keytool -certreq -alias {cert.alias} -file {csrfile} -keystore {outfile} -storepass {store.password} -keypass {cert.key.password} -v'
executor(cmd2)
# sign the CSR
cmd3 = f'keytool -gencert -alias {issuer} -ext honored=all -infile {csrfile} -outfile {crtfile} -keystore {castore.file} -storepass {castore.password} -keypass {issuerpass} -v'
cmd3 = cmd3 + self.__extensions_str(cert)
executor(cmd3)
# update with the signed copy
cmd4 = f'keytool -importcert -alias {cert.alias} -keystore {outfile} -storepass {store.password} -noprompt -trustcacerts -keypass {cert.key.password} -v'
cat = 'type' if os.name == 'nt' else 'cat'
cmd4 = f'{cat} {crtfile} {rootfile} | {cmd4}'
executor(cmd4)
else:
# self-signed - nothing else to do
pass
print(f' --> {store.file}')
def __apply_store_defaults( store, alias):
try:
store.__getattr__('password')
except AttributeError:
store['password'] = 'password'
pass
def __apply_cert_defaults(cert, alias):
if not hasattr(cert, 'alias'):
cert.alias = alias
if not hasattr(cert, 'issuer'):
cert.issuer = None
if not hasattr(cert, 'password'):
cert.password = 'password'
def generate(self, config, verbose=False, executor=execute):
# preprocess the stores section
for store in config.stores:
store = config.stores[store]
CertGenerator.__apply_store_defaults(store, 0)
# preprocess the certificates section
for alias in config.certificates:
cert = config.certificates[alias]
CertGenerator.__apply_cert_defaults(cert, alias)
# generate the certificates
for alias in config.certificates:
cert = config.certificates[alias]
if verbose:
print(Fore.BLACK + Style.BRIGHT + f'@echo ' + '-'*100)
print(Fore.BLACK + Style.BRIGHT + f'@echo generating: {cert.alias} - "{cert.subject}"')
print(Fore.BLACK + Style.BRIGHT + f'@echo ' + '-'*100)
print(Style.RESET_ALL)
else:
print(f'generating: {cert.alias} - "{cert.subject}"')
self.create_cert(config, cert, executor)
def main(args):
from colorama import init
init(strip=False)
# build diction of parameter substitutions
args.params = [y for x in args.params for y in x]
params = {}
for p in args.params:
nv = p.split(':')
params[nv[0]] = nv[1]
certgen = CertGenerator(args.workdir)
print(f'Parameter substitutions: {params}')
cert_config = certgen.load_config(args.config, params)
if args.execute:
executor = execute
else:
executor = print
certgen.generate(cert_config, True, executor)
if __name__ == "__main__":
parser = argparse.ArgumentParser('Certificate hierarchy generator')
parser.add_argument('--config', help = 'configuration file in YAML format', required=True)
parser.add_argument('--workdir', help = 'working directory', dest='workdir', default='work')
parser.add_argument('--execute', help = 'working directory', dest='execute', default=False, action='store_true')
parser.add_argument('--param', help = 'parameter substitutions. [A:B]', dest='params', nargs='+', action='append', default=[])
#parser.add_argument('--render', help = 'render config and exit', default=False, action='store_true')
args = parser.parse_args()
main(args) |
import yaml
class Parser:
def __init__(self,config):
self.config_file=config
def get_tests(self):
with open(self.config_file,'r') as file:
data=file.read()
data=yaml.load(data)
for test in data['tests']:
yield test
|
"""Results using grey-matter maps."""
import numpy as np
import pandas as pd
from sklearn.svm import SVR
from sklearn.model_selection import KFold
from utils import plot_results, GridSearchCVRBFKernel
# Load the data
df = pd.read_csv('gm/Kernels/euclidean_norm.csv', index_col=0)
X = df.values
y = pd.read_csv('gm/Target/age.csv', index_col=0, header=None).squeeze().values
# Perform 5-fold cross-validation
estimator = SVR(kernel='precomputed', max_iter=1e6, tol=1e-2)
param_grid = {'gamma': [j * 10**k for k in range(-8, -6) for j in [1, 4, 7]],
'C': [j * 10**k for k in range(2, 4) for j in [1, 4, 7]]}
y_trues, y_preds = [], []
gridsearchs = []
for i in range(5):
df_train = pd.read_csv(
'.../data/splits/training/split-{0}.tsv'.format(i),
sep='\t', index_col=0
)
df_val = pd.read_csv(
'.../data/splits/validation/split-{0}.tsv'.format(i),
sep='\t', index_col=0
)
train_index = np.where(
np.in1d(df.index.values, df_train['subject_ID'].values))[0]
val_index = np.where(
np.in1d(df.index.values, df_val['subject_ID'].values))[0]
gridsearch = GridSearchCVRBFKernel(
estimator, param_grid, cv=KFold(5, shuffle=True, random_state=42),
age_range=(18, 90)
)
gridsearch.fit(X, y, train_index)
y_pred = gridsearch.predict(val_index)
y_preds.append(y_pred)
y_trues.append(y[val_index])
gridsearchs.append(gridsearch)
# Put the results in a DataFrame
df_pred = pd.DataFrame(y_preds[i], index=df.index.values[val_index],
columns=['predicted age'])
df_res = pd.concat([df_val.set_index('subject_ID'), df_pred], axis=1,
sort=False).reset_index().rename(
columns={'index': 'subject_ID'})
# Fill in missing values with the mean age
# from the corresponding (site, gender) pair
df_res.loc[df_res['predicted age'].isna(), 'predicted age'] = (
df_res[df_res['predicted age'].isna()].apply(
lambda x: df_train.groupby(['site', 'gender'])['age'].mean().loc[
(x['site'], x['gender'])], axis=1)
)
# Save the results in a tsv file
df_res = df_res.drop(['gender', 'site'], axis=1)
df_res.to_csv('gm/Predictions/split-{0}.tsv'.format(i), sep='\t')
# Plot the performance on each fold
plot_results(y_trues, y_preds, marker='o', fillstyle='none',
output_file='gm/SVM.png')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/1/8 00:02
# @Author : Yuecheng Jing
# @Site : www.nanosparrow.com
# @File : PathSum
# @Software: PyCharm
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
if root == None:
return False
leftResult = False
if root.left != None:
leftResult = self.hasPathSum(root.left, sum - root.val)
rightResult = False
if root.right != None:
rightResult = self.hasPathSum(root.right, sum - root.val)
if root.left == None and root.right == None:
return root.val == sum
return leftResult or rightResult
def printTree(self, root):
print(root.value)
if root.left != None:
self.printTree(root.left)
if root.right != None:
self.printTree(root.right)
|
# Chapter 2 concepts
x = 2
y = 4
z = x * y
print(f"{x} * {y} = {z}")
print("Hello Python world!")
name = "Marcel"
last_name = "Houary"
full_name = name + " " + last_name
print(f"First name: {name}, Last name: {last_name}, Full name: {full_name}")
n = "Marcel"
ln = "Houary"
fn = n + " " + ln
print(f"First name: {n}, Last name: {ln}, Full name: {fn}") |
# When the MVP is deployed using Kubernetes ingress controller implemented by Traefik,
# TLS certificates are not served correctly. This results in https and secure
# websockets not being enabled. This test case is to verify this bug.
# To test using local ip address, assign local ip to env variable 'IP_FOR_INGRESS_TEST'
# and then run make k8s_test
import pytest
import socket
import os
import requests
@pytest.mark.skip
@pytest.mark.common
def test_check_connection():
if os.getenv("IP_FOR_INGRESS_TEST") == None:
host_ip = "192.168.93.47" # IP of cluster master
else:
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name) # IP of this host
ingress_host="k8s.stfc.skao.int" # Ingress HTTP hostname
url = "https://" + str(host_ip)
exception = ""
result = ""
try:
result = requests.get(url, headers={'host': ingress_host}, verify=True)
except Exception as ex:
exception = ex
assert (exception == "" and "Response [200]" in str(result))
|
import streamlit as st
from streamlit.logger import get_logger
from duplicazer.core import remove_duplicate, find_duplicate
from duplicazer.constant import APP_PARAMS
st_logger = get_logger(__name__)
st.set_page_config(
layout=APP_PARAMS["layout"],
page_title=APP_PARAMS["title"],
page_icon=APP_PARAMS["icon"],
)
def remove():
remove_ret = remove_duplicate(input_text)
st.session_state[APP_PARAMS["right"]["key"]] = remove_ret
def find():
find_ret = find_duplicate(input_text)
st.session_state[APP_PARAMS["right"]["key"]] = find_ret
def clear():
st.session_state[APP_PARAMS["left"]["key"]] = ""
st.session_state[APP_PARAMS["right"]["key"]] = ""
if __name__ == "__main__":
st.title(APP_PARAMS["title"])
st.markdown(f'{APP_PARAMS["markdown"]["repository"]} {APP_PARAMS["markdown"]["visitor"]}')
st.markdown(APP_PARAMS["markdown"]["description"])
input_column, output_column = st.columns(2)
remove_button, find_button, _,_,clear_button = st.columns([0.15,0.1,0.35,0.4,0.1])
with input_column:
input_text = st.text_area(
label=APP_PARAMS["left"]["label"],
height=APP_PARAMS["left"]["height"],
key=APP_PARAMS["left"]["key"],
)
with output_column:
output_text = st.text_area(
label=APP_PARAMS["right"]["label"],
height=APP_PARAMS["right"]["height"],
key=APP_PARAMS["right"]["key"],
)
with remove_button:
st.button(label=APP_PARAMS["button"]["remove"]["label"], on_click=remove)
with find_button:
st.button(label=APP_PARAMS["button"]["find"]["label"], on_click=find)
with clear_button:
st.button(label=APP_PARAMS["button"]["clear"]["label"], on_click=clear)
|
# MODULES
from . import schemas
from . import __demo__
from . import blueprints
from . import sanic
# STRUCTURE
__plugins__ = {
"schemas" : schemas.__dir__(),
"__demo__" : __demo__.__dir__(),
"blueprints" : blueprints.__dir__(),
"sanic" : sanic.__dir__(),
} |
# TODO Throw this away and use https://github.com/googledatalab/pydatalab instead
# - Supports full API, e.g. use_cached_results, max_billing_tier
# - See examples: https://github.com/googledatalab/notebooks
# - But verify it works with arrays and structs...
from collections import OrderedDict
import pandas as pd
from potoo.pandas import pd_read_bq, bq_default_project
import potoo.pandas_io_gbq_par_io as gbq
# TODO Use https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery-usage.html
# - TODO Needs a little extra setup for auth
class BQ:
def __init__(self, project_id):
self.project_id = project_id
self.service = gbq.GbqConnector(project_id=project_id).service
def pd_read(self, *args, **kw):
# Reuses self.project_id but not self.services (makes its own)
return pd_read_bq(*args, **kw, project_id=self.project_id)
# TODO Stop doing stuff like this; use `bq ls` and/or google-cloud-python instead
def tables_list(self, dataset_id):
res = bq.service.tables().list(projectId=bq.project_id, datasetId='rt_v1_s2').execute()
return pd.DataFrame(res['tables']).apply(axis=1, func=lambda x: pd.Series(OrderedDict([
('type', x['type']),
('project_id', x['tableReference']['projectId']),
('dataset_id', x['tableReference']['datasetId']),
('table_id', x['tableReference']['tableId']),
])))
def table_get(self, dataset_id, table_id):
return self.service.tables().get(projectId=self.project_id, datasetId=dataset_id, tableId=table_id).execute()
def table_schema(self, dataset_id, table_id):
return self.table_get(dataset_id, table_id)['schema']['fields']
def table_head(self, dataset_id, table_id, limit=10):
return self.pd_read('select * from `%(dataset_id)s.%(table_id)s` limit %(limit)s' % locals())
# bq.table_summary
# - cf. https://www.postgresql.org/docs/current/static/view-pg-stats.html
# - TODO histogram (graph?)
# - TODO Add perc to approx_top_count [{'count':100, 'value':'a'}, {'count':50, 'value':'b'}, ...]
def table_summary(
self,
dataset_id,
table_id,
fields_f=lambda xs: xs, # e.g. to subset when bq barfs on too many things
):
return self.pd_read('''
select * from unnest((
select %(array_agg_expr)s
from %(dataset_id)s.%(table_id)s
))
''' % dict(
array_agg_expr = '[\n%s\n]' % ',\n'.join(
'''
struct(
'%(name)s' as name,
count(*) as count,
approx_count_distinct(`%(name)s`) as `distinct`,
sum(case when `%(name)s` is null or `%(name)s` = '' then 1 else 0 end) / count(*) as null_frac,
approx_top_count(`%(name)s`, 5) as top_by_count,
approx_quantiles(`%(name)s`, 4) as quantiles
)
''' % field
for field in fields_f(bq.table_schema(dataset_id, table_id))
),
dataset_id = dataset_id,
table_id = table_id,
)
)
# Connect bq
bq = BQ(bq_default_project())
# XXX Testing
# importlib.reload(gbq)
# bq.table_head('rt_v1_s1', 'outgoing_Neighborhood_Data_OpenDoor_AttomDataNeighborhoodAirportNoise_zip_OpenDoor_AttomDataNeighborhoodAirportNoise')
# bq.table_summary('rt_v1_s1', 'outgoing_Neighborhood_Data_OpenDoor_AttomDataNeighborhoodAirportNoise_zip_OpenDoor_AttomDataNeighborhoodAirportNoise')
# bq.pd_read('''
# select
# address_token,
# 4,
# 'foo',
# true,
# struct('a',3,true),
# struct('b' as x, 4 as y, false as z),
# [1,2,3],
# 'foo'
# from (select address_token from dwellings_v0.addresses limit 2)
# ''')
|
from distutils.core import setup
setup(name='m',
version='0.1.0',
packages=['m', 'm.security', 'm.extensions', 'm.extensions.sqlalchemy'],
install_requires=[
'WebOb>=1.6.1',
'sqlalchemy>=1.0.0',
'pyhocon>=0.3.0',
],
author="comyn",
author_email="me@xueming.li",
description="This is a very light web framework",
license="Apache-2",
)
|
'''A Python module for reading and writing C3D files.'''
from __future__ import unicode_literals
import array
import io
import numpy as np
import struct
import warnings
import codecs
PROCESSOR_INTEL = 84
PROCESSOR_DEC = 85
PROCESSOR_MIPS = 86
class DataTypes(object):
''' Container defining different data types used for reading file data.
Data types depend on the processor format the file is stored in.
'''
def __init__(self, proc_type):
self.proc_type = proc_type
if proc_type == PROCESSOR_MIPS:
# Big-Endian (SGI/MIPS format)
self.float32 = np.dtype(np.float32).newbyteorder('>')
self.float64 = np.dtype(np.float64).newbyteorder('>')
self.uint8 = np.uint8
self.uint16 = np.dtype(np.uint16).newbyteorder('>')
self.uint32 = np.dtype(np.uint32).newbyteorder('>')
self.uint64 = np.dtype(np.uint64).newbyteorder('>')
self.int8 = np.int8
self.int16 = np.dtype(np.int16).newbyteorder('>')
self.int32 = np.dtype(np.int32).newbyteorder('>')
self.int64 = np.dtype(np.int64).newbyteorder('>')
else:
# Little-Endian format (Intel or DEC format)
self.float32 = np.float32
self.float64 = np.float64
self.uint8 = np.uint8
self.uint16 = np.uint16
self.uint32 = np.uint32
self.uint64 = np.uint64
self.int8 = np.int8
self.int16 = np.int16
self.int32 = np.int32
self.int64 = np.int64
@property
def is_ieee(self):
''' True if the associated file is in the Intel format.
'''
return self.proc_type == PROCESSOR_INTEL
@property
def is_dec(self):
''' True if the associated file is in the DEC format.
'''
return self.proc_type == PROCESSOR_DEC
@property
def is_mips(self):
''' True if the associated file is in the SGI/MIPS format.
'''
return self.proc_type == PROCESSOR_MIPS
def decode_string(self, bytes):
''' Decode a byte array to a string.
'''
# Attempt to decode using different decoders
decoders = ['utf-8', 'latin-1']
for dec in decoders:
try:
return codecs.decode(bytes, dec)
except UnicodeDecodeError:
continue
# Revert to using default decoder but replace characters
return codecs.decode(bytes, decoders[0], 'replace')
def UNPACK_FLOAT_IEEE(uint_32):
'''Unpacks a single 32 bit unsigned int to a IEEE float representation
'''
return struct.unpack('f', struct.pack("<I", uint_32))[0]
def UNPACK_FLOAT_MIPS(uint_32):
'''Unpacks a single 32 bit unsigned int to a IEEE float representation
'''
return struct.unpack('f', struct.pack(">I", uint_32))[0]
def DEC_to_IEEE(uint_32):
'''Convert the 32 bit representation of a DEC float to IEEE format.
Params:
----
uint_32 : 32 bit unsigned integer containing the DEC single precision float point bits.
Returns : IEEE formated floating point of the same shape as the input.
'''
# Follows the bit pattern found:
# http://home.fnal.gov/~yang/Notes/ieee_vs_dec_float.txt
# Further formating descriptions can be found:
# http://www.irig106.org/docs/106-07/appendixO.pdf
# In accodance with the first ref. first & second 16 bit words are placed
# in a big endian 16 bit word representation, and needs to be inverted.
# Second reference describe the DEC->IEEE conversion.
# Warning! Unsure if NaN numbers are managed appropriately.
# Shuffle the first two bit words from DEC bit representation to an ordered representation.
# Note that the most significant fraction bits are placed in the first 7 bits.
#
# Below are the DEC layout in accordance with the references:
# ___________________________________________________________________________________
# | Mantissa (16:0) | SIGN | Exponent (8:0) | Mantissa (23:17) |
# ___________________________________________________________________________________
# |32- -16| 15 |14- -7|6- -0|
#
# Legend:
# _______________________________________________________
# | Part (left bit of segment : right bit) | Part | ..
# _______________________________________________________
# |Bit adress - .. - Bit adress | Bit adress - ..
####
# Swap the first and last 16 bits for a consistent alignment of the fraction
reshuffled = ((uint_32 & 0xFFFF0000) >> 16) | ((uint_32 & 0x0000FFFF) << 16)
# After the shuffle each part are in little-endian and ordered as: SIGN-Exponent-Fraction
exp_bits = ((reshuffled & 0xFF000000) - 1) & 0xFF000000
reshuffled = (reshuffled & 0x00FFFFFF) | exp_bits
return UNPACK_FLOAT_IEEE(reshuffled)
def DEC_to_IEEE_BYTES(bytes):
'''Convert byte array containing 32 bit DEC floats to IEEE format.
Params:
----
bytes : Byte array where every 4 bytes represent a single precision DEC float.
Returns : IEEE formated floating point of the same shape as the input.
'''
# See comments in DEC_to_IEEE() for DEC format definition
# Reshuffle
bytes = np.frombuffer(bytes, dtype=np.dtype('B'))
reshuffled = np.empty(len(bytes), dtype=np.dtype('B'))
reshuffled[0::4] = bytes[2::4]
reshuffled[1::4] = bytes[3::4]
reshuffled[2::4] = bytes[0::4]
reshuffled[3::4] = bytes[1::4] + ((bytes[1::4] & 0x7f == 0) - 1) # Decrement exponent by 2, if exp. > 1
# There are different ways to adjust for differences in DEC/IEEE representation
# after reshuffle. Two simple methods are:
# 1) Decrement exponent bits by 2, then convert to IEEE.
# 2) Convert to IEEE directly and divide by four.
# 3) Handle edge cases, expensive in python...
# However these are simple methods, and do not accurately convert when:
# 1) Exponent < 2 (without bias), impossible to decrement exponent without adjusting fraction/mantissa.
# 2) Exponent == 0, DEC numbers are then 0 or undefined while IEEE is not. NaN are produced when exponent == 255.
# Here method 1) is used, which mean that only small numbers will be represented incorrectly.
return np.frombuffer(reshuffled.tobytes(),
dtype=np.float32,
count=int(len(bytes) / 4))
def is_integer(value):
'''Check if value input is integer.'''
return isinstance(value, (int, np.int32, np.int64))
class Header(object):
'''Header information from a C3D file.
Attributes
----------
event_block : int
Index of the 512-byte block where labels (metadata) are found.
parameter_block : int
Index of the 512-byte block where parameters (metadata) are found.
data_block : int
Index of the 512-byte block where data starts.
point_count : int
Number of motion capture channels recorded in this file.
analog_count : int
Number of analog values recorded per frame of 3D point data.
first_frame : int
Index of the first frame of data.
last_frame : int
Index of the last frame of data.
analog_per_frame : int
Number of analog frames per frame of 3D point data. The analog frame
rate (ANALOG:RATE) apparently equals the point frame rate (POINT:RATE)
times this value.
frame_rate : float
The frame rate of the recording, in frames per second.
scale_factor : float
Multiply values in the file by this scale parameter.
long_event_labels : bool
max_gap : int
.. note::
The ``scale_factor`` attribute is not used in Phasespace C3D files;
instead, use the POINT.SCALE parameter.
.. note::
The ``first_frame`` and ``last_frame`` header attributes are not used in
C3D files generated by Phasespace. Instead, the first and last
frame numbers are stored in the POINTS:ACTUAL_START_FIELD and
POINTS:ACTUAL_END_FIELD parameters.
'''
# Read/Write header formats, read values as unsigned ints rather then floats.
BINARY_FORMAT_WRITE = '<BBHHHHHfHHf274sHHH164s44s'
BINARY_FORMAT_READ = '<BBHHHHHIHHI274sHHH164s44s'
BINARY_FORMAT_READ_BIG_ENDIAN = '>BBHHHHHIHHI274sHHH164s44s'
def __init__(self, handle=None):
'''Create a new Header object.
Parameters
----------
handle : file handle, optional
If given, initialize attributes for the Header from this file
handle. The handle must be seek-able and readable. If `handle` is
not given, Header attributes are initialized with default values.
'''
self.parameter_block = 2
self.data_block = 3
self.point_count = 50
self.analog_count = 0
self.first_frame = 1
self.last_frame = 1
self.analog_per_frame = 0
self.frame_rate = 60.0
self.max_gap = 0
self.scale_factor = -1.0
self.long_event_labels = False
self.event_count = 0
self.event_block = b''
self.event_timings = np.zeros(0, dtype=np.float32)
self.event_disp_flags = np.zeros(0, dtype=np.bool)
self.event_labels = []
if handle:
self.read(handle)
def write(self, handle):
'''Write binary header data to a file handle.
This method writes exactly 512 bytes to the beginning of the given file
handle.
Parameters
----------
handle : file handle
The given handle will be reset to 0 using `seek` and then 512 bytes
will be written to describe the parameters in this Header. The
handle must be writeable.
'''
handle.seek(0)
handle.write(struct.pack(self.BINARY_FORMAT_WRITE,
# Pack vars:
self.parameter_block,
0x50,
self.point_count,
self.analog_count,
self.first_frame,
self.last_frame,
self.max_gap,
self.scale_factor,
self.data_block,
self.analog_per_frame,
self.frame_rate,
b'',
self.long_event_labels and 0x3039 or 0x0, # If True write long_event_key else 0
self.event_count,
0x0,
self.event_block,
b''))
def __str__(self):
'''Return a string representation of this Header's attributes.'''
return '''\
parameter_block: {0.parameter_block}
point_count: {0.point_count}
analog_count: {0.analog_count}
first_frame: {0.first_frame}
last_frame: {0.last_frame}
max_gap: {0.max_gap}
scale_factor: {0.scale_factor}
data_block: {0.data_block}
analog_per_frame: {0.analog_per_frame}
frame_rate: {0.frame_rate}
long_event_labels: {0.long_event_labels}
event_block: {0.event_block}'''.format(self)
def read(self, handle, fmt=BINARY_FORMAT_READ):
'''Read and parse binary header data from a file handle.
This method reads exactly 512 bytes from the beginning of the given file
handle.
Parameters
----------
handle : file handle
The given handle will be reset to 0 using `seek` and then 512 bytes
will be read to initialize the attributes in this Header. The handle
must be readable.
fmt : Formating string used to read the header.
Raises
------
AssertionError
If the magic byte from the header is not 80 (the C3D magic value).
'''
handle.seek(0)
raw = handle.read(512)
(self.parameter_block,
magic,
self.point_count,
self.analog_count,
self.first_frame,
self.last_frame,
self.max_gap,
self.scale_factor,
self.data_block,
self.analog_per_frame,
self.frame_rate,
_,
self.long_event_labels,
self.event_count,
__,
self.event_block,
_) = struct.unpack(fmt, raw)
# Check magic number
assert magic == 80, 'C3D magic {} != 80 !'.format(magic)
# Check long event key
self.long_event_labels = self.long_event_labels == 0x3039
def _processor_convert(self, dtypes, handle):
''' Function interpreting the header once a processor type has been determined.
'''
if dtypes.is_dec:
self.scale_factor = DEC_to_IEEE(self.scale_factor)
self.frame_rate = DEC_to_IEEE(self.frame_rate)
float_unpack = DEC_to_IEEE
elif dtypes.is_ieee:
self.scale_factor = UNPACK_FLOAT_IEEE(self.scale_factor)
self.frame_rate = UNPACK_FLOAT_IEEE(self.frame_rate)
float_unpack = UNPACK_FLOAT_IEEE
elif dtypes.is_mips:
# Re-read header in big-endian
self.read(handle, Header.BINARY_FORMAT_READ_BIG_ENDIAN)
# Then unpack
self.scale_factor = UNPACK_FLOAT_IEEE(self.scale_factor)
self.frame_rate = UNPACK_FLOAT_IEEE(self.frame_rate)
float_unpack = UNPACK_FLOAT_IEEE
self._parse_events(dtypes, float_unpack)
def _parse_events(self, dtypes, float_unpack):
''' Parse the event section of the header.
'''
# Event section byte blocks
time_bytes = self.event_block[:72]
disp_bytes = self.event_block[72:90]
label_bytes = self.event_block[92:]
if dtypes.is_mips:
unpack_fmt = '>I'
else:
unpack_fmt = '<I'
read_count = self.event_count
self.event_timings = np.zeros(read_count, dtype=np.float32)
self.event_disp_flags = np.zeros(read_count, dtype=np.bool)
self.event_labels = np.empty(read_count, dtype=object)
for i in range(read_count):
ilong = i*4
# Unpack
self.event_disp_flags[i] = disp_bytes[i] > 0
self.event_timings[i] = float_unpack(struct.unpack(unpack_fmt, time_bytes[ilong:ilong+4])[0])
self.event_labels[i] = dtypes.decode_string(label_bytes[ilong:ilong+4])
@property
def events(self):
''' Get an iterable over displayed events defined in the header. Iterable items are on form (timing, label).
Note*:
Time as defined by the 'timing' is relative to frame 1 and not the 'first_frame' parameter.
Frame 1 therefor has the time 0.0 in relation to the event timing.
'''
return zip(self.event_timings[self.event_disp_flags], self.event_labels[self.event_disp_flags])
class Param(object):
'''A class representing a single named parameter from a C3D file.
Attributes
----------
name : str
Name of this parameter.
dtype: DataTypes
Reference to the DataTypes object associated with the file.
desc : str
Brief description of this parameter.
bytes_per_element : int, optional
For array data, this describes the size of each element of data. For
string data (including arrays of strings), this should be -1.
dimensions : list of int
For array data, this describes the dimensions of the array, stored in
column-major order. For arrays of strings, the dimensions here will be
the number of columns (length of each string) followed by the number of
rows (number of strings).
bytes : str
Raw data for this parameter.
handle :
File handle positioned at the first byte of a .c3d parameter description.
'''
def __init__(self,
name,
dtype,
desc='',
bytes_per_element=1,
dimensions=None,
bytes=b'',
handle=None):
'''Set up a new parameter, only the name is required.'''
self.name = name
self._dtypes = dtype
self.desc = desc
self.bytes_per_element = bytes_per_element
self.dimensions = dimensions or []
self.bytes = bytes
if handle:
self.read(handle)
def __repr__(self):
return '<Param: {}>'.format(self.desc)
@property
def num_elements(self):
'''Return the number of elements in this parameter's array value.'''
e = 1
for d in self.dimensions:
e *= d
return e
@property
def total_bytes(self):
'''Return the number of bytes used for storing this parameter's data.'''
return self.num_elements * abs(self.bytes_per_element)
def binary_size(self):
'''Return the number of bytes needed to store this parameter.'''
return (
1 + # group_id
2 + # next offset marker
1 + len(self.name.encode('utf-8')) + # size of name and name bytes
1 + # data size
# size of dimensions and dimension bytes
1 + len(self.dimensions) +
self.total_bytes + # data
1 + len(self.desc.encode('utf-8')) # size of desc and desc bytes
)
def write(self, group_id, handle):
'''Write binary data for this parameter to a file handle.
Parameters
----------
group_id : int
The numerical ID of the group that holds this parameter.
handle : file handle
An open, writable, binary file handle.
'''
name = self.name.encode('utf-8')
handle.write(struct.pack('bb', len(name), group_id))
handle.write(name)
handle.write(struct.pack('<h', self.binary_size() - 2 - len(name)))
handle.write(struct.pack('b', self.bytes_per_element))
handle.write(struct.pack('B', len(self.dimensions)))
handle.write(struct.pack('B' * len(self.dimensions), *self.dimensions))
if self.bytes:
handle.write(self.bytes)
desc = self.desc.encode('utf-8')
handle.write(struct.pack('B', len(desc)))
handle.write(desc)
def read(self, handle):
'''Read binary data for this parameter from a file handle.
This reads exactly enough data from the current position in the file to
initialize the parameter.
'''
self.bytes_per_element, = struct.unpack('b', handle.read(1))
dims, = struct.unpack('B', handle.read(1))
self.dimensions = [struct.unpack('B', handle.read(1))[
0] for _ in range(dims)]
self.bytes = b''
if self.total_bytes:
self.bytes = handle.read(self.total_bytes)
desc_size, = struct.unpack('B', handle.read(1))
self.desc = desc_size and self._dtypes.decode_string(handle.read(desc_size)) or ''
def _as(self, dtype):
'''Unpack the raw bytes of this param using the given struct format.'''
return np.frombuffer(self.bytes, count=1, dtype=dtype)[0]
def _as_array(self, dtype):
'''Unpack the raw bytes of this param using the given data format.'''
assert self.dimensions, \
'{}: cannot get value as {} array!'.format(self.name, dtype)
elems = np.frombuffer(self.bytes, dtype=dtype)
# Reverse shape as the shape is defined in fortran format
return elems.reshape(self.dimensions[::-1])
def _as_any(self, dtype):
'''Unpack the raw bytes of this param as either array or single value.'''
if 0 in self.dimensions[:]: # Check if any dimension is 0 (empty buffer)
return [] # Buffer is empty
if len(self.dimensions) == 0: # Parse data as a single value
if dtype == np.float32: # Floats need to be parsed separately!
return self.float_value
return self._as(dtype)
else: # Parse data as array
if dtype == np.float32:
data = self.float_array
else:
data = self._as_array(dtype)
if len(self.dimensions) < 2: # Check if data is contained in a single dimension
return data.flatten()
return data
@property
def _as_integer_value(self):
''' Get the param as either 32 bit float or unsigned integer.
Evaluates if an integer is stored as a floating point representation.
Note: This is implemented purely for parsing start/end frames.
'''
if self.total_bytes >= 4:
# Check if float value representation is an integer
value = self.float_value
if int(value) == value:
return value
return self.uint32_value
elif self.total_bytes >= 2:
return self.uint16_value
else:
return self.uint8_value
@property
def int8_value(self):
'''Get the param as an 8-bit signed integer.'''
return self._as(self._dtypes.int8)
@property
def uint8_value(self):
'''Get the param as an 8-bit unsigned integer.'''
return self._as(self._dtypes.uint8)
@property
def int16_value(self):
'''Get the param as a 16-bit signed integer.'''
return self._as(self._dtypes.int16)
@property
def uint16_value(self):
'''Get the param as a 16-bit unsigned integer.'''
return self._as(self._dtypes.uint16)
@property
def int32_value(self):
'''Get the param as a 32-bit signed integer.'''
return self._as(self._dtypes.int32)
@property
def uint32_value(self):
'''Get the param as a 32-bit unsigned integer.'''
return self._as(self._dtypes.uint32)
@property
def float_value(self):
'''Get the param as a 32-bit float.'''
if self._dtypes.is_dec:
return DEC_to_IEEE(self._as(np.uint32))
else: # is_mips or is_ieee
return self._as(self._dtypes.float32)
@property
def bytes_value(self):
'''Get the param as a raw byte string.'''
return self.bytes
@property
def string_value(self):
'''Get the param as a unicode string.'''
return self._dtypes.decode_string(self.bytes)
@property
def int8_array(self):
'''Get the param as an array of 8-bit signed integers.'''
return self._as_array(self._dtypes.int8)
@property
def uint8_array(self):
'''Get the param as an array of 8-bit unsigned integers.'''
return self._as_array(self._dtypes.uint8)
@property
def int16_array(self):
'''Get the param as an array of 16-bit signed integers.'''
return self._as_array(self._dtypes.int16)
@property
def uint16_array(self):
'''Get the param as an array of 16-bit unsigned integers.'''
return self._as_array(self._dtypes.uint16)
@property
def int32_array(self):
'''Get the param as an array of 32-bit signed integers.'''
return self._as_array(self._dtypes.int32)
@property
def uint32_array(self):
'''Get the param as an array of 32-bit unsigned integers.'''
return self._as_array(self._dtypes.uint32)
@property
def int64_array(self):
'''Get the param as an array of 32-bit signed integers.'''
return self._as_array(self._dtypes.int64)
@property
def uint64_array(self):
'''Get the param as an array of 32-bit unsigned integers.'''
return self._as_array(self._dtypes.uint64)
@property
def float32_array(self):
'''Get the param as an array of 32-bit floats.'''
# Convert float data if not IEEE processor
if self._dtypes.is_dec:
# _as_array but for DEC
assert self.dimensions, \
'{}: cannot get value as {} array!'.format(self.name, self._dtypes.float32)
return DEC_to_IEEE_BYTES(self.bytes).reshape(self.dimensions[::-1]) # Reverse fortran format
else: # is_ieee or is_mips
return self._as_array(self._dtypes.float32)
@property
def float64_array(self):
'''Get the param as an array of 64-bit floats.'''
# Convert float data if not IEEE processor
if self._dtypes.is_dec:
raise ValueError('Unable to convert bytes encoded in a 64 bit floating point DEC format.')
else: # is_ieee or is_mips
return self._as_array(self._dtypes.float64)
@property
def float_array(self):
'''Get the param as an array of 32 or 64 bit floats.'''
# Convert float data if not IEEE processor
if self.bytes_per_element == 4:
return self.float32_array
elif self.bytes_per_element == 8:
return self.float64_array
else:
raise TypeError("Parsing parameter bytes to an array with %i bit " % self.bytes_per_element +
"floating-point precission is not unsupported.")
@property
def int_array(self):
'''Get the param as an array of integer values.'''
# Convert float data if not IEEE processor
if self.bytes_per_element == 1:
return self.int8_array
elif self.bytes_per_element == 2:
return self.int16_array
elif self.bytes_per_element == 4:
return self.int32_array
elif self.bytes_per_element == 8:
return self.int64_array
else:
raise TypeError("Parsing parameter bytes to an array with %i bit integer values is not unsupported." %
self.bytes_per_element)
@property
def uint_array(self):
'''Get the param as an array of integer values.'''
# Convert float data if not IEEE processor
if self.bytes_per_element == 1:
return self.uint8_array
elif self.bytes_per_element == 2:
return self.uint16_array
elif self.bytes_per_element == 4:
return self.uint32_array
elif self.bytes_per_element == 8:
return self.uint64_array
else:
raise TypeError("Parsing parameter bytes to an array with %i bit integer values is not unsupported." %
self.bytes_per_element)
@property
def bytes_array(self):
'''Get the param as an array of raw byte strings.'''
# Decode different dimensions
if len(self.dimensions) == 0:
return np.array([])
elif len(self.dimensions) == 1:
return np.array(self.bytes)
else:
# Convert Fortran shape (data in memory is identical, shape is transposed)
word_len = self.dimensions[0]
dims = self.dimensions[1:][::-1] # Identical to: [:0:-1]
byte_steps = np.cumprod(self.dimensions[:-1])[::-1]
# Generate mult-dimensional array and parse byte words
byte_arr = np.empty(dims, dtype=object)
for i in np.ndindex(*dims):
# Calculate byte offset as sum of each array index times the byte step of each dimension.
off = np.sum(np.multiply(i, byte_steps))
byte_arr[i] = self.bytes[off:off+word_len]
return byte_arr
@property
def string_array(self):
'''Get the param as a python array of unicode strings.'''
# Decode different dimensions
if len(self.dimensions) == 0:
return np.array([])
elif len(self.dimensions) == 1:
return np.array([self.string_value])
else:
# Parse byte sequences
byte_arr = self.bytes_array
# Decode sequences
for i in np.ndindex(byte_arr.shape):
byte_arr[i] = self._dtypes.decode_string(byte_arr[i])
return byte_arr
class Group(object):
'''A group of parameters from a C3D file.
In C3D files, parameters are organized in groups. Each group has a name, a
description, and a set of named parameters.
Attributes
----------
dtypes : 'DataTypes'
Data types object used for parsing.
name : str
Name of this parameter group.
desc : str
Description for this parameter group.
'''
def __init__(self, dtypes, name=None, desc=None):
self._params = {}
self._dtypes = dtypes
# Assign through property setters
self.name = name
self.desc = desc
def __repr__(self):
return '<Group: {}>'.format(self.desc)
@property
def name(self):
''' Group name. '''
return self._name
@name.setter
def name(self, value):
''' Group name string.
Parameters
----------
value : str
New name for the group.
'''
if value is None or isinstance(value, str):
self._name = value
else:
raise TypeError('Expected group name to be string, was %s.' % type(value))
@property
def desc(self):
''' Group descriptor. '''
return self._desc
@desc.setter
def desc(self, value):
''' Group descriptor.
Parameters
----------
value : str, or bytes
New description for this parameter group.
'''
if isinstance(value, bytes):
self._desc = self._dtypes.decode_string(value)
elif value is not None and not isinstance(value, str):
raise TypeError('Expected descriptor to be byte string or python string, was %s.' % type(value))
self._desc = value
def param_items(self):
''' Acquire iterator for paramater key-entry pairs. '''
return self._params.items()
def param_values(self):
''' Acquire iterator for parameter entries. '''
return self._params.values()
def param_keys(self):
''' Acquire iterator for parameter entry keys. '''
return self._params.keys()
def get(self, key, default=None):
'''Get a parameter by key.
Parameters
----------
key : any
Parameter key to look up in this group.
default : any, optional
Value to return if the key is not found. Defaults to None.
Returns
-------
param : :class:`Param`
A parameter from the current group.
'''
return self._params.get(key, default)
def add_param(self, name, **kwargs):
'''Add a parameter to this group.
Parameters
----------
name : str
Name of the parameter to add to this group. The name will
automatically be case-normalized.
Additional keyword arguments will be passed to the `Param` constructor.
'''
self._params[name.upper()] = Param(name.upper(), self._dtypes, **kwargs)
def remove_param(self, name):
'''Remove the specified parameter.
Parameters
----------
name : str
Name for the parameter to remove.
'''
del self._params[name]
def rename_param(self, name, new_name):
''' Rename a specified parameter group.
Parameters
----------
name : str, or 'Param'
Parameter instance, or name.
new_name : str
New name for the parameter.
'''
if new_name in self._params:
raise ValueError("Key %s already exist." % new_name)
if isinstance(name, Param):
param = name
name = param.name
else:
# Aquire instance using id
param = self._params.get(name, None)
if param is None:
raise ValueError('No parameter found matching the identifier: %s' % str(name))
del self._params[name]
self._params[new_name] = param
def binary_size(self):
'''Return the number of bytes to store this group and its parameters.'''
return (
1 + # group_id
1 + len(self._name.encode('utf-8')) + # size of name and name bytes
2 + # next offset marker
1 + len(self._desc.encode('utf-8')) + # size of desc and desc bytes
sum(p.binary_size() for p in self._params.values()))
def write(self, group_id, handle):
'''Write this parameter group, with parameters, to a file handle.
Parameters
----------
group_id : int
The numerical ID of the group.
handle : file handle
An open, writable, binary file handle.
'''
name = self._name.encode('utf-8')
desc = self._desc.encode('utf-8')
handle.write(struct.pack('bb', len(name), -group_id))
handle.write(name)
handle.write(struct.pack('<h', 3 + len(desc)))
handle.write(struct.pack('B', len(desc)))
handle.write(desc)
for param in self._params.values():
param.write(group_id, handle)
def get_int8(self, key):
'''Get the value of the given parameter as an 8-bit signed integer.'''
return self._params[key.upper()].int8_value
def get_uint8(self, key):
'''Get the value of the given parameter as an 8-bit unsigned integer.'''
return self._params[key.upper()].uint8_value
def get_int16(self, key):
'''Get the value of the given parameter as a 16-bit signed integer.'''
return self._params[key.upper()].int16_value
def get_uint16(self, key):
'''Get the value of the given parameter as a 16-bit unsigned integer.'''
return self._params[key.upper()].uint16_value
def get_int32(self, key):
'''Get the value of the given parameter as a 32-bit signed integer.'''
return self._params[key.upper()].int32_value
def get_uint32(self, key):
'''Get the value of the given parameter as a 32-bit unsigned integer.'''
return self._params[key.upper()].uint32_value
def get_float(self, key):
'''Get the value of the given parameter as a 32-bit float.'''
return self._params[key.upper()].float_value
def get_bytes(self, key):
'''Get the value of the given parameter as a byte array.'''
return self._params[key.upper()].bytes_value
def get_string(self, key):
'''Get the value of the given parameter as a string.'''
return self._params[key.upper()].string_value
class Manager(object):
'''A base class for managing C3D file metadata.
This class manages a C3D header (which contains some stock metadata fields)
as well as a set of parameter groups. Each group is accessible using its
name.
Attributes
----------
header : `Header`
Header information for the C3D file.
'''
def __init__(self, header=None):
'''Set up a new Manager with a Header.'''
self._header = header or Header()
self._groups = {}
@property
def header(self):
''' Access the parsed c3d header. '''
return self._header
def group_items(self):
''' Acquire iterable over parameter group pairs.
Returns
-------
items : Touple of ((str, :class:`Group`), ...)
Python touple containing pairs of name keys and parameter group entries.
'''
return ((k, v) for k, v in self._groups.items() if isinstance(k, str))
def group_values(self):
''' Acquire iterable over parameter group entries.
Returns
-------
values : Touple of (:class:`Group`, ...)
Python touple containing unique parameter group entries.
'''
return (v for k, v in self._groups.items() if isinstance(k, str))
def group_keys(self):
''' Acquire iterable over parameter group entry string keys.
Returns
-------
keys : Touple of (str, ...)
Python touple containing keys for the parameter group entries.
'''
return (k for k in self._groups.keys() if isinstance(k, str))
def group_listed(self):
''' Acquire iterable over sorted numerical parameter group pairs.
Returns
-------
items : Touple of ((int, :class:`Group`), ...)
Sorted python touple containing pairs of numerical keys and parameter group entries.
'''
return sorted((i, g) for i, g in self._groups.items() if isinstance(i, int))
def _check_metadata(self):
''' Ensure that the metadata in our file is self-consistent. '''
assert self._header.point_count == self.point_used, (
'inconsistent point count! {} header != {} POINT:USED'.format(
self._header.point_count,
self.point_used,
))
assert self._header.scale_factor == self.point_scale, (
'inconsistent scale factor! {} header != {} POINT:SCALE'.format(
self._header.scale_factor,
self.point_scale,
))
assert self._header.frame_rate == self.point_rate, (
'inconsistent frame rate! {} header != {} POINT:RATE'.format(
self._header.frame_rate,
self.point_rate,
))
if self.point_rate:
ratio = self.analog_rate / self.point_rate
else:
ratio = 0
assert self._header.analog_per_frame == ratio, (
'inconsistent analog rate! {} header != {} analog-fps / {} point-fps'.format(
self._header.analog_per_frame,
self.analog_rate,
self.point_rate,
))
count = self.analog_used * self._header.analog_per_frame
assert self._header.analog_count == count, (
'inconsistent analog count! {} header != {} analog used * {} per-frame'.format(
self._header.analog_count,
self.analog_used,
self._header.analog_per_frame,
))
try:
start = self.get_uint16('POINT:DATA_START')
if self._header.data_block != start:
warnings.warn('inconsistent data block! {} header != {} POINT:DATA_START'.format(
self._header.data_block, start))
except AttributeError:
warnings.warn('''no pointer available in POINT:DATA_START indicating the start of the data block, using
header pointer as fallback''')
def check_parameters(params):
for name in params:
if self.get(name) is None:
warnings.warn('missing parameter {}'.format(name))
if self.point_used > 0:
check_parameters(('POINT:LABELS', 'POINT:DESCRIPTIONS'))
else:
warnings.warn('No point data found in file.')
if self.analog_used > 0:
check_parameters(('ANALOG:LABELS', 'ANALOG:DESCRIPTIONS'))
else:
warnings.warn('No analog data found in file.')
def add_group(self, group_id, name, desc, rename_duplicated_groups=False):
'''Add a new parameter group.
Parameters
----------
group_id : int
The numeric ID for a group to check or create.
name : str
If a group is created, assign this name to the group.
The name will be turned to upper case letters.
desc : str, optional
If a group is created, assign this description to the group.
rename_duplicated_groups : bool
If True, when adding a group with a name that already exists, the group will be renamed to
`{name}{group_id}`.
The original group will not be renamed.
In general, having multiple groups with the same name is against the c3d specification.
This option only exists to handle edge cases where files are not created according to the spec and still
need to be imported.
Returns
-------
group : :class:`Group`
A group with the given ID, name, and description.
Raises
------
TypeError
Input arguments are of the wrong type.
'''
if not is_integer(group_id):
raise ValueError('Expected Group numerical key to be integer, was %s.' % type(group_id))
if not isinstance(name, str):
raise ValueError('Expected Group name key to be string, was %s.' % type(name))
group_id = int(group_id) # Assert python int
if group_id in self._groups:
raise KeyError('Group with numerical key {} already exists'.format(group_id))
name = name.upper()
if name in self._groups:
if rename_duplicated_groups is True:
# In some cases group name is not unique (though c3d spec requires that).
# To allow using such files we auto-generate new name.
# Notice that referring to this group's parameters later with the original name will fail.
new_name = name + str(group_id)
warnings.warn(f'Repeated group name {name} modified to {new_name}')
name = new_name
else:
raise KeyError(f'A group with the name {name} already exists.')
group = self._groups[name] = self._groups[group_id] = Group(self._dtypes, name, desc)
return group
def remove_group(self, group_id):
'''Remove the parameter group.
Parameters
----------
group_id : int, or str
The numeric or name ID key for a group to remove all entries for.
'''
grp = self._groups.get(group_id, None)
if grp is None:
return
gkeys = [k for (k, v) in self._groups.items() if v == grp]
for k in gkeys:
del self._groups[k]
def rename_group(self, group_id, new_group_id):
''' Rename a specified parameter group.
Parameters
----------
group_id : int, str, or 'Group'
Group instance, name, or numerical identifier for the group.
new_group_id : str, or int
If string, it is the new name for the group. If integer, it will replace its numerical group id.
Raises
------
KeyError
If a group with a duplicate ID or name already exists.
'''
if isinstance(group_id, Group):
grp = group_id
else:
# Aquire instance using id
grp = self._groups.get(group_id, None)
if grp is None:
raise KeyError('No group found matching the identifier: %s' % str(group_id))
if new_group_id in self._groups:
if new_group_id == group_id:
return
raise ValueError('Key %s for group %s already exist.' % (str(new_group_id), grp.name))
# Clear old id
if isinstance(new_group_id, (str, bytes)):
if grp.name in self._groups:
del self._groups[grp.name]
grp._name = new_group_id
elif is_integer(new_group_id):
new_group_id = int(new_group_id) # Ensure python int
del self._groups[group_id]
else:
raise KeyError('Invalid group identifier of type: %s' % str(type(new_group_id)))
# Update
self._groups[new_group_id] = grp
def get(self, group, default=None):
'''Get a group or parameter.
Parameters
----------
group : str
If this string contains a period (.), then the part before the
period will be used to retrieve a group, and the part after the
period will be used to retrieve a parameter from that group. If this
string does not contain a period, then just a group will be
returned.
default : any
Return this value if the named group and parameter are not found.
Returns
-------
value : :class:`Group` or :class:`Param`
Either a group or parameter with the specified name(s). If neither
is found, returns the default value.
'''
if is_integer(group):
return self._groups.get(int(group), default)
group = group.upper()
param = None
if '.' in group:
group, param = group.split('.', 1)
if ':' in group:
group, param = group.split(':', 1)
if group not in self._groups:
return default
group = self._groups[group]
if param is not None:
return group.get(param, default)
return group
def get_int8(self, key):
'''Get a parameter value as an 8-bit signed integer.'''
return self.get(key).int8_value
def get_uint8(self, key):
'''Get a parameter value as an 8-bit unsigned integer.'''
return self.get(key).uint8_value
def get_int16(self, key):
'''Get a parameter value as a 16-bit signed integer.'''
return self.get(key).int16_value
def get_uint16(self, key):
'''Get a parameter value as a 16-bit unsigned integer.'''
return self.get(key).uint16_value
def get_int32(self, key):
'''Get a parameter value as a 32-bit signed integer.'''
return self.get(key).int32_value
def get_uint32(self, key):
'''Get a parameter value as a 32-bit unsigned integer.'''
return self.get(key).uint32_value
def get_float(self, key):
'''Get a parameter value as a 32-bit float.'''
return self.get(key).float_value
def get_bytes(self, key):
'''Get a parameter value as a byte string.'''
return self.get(key).bytes_value
def get_string(self, key):
'''Get a parameter value as a string.'''
return self.get(key).string_value
def parameter_blocks(self):
'''Compute the size (in 512B blocks) of the parameter section.'''
bytes = 4. + sum(g.binary_size() for g in self._groups.values())
return int(np.ceil(bytes / 512))
@property
def point_rate(self):
''' Number of sampled 3D coordinates per second.
'''
try:
return self.get_float('POINT:RATE')
except AttributeError:
return self.header.frame_rate
@property
def point_scale(self):
try:
return self.get_float('POINT:SCALE')
except AttributeError:
return self.header.scale_factor
@property
def point_used(self):
''' Number of sampled 3D point coordinates per frame.
'''
try:
return self.get_uint16('POINT:USED')
except AttributeError:
return self.header.point_count
@property
def analog_used(self):
''' Number of analog measurements, or channels, for each analog data sample.
'''
try:
return self.get_uint16('ANALOG:USED')
except AttributeError:
return self.header.analog_count
@property
def analog_rate(self):
''' Number of analog data samples per second.
'''
try:
return self.get_float('ANALOG:RATE')
except AttributeError:
return self.header.analog_per_frame * self.point_rate
@property
def analog_per_frame(self):
''' Number of analog samples per 3D frame (point sample).
'''
return int(self.analog_rate / self.point_rate)
@property
def analog_sample_count(self):
''' Number of analog samples per channel.
'''
has_analog = self.analog_used > 0
return int(self.frame_count * self.analog_per_frame) * has_analog
@property
def point_labels(self):
return self.get('POINT:LABELS').string_array
@property
def analog_labels(self):
return self.get('ANALOG:LABELS').string_array
@property
def frame_count(self):
return self.last_frame - self.first_frame + 1 # Add 1 since range is inclusive [first, last]
@property
def first_frame(self):
# Start frame seems to be less of an issue to determine.
# this is a hack for phasespace files ... should put it in a subclass.
param = self.get('TRIAL:ACTUAL_START_FIELD')
if param is not None:
return param.uint32_value
return self.header.first_frame
@property
def last_frame(self):
# Number of frames can be represented in many formats, first check if valid header values
if self.header.first_frame < self.header.last_frame and self.header.last_frame != 65535:
return self.header.last_frame
# Check different parameter options where the frame can be encoded
end_frame = [self.header.last_frame, 0.0, 0.0, 0.0]
param = self.get('TRIAL:ACTUAL_END_FIELD')
if param is not None:
end_frame[1] = param._as_integer_value
param = self.get('POINT:LONG_FRAMES')
if param is not None:
end_frame[2] = param._as_integer_value
param = self.get('POINT:FRAMES')
if param is not None:
# Can be encoded either as 32 bit float or 16 bit uint
end_frame[3] = param._as_integer_value
# Return the largest of the all (queue bad reading...)
return int(np.max(end_frame))
class Reader(Manager):
'''This class provides methods for reading the data in a C3D file.
A C3D file contains metadata and frame-based data describing 3D motion.
You can iterate over the frames in the file by calling `read_frames()` after
construction:
>>> r = c3d.Reader(open('capture.c3d', 'rb'))
>>> for frame_no, points, analog in r.read_frames():
... print('{0.shape} points in this frame'.format(points))
'''
def __init__(self, handle):
'''Initialize this C3D file by reading header and parameter data.
Parameters
----------
handle : file handle
Read metadata and C3D motion frames from the given file handle. This
handle is assumed to be `seek`-able and `read`-able. The handle must
remain open for the life of the `Reader` instance. The `Reader` does
not `close` the handle.
Raises
------
ValueError
If the processor metadata in the C3D file is anything other than 84
(Intel format).
'''
super(Reader, self).__init__(Header(handle))
self._handle = handle
def seek_param_section_header():
''' Seek to and read the first 4 byte of the parameter header section '''
self._handle.seek((self._header.parameter_block - 1) * 512)
# metadata header
return self._handle.read(4)
# Begin by reading the processor type:
buf = seek_param_section_header()
_, _, parameter_blocks, processor = struct.unpack('BBBB', buf)
self._dtypes = DataTypes(processor)
# Convert header parameters in accordance with the processor type (MIPS format re-reads the header)
self._header._processor_convert(self._dtypes, handle)
# Restart reading the parameter header after parsing processor type
buf = seek_param_section_header()
start_byte = self._handle.tell()
endbyte = start_byte + 512 * parameter_blocks - 4
while self._handle.tell() < endbyte:
chars_in_name, group_id = struct.unpack('bb', self._handle.read(2))
if group_id == 0 or chars_in_name == 0:
# we've reached the end of the parameter section.
break
name = self._dtypes.decode_string(self._handle.read(abs(chars_in_name))).upper()
# Read the byte segment associated with the parameter and create a
# separate binary stream object from the data.
offset_to_next, = struct.unpack(['<h', '>h'][self._dtypes.is_mips], self._handle.read(2))
if offset_to_next == 0:
# Last parameter, as number of bytes are unknown,
# read the remaining bytes in the parameter section.
bytes = self._handle.read(endbyte - self._handle.tell())
else:
bytes = self._handle.read(offset_to_next - 2)
buf = io.BytesIO(bytes)
if group_id > 0:
# We've just started reading a parameter. If its group doesn't
# exist, create a blank one. add the parameter to the group.
self._groups.setdefault(
group_id, Group(self._dtypes)).add_param(name, handle=buf)
else:
# We've just started reading a group. If a group with the
# appropriate numerical id exists already (because we've
# already created it for a parameter), just set the name of
# the group. Otherwise, add a new group.
group_id = abs(group_id)
size, = struct.unpack('B', buf.read(1))
desc = size and buf.read(size) or ''
group = self.get(group_id)
if group is not None:
self.rename_group(group, name)
group.desc = desc
else:
# We allow duplicated group names here, even though it is against the c3d spec.
# The groups will be renamed.
self.add_group(group_id, name, desc, rename_duplicated_groups=True)
self._check_metadata()
def read_frames(self, copy=True):
'''Iterate over the data frames from our C3D file handle.
Parameters
----------
copy : bool
If False, the reader returns a reference to the same data buffers
for every frame. The default is True, which causes the reader to
return a unique data buffer for each frame. Set this to False if you
consume frames as you iterate over them, or True if you store them
for later.
Returns
-------
frames : sequence of (frame number, points, analog)
This method generates a sequence of (frame number, points, analog)
tuples, one tuple per frame. The first element of each tuple is the
frame number. The second is a numpy array of parsed, 5D point data
and the third element of each tuple is a numpy array of analog
values that were recorded during the frame. (Often the analog data
are sampled at a higher frequency than the 3D point data, resulting
in multiple analog frames per frame of point data.)
The first three columns in the returned point data are the (x, y, z)
coordinates of the observed motion capture point. The fourth column
is an estimate of the error for this particular point, and the fifth
column is the number of cameras that observed the point in question.
Both the fourth and fifth values are -1 if the point is considered
to be invalid.
'''
# Point magnitude scalar, if scale parameter is < 0 data is floating point
# (in which case the magnitude is the absolute value)
scale_mag = abs(self.point_scale)
is_float = self.point_scale < 0
if is_float:
point_word_bytes = 4
point_dtype = self._dtypes.uint32
else:
point_word_bytes = 2
point_dtype = self._dtypes.int16
points = np.zeros((self.point_used, 5), np.float32)
# TODO: handle ANALOG:BITS parameter here!
p = self.get('ANALOG:FORMAT')
analog_unsigned = p and p.string_value.strip().upper() == 'UNSIGNED'
if is_float:
analog_dtype = self._dtypes.float32
analog_word_bytes = 4
elif analog_unsigned:
# Note*: Floating point is 'always' defined for both analog and point data, according to the standard.
analog_dtype = self._dtypes.uint16
analog_word_bytes = 2
# Verify BITS parameter for analog
p = self.get('ANALOG:BITS')
if p and p._as_integer_value / 8 != analog_word_bytes:
raise NotImplementedError('Analog data using {} bits is not supported.'.format(p._as_integer_value))
else:
analog_dtype = self._dtypes.int16
analog_word_bytes = 2
analog = np.array([], float)
offsets = np.zeros((self.analog_used, 1), int)
param = self.get('ANALOG:OFFSET')
if param is not None:
offsets = param.int16_array[:self.analog_used, None]
analog_scales = np.ones((self.analog_used, 1), float)
param = self.get('ANALOG:SCALE')
if param is not None:
analog_scales[:, :] = param.float_array[:self.analog_used, None]
gen_scale = 1.
param = self.get('ANALOG:GEN_SCALE')
if param is not None:
gen_scale = param.float_value
# Seek to the start point of the data blocks
self._handle.seek((self._header.data_block - 1) * 512)
# Number of values (words) read in regard to POINT/ANALOG data
N_point = 4 * self.point_used
N_analog = self.analog_used * self.analog_per_frame
# Total bytes per frame
point_bytes = N_point * point_word_bytes
analog_bytes = N_analog * analog_word_bytes
# Parse the data blocks
for frame_no in range(self.first_frame, self.last_frame + 1):
# Read the byte data (used) for the block
raw_bytes = self._handle.read(N_point * point_word_bytes)
raw_analog = self._handle.read(N_analog * analog_word_bytes)
# Verify read pointers (any of the two can be assumed to be 0)
if len(raw_bytes) < point_bytes:
warnings.warn('''reached end of file (EOF) while reading POINT data at frame index {}
and file pointer {}!'''.format(frame_no - self.first_frame, self._handle.tell()))
return
if len(raw_analog) < analog_bytes:
warnings.warn('''reached end of file (EOF) while reading POINT data at frame index {}
and file pointer {}!'''.format(frame_no - self.first_frame, self._handle.tell()))
return
if is_float:
# Convert every 4 byte words to a float-32 reprensentation
# (the fourth column is still not a float32 representation)
if self._dtypes.is_dec:
# Convert each of the first 6 16-bit words from DEC to IEEE float
points[:, :4] = DEC_to_IEEE_BYTES(raw_bytes).reshape((self.point_used, 4))
else: # If IEEE or MIPS:
# Re-read the raw byte representation directly
points[:, :4] = np.frombuffer(raw_bytes,
dtype=self._dtypes.float32,
count=N_point).reshape((int(self.point_used), 4))
# Parse the camera-observed bits and residuals.
# Notes:
# - Invalid sample if residual is equal to -1.
# - A residual of 0.0 represent modeled data (filtered or interpolated).
# - The same format should be used internally when a float or integer representation is used,
# with the difference that the words are 16 and 8 bit respectively (see the MLS guide).
# - While words are 16 bit, residual and camera mask is always interpreted as 8 packed in a single word!
# - 16 or 32 bit may represent a sign (indication that certain files write a -1 floating point only)
last_word = points[:, 3].astype(np.int32)
valid = (last_word & 0x80008000) == 0
points[~valid, 3:5] = -1.0
c = last_word[valid]
else:
# Convert the bytes to a unsigned 32 bit or signed 16 bit representation
raw = np.frombuffer(raw_bytes,
dtype=point_dtype,
count=N_point).reshape((self.point_used, 4))
# Read point 2 byte words in int-16 format
points[:, :3] = raw[:, :3] * scale_mag
# Parse last 16-bit word as two 8-bit words
valid = raw[:, 3] > -1
points[~valid, 3:5] = -1
c = raw[valid, 3].astype(self._dtypes.uint16)
# Convert coordinate data
# fourth value is floating-point (scaled) error estimate (residual)
points[valid, 3] = (c & 0xff).astype(np.float32) * scale_mag
# fifth value is number of bits set in camera-observation byte
points[valid, 4] = sum((c & (1 << k)) >> k for k in range(8, 15))
# Get value as is: points[valid, 4] = (c >> 8)
# Check if analog data exist, and parse if so
if N_analog > 0:
if is_float and self._dtypes.is_dec:
# Convert each of the 16-bit words from DEC to IEEE float
analog = DEC_to_IEEE_BYTES(raw_analog)
else:
# Integer or INTEL/MIPS floating point data can be parsed directly
analog = np.frombuffer(raw_analog, dtype=analog_dtype, count=N_analog)
# Reformat and convert
analog = analog.reshape((-1, self.analog_used)).T
analog = analog.astype(float)
# Convert analog
analog = (analog - offsets) * analog_scales * gen_scale
# Output buffers
if copy:
yield frame_no, points.copy(), analog # .copy(), a new array is generated per frame for analog data.
else:
yield frame_no, points, analog
# Function evaluating EOF, note that data section is written in blocks of 512
final_byte_index = self._handle.tell()
self._handle.seek(0, 2) # os.SEEK_END)
# Check if more then 1 block remain
if self._handle.tell() - final_byte_index >= 512:
warnings.warn('incomplete reading of data blocks. {} bytes remained after all datablocks were read!'.format(
self._handle.tell() - final_byte_index))
@property
def proc_type(self):
"""
Get the processory type associated with the data format in the file.
"""
processor_type = ['PROCESSOR_INTEL', 'PROCESSOR_DEC', 'PROCESSOR_MIPS']
return processor_type[self._dtypes.proc_type - PROCESSOR_INTEL]
class Writer(Manager):
'''This class writes metadata and frames to a C3D file.
For example, to read an existing C3D file, apply some sort of data
processing to the frames, and write out another C3D file::
>>> r = c3d.Reader(open('data.c3d', 'rb'))
>>> w = c3d.Writer()
>>> w.add_frames(process_frames_somehow(r.read_frames()))
>>> with open('smoothed.c3d', 'wb') as handle:
>>> w.write(handle)
Parameters
----------
point_rate : float, optional
The frame rate of the data. Defaults to 480.
analog_rate : float, optional
The number of analog samples per frame. Defaults to 0.
point_scale : float, optional
The scale factor for point data. Defaults to -1 (i.e., "check the
POINT:SCALE parameter").
point_units : str, optional
The units that the point numbers represent. Defaults to ``'mm '``.
gen_scale : float, optional
General scaling factor for data. Defaults to 1.
'''
def __init__(self,
point_rate=480.,
analog_rate=0.,
point_scale=-1.,
point_units='mm ',
gen_scale=1.):
'''Set metadata for this writer.
'''
# Always write INTEL format
self._dtypes = DataTypes(PROCESSOR_INTEL)
super(Writer, self).__init__()
self._point_rate = point_rate
self._analog_rate = analog_rate
self._analog_per_frame = analog_rate / point_rate
self._point_scale = point_scale
self._point_units = point_units
self._gen_scale = gen_scale
self._frames = []
def add_frames(self, frames):
'''Add frames to this writer instance.
Parameters
----------
frames : sequence of (point, analog) tuples
A sequence of frame data to add to the writer.
'''
self._frames.extend(frames)
def _pad_block(self, handle):
'''Pad the file with 0s to the end of the next block boundary.'''
extra = handle.tell() % 512
if extra:
handle.write(b'\x00' * (512 - extra))
def _write_metadata(self, handle):
'''Write metadata to a file handle.
Parameters
----------
handle : file
Write metadata and C3D motion frames to the given file handle. The
writer does not close the handle.
'''
self._check_metadata()
# Header
self._header.write(handle)
self._pad_block(handle)
assert handle.tell() == 512
# Groups
handle.write(struct.pack(
'BBBB', 0, 0, self.parameter_blocks(), PROCESSOR_INTEL))
for group_id, group in self.group_listed():
group.write(group_id, handle)
# Padding
self._pad_block(handle)
while handle.tell() != 512 * (self.header.data_block - 1):
handle.write(b'\x00' * 512)
def _write_frames(self, handle):
'''Write our frame data to the given file handle.
Parameters
----------
handle : file
Write metadata and C3D motion frames to the given file handle. The
writer does not close the handle.
'''
assert handle.tell() == 512 * (self._header.data_block - 1)
scale = abs(self.point_scale)
is_float = self.point_scale < 0
if is_float:
point_dtype = np.float32
point_format = 'f'
point_scale = 1.0
else:
point_dtype = np.int16
point_format = 'i'
point_scale = scale
raw = np.empty((self.point_used, 4), point_dtype)
for points, analog in self._frames:
valid = points[:, 3] > -1
raw[~valid, 3] = -1
raw[valid, :3] = points[valid, :3] / point_scale
raw[valid, 3] = (
((points[valid, 4]).astype(np.uint8) << 8) |
(points[valid, 3] / scale).astype(np.uint16)
)
point = array.array(point_format)
point.extend(raw.flatten())
point.tofile(handle)
analog = array.array(point_format)
analog.extend(analog)
analog.tofile(handle)
self._pad_block(handle)
def write(self, handle, labels):
'''Write metadata and point + analog frames to a file handle.
Parameters
----------
handle : file
Write metadata and C3D motion frames to the given file handle. The
writer does not close the handle.
'''
if not self._frames:
return
def add(name, desc, bpe, format, bytes, *dimensions):
group.add_param(name,
desc=desc,
bytes_per_element=bpe,
bytes=struct.pack(format, bytes),
dimensions=list(dimensions))
def add_str(name, desc, bytes, *dimensions):
group.add_param(name,
desc=desc,
bytes_per_element=-1,
bytes=bytes.encode('utf-8'),
dimensions=list(dimensions))
def add_empty_array(name, desc, bpe):
group.add_param(name, desc=desc,
bytes_per_element=bpe, dimensions=[0])
points, analog = self._frames[0]
ppf = len(points)
labels = np.ravel(labels)
# POINT group
# Get longest label name
label_max_size = 0
label_max_size = max(label_max_size, np.max([len(label) for label in labels]))
group = self.add_group(1, 'POINT', 'POINT group')
add('USED', 'Number of 3d markers', 2, '<H', ppf)
add('FRAMES', 'frame count', 2, '<H', min(65535, len(self._frames)))
add('DATA_START', 'data block number', 2, '<H', 0)
add('SCALE', '3d scale factor', 4, '<f', np.float32(self._point_scale))
add('RATE', '3d data capture rate', 4, '<f', np.float32(self._point_rate))
add_str('X_SCREEN', 'X_SCREEN parameter', '+X', 2)
add_str('Y_SCREEN', 'Y_SCREEN parameter', '+Y', 2)
add_str('UNITS', '3d data units',
self._point_units, len(self._point_units))
add_str('LABELS', 'labels', ''.join(labels[i].ljust(label_max_size)
for i in range(ppf)), label_max_size, ppf)
add_str('DESCRIPTIONS', 'descriptions', ' ' * 16 * ppf, 16, ppf)
# ANALOG group
group = self.add_group(2, 'ANALOG', 'ANALOG group')
add('USED', 'analog channel count', 2, '<H', analog.shape[0])
add('RATE', 'analog samples per second', 4, '<f', np.float32(self._analog_rate))
add('GEN_SCALE', 'analog general scale factor', 4, '<f', np.float32(self._gen_scale))
add_empty_array('SCALE', 'analog channel scale factors', 4)
add_empty_array('OFFSET', 'analog channel offsets', 2)
# TRIAL group
group = self.add_group(3, 'TRIAL', 'TRIAL group')
add('ACTUAL_START_FIELD', 'actual start frame', 2, '<I', 1, 2)
add('ACTUAL_END_FIELD', 'actual end frame', 2, '<I', len(self._frames), 2)
# sync parameter information to header.
blocks = self.parameter_blocks()
self.get('POINT:DATA_START').bytes = struct.pack('<H', 2 + blocks)
self._header.data_block = np.uint16(2 + blocks)
self._header.frame_rate = np.float32(self._point_rate)
self._header.last_frame = np.uint16(min(len(self._frames), 65535))
self._header.point_count = np.uint16(ppf)
self._header.analog_count = np.uint16(np.prod(analog.shape))
self._header.analog_per_frame = np.uint16(self._analog_per_frame)
self._header.scale_factor = np.float32(self._point_scale)
self._write_metadata(handle)
self._write_frames(handle)
|
import argparse
import sys
from .drawing import Drawing
from .runner import makeDrawbotNamespace, runScriptSource
def main(args=None):
if args is None:
args = sys.argv[1:]
parser = argparse.ArgumentParser(
prog="drawbot",
description="Command line DrawBot tool.",
)
parser.add_argument(
"drawbot_script",
type=argparse.FileType("r"),
help="The Drawbot script to run.",
)
parser.add_argument(
"output_file",
nargs="*",
default=[],
help="A filename for the output graphic.",
)
arguments = parser.parse_args(args)
db = Drawing()
namespace = makeDrawbotNamespace(db)
runScriptSource(arguments.drawbot_script.read(), arguments.drawbot_script.name, namespace)
for path in arguments.output_file:
db.saveImage(path)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
Solution to Project Euler problem 120
Author: Jaime Liew
https://github.com/jaimeliew1/Project_Euler_Solutions
"""
def run():
S = 0
for a in range(3, 1001):
r_max = 0
for n in range(1, a*2+2):
r = ((a-1)**n + (a+1)**n)%a**2
if n == 1:
end_condition = r
elif r == end_condition:
break
if r > r_max:
r_max = r
S += r_max
return S
if __name__ == "__main__":
print(run())
|
# !/usr/bin/env python
# -*- coding:utf-8 -*-
##############################################################
# Lempel-Ziv-Stac decompression
# BitReader and RingList classes
#
# Copyright (C) 2011 Filippo Valsorda - FiloSottile
# filosottile.wiki gmail.com - www.pytux.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################
import collections
class BitReader:
"""
Gets a string or a iterable of chars (also mmap)
representing bytes (ord) and permits to extract
bits one by one like a stream
"""
def __init__(self, bytes):
self._bits = collections.deque()
for byte in bytes:
byte = ord(byte)
for n in range(8):
self._bits.append(bool((byte >> (7 - n)) & 1))
def getBit(self):
return self._bits.popleft()
def getBits(self, num):
res = 0
for i in range(num):
res += self.getBit() << num - 1 - i
return res
def getByte(self):
return self.getBits(8)
def __len__(self):
return len(self._bits)
class RingList:
"""
When the list is full, for every item appended
the older is removed
"""
def __init__(self, length):
self.__data__ = collections.deque()
self.__full__ = False
self.__max__ = length
def append(self, x):
if self.__full__:
self.__data__.popleft()
self.__data__.append(x)
if self.size() == self.__max__:
self.__full__ = True
def get(self):
return self.__data__
def size(self):
return len(self.__data__)
def maxsize(self):
return self.__max__
def __getitem__(self, n):
if n >= self.size():
return None
return self.__data__[n]
def LZSDecompress(data, window=RingList(2048)):
"""
Gets a string or a iterable of chars (also mmap)
representing bytes (ord) and an optional
pre-populated dictionary; return the decompressed
string and the final dictionary
"""
reader = BitReader(data)
result = ''
while True:
bit = reader.getBit()
if not bit:
char = reader.getByte()
result += chr(char)
window.append(char)
else:
bit = reader.getBit()
if bit:
offset = reader.getBits(7)
if offset == 0:
# EOF
break
else:
offset = reader.getBits(11)
lenField = reader.getBits(2)
if lenField < 3:
lenght = lenField + 2
else:
lenField <<= 2
lenField += reader.getBits(2)
if lenField < 15:
lenght = (lenField & 0x0f) + 5
else:
lenCounter = 0
lenField = reader.getBits(4)
while lenField == 15:
lenField = reader.getBits(4)
lenCounter += 1
lenght = 15 * lenCounter + 8 + lenField
for i in range(lenght):
char = window[-offset]
result += chr(char)
window.append(char)
return result, window
|
#!/usr/bin/env python3
"""
Build model
"""
import tensorflow.keras as Keras
def build_model(nx, layers, activations, lambtha, keep_prob):
"""
Build model
"""
model = Keras.Sequential()
l2 = Keras.regularizers.l2(lambtha)
model.add(Keras.layers.Dense(
layers[0],
activation=activations[0],
input_shape=(nx,),
kernel_regularizer=l2))
for i in range(1, len(layers)):
model.add(Keras.layers.Dropout(1 - keep_prob))
model.add(
Keras.layers.Dense(
layers[i],
activation=activations[i],
kernel_regularizer=l2))
return model
|
import requests
import logging
import json
import boto3
from botocore.exceptions import ClientError
from urllib2 import build_opener, HTTPHandler, Request
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def get_secret(secret_name, ssm_endpoint_url, region_name):
session = boto3.session.Session()
client = session.client(
service_name='secretsmanager',
region_name=region_name,
endpoint_url=ssm_endpoint_url
)
try:
get_secret_value_response = client.get_secret_value(
SecretId=secret_name
)
except ClientError as e:
print e
if e.response['Error']['Code'] == 'ResourceNotFoundException':
print("The requested secret " + secret_name + " was not found")
elif e.response['Error']['Code'] == 'InvalidRequestException':
print("The request was invalid due to:", e)
elif e.response['Error']['Code'] == 'InvalidParameterException':
print("The request had invalid params:", e)
else:
if 'SecretString' in get_secret_value_response:
secret = get_secret_value_response['SecretString']
return secret
else:
binary_secret_data = get_secret_value_response['SecretBinary']
return binary_secret_data
def handler(event, context):
endpoint = event["ResourceProperties"]["SplunkHttpEventCollectorManagementURL"]
splunk_user = event["ResourceProperties"]["SplunkUser"]
secret_name = event["ResourceProperties"]["SecretManagerSecretName"]
region_name = event["ResourceProperties"]["AWSRegion"]
ssm_endpoint_url = "https://secretsmanager." + region_name + ".amazonaws.com"
secret_response = get_secret(secret_name, ssm_endpoint_url, region_name)
# List tokens and remove from token list if we already have them
response = requests.get(endpoint + '/services/data/inputs/http?output_mode=json', verify=False,
auth=(splunk_user, secret_response))
json_data = json.loads(response.text)
cloudtrail_token_name = 'generated-cloudtrail'
config_notification_token_name = 'generated-config-notification'
config_snapshot_token_name = 'generated-config-snapshot'
cloudwatch_token_name = 'generated-cloudwatch'
guardduty_token_name = 'generated-guardduty'
cloudtrail_sourcetype = 'aws:cloudtrail'
config_notification_sourcetype = 'aws:config:notification'
config_snapshot_sourcetype = 'aws:config'
cloudwatch_sourcetype = 'aws:cloudwatch'
guardduty_sourcetype = 'aws:cloudwatch:guardduty'
token_names = [cloudtrail_token_name, config_notification_token_name, config_snapshot_token_name,
cloudwatch_token_name, guardduty_token_name]
token_name_sourcetype_map = {cloudtrail_token_name: cloudtrail_sourcetype,
config_notification_token_name: config_notification_sourcetype,
config_snapshot_token_name: config_snapshot_sourcetype,
cloudwatch_token_name: cloudwatch_sourcetype,
guardduty_token_name: guardduty_sourcetype}
for token_data in json_data["entry"]:
token_name = token_data["name"].split("http://")[1]
if token_name in token_name_sourcetype_map:
del token_name_sourcetype_map[token_name]
# Create tokens that don't already exist
for token_name in token_name_sourcetype_map:
print "Creating token: " + token_name
sourcetype = token_name_sourcetype_map[token_name]
data = [
('name', token_name),
]
# source overrides for firehose sourcetypes
if (sourcetype == "aws:config:notification" ):
data.append(('source', "aws_firehose_confignotification"))
if (sourcetype == "aws:cloudtrail"):
data.append(('source', "aws_firehose_cloudtrail"))
if (sourcetype == "aws:cloudwatch:guardduty"):
data.append(('source', "aws_cloudwatchevents_guardduty"))
response = requests.post(endpoint + '/services/data/inputs/http',
data=data, verify=False, auth=(splunk_user, secret_response))
data = [
('sourcetype', sourcetype),
('enabled', '1')
]
# useACK overrides for firehose sourcetypes
if (sourcetype == "aws:config:notification" or sourcetype == "aws:cloudtrail" or sourcetype == "aws:cloudwatch:guardduty"):
data.append(('useACK', "1"))
response = requests.post(
endpoint + '/services/data/inputs/http/' + token_name, data=data,
verify=False, auth=(splunk_user, secret_response))
# Grab all tokens (included newly created ones - if any)
response = requests.get(endpoint + '/services/data/inputs/http?output_mode=json', verify=False,
auth=(splunk_user, secret_response))
json_data = json.loads(response.text)
HEC_tokens = {}
for token_data in json_data["entry"]:
if (token_data["name"].split("http://")[1] in token_names):
HEC_tokens[token_data["name"].split("http://")[1]] = token_data["content"]["token"]
sendResponse(event, context, "SUCCESS", {"Message": "Splunk HEC configuration successful!",
"CloudTrailHECToken": HEC_tokens["generated-cloudtrail"],
"CloudWatchHECToken": HEC_tokens["generated-cloudwatch"],
"ConfigNotificationHECToken": HEC_tokens["generated-config-notification"],
"ConfigSnapshotHECToken": HEC_tokens["generated-config-snapshot"],
"GuardDutyHECToken": HEC_tokens["generated-guardduty"]})
def sendResponse(event, context, responseStatus, responseData):
responseBody = json.dumps({
"Status": responseStatus,
"Reason": "See the details in CloudWatch Log Stream: " + context.log_stream_name,
"PhysicalResourceId": context.log_stream_name,
"StackId": event['StackId'],
"RequestId": event['RequestId'],
"LogicalResourceId": event['LogicalResourceId'],
"Data": responseData
})
logger.info('ResponseURL: {}'.format(event['ResponseURL']))
logger.info('ResponseBody: {}'.format(responseBody))
opener = build_opener(HTTPHandler)
request = Request(event['ResponseURL'], data=responseBody)
request.add_header('Content-Type', '')
request.add_header('Content-Length', len(responseBody))
request.get_method = lambda: 'PUT'
response = opener.open(request)
print("Status code: {}".format(response.getcode()))
print("Status message: {}".format(response.msg)) |
import logging
import random
import os
import datetime
import json
import pytz
import sentry_sdk
from dff import dialogflow_extension
import common.dialogflow_framework.utils.state as state_utils
import common.dialogflow_framework.utils.condition as condition_utils
from common.constants import CAN_CONTINUE_PROMPT, MUST_CONTINUE
# from common.greeting import HI_THIS_IS_ALEXA
from common.starter import (
INTROS,
OUTROS,
CATEGORIES_VERBS,
PERSONA1_GENRES,
GENRES_ATTITUDES,
GENRE_ITEMS,
WEEKDAYS_ATTITUDES,
WHATS_YOUR_FAV_PHRASES,
WHY_QUESTIONS,
ACKNOWLEDGEMENTS,
MY_FAV_ANSWERS,
WONDER_WHY_QUESTIONS,
OH_PHRASES,
SO_YOU_SAY_PHRASES,
ASSENT_YES_PHRASES,
ASSENT_NO_PHRASES,
)
from common.music import OPINION_REQUESTS_ABOUT_MUSIC
import dialogflows.scopes as scopes
# from dialogflows.flows import shared
from dialogflows.flows.starter_states import State
sentry_sdk.init(os.getenv("SENTRY_DSN"))
logging.basicConfig(format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", level=logging.DEBUG)
logger = logging.getLogger(__name__)
with open("common/topic_favorites.json", "r") as f:
FAV_STORIES_TOPICS = json.load(f)
CONF_HIGH = 1.0
CONF_MIDDLE = 0.95
CONF_LOW = 0.9
# %%
##################################################################################################################
# Init DialogFlow
##################################################################################################################
simplified_dialogflow = dialogflow_extension.DFEasyFilling(State.USR_START)
##################################################################################################################
##################################################################################################################
# Design DialogFlow.
##################################################################################################################
##################################################################################################################
##################################################################################################################
# yes
##################################################################################################################
def yes_request(ngrams, vars):
flag = condition_utils.is_yes_vars(vars)
logger.info(f"yes_request {flag}")
return flag
##################################################################################################################
# no
##################################################################################################################
def no_request(ngrams, vars):
flag = condition_utils.is_no_vars(vars)
logger.info(f"no_request {flag}")
return flag
##################################################################################################################
# error
##################################################################################################################
def error_response(vars):
state_utils.set_confidence(vars, 0)
return "Sorry"
##################################################################################################################
# scenario
##################################################################################################################
def genre_request(ngrams, vars):
shared_memory = state_utils.get_shared_memory(vars)
greeting_type = shared_memory.get("greeting_type", "")
flag = greeting_type == "starter_genre"
logger.info(f"genre_request {flag}")
return flag
def weekday_request(ngrams, vars):
shared_memory = state_utils.get_shared_memory(vars)
greeting_type = shared_memory.get("greeting_type", "")
flag = greeting_type == "starter_weekday"
logger.info(f"weekday_request {flag}")
return flag
def genre_response(vars):
try:
shared_memory = state_utils.get_shared_memory(vars)
used_categories = shared_memory.get("used_categories", [])
# _object = ""
category = random.choice(list(PERSONA1_GENRES))
category_verb = CATEGORIES_VERBS.get(category, "")
genre = random.choice(PERSONA1_GENRES.get(category, [""]))
attitude = random.choice(GENRES_ATTITUDES.get(genre, [""]))
item = random.choice(GENRE_ITEMS.get(genre, [""]))
# if category in CATEGORIES_OBJECTS:
# _object = random.choice(CATEGORIES_OBJECTS[category])
# item = FAV_STORIES_TOPICS.get(category, "").get("name", "")
# if item:
# category_verb = CATEGORIES_VERBS.get(category, "")
# genre = shared.get_genre_top_wiki_parser(_object, item)[0]
# attitude = random.choice(GENRES_ATTITUDES.get(genre, [""]))
# state_utils.save_to_shared_memory(vars, used_categories=used_categories + [category])
if all([category_verb, genre, attitude, item]):
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
state_utils.save_to_shared_memory(
vars, used_categories=used_categories + [{"category": category, "genre": genre, "item": item}]
)
return (
f"{random.choice(INTROS)} "
+ f"{category_verb} {item}. {attitude} {random.choice(OUTROS)} {genre} {category}?"
)
else:
state_utils.set_confidence(vars, 0)
return error_response(vars)
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def positive_request(ngrams, vars):
no_requests = condition_utils.no_requests(vars)
sentiment = state_utils.get_human_sentiment(vars)
flag = all([no_requests, sentiment == "positive", genre_request(ngrams, vars)])
logger.info(f"positive_request {flag}")
return flag
def negative_request(ngrams, vars):
no_requests = condition_utils.no_requests(vars)
sentiment = state_utils.get_human_sentiment(vars)
flag = all([no_requests, sentiment == "negative", genre_request(ngrams, vars)])
logger.info(f"negative_request {flag}")
return flag
def neutral_request(ngrams, vars):
no_requests = condition_utils.no_requests(vars)
sentiment = state_utils.get_human_sentiment(vars)
flag = all([no_requests, sentiment == "neutral", genre_request(ngrams, vars)])
logger.info(f"neutral_request {flag}")
return flag
def friday_request(ngrams, vars):
flag = all([condition_utils.no_requests(vars), weekday_request(ngrams, vars)])
logger.info(f"smth_request {flag}")
return flag
def what_fav_response(vars):
try:
shared_memory = state_utils.get_shared_memory(vars)
used_topics = shared_memory.get("used_categories", [])
curr_topic = ""
curr_genre = ""
if used_topics:
curr_topic = used_topics[-1].get("category", "")
curr_genre = used_topics[-1].get("genre", "")
if curr_topic:
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=CAN_CONTINUE_PROMPT)
if curr_topic == "music":
return random.choice(OPINION_REQUESTS_ABOUT_MUSIC)
return f"{random.choice(WHATS_YOUR_FAV_PHRASES)} {curr_genre} {curr_topic}?"
else:
state_utils.set_confidence(vars, 0)
return error_response(vars)
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def why_response(vars):
try:
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
return f"{random.choice(WHY_QUESTIONS)}"
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def my_fav_response(vars):
try:
shared_memory = state_utils.get_shared_memory(vars)
used_topics = shared_memory.get("used_categories", [])
item = ""
if used_topics:
category = used_topics[-1].get("category", "")
item = FAV_STORIES_TOPICS.get(category, "").get("name", "")
if category not in ["series", "music"]:
category += "s"
if item:
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
return (
f"{random.choice(ACKNOWLEDGEMENTS)}"
+ random.choice(MY_FAV_ANSWERS(category, item))
+ f"{random.choice(WONDER_WHY_QUESTIONS)}"
)
else:
state_utils.set_confidence(vars, 0)
return error_response(vars)
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def reason_request(ngrams, vars):
flag = condition_utils.no_requests(vars)
logger.info(f"reason_request {flag}")
return flag
def repeat_response(vars):
try:
utt = state_utils.get_last_human_utterance(vars)["text"].lower()
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
return f"{random.choice(OH_PHRASES)} " + random.choice(SO_YOU_SAY_PHRASES(utt))
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def assent_yes_response(vars):
try:
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
return random.choice(ASSENT_YES_PHRASES)
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def assent_no_response(vars):
try:
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
return random.choice(ASSENT_NO_PHRASES)
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def my_fav_story_response(vars):
try:
shared_memory = state_utils.get_shared_memory(vars)
used_categories = shared_memory.get("used_categories", [])
story = ""
if used_categories:
category = used_categories[-1].get("category", "")
story = FAV_STORIES_TOPICS.get(category, "").get("story", "")
if story:
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
return story
else:
state_utils.set_confidence(vars, 0)
return error_response(vars)
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def weekday_response(vars):
weekdays = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
weekday = ""
attitude = ""
try:
shared_memory = state_utils.get_shared_memory(vars)
used_categories = shared_memory.get("used_categories", [])
category = "day"
state_utils.save_to_shared_memory(vars, used_categories=used_categories + [category])
weekday = weekdays[int(datetime.datetime.now(pytz.timezone("US/Mountain")).weekday()) - 1]
attitude = WEEKDAYS_ATTITUDES.get(weekday, "")
if weekday and attitude:
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
return f"Oh, Gosh, it's {weekday}! {attitude} What's your favorite day of the week?"
else:
state_utils.set_confidence(vars, 0)
return error_response(vars)
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def friday_response(vars):
utt = state_utils.get_last_human_utterance(vars)["text"].lower()
friday_check = "friday" in utt
weekday = ""
try:
if friday_check:
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
return "It's my favorite too!"
else:
for day in WEEKDAYS_ATTITUDES:
if day in utt:
weekday = day
break
if weekday:
attitude = WEEKDAYS_ATTITUDES.get(weekday, "")
if attitude:
state_utils.set_confidence(vars, confidence=CONF_MIDDLE)
state_utils.set_can_continue(vars, continue_flag=CAN_CONTINUE_PROMPT)
return f"Ah, interesting. I {attitude}. Why do you like it?"
else:
state_utils.set_confidence(vars, confidence=CONF_MIDDLE)
state_utils.set_can_continue(vars, continue_flag=CAN_CONTINUE_PROMPT)
return "Okay. But why?"
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def my_fav_day_response(vars):
try:
state_utils.set_confidence(vars, confidence=CONF_HIGH)
state_utils.set_can_continue(vars, continue_flag=MUST_CONTINUE)
return "Aha. Speaking of me, my favorite day is Friday. " "As the song says, Nothing matters like the weekend."
except Exception as exc:
logger.exception(exc)
sentry_sdk.capture_exception(exc)
state_utils.set_confidence(vars, 0)
return error_response(vars)
def smth_request(ngrams, vars):
flag = condition_utils.no_requests(vars)
logger.info(f"smth_request {flag}")
return flag
def starter_request(ngrams, vars):
shared_memory = state_utils.get_shared_memory(vars)
greeting_type = shared_memory.get("greeting_type", "")
flag = condition_utils.no_requests(vars) and (greeting_type in ["starter_genre", "starter_weekday"])
logger.info(f"starter_request {flag}")
return flag
##################################################################################################################
##################################################################################################################
# linking
##################################################################################################################
##################################################################################################################
##################################################################################################################
# START
simplified_dialogflow.add_user_serial_transitions(
State.USR_START,
{
# State.SYS_GENRE: genre_request,
State.SYS_CHECK_POSITIVE: positive_request,
State.SYS_CHECK_NEGATIVE: negative_request,
State.SYS_CHECK_NEUTRAL: neutral_request,
# State.SYS_WEEKDAY: weekday_request,
State.SYS_FRIDAY: friday_request,
},
)
##################################################################################################################
# GENRE
# simplified_dialogflow.add_system_transition(State.SYS_GENRE, State.USR_GENRE, genre_response)
# simplified_dialogflow.set_error_successor(State.SYS_GENRE, State.SYS_ERR)
# simplified_dialogflow.add_user_serial_transitions(
# State.USR_GENRE,
# {
# State.SYS_CHECK_POSITIVE: positive_request,
# State.SYS_CHECK_NEGATIVE: negative_request,
# State.SYS_CHECK_NEUTRAL: neutral_request
# })
# simplified_dialogflow.set_error_successor(State.USR_GENRE, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_CHECK_POSITIVE, State.USR_WHAT_FAV, what_fav_response)
simplified_dialogflow.set_error_successor(State.SYS_CHECK_POSITIVE, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_CHECK_NEGATIVE, State.USR_WHY, why_response)
simplified_dialogflow.set_error_successor(State.SYS_CHECK_NEGATIVE, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_CHECK_NEUTRAL, State.USR_MY_FAV, my_fav_response)
simplified_dialogflow.set_error_successor(State.SYS_CHECK_NEUTRAL, State.SYS_ERR)
simplified_dialogflow.add_user_transition(State.USR_WHY, State.SYS_GET_REASON, reason_request)
simplified_dialogflow.set_error_successor(State.USR_WHY, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_GET_REASON, State.USR_REPEAT, repeat_response)
simplified_dialogflow.set_error_successor(State.SYS_GET_REASON, State.SYS_ERR)
simplified_dialogflow.add_user_serial_transitions(
State.USR_REPEAT,
{
State.SYS_AGREED: yes_request,
State.SYS_DISAGREED: no_request,
},
)
simplified_dialogflow.set_error_successor(State.USR_REPEAT, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_AGREED, State.USR_ASSENT_YES, assent_yes_response)
simplified_dialogflow.set_error_successor(State.SYS_AGREED, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_DISAGREED, State.USR_ASSENT_NO, assent_no_response)
simplified_dialogflow.set_error_successor(State.SYS_DISAGREED, State.SYS_ERR)
simplified_dialogflow.add_user_serial_transitions(
State.USR_MY_FAV,
{
State.SYS_YES: yes_request,
State.SYS_NO: no_request,
},
)
simplified_dialogflow.set_error_successor(State.USR_MY_FAV, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_YES, State.USR_MY_FAV_STORY, my_fav_story_response)
simplified_dialogflow.set_error_successor(State.SYS_YES, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_NO, State.USR_WHY, why_response)
simplified_dialogflow.set_error_successor(State.SYS_NO, State.SYS_ERR)
#################################################################################################################
# WEEKDAY
# simplified_dialogflow.add_system_transition(State.SYS_WEEKDAY, State.USR_WEEKDAY, weekday_response)
# simplified_dialogflow.set_error_successor(State.SYS_WEEKDAY, State.SYS_ERR)
# simplified_dialogflow.add_user_transition(State.USR_WEEKDAY, State.SYS_FRIDAY, friday_request)
# simplified_dialogflow.set_error_successor(State.USR_WEEKDAY, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_FRIDAY, State.USR_FRIDAY, friday_response)
simplified_dialogflow.set_error_successor(State.SYS_FRIDAY, State.SYS_ERR)
simplified_dialogflow.add_user_transition(State.USR_FRIDAY, State.SYS_SMTH, smth_request)
simplified_dialogflow.set_error_successor(State.USR_FRIDAY, State.SYS_ERR)
simplified_dialogflow.add_system_transition(State.SYS_SMTH, State.USR_MY_FAV_DAY, my_fav_day_response)
simplified_dialogflow.set_error_successor(State.SYS_SMTH, State.SYS_ERR)
#################################################################################################################
# SYS_ERR
simplified_dialogflow.add_system_transition(
State.SYS_ERR,
(scopes.MAIN, scopes.State.USR_ROOT),
error_response,
)
dialogflow = simplified_dialogflow.get_dialogflow()
|
#! /usr/bin/env python3
"""dosomethingwith.py
input: a file with file or directory names
initially written to programmatically change directory names from a list
produced using Double Commander
copy, rename and adapt the below function(s) to suit your needs
"""
import sys
import pathlib
def reorder_names(temp_file):
"""rename from e.g. "Albert Visser" to "Visser, Albert"
"""
with open(temp_file) as _in:
data = _in.readlines()
## result = []
for line in data:
path = pathlib.Path(line.strip())
root = path.parent
first, last = path.name.rsplit(' ', 1)
newpath = root / ', '.join((last, first))
path.rename(newpath)
reorder_names(sys.argv[1])
|
import logging
import re
import subprocess
from anime_downloader.extractors.base_extractor import BaseExtractor
from anime_downloader.sites import helpers
from anime_downloader.util import eval_in_node
logger = logging.getLogger(__name__)
class Streamango(BaseExtractor):
def _get_data(self):
url = self.url
res = helpers.get(url)
js = re.findall(
r'<script type="text/javascript">([^"]+)var srces', res.text)[0]
src = re.findall(
r'src:d\(([^"]+?)\)', res.text)[0]
js = "window = {}; \n" + js + f"console.log(window.d({src}))"
logger.debug(f"Evaling: {js}")
output = eval_in_node(js)
stream = "https:" + output
return {
'stream_url': stream,
}
|
# coding=utf-8
import pytest
from mockito import verifyStubbedInvocationsAreUsed, when
from epab.utils import _next_version, get_next_version
CALVER = '2018.1.2'
@pytest.fixture(autouse=True)
def _mock_calver():
when(_next_version)._get_calver().thenReturn(CALVER)
yield
verifyStubbedInvocationsAreUsed()
def _check_next_version(repo, expected_version):
next_version = get_next_version()
assert expected_version == next_version
repo.mark(f'calver: {CALVER}')
repo.mark(f'next version: {next_version}')
def test_next_version_empty_repo(repo):
assert not repo.list_tags()
assert repo.get_current_branch() == 'master'
_check_next_version(repo, f'{CALVER}.1')
def test_next_version_stable(repo):
assert repo.get_current_branch() == 'master'
repo.tag(f'{CALVER}.1')
_check_next_version(repo, f'{CALVER}.2')
def test_next_version_stable_older_calver(repo):
assert repo.get_current_branch() == 'master'
repo.tag(f'2018.1.1.1')
repo.tag(f'2018.1.1.2')
_check_next_version(repo, f'{CALVER}.1')
@pytest.mark.long
def test_next_version_alpha_empty_repo(repo):
assert repo.get_current_branch() == 'master'
repo.create_branch_and_checkout('test')
_check_next_version(repo, f'{CALVER}.1a1+test')
@pytest.mark.long
def test_next_version_alpha(repo):
assert repo.get_current_branch() == 'master'
repo.tag('2018.1.1.1')
repo.tag('2018.1.1.2')
_check_next_version(repo, f'{CALVER}.1')
repo.tag(f'{CALVER}.1')
repo.create_branch_and_checkout('test')
repo.tag(f'{CALVER}.2a1+test')
repo.tag(f'{CALVER}.2a2+test')
_check_next_version(repo, f'{CALVER}.2a3+test')
repo.checkout('master')
_check_next_version(repo, f'{CALVER}.2')
|
# -*- coding: utf-8 -*-
model = {
u'en ': 0,
u'er ': 1,
u' de': 2,
u'der': 3,
u'ie ': 4,
u' di': 5,
u'die': 6,
u'sch': 7,
u'ein': 8,
u'che': 9,
u'ich': 10,
u'den': 11,
u'in ': 12,
u'te ': 13,
u'ch ': 14,
u' ei': 15,
u'ung': 16,
u'n d': 17,
u'nd ': 18,
u' be': 19,
u'ver': 20,
u'es ': 21,
u' zu': 22,
u'eit': 23,
u'gen': 24,
u'und': 25,
u' un': 26,
u' au': 27,
u' in': 28,
u'cht': 29,
u'it ': 30,
u'ten': 31,
u' da': 32,
u'ent': 33,
u' ve': 34,
u'and': 35,
u' ge': 36,
u'ine': 37,
u' mi': 38,
u'r d': 39,
u'hen': 40,
u'ng ': 41,
u'nde': 42,
u' vo': 43,
u'e d': 44,
u'ber': 45,
u'men': 46,
u'ei ': 47,
u'mit': 48,
u' st': 49,
u'ter': 50,
u'ren': 51,
u't d': 52,
u' er': 53,
u'ere': 54,
u'n s': 55,
u'ste': 56,
u' se': 57,
u'e s': 58,
u'ht ': 59,
u'des': 60,
u'ist': 61,
u'ne ': 62,
u'auf': 63,
u'e a': 64,
u'isc': 65,
u'on ': 66,
u'rte': 67,
u' re': 68,
u' we': 69,
u'ges': 70,
u'uch': 71,
u' fü': 72,
u' so': 73,
u'bei': 74,
u'e e': 75,
u'nen': 76,
u'r s': 77,
u'ach': 78,
u'für': 79,
u'ier': 80,
u'par': 81,
u'ür ': 82,
u' ha': 83,
u'as ': 84,
u'ert': 85,
u' an': 86,
u' pa': 87,
u' sa': 88,
u' sp': 89,
u' wi': 90,
u'for': 91,
u'tag': 92,
u'zu ': 93,
u'das': 94,
u'rei': 95,
u'he ': 96,
u'hre': 97,
u'nte': 98,
u'sen': 99,
u'vor': 100,
u' sc': 101,
u'ech': 102,
u'etz': 103,
u'hei': 104,
u'lan': 105,
u'n a': 106,
u'pd ': 107,
u'st ': 108,
u'sta': 109,
u'ese': 110,
u'lic': 111,
u' ab': 112,
u' si': 113,
u'gte': 114,
u' wa': 115,
u'iti': 116,
u'kei': 117,
u'n e': 118,
u'nge': 119,
u'sei': 120,
u'tra': 121,
u'zen': 122,
u' im': 123,
u' la': 124,
u'art': 125,
u'im ': 126,
u'lle': 127,
u'n w': 128,
u'rde': 129,
u'rec': 130,
u'set': 131,
u'str': 132,
u'tei': 133,
u'tte': 134,
u' ni': 135,
u'e p': 136,
u'ehe': 137,
u'ers': 138,
u'g d': 139,
u'nic': 140,
u'von': 141,
u' al': 142,
u' pr': 143,
u'an ': 144,
u'aus': 145,
u'erf': 146,
u'r e': 147,
u'tze': 148,
u'tür': 149,
u'uf ': 150,
u'ag ': 151,
u'als': 152,
u'ar ': 153,
u'chs': 154,
u'end': 155,
u'ge ': 156,
u'ige': 157,
u'ion': 158,
u'ls ': 159,
u'n m': 160,
u'ngs': 161,
u'nis': 162,
u'nt ': 163,
u'ord': 164,
u's s': 165,
u'sse': 166,
u' tü': 167,
u'ahl': 168,
u'e b': 169,
u'ede': 170,
u'em ': 171,
u'len': 172,
u'n i': 173,
u'orm': 174,
u'pro': 175,
u'rke': 176,
u'run': 177,
u's d': 178,
u'wah': 179,
u'wer': 180,
u'ürk': 181,
u' me': 182,
u'age': 183,
u'att': 184,
u'ell': 185,
u'est': 186,
u'hat': 187,
u'n b': 188,
u'oll': 189,
u'raf': 190,
u's a': 191,
u'tsc': 192,
u' es': 193,
u' fo': 194,
u' gr': 195,
u' ja': 196,
u'abe': 197,
u'auc': 198,
u'ben': 199,
u'e n': 200,
u'ege': 201,
u'lie': 202,
u'n u': 203,
u'r v': 204,
u're ': 205,
u'rit': 206,
u'sag': 207,
u' am': 208,
u'agt': 209,
u'ahr': 210,
u'bra': 211,
u'de ': 212,
u'erd': 213,
u'her': 214,
u'ite': 215,
u'le ': 216,
u'n p': 217,
u'n v': 218,
u'or ': 219,
u'rbe': 220,
u'rt ': 221,
u'sic': 222,
u'wie': 223,
u'übe': 224,
u' is': 225,
u' üb': 226,
u'cha': 227,
u'chi': 228,
u'e f': 229,
u'e m': 230,
u'eri': 231,
u'ied': 232,
u'mme': 233,
u'ner': 234,
u'r a': 235,
u'sti': 236,
u't a': 237,
u't s': 238,
u'tis': 239,
u' ko': 240,
u'arb': 241,
u'ds ': 242,
u'gan': 243,
u'n z': 244,
u'r f': 245,
u'r w': 246,
u'ran': 247,
u'se ': 248,
u't i': 249,
u'wei': 250,
u'wir': 251,
u' br': 252,
u' np': 253,
u'am ': 254,
u'bes': 255,
u'd d': 256,
u'deu': 257,
u'e g': 258,
u'e k': 259,
u'efo': 260,
u'et ': 261,
u'eut': 262,
u'fen': 263,
u'hse': 264,
u'lte': 265,
u'n r': 266,
u'npd': 267,
u'r b': 268,
u'rhe': 269,
u't w': 270,
u'tz ': 271,
u' fr': 272,
u' ih': 273,
u' ke': 274,
u' ma': 275,
u'ame': 276,
u'ang': 277,
u'd s': 278,
u'eil': 279,
u'el ': 280,
u'era': 281,
u'erh': 282,
u'h d': 283,
u'i d': 284,
u'kan': 285,
u'n f': 286,
u'n l': 287,
u'nts': 288,
u'och': 289,
u'rag': 290,
u'rd ': 291,
u'spd': 292,
u'spr': 293,
u'tio': 294,
u' ar': 295,
u' en': 296,
u' ka': 297,
u'ark': 298,
u'ass': 299,
}
|
# Exercise 01.3
# Author: Leonardo Ferreira Santos
def quadratic(a, b, c):
delta = (b ** 2 - 4 * a * c)
x1 = (-b + delta ** (1 / 2)) / (2 * a)
x2 = (-b - delta ** (1 / 2)) / (2 * a)
solution = 0
if type(x1) != complex:
solution = solution + 1
if type(x2) != complex:
solution = solution + 1
print('\nSolutions: ', solution)
if solution > 0:
print('\nX1 = ', x1)
print('X2 = ', x2)
a = int(input('Enter the value of A: '))
b = int(input('Enter the value of B: '))
c = int(input('Enter the value of C: '))
quadratic(a, b, c)
|
from assets.models import *
from assets.utils import *
from assets.datasets import *
from atkEnv.keys import * # secret keys stored here, ignored in git commits, *change this to .env file secret keys
from sys import platform
import time, base64
def detect(
cfg,
data_cfg,
weights,
images, # input folder
output, # output folder
fourcc='H264', # output video encoding
img_size=416, # resize images for inferencing
conf_thres=0.5,
nms_thres=0.5,
save_txt=False,
save_images=False,
backend=False,
classofinterest = 'car',
includetwittergraph = False, # graph options are set in load and run inference layer
twitterOptions=None
):
device = torch_utils.select_device(force_cpu=backend)
if save_images | save_txt:
if os.path.exists(output) & (save_images | save_txt):
shutil.rmtree(output) # delete output folder
os.makedirs(output) # make new output folder
model = Darknet(cfg, (img_size, img_size))
# Load weights
if weights.endswith('.pt'): # pytorch format
model.load_state_dict(torch.load(weights, map_location=device)['model'])
else: # darknet format
_ = load_darknet_weights(model, weights)
# Fuse Conv2d + BatchNorm2d layers
model.fuse()
# Eval mode
model.to(device).eval()
# Set Dataloader
vid_path, vid_writer = None, None
dataloader = LoadImages(images, img_size=img_size)
# Get classes and colors
names_directory = parse_data_cfg(data_cfg)['names']
# print(names_directory)
classes = load_classes(names_directory)
colors = [[random.randint(0, 255) for _ in range(3)] for _ in range(len(classes))]
segmented_images = {} # return object
np_array_or_byte = None
classTracker = None
interval = 1 # # of frames between points on the graph i.e. 30 frames, interval 10 -> 0,10,20,30
for i, (path, img, im0, vid_cap) in enumerate(dataloader):
if (dataloader.mode == 'video') & (type(classTracker) == type(None)):
graphPts = int(dataloader.nframes / interval) + 1
classTracker = np.zeros(graphPts)
mslabels = np.zeros(graphPts) # populate graph labels as inference runs
t = time.time()
im_name = Path(path).name
save_path = str(Path(output) / im_name)
_, ext = os.path.splitext(im_name)
ext = 'jpeg' if ext[1:] == 'jpg' else ext[1:] # exclude the dot
tmpFile = 'tmp_'+im_name
# Get detections
img = torch.from_numpy(img).unsqueeze(0).to(device)
pred, _ = model(img)
det = non_max_suppression(pred, conf_thres, nms_thres)[0]
currentFrame = vid_cap.get(cv2.CAP_PROP_POS_FRAMES)
# currentMS = vid_cap.get(cv2.CAP_PROP_POS_MSEC)
if (dataloader.mode == 'video') & (currentFrame % interval == 0.0):
mslabels[int(currentFrame/interval)] = round(vid_cap.get(cv2.CAP_PROP_POS_MSEC), 2)
# print('Updating ms {} labels at index: {}'.format(vid_cap.get(cv2.CAP_PROP_POS_MSEC),str(i)))
if det is not None and len(det) > 0:
# Rescale boxes from prediction size to true image size
det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round()
# Print results to screen
print('%gx%g ' % img.shape[2:], end='') # print image size
for c in det[:, -1].unique():
n = (det[:, -1] == c).sum()
if (dataloader.mode == 'video') & (classes[int(c)] == classofinterest):
if (currentFrame % interval == 0.0):
classTracker[int(currentFrame / interval)] = n
print('%g %s ' % (n, classes[int(c)]), end=', ')
# Draw bounding boxes and labels of detections
for *xyxy, conf, cls_conf, cls in det:
if save_txt:
with open(save_path + '.txt', 'a+') as file:
file.write(('%g ' * 6 + '\n') % (*xyxy, cls, conf))
# Add bbox to the image
label = '%s %.2f' % (classes[int(cls)], conf)
plot_one_box(xyxy, im0, label=label, color=colors[int(cls)])
print('Done. (%.3fs)' % (time.time() - t))
# print('Frames Processed: ' + str(frames))
if dataloader.mode == 'images':
if save_images:
cv2.imwrite(save_path, im0)
else:
np_array_or_byte = im0
else:
if vid_path != save_path: # start of video, set up videowriter + save path
vid_path = save_path
fps = vid_cap.get(cv2.CAP_PROP_FPS)
width = int(vid_cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(vid_cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
resize = False
# make sure aspect ratio for fourcc is ok
aspectratio = width / height
if (aspectratio == 4/3) | (aspectratio == 16/9) | (aspectratio == 1/1):
f = fourcc
else:
f = 'mp4v'
if save_images:
vid_writer = cv2.VideoWriter(vid_path, cv2.VideoWriter_fourcc(*f), fps, (width, height))
else:
# https://sibsoft.net/xvideosharing/info_video_dimensions.html
# h264/avc1 dimensions should be in multiples of 8 or 16
ffmpegConvert = False
if (aspectratio == 16/9) & (width > 896): # resize aspect ratio 16:9
width, height = 896, 504
print('Resizing video to fit browser: width %s x height %s' % (width, height))
resize = True
if (aspectratio == 1/1) & (width > 800): # resize aspect ratio 1:1
width, height = 800, 800
print('Resizing video to fit browser: width %s x height %s' % (width, height))
resize = True
if (aspectratio == 4/3) & (width > 960): # resize aspect ratio 4:3
width, height = 960, 720
print('Resizing video to fit browser: width %s x height %s' % (width, height))
resize = True
# For displaying video to browser:
# if aspect ratio is fine for h264, force encoding to be h264 (html5 video only supports h264)
# otherwise, write video as mp4v then use ffmpeg to -> h264
if (aspectratio == 4/3) | (aspectratio == 16/9):
vid_writer = cv2.VideoWriter(tmpFile, cv2.VideoWriter_fourcc(*'h264'), fps, (width, height))
else:
vid_writer = cv2.VideoWriter(tmpFile, cv2.VideoWriter_fourcc(*'mp4v'), fps, (width, height))
ffmpegConvert = True
if resize:
vid_writer.write(cv2.resize(im0, (width, height)))
else:
vid_writer.write(im0)
if save_images:
if dataloader.mode == 'video':
vid_writer.release()
print('Results saved to %s' % output)
if platform == 'darwin': # macos
os.system('open ' + output + ' ' + save_path)
else: # write images to browser
if dataloader.mode == 'video':
vid_writer.release()
# for videos with strange aspect ratios (i.e. shot on iphone), the opencv
# h264 compression doesn't work. Temporary workaround for now is to use mp4v then
# convert to h264 with ffmpeg cli.
if ffmpegConvert:
tmpfile1 = 'tmp_h264_' + im_name
os.system('ffmpeg -i {0} -an -vcodec libx264 -crf 17 {1}'.format(tmpFile, tmpfile1))
os.remove(tmpFile) # convert temporary video to h264 via ffmpeg
np_array_or_byte = open(tmpfile1, 'rb').read()
os.remove(tmpfile1) # remove temporary video
else:
np_array_or_byte = open(tmpFile, 'rb').read()
os.remove(tmpFile) # remove temporary video
segmented_images['data'] = np_array_or_byte
segmented_images['ext'] = ext
segmented_images['class_tracker_data'] = {'labels': mslabels.tolist(), 'datasets': [
{'label': classofinterest, 'borderColor': 'rgb(255, 99, 132)', 'data': classTracker.tolist(),
'fill': 'false'}]}
# add location and time range info into segmented_images to pull twitter data:
segmented_images['twitter'] = includetwittergraph
if twitterOptions != None:
segmented_images['twitterOptions'] = twitterOptions
else:
segmented_images['twitter'] = False
authkeystr = cons_api_key+':'+cons_api_secret
encodedBytes = base64.b64encode(authkeystr.encode("utf-8"))
encodedStr = str(encodedBytes, "utf-8")
segmented_images['authKey'] = encodedStr
else:
segmented_images['data'] = np_array_or_byte # image as np array (bgr)
segmented_images['ext'] = ext # include extension of data for mimetype
return segmented_images
|
"""Photo module."""
from __future__ import annotations
import saas.storage.datadir as DataDirectory
import saas.storage.refresh as refresh
from saas.web.url import Url
from abc import ABCMeta
from typing import Type
import uuid
import os
class Photo(metaclass=ABCMeta):
"""Base Photo class."""
def __init__(
self,
url: Url,
path: 'PhotoPath',
refresh_rate: Type[refresh.RefreshRate],
index_filesize: int=None
):
"""Create new photo.
Args:
url: The photo is taken of given Url
path: Path to photo in data directory
refresh_rate: The refresh rate of the photo (hourly, daily, etc.)
index_filesize: If photo have been stored in index, filesize is
already stored there. To speed up performance this takes
priority over filesize in datadir. see self.filesize()
"""
self.url = url
self.path = path
self.refresh_rate = refresh_rate
self.index_filesize = index_filesize
def get_raw(self) -> str:
"""Get raw content of photos file in data directory.
Returns:
Raw source
str
"""
file = open(self.path.full_path(), 'r')
content = file.read()
file.close()
return content
def filename(self) -> str:
"""Get photo filename.
Returns:
A filename based on the photos url
str
"""
return self.url.make_filename()
def directory(self) -> str:
"""Get photo directory.
Returns:
A directory based on the photos url
str
"""
return self.url.make_directory()
def domain(self) -> str:
"""Get photo domain.
Returns:
Domain photo belongs to
str
"""
return self.url.domain
def filesize(self) -> int:
"""Get photo filesize.
Returns:
Size in bytes
int
"""
if self.index_filesize:
return self.index_filesize
return self.path.filesize()
class LoadingPhoto(Photo):
"""Loading photo class.
A loading photo is created once photographer do a
checkout of a url. This is used to display a webpage
that is being rendered in the mounted filesystem.
"""
def save_loading_text(self):
"""Write loading text to photo source."""
file = open(self.path.full_path(), 'w+')
file.write('loading')
file.close()
class Screenshot(Photo):
"""Screenshot photo class."""
pass
class PhotoPath:
"""Photopath class."""
def __init__(
self,
datadir: DataDirectory.DataDirectory,
uuid: str=''
):
"""Create new path to a photo.
Args:
datadir: Data directory to store photo in
uuid: If an PhotoPath should represent an existing
path, use uuid argument, otherwise a new uuid
will be generated
"""
self.datadir = datadir
if uuid != '':
self.uuid = uuid
else:
self.uuid = self.make_uuid()
def make_uuid(self) -> str:
"""Make uuid.
Returns:
A unique id
str
"""
return str(uuid.uuid4())
def full_path(self) -> str:
"""Get full path to photo's file in datadir.
Returns:
An absolute path to the photo
str
"""
return self.datadir.path_for_photo(self)
def filesize(self) -> int:
"""Get filesize.
Returns:
Size in bytes
int
"""
return os.path.getsize(self.full_path())
def should_optimize(self) -> bool:
"""If photo file should be optimized.
Returns:
True if should be optimize, otherwise False
bool
"""
return self.datadir.optimize_storage
def optimize(self):
"""Optimize png file in data directory."""
self.datadir.optimize_file(self.full_path())
|
from __future__ import annotations
from abc import ABC, abstractmethod
from logger.logger import Logger
from config.config import Config
from pathlib import Path
from tensorflow import keras as K
class BaseModel(ABC):
def __init__(self):
Logger.log(f"Initializing {self.__class__.__name__}...")
self._load() or self._build()
@abstractmethod
def train(self):
raise NotImplementedError
@abstractmethod
def predict(self, file: str):
raise NotImplementedError
@abstractmethod
def summary(self):
raise NotImplementedError
@abstractmethod
def _build(self):
raise NotImplementedError
@abstractmethod
def _load(self) -> bool:
raise NotImplementedError
def _summary(self, models: dict[str, K.Model]):
for name, model in models.items():
Logger.log(f"\nSummary of {name}:")
model.summary()
K.utils.plot_model(
model,
show_shapes=True,
to_file=createFolder(self._getModelSavePath()).joinpath(f"{name}.png"),
)
def _getModelSavePath(self):
return Path(Config().environment.output_path, "model")
def _tensorboardCallback(self):
path = Path(Config().environment.output_path, "tensorboard")
return K.callbacks.TensorBoard(log_dir=path, histogram_freq=1)
def createFolder(path: Path):
path.mkdir(parents=True, exist_ok=True)
return path
|
#-------------------------------------------------------------------------------
# Name: Go board recognition project
# Purpose: Deep-learning network testing module
#
# Author: kol
#
# Created: 19.07.2019
# Copyright: (c) kol 2019
# License: MIT
#-------------------------------------------------------------------------------
import sys
import os
from pathlib import Path
import cv2
import caffe
import numpy as np
from fast_rcnn.config import cfg
from fast_rcnn.test import im_detect
from fast_rcnn.nms_wrapper import nms
from gr.board import GrBoard
from gr.utils import img_to_imgtk, resize2
from gr.grdef import *
from gr.ui_extra import *
from gr.net_utils import cv2_show_detections
from PIL import Image, ImageTk
if sys.version_info[0] < 3:
import Tkinter as tk
import tkFileDialog as filedialog
import ttk
else:
import tkinter as tk
from tkinter import filedialog
from tkinter import ttk
CLASSES = ["_back_", "white", "black"]
class GrTestNetGui(object):
def __init__(self, root, max_size = 500, allow_open = True):
self.root = root
self.allow_open = allow_open
self.max_size = max_size
# Set paths
self.root_path = Path(__file__).parent.resolve()
self.model_path = self.root_path.joinpath("models")
self.model_file = 'zf_test.prototxt'
self.weigth_path = self.root_path.joinpath("out", "gbr_zf", "train")
self.weigth_file = None
self.solver_file = 'zf_solver.prototxt'
self.config_file = 'gbr_rcnn.yml'
self.net_prob = 0.8
self.net_iters = 10000
# Top frames
self.imgFrame = tk.Frame(self.root)
self.imgFrame.pack(side = tk.TOP, fill=tk.BOTH, padx = PADX, pady = PADY)
self.buttonFrame = tk.Frame(self.root, width = max_size + 10, height = 70, bd = 1, relief = tk.RAISED)
self.buttonFrame.pack(side = tk.TOP, fill=tk.BOTH, padx = PADX, pady = PADY)
self.configFrame = tk.Frame(self.root, bd = 1, relief = tk.RAISED)
self.configFrame.pack(side = tk.TOP, fill=tk.BOTH, padx = PADX)
self.statusFrame = tk.Frame(self.root, width = max_size + 2*PADX, bd = 1, relief = tk.SUNKEN)
self.statusFrame.pack(side = tk.BOTTOM, fill=tk.BOTH, padx = PADX, pady = PADY)
# Image frame
self.defBoardImg = GrBoard(board_shape = (max_size, max_size)).image
self.boardImg = self.defBoardImg
self.boardImgTk = img_to_imgtk(self.boardImg)
self.boardImgName = None
_, self.imgPanel, _ = addImagePanel(self.imgFrame,"DLN detection",
[],
self.boardImgTk, self.open_img_callback)
# Buttons
if self.allow_open:
self.openBtn = tk.Button(self.buttonFrame, text = "Open",
command = self.open_btn_callback)
self.openBtn.pack(side = tk.LEFT, padx = PADX, pady = PADX)
self.trainBtn = tk.Button(self.buttonFrame, text = "Train",
command = self.train_callback)
self.trainBtn.pack(side = tk.LEFT, padx = PADX, pady = PADX)
self.updateBtn = tk.Button(self.buttonFrame, text = "Detect",
command = self.update_callback)
self.updateBtn.pack(side = tk.LEFT, padx = PADX, pady = PADX)
# Params
self.probFrame = tk.Frame(self.configFrame)
self.probFrame.pack(side = tk.LEFT, padx = PADX, pady = PADY)
# Solver file
self.solverVar, self.cbSolver = addField(self.probFrame, "cb", "Solver", 0, 0, self.solver_file)
self.load_files(self.cbSolver, self.model_path, '*solver.prototxt')
# Net train config
self.configVar, self.cbConfig = addField(self.probFrame, "cb", "Config", 1, 0, self.config_file)
self.load_files(self.cbConfig, self.model_path, '*.yml')
# Number of iterations
self.iterVar, self.iterEntry = addField(self.probFrame, "e", "Iterations", 2, 0, self.net_iters)
# Model file
self.modelVar, self.cbModel = addField(self.probFrame, "cb", "Model", 0, 2, self.model_file)
self.load_files(self.cbModel, self.model_path, '*test.prototxt')
# Weight file
self.weigthVar, self.cbWeight = addField(self.probFrame, "cb", "Weights", 1, 2, self.weigth_file)
self.load_files(self.cbWeight, self.weigth_path, '*.caffemodel')
# Probability
self.probVar, self.probEntry = addField(self.probFrame, "e", "Threshold", 2, 2, self.net_prob)
# Status frame
self.statusInfo = addStatusPanel(self.statusFrame, self.max_size + 10)
self.statusInfo.pack(side = tk.LEFT, fill = tk.BOTH, expand = True)
def open_img_callback(self, event):
self.open_btn_callback()
def open_btn_callback(self):
fn = filedialog.askopenfilename(title = "Select file",
filetypes = (("PNG files","*.png"),("JPEG files","*.jpg"),("All files","*.*")))
if fn != "":
self.load_image(fn)
self.statusInfo.set("File loaded {}".format(fn))
def update_callback(self):
if not self.boardImgName is None:
self.load_image(self.boardImgName)
self.statusInfo.set("Detections updated")
def train_callback(self):
# Set params
if not self.update_train_params():
return
# Get args
cmd = os.environ['FASTER_RCNN_HOME'] + '\\tools\\train_net.py'
args = [sys.executable,
cmd,
'--solver',
self.solver_file,
'--imdb',
'gbr_train',
'--iters',
str(self.net_iters),
'--cfg',
self.config_file]
# Create a simple batch file
with open("train.bat", "w") as f:
f.write(':: This is an DLN training file generated by test_net.py module')
f.write('@echo off\n')
f.write('set PYTHONPATH=%PYTHONPATH%;.\n')
f.writelines('%s ' % item for item in args)
f.write('2>&1 | wtee out\\logs\\train.log\n')
f.close()
# Run the command
os.system("start cmd.exe /k train.bat")
def load_image(self, file_name):
# Load image
img = cv2.imread(file_name)
if img is None:
raise Exception('File not found {}'.format(file_name))
# Update detections
# Update params
if self.update_net_params():
self.show_detection(img, self.net_prob)
# Resize the image
img2, self.zoom = resize2 (img, np.max(self.defBoardImg.shape[:2]), f_upsize = False)
# Display the image
self.boardImg = img
self.boardImgTk = img_to_imgtk(img2)
self.boardImgName = file_name
self.imgFrame.pack_propagate(False)
self.imgPanel.configure(image = self.boardImgTk)
def load_files(self, cb, path, mask):
file_list = []
g = path.glob(mask)
for x in g:
if x.is_file(): file_list.append(str(x.name))
cb['values'] = sorted(file_list)
def show_detection(self, img, det_thresh):
cfg.TEST.HAS_RPN = True
cfg.TEST.BBOX_REG = False
caffe.set_mode_gpu()
net = caffe.Net(str(self.model_file), str(self.weigth_file), caffe.TEST)
## print("== Network layers:")
## for name, layer in zip(net._layer_names, net.layers):
## print("{:<7}: {:17s}({} blobs)".format(name, layer.type, len(layer.blobs)))
##
## print("== Blobs:")
## for name, blob in net.blobs.iteritems():
## print("{:<5}: {}".format(name, blob.data.shape))
##
## img = cv2.imread(img_file)
## scores, boxes = im_detect(net, img)
##
## print("== Detections")
## print("Scores: {}".format(scores))
## print("Boxes: {}".format(boxes))
# Detection
scores, boxes = im_detect(net, img)
# Draw results
NMS_THRESH = 0.3
colors = (0, (0,0,255), (255,0,0))
for cls_ind, cls in enumerate(CLASSES[1:]):
cls_ind += 1
cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
cls_scores = scores[:, cls_ind]
dets = np.hstack((cls_boxes,
cls_scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, NMS_THRESH)
dets = dets[keep, :]
cv2_show_detections(img, cls, dets, thresh=det_thresh, f_label = False, color = colors[cls_ind])
def update_train_params(self):
try:
self.solver_file = self.get_file(self.model_path, self.solverVar)
self.config_file = self.get_file(self.model_path, self.configVar)
self.net_iters = int(self.get_entry(self.iterVar, 1000, 40000))
return True
except:
self.statusInfo.set(str(sys.exc_info()[1]))
return False
def update_net_params(self):
try:
self.model_file = self.get_file(self.model_path, self.modelVar)
self.weigth_file = self.get_file(self.weigth_path, self.weigthVar)
self.net_prob = self.get_entry(self.probVar, 0.1, 100)
return True
except:
self.statusInfo.set(str(sys.exc_info()[1]))
return False
def get_file(self, p, v):
f = v.get()
if f is None or f == '':
raise ValueError('File not selected')
return str(p.joinpath(f))
def get_entry(self, v, min_, max_):
f = float(v.get())
if f < min_ or f > max_:
raise ValueError('Value not in range {}, {}'.format(min_, max_))
return f
def main():
# Construct interface
window = tk.Tk()
window.title("View annotaitons")
gui = GrTestNetGui(window)
# Main loop
window.mainloop()
if __name__ == '__main__':
main()
|
import math
from time import sleep
print('='*30)
print(' ')
ang = int(input('Digite o valor do ângulo em graus: '))
angr = math.radians(ang)
print('Calculando...')
sleep(0.5)
print(f'O seno de {ang}º é igual a: {math.sin(angr):.2f}\nO cosseno é igual a: {math.cos(angr):.2f}\nA tangente é igual a: {math.tan(angr):.2f}')
print(' ')
print('='*30)
|
#!/usr/bin/python3
""" testbed for temperature, humidity, pressure and gas sensors """
# MIT License
#
# Copyright (c) 2019 Dave Wilson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import time
from si7021 import Si7021
import Adafruit_DHT
import bme680
import board
import busio
import adafruit_mpl115a2
# Import Adafruit IO REST client.
from Adafruit_IO import Client, RequestError, Feed
# Import the device driver stuff
from smbus import SMBus
# Set to your Adafruit IO key.
# Remember, your key is a secret,
# so make sure not to publish it when you publish this code!
ADAFRUIT_IO_KEY = 'xxxxx'
# Set to your Adafruit IO username.
# (go to https://accounts.adafruit.com to find your username)
ADAFRUIT_IO_USERNAME = 'xxxxx'
# Create an instance of the REST client.
AIO = Client(ADAFRUIT_IO_USERNAME, ADAFRUIT_IO_KEY)
# Access or create the data feeds to adafruit.io
try:
TEMPERATURE_SI7021_FEED = AIO.feeds("testsi7021temperature")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testsi7021temperature")
TEMPERATURE_SI7021_FEED = AIO.create_feed(FEED)
try:
HUMIDITY_SI7021_FEED = AIO.feeds("testsi7021humidity")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testsi7021humidity")
HUMIDITY_SI7021_FEED = AIO.create_feed(FEED)
try:
TEMPERATURE_DHT22_FEED = AIO.feeds("testdht22temperature")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testdht22temperature")
TEMPERATURE_DHT22_FEED = AIO.create_feed(FEED)
try:
HUMIDITY_DHT22_FEED = AIO.feeds("testdht22humidity")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testdht22humidity")
HUMIDITY_DHT22_FEED = AIO.create_feed(FEED)
try:
TEMPERATURE_MPL115A2_FEED = AIO.feeds("testmpl115a2temperature")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testmpl115a2temperature")
TEMPERATURE_MPL115A2_FEED = AIO.create_feed(FEED)
try:
PRESSURE_MPL115A2_FEED = AIO.feeds("testmpl115a2pressure")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testmpl115a2pressure")
PRESSURE_MPL115A2_FEED = AIO.create_feed(FEED)
try:
TEMPERATURE_BME680_FEED = AIO.feeds("testbme680temperature")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testbme680temperature")
TEMPERATURE_BME680_FEED = AIO.create_feed(FEED)
try:
PRESSURE_BME680_FEED = AIO.feeds("testbme680pressure")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testbme680pressure")
PRESSURE_BME680_FEED = AIO.create_feed(FEED)
try:
HUMIDITY_BME680_FEED = AIO.feeds("testbme680humidity")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testbme680humidity")
HUMIDITY_BME680_FEED = AIO.create_feed(FEED)
try:
GAS_BME680_FEED = AIO.feeds("testbme680gas")
except RequestError: # Doesn't exist, create a new feed
FEED = Feed(name="testbme680gas")
GAS_BME680_FEED = AIO.create_feed(FEED)
# Access the Si7021 sensor
SI7021_SENSOR = Si7021(SMBus(1))
# Access the DHT22 device driver using pin 4
DHT_SENSOR = Adafruit_DHT.DHT22
DHT_PIN = 4
# Access the MPL115A2 device driver using I2C bus
I2C = busio.I2C(board.SCL, board.SDA)
MPL_SENSOR = adafruit_mpl115a2.MPL115A2(I2C)
# Access and setup the BME680 sensor
try:
BME680_SENSOR = bme680.BME680(bme680.I2C_ADDR_PRIMARY)
except IOError:
BME680_SENSOR = bme680.BME680(bme680.I2C_ADDR_SECONDARY)
BME680_SENSOR.set_humidity_oversample(bme680.OS_2X)
BME680_SENSOR.set_pressure_oversample(bme680.OS_4X)
BME680_SENSOR.set_temperature_oversample(bme680.OS_8X)
BME680_SENSOR.set_filter(bme680.FILTER_SIZE_3)
BME680_SENSOR.set_gas_status(bme680.ENABLE_GAS_MEAS)
BME680_SENSOR.set_gas_heater_temperature(320)
BME680_SENSOR.set_gas_heater_duration(150)
BME680_SENSOR.select_gas_heater_profile(0)
def celsius_to_fahrenheit(celsius):
""" convert celsius to fahrenheit """
fahrenheit = (celsius * (9.0/5.0)) + 32.0
return fahrenheit
def pressure_to_kpa(pressure):
""" convert 1000 Pa to kpi """
kpa = pressure / 10.0
return kpa
# send temperature and humidity to adafruit.io
while True:
# collect and post data once per minute
time.sleep(45.0)
# SI7021 sensor data
HUMIDITY, CELSIUS = SI7021_SENSOR.read()
VALUE = celsius_to_fahrenheit(CELSIUS)
AIO.send_data(TEMPERATURE_SI7021_FEED.key, VALUE)
AIO.send_data(HUMIDITY_SI7021_FEED.key, HUMIDITY)
time.sleep(5.0) # delay between posts
# DHT22 sensor data
HUMIDITY, CELSIUS = Adafruit_DHT.read_retry(DHT_SENSOR, DHT_PIN)
if HUMIDITY is not None and CELSIUS is not None:
VALUE = celsius_to_fahrenheit(CELSIUS)
AIO.send_data(TEMPERATURE_DHT22_FEED.key, VALUE)
AIO.send_data(HUMIDITY_DHT22_FEED.key, HUMIDITY)
time.sleep(5.0) # delay between posts
# MPL115A2 sensor data
CELSIUS = MPL_SENSOR.temperature
VALUE = celsius_to_fahrenheit(CELSIUS)
AIO.send_data(TEMPERATURE_MPL115A2_FEED.key, VALUE)
PRESSURE = MPL_SENSOR.pressure
VALUE = pressure_to_kpa(PRESSURE)
AIO.send_data(PRESSURE_MPL115A2_FEED.key, VALUE)
time.sleep(5.0) # delay between posts
# BME680 sensor data
if BME680_SENSOR.get_sensor_data():
CELSIUS = BME680_SENSOR.data.temperature
VALUE = celsius_to_fahrenheit(CELSIUS)
AIO.send_data(TEMPERATURE_BME680_FEED.key, VALUE)
PRESSURE = BME680_SENSOR.data.pressure
VALUE = pressure_to_kpa(PRESSURE)
AIO.send_data(PRESSURE_BME680_FEED.key, VALUE)
HUMIDITY = BME680_SENSOR.data.humidity
AIO.send_data(HUMIDITY_BME680_FEED.key, HUMIDITY)
if BME680_SENSOR.data.heat_stable:
GAS = BME680_SENSOR.data.gas_resistance
AIO.send_data(GAS_BME680_FEED.key, GAS)
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetEntitlementResult',
'AwaitableGetEntitlementResult',
'get_entitlement',
'get_entitlement_output',
]
@pulumi.output_type
class GetEntitlementResult:
def __init__(__self__, association_info=None, commitment_settings=None, create_time=None, name=None, offer=None, parameters=None, provisioned_service=None, provisioning_state=None, purchase_order_id=None, suspension_reasons=None, trial_settings=None, update_time=None):
if association_info and not isinstance(association_info, dict):
raise TypeError("Expected argument 'association_info' to be a dict")
pulumi.set(__self__, "association_info", association_info)
if commitment_settings and not isinstance(commitment_settings, dict):
raise TypeError("Expected argument 'commitment_settings' to be a dict")
pulumi.set(__self__, "commitment_settings", commitment_settings)
if create_time and not isinstance(create_time, str):
raise TypeError("Expected argument 'create_time' to be a str")
pulumi.set(__self__, "create_time", create_time)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if offer and not isinstance(offer, str):
raise TypeError("Expected argument 'offer' to be a str")
pulumi.set(__self__, "offer", offer)
if parameters and not isinstance(parameters, list):
raise TypeError("Expected argument 'parameters' to be a list")
pulumi.set(__self__, "parameters", parameters)
if provisioned_service and not isinstance(provisioned_service, dict):
raise TypeError("Expected argument 'provisioned_service' to be a dict")
pulumi.set(__self__, "provisioned_service", provisioned_service)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if purchase_order_id and not isinstance(purchase_order_id, str):
raise TypeError("Expected argument 'purchase_order_id' to be a str")
pulumi.set(__self__, "purchase_order_id", purchase_order_id)
if suspension_reasons and not isinstance(suspension_reasons, list):
raise TypeError("Expected argument 'suspension_reasons' to be a list")
pulumi.set(__self__, "suspension_reasons", suspension_reasons)
if trial_settings and not isinstance(trial_settings, dict):
raise TypeError("Expected argument 'trial_settings' to be a dict")
pulumi.set(__self__, "trial_settings", trial_settings)
if update_time and not isinstance(update_time, str):
raise TypeError("Expected argument 'update_time' to be a str")
pulumi.set(__self__, "update_time", update_time)
@property
@pulumi.getter(name="associationInfo")
def association_info(self) -> 'outputs.GoogleCloudChannelV1AssociationInfoResponse':
"""
Association information to other entitlements.
"""
return pulumi.get(self, "association_info")
@property
@pulumi.getter(name="commitmentSettings")
def commitment_settings(self) -> 'outputs.GoogleCloudChannelV1CommitmentSettingsResponse':
"""
Commitment settings for a commitment-based Offer. Required for commitment based offers.
"""
return pulumi.get(self, "commitment_settings")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> str:
"""
The time at which the entitlement is created.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name of an entitlement in the form: accounts/{account_id}/customers/{customer_id}/entitlements/{entitlement_id}.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def offer(self) -> str:
"""
The offer resource name for which the entitlement is to be created. Takes the form: accounts/{account_id}/offers/{offer_id}.
"""
return pulumi.get(self, "offer")
@property
@pulumi.getter
def parameters(self) -> Sequence['outputs.GoogleCloudChannelV1ParameterResponse']:
"""
Extended entitlement parameters. When creating an entitlement, valid parameter names and values are defined in the Offer.parameter_definitions. The response may include the following output-only Parameters: - assigned_units: The number of licenses assigned to users. - max_units: The maximum assignable units for a flexible offer. - num_units: The total commitment for commitment-based offers.
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter(name="provisionedService")
def provisioned_service(self) -> 'outputs.GoogleCloudChannelV1ProvisionedServiceResponse':
"""
Service provisioning details for the entitlement.
"""
return pulumi.get(self, "provisioned_service")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Current provisioning state of the entitlement.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="purchaseOrderId")
def purchase_order_id(self) -> str:
"""
Optional. This purchase order (PO) information is for resellers to use for their company tracking usage. If a purchaseOrderId value is given, it appears in the API responses and shows up in the invoice. The property accepts up to 80 plain text characters.
"""
return pulumi.get(self, "purchase_order_id")
@property
@pulumi.getter(name="suspensionReasons")
def suspension_reasons(self) -> Sequence[str]:
"""
Enumerable of all current suspension reasons for an entitlement.
"""
return pulumi.get(self, "suspension_reasons")
@property
@pulumi.getter(name="trialSettings")
def trial_settings(self) -> 'outputs.GoogleCloudChannelV1TrialSettingsResponse':
"""
Settings for trial offers.
"""
return pulumi.get(self, "trial_settings")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> str:
"""
The time at which the entitlement is updated.
"""
return pulumi.get(self, "update_time")
class AwaitableGetEntitlementResult(GetEntitlementResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetEntitlementResult(
association_info=self.association_info,
commitment_settings=self.commitment_settings,
create_time=self.create_time,
name=self.name,
offer=self.offer,
parameters=self.parameters,
provisioned_service=self.provisioned_service,
provisioning_state=self.provisioning_state,
purchase_order_id=self.purchase_order_id,
suspension_reasons=self.suspension_reasons,
trial_settings=self.trial_settings,
update_time=self.update_time)
def get_entitlement(account_id: Optional[str] = None,
customer_id: Optional[str] = None,
entitlement_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetEntitlementResult:
"""
Returns the requested Entitlement resource. Possible error codes: * PERMISSION_DENIED: The customer doesn't belong to the reseller. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * NOT_FOUND: The customer entitlement was not found. Return value: The requested Entitlement resource.
"""
__args__ = dict()
__args__['accountId'] = account_id
__args__['customerId'] = customer_id
__args__['entitlementId'] = entitlement_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('google-native:cloudchannel/v1:getEntitlement', __args__, opts=opts, typ=GetEntitlementResult).value
return AwaitableGetEntitlementResult(
association_info=__ret__.association_info,
commitment_settings=__ret__.commitment_settings,
create_time=__ret__.create_time,
name=__ret__.name,
offer=__ret__.offer,
parameters=__ret__.parameters,
provisioned_service=__ret__.provisioned_service,
provisioning_state=__ret__.provisioning_state,
purchase_order_id=__ret__.purchase_order_id,
suspension_reasons=__ret__.suspension_reasons,
trial_settings=__ret__.trial_settings,
update_time=__ret__.update_time)
@_utilities.lift_output_func(get_entitlement)
def get_entitlement_output(account_id: Optional[pulumi.Input[str]] = None,
customer_id: Optional[pulumi.Input[str]] = None,
entitlement_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetEntitlementResult]:
"""
Returns the requested Entitlement resource. Possible error codes: * PERMISSION_DENIED: The customer doesn't belong to the reseller. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * NOT_FOUND: The customer entitlement was not found. Return value: The requested Entitlement resource.
"""
...
|
from django.contrib.auth.mixins import LoginRequiredMixin
from django.db.models import Q
from django.shortcuts import render
from django.urls import reverse_lazy
from django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView, RedirectView
from .models import Post, Category
from .forms import PostForm
from profiles.models import PostInstance
class PostListView(LoginRequiredMixin, ListView):
model = Post
context_object_name = 'posts'
template_name = 'post_home.html'
def get_context_data(self, **kwargs):
context = super(PostListView, self).get_context_data(**kwargs)
context['categories'] = Category.objects.filter(
Q(name__iexact='Travel') |
Q(name__iexact='Adventure') |
Q(name__iexact='Learn New Things')).distinct()
context['travels'] = Post.objects.filter(category__name__iexact='Travel')[:6]
context['adventures'] = Post.objects.filter(category__name__iexact='Adventure')[:6]
context['lnts'] = Post.objects.filter(category__name__iexact='Learn New Things')[:6]
qs = self.request.GET.get('q')
if qs:
context['qs'] = Post.objects.filter(title__icontains=qs)
return context
class PostCompleteListView(LoginRequiredMixin, ListView):
model = Post
template_name = 'post_complete.html'
def get_queryset(self):
return Post.objects.filter(completed=True, user=self.request.user)
class PostDetailView(DetailView):
model = Post
context_object_name = 'posts'
template_name = 'post_detail.html'
class PostCompleteDetailView(LoginRequiredMixin, UpdateView):
model = Post
fields = ['title', 'description', 'anecdote', 'category', 'completed']
template_name = 'post_edit.html'
class PostCreate(LoginRequiredMixin, CreateView):
form_class = PostForm
template_name = 'post_create.html'
success_url = reverse_lazy('posts:post-home')
def form_valid(self, form):
instance = form.save(commit=False)
instance.user = self.request.user
return super(PostCreate, self).form_valid(form)
class PostEdit(LoginRequiredMixin, UpdateView):
model = Post
template_name = 'post_edit.html'
fields = ['title', 'description', 'category']
class PostDelete(LoginRequiredMixin, DeleteView):
model = Post
template_name = 'post_delete.html'
success_url = reverse_lazy('posts:post-home')
class PostCategoryListView(ListView):
model = Post
context_object_name = 'categories'
template_name = 'post_all.html'
def get_queryset(self):
cat = self.request.GET.get('q')
return Post.objects.filter(category__name__iexact=cat)
class LogOutView(RedirectView):
url = reverse_lazy('posts:post-home')
def get(self, request, *args, **kwargs):
logout(request)
return super(LogOutView, self).get(request, *args, **kwargs)
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Niklas Rosenstein
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from typing import TextIO
import codecs
import io
import sys
class EncodedStreamReader(io.BufferedIOBase):
"""
A readable file-like object that wraps a #TextIO object and converts it to
a binary stream encoded with the specified *encoding* (or the default system
encoding).
Example:
```py
import io
from nr.utils.io.readers import EncodedStreamReader
fp = EncodedStreamReader(io.StringIO('äöü'), 'utf-8')
assert fp.read(1) == b'\xc3'
assert fp.read(1) == b'\xa4'
```
"""
def __init__(self, stream: TextIO, encoding: str = None, errors: str = 'strict'):
encoding = encoding or getattr(stream, 'encoding', None) or sys.getdefaultencoding()
self._stream = stream
self._encoder = codecs.getencoder(encoding)
self._errors = errors
self._buffer = b''
self.encoding = encoding
def fileno(self):
return self._stream.fileno()
def read(self, n: int = None) -> bytes:
if self._buffer and n is None:
result, self._buffer = self._buffer, ''
elif self._buffer:
result, self._buffer = self._buffer[:n], self._buffer[n:]
n -= len(result)
else:
result = b''
if n is None or n >= 0:
assert not self._buffer
data = self._encoder(self._stream.read(n), self._errors)[0]
if n is not None and len(data) > n:
data, self._buffer = data[:n], data[n:]
result += data
return result
|
"""Customizing a legend box"""
from vedo import *
s = Sphere()
c = Cube().x(2)
e = Ellipsoid().x(4)
h = Hyperboloid().x(6).legend('The description for\nthis one is quite long')
lb = LegendBox([s,c,e,h], width=0.3, height=0.4).font(5)
show(s, c, e, h, lb, __doc__,
axes=1, bg='ly', bg2='w', size=(1400,800), viewup='z')
|
# Author: Nam Nguyen Hoai
# coding: utf-8
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
import config
import models
_SESSION_FACTORY = None
_ENGINE_FACTORY = None
def setup_database_engine():
global _SESSION_FACTORY, _ENGINE_FACTORY
engine = create_engine(config.DATABASE_URI, echo=True)
_ENGINE_FACTORY = engine
_SESSION_FACTORY = sessionmaker(bind=engine)()
def get_engine():
if not _ENGINE_FACTORY:
setup_database_engine()
return _ENGINE_FACTORY
def get_session():
if not _SESSION_FACTORY:
setup_database_engine()
return _SESSION_FACTORY
def get_session_2():
engine = create_engine(config.DATABASE_URI_2, echo=True)
return sessionmaker(bind=engine)()
def get_records(session, name=''):
# name = '{}_'.format(name)+'%'
return session.query(models.TestTable).all()
# filter(
# models.TestTable.name.like('name_%')).from_self()
def delete_records(session, name=''):
session.query(models.TestTable).delete(synchronize_session=False)
|
"""Add administrator level to admin
Revision ID: 26745016c3ce
Revises: 3a731ce5846e
Create Date: 2014-04-15 17:55:26.716534
"""
# revision identifiers, used by Alembic.
revision = '26745016c3ce'
down_revision = '3a731ce5846e'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.execute("UPDATE Users SET role = 'administrator' WHERE login IN ('admin', 'testuser')")
def downgrade():
pass
|
__author__ = 'mpetyx'
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import TemplateView
from tastypie.api import Api
from .Resources import PhotoResource
api = Api(api_name='media')
api.register(PhotoResource())
urlpatterns = api.urls |
with open("../in/input15.txt", "r") as file:
data = [int(y) for y in [x.strip() for x in file.read().splitlines()][0].split(',')]
occurences = {data[i-1]:[i] for i in range(1, len(data)+1)}
current = data[-1]
for i in range(len(data)+1,30000001):
if len(occurences[current]) == 1:
current = 0
if len(occurences[0]) == 2:
occurences[0].remove(occurences[0][0])
occurences[0].append(i)
else:
new = occurences[current][1] - occurences[current][0]
if new in occurences:
if len(occurences[new]) == 2:
occurences[new].remove(occurences[new][0])
occurences[new].append(i)
else:
occurences[new] = [i]
current = new
if i == 2020:
print('Part 1: {}'.format(current))
print('Part 2: {}'.format(current)) |
# -*- coding: utf-8 -*-
"""
Fixtures for all abagen tests
"""
import os
import pytest
from abagen.datasets import (fetch_desikan_killiany,
fetch_microarray,
fetch_raw_mri,
fetch_rnaseq)
@pytest.fixture(scope='session')
def datadir(tmp_path_factory):
dd = os.environ.get('ABAGEN_DATA')
if dd is None:
dd = str(tmp_path_factory.mktemp('abagen-data'))
return dd
@pytest.fixture(scope='session')
def testfiles(datadir):
return fetch_microarray(data_dir=datadir, donors=['12876', '15496'],
n_proc=2)
@pytest.fixture(scope='session')
def rnafiles(datadir):
return fetch_rnaseq(data_dir=datadir, donors=['9861'])
@pytest.fixture(scope='session')
def rawmri(datadir):
return fetch_raw_mri(data_dir=datadir, donors=['12876', '15496'])
@pytest.fixture(scope='session')
def atlas():
return fetch_desikan_killiany(native=False, surface=False)
@pytest.fixture(scope='session')
def surface():
return fetch_desikan_killiany(native=False, surface=True)
|
#!/usr/bin/env python
#pythonlib
import os
import sys
import re
#appion
from appionlib import appionLoop2
from appionlib import apFindEM
from appionlib import apImage
from appionlib import apDisplay
from appionlib import apDatabase
from appionlib import apPeaks
from appionlib import apParticle
from appionlib import apDefocalPairs
#legacy
#import apViewIt
#import selexonFunctions as sf1
class AlignDefocLoop(appionLoop2.AppionLoop):
#======================
def processImage(self, imgdata):
self.sibling, self.shiftpeak = apDefocalPairs.getShiftFromImage(imgdata, self.params['sessionname'])
#======================
def setProcessingDirName(self):
self.processdirname = "defocalpairs"
#======================
def commitToDatabase(self, imgdata):
apDefocalPairs.insertShift(imgdata, self.sibling, self.shiftpeak)
#======================
def checkConflicts(self):
return
#======================
def setupParserOptions(self):
self.parser.add_option("--lp", dest="lp", type="int", default=30,
help="Low pass filter value, default=30", metavar="#")
if __name__ == '__main__':
imgLoop = AlignDefocLoop()
imgLoop.run()
|
from pymesh.TestCase import TestCase
import pymesh
import numpy as np
class CurvatureTest(TestCase):
def test_balls(self):
ball_r1 = pymesh.generate_icosphere(1.0, [0.0, 0.0, 0.0], 4)
ball_r2 = pymesh.generate_icosphere(2.0, [1.0, 0.0, 0.0], 4)
ball_r3 = pymesh.generate_icosphere(3.0, [0.0, 1.0, 0.0], 4)
ball_r1.add_attribute("vertex_gaussian_curvature")
ball_r2.add_attribute("vertex_gaussian_curvature")
ball_r3.add_attribute("vertex_gaussian_curvature")
ball_r1.add_attribute("vertex_mean_curvature")
ball_r2.add_attribute("vertex_mean_curvature")
ball_r3.add_attribute("vertex_mean_curvature")
gaussian_r1 = ball_r1.get_attribute("vertex_gaussian_curvature")
gaussian_r2 = ball_r2.get_attribute("vertex_gaussian_curvature")
gaussian_r3 = ball_r3.get_attribute("vertex_gaussian_curvature")
mean_r1 = ball_r1.get_attribute("vertex_mean_curvature")
mean_r2 = ball_r2.get_attribute("vertex_mean_curvature")
mean_r3 = ball_r3.get_attribute("vertex_mean_curvature")
self.assertAlmostEqual(1.0, np.amin(gaussian_r1), 2)
self.assertAlmostEqual(1.0/4.0, np.amin(gaussian_r2), 2)
self.assertAlmostEqual(1.0/9.0, np.amin(gaussian_r3), 2)
self.assertAlmostEqual(1.0, np.amin(mean_r1), 2)
self.assertAlmostEqual(1.0/2.0, np.amin(mean_r2), 2)
self.assertAlmostEqual(1.0/3.0, np.amin(mean_r3), 2)
|
import pygame
from game_objects.player import Player
from utils import settings
class HeadsUpDisplay:
"""User Interface Heads up Display
Draws the Stats and Weapon/Magic to the game screen
"""
def __init__(self) -> None:
# General Info:
self.display_surface = pygame.display.get_surface()
self.font = pygame.font.Font(settings.UI_FONT, settings.UI_FONT_SIZE)
# Bar Setup
self.health_bar_rect = pygame.Rect(10,10,settings.HEALTH_BAR_WIDTH,settings.BAR_HEIGHT)
self.energy_bar_rect = pygame.Rect(10,34,settings.ENERGY_BAR_WIDTH,settings.BAR_HEIGHT)
# Weapon Graphics
self.weapon_graphics = []
self.magic_graphics = []
self.__build_graphics_from_data_dict(settings.weapon_data, self.weapon_graphics)
self.__build_graphics_from_data_dict(settings.magic_data, self.magic_graphics)
def __build_graphics_from_data_dict(self, data:dict, append_to_list:list):
"""* Selects item graphic path from item data dict.
* Insanities a pygame.image from the path
* stores image in append to list
Args:
data (dict): The item data dictionary from the settings file
append_to_list (list): list to append graphics file path to
"""
for item in data.values():
path = item['graphic']
img = pygame.image.load(path).convert_alpha()
append_to_list.append(img)
def __show_bar(self, cur_amount, max_amount, bg_rect:pygame.Rect, color):
"""Draw a Rect Bar to display stats
Args:
cur_amount (int): current stat amount
max_amount (int): max stat amount
bg_rect (pygame.Rect): instantiated background rect
color (str): color code
"""
# Draw background
pygame.draw.rect(self.display_surface, settings.UI_BG_COLOR, bg_rect)
# convert stat to pixels
ratio = cur_amount / max_amount
cur_width = bg_rect.width * ratio
cur_rect = bg_rect.copy()
cur_rect.width = cur_width
# Draw bar
pygame.draw.rect(self.display_surface, color, cur_rect)
# Draw border
pygame.draw.rect(self.display_surface, settings.UI_BORDER_COLOR, bg_rect, 3)
def __show_exp(self, exp):
"""Draw experience points to game screen
Args:
exp (int): experience points
"""
exp = str(int(exp))
exp_desc = f"EXP: {exp}"
# build display
text_surf = self.font.render(exp_desc, False, settings.TEXT_COLOR)
x = self.display_surface.get_size()[0] - 20
y = self.display_surface.get_size()[1] - 20
text_rect = text_surf.get_rect(bottomright=(x,y))
# display on game screen
# background
pygame.draw.rect(self.display_surface,settings.UI_BG_COLOR, text_rect.inflate(20,20))
# contents
self.display_surface.blit(text_surf, text_rect)
# border
pygame.draw.rect(self.display_surface,settings.UI_BORDER_COLOR, text_rect.inflate(20,20),3)
def __selection_box(self, left, top, has_switched:bool):
"""Draw game item selection box
Args:
left (int): left position
top (int): top position
has_switched (bool): if player is switching
Returns:
pygame.Rect: Rect of the selection box created
"""
bg_rect = pygame.Rect(left, top, settings.ITEM_BOX_SIZE, settings.ITEM_BOX_SIZE)
pygame.draw.rect(self.display_surface, settings.UI_BG_COLOR, bg_rect)
if has_switched:
pygame.draw.rect(self.display_surface, settings.UI_BORDER_COLOR_ACTIVE, bg_rect, 3)
else:
pygame.draw.rect(self.display_surface, settings.UI_BORDER_COLOR, bg_rect, 3)
return bg_rect
def __item_overlay(self, left:int, top:int, item_index:int, item_graphics:list ,has_switched:bool):
"""Draw the current item to the game screen
Args:
left (int): left position
top (int): top position
item_index (int): Current selected item index
item_graphics (list): Item Graphic List
has_switched (bool): Can Player Switch Itesm
"""
bg_rect = self.__selection_box(left,top, has_switched)
surf = item_graphics[item_index]
rect = surf.get_rect(center = bg_rect.center)
self.display_surface.blit(surf, rect)
def __show_level(self, level_nbr:int):
desc = f'Level: {level_nbr}'
# build display
text_surf = self.font.render(desc, False, settings.TEXT_COLOR)
x = self.display_surface.get_size()[0] * 0.5
y = self.display_surface.get_size()[1] *0.05
text_rect = text_surf.get_rect(center=(x,y))
# display on game screen
# background
pygame.draw.rect(self.display_surface,settings.UI_BG_COLOR, text_rect.inflate(20,20))
# contents
self.display_surface.blit(text_surf, text_rect)
# border
pygame.draw.rect(self.display_surface,settings.UI_BORDER_COLOR, text_rect.inflate(20,20),3)
def display(self, player:Player, level_nbr:int):
"""Draw UI Items to the Game Screen
Args:
player (Player): Insanitated Player Object
"""
self.__show_bar(player.health,player.stats['health'],self.health_bar_rect, settings.HEALTH_COLOR)
self.__show_bar(player.energy,player.stats['energy'],self.energy_bar_rect, settings.ENERGY_COLOR)
self.__show_exp(player.exp)
self.__show_level(level_nbr)
# Weapon Overlay
self.__item_overlay(10,630, player.weapon_index,self.weapon_graphics,not player.can_switch_weapon)
# Magic Overlay
self.__item_overlay(90,630, player.magic_index,self.magic_graphics,not player.can_switch_magic)
|
from Validation import *
from enum import Enum
class Type(Enum):
econom = 1
standart = 2
comfort = 3
minibus = 4
class Taxi:
@staticmethod
def atributes():
return ["DriverName", "Type", "StartTime", "EndTime", "StartPlace", "EndPlace"]
def __init__(self, *item):
for i in range(len(self.atributes())):
setattr(self, self.atributes()[i], item[i])
def __str__(self):
res = ""
for i in range(len(self.atributes())):
res += self.atributes()[i] + ": " + self.__getitem__(self.atributes()[i]) + ", "
res = res[:res.__len__() - 2]
return res
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
return setattr(self, key, value)
@property
def DriverName(self):
return self._DriverName
@DriverName.setter
@Validation.nameV
def DriverName(self, value):
self._DriverName = value
@property
def Type(self):
return self._Type
@Type.setter
@Validation.typeV
def Type(self, value):
self._Type = value
@property
def StartTime(self):
return self._StartTime._time
@StartTime.setter
@Validation.timeV
def StartTime(self, value):
self._StartTime = Time(value)
@property
def EndTime(self):
return self._EndTime._time
@EndTime.setter
@Validation.timeV
def EndTime(self, value):
self._EndTime = Time(value)
@property
def StartPlace(self):
return self._StartPlace
@StartPlace.setter
@Validation.nameV
def StartPlace(self, value):
self._StartPlace = value
@property
def EndPlace(self):
return self._StartPlace
@EndPlace.setter
@Validation.nameV
def EndPlace(self, value):
self._EndPlace = value
class Time:
def __init__(self, time):
self._time = time
self._hour = time[:2]
self._minute = time[-2:]
def inputElement():
while True:
try:
el = input("Enter element: ")
el = el.replace("\n", "")
mas = el.split(", ")
el = Taxi(*mas)
except ValueError:
print("Not correct element")
continue
break
return el
|
import glob
import os
import scipy.io as sio
from torch.utils.data import Dataset # Dataset class from PyTorch
from PIL import Image, ImageChops # PIL is a nice Python Image Library that we can use to handle images
import torchvision.transforms as transforms # torch transform used for computer vision applications
import numpy as np
import torch
# import sys
# https://pytorch.org/tutorials/intermediate/torchvision_tutorial.html
def get_clothCoParse_class_names():
# names ordered according to label id, 0 for background and 59 for wedges
ClothCoParse_class_names = ['background', 'accessories', 'bag', 'belt', 'blazer',
'blouse', 'bodysuit', 'boots', 'bra', 'bracelet', 'cape', 'cardigan',
'clogs', 'coat', 'dress', 'earrings', 'flats', 'glasses', 'gloves', 'hair',
'hat', 'heels', 'hoodie', 'intimate', 'jacket', 'jeans', 'jumper', 'leggings',
'loafers', 'necklace', 'panties', 'pants', 'pumps', 'purse', 'ring', 'romper',
'sandals', 'scarf', 'shirt', 'shoes', 'shorts', 'skin', 'skirt', 'sneakers',
'socks', 'stockings', 'suit', 'sunglasses', 'sweater', 'sweatshirt', 'swimwear',
't-shirt', 'tie', 'tights', 'top', 'vest', 'wallet', 'watch', 'wedges']
return ClothCoParse_class_names
class ImageDataset(Dataset):
def __init__(self, root, transforms_=None, transforms_target=None,
mode="train", person_detection=False,
HPC_run=False, remove_background=True,
):
self.remove_background = remove_background # we'll have to add it as an argument later
self.person_detection =person_detection
if transforms_ != None:
self.transforms = transforms.Compose(transforms_) # image transform
else: self.transforms=None
if transforms_target != None:
self.transforms_target = transforms.Compose(transforms_target) # image transform
else: self.transforms_target=None
if HPC_run:
root = '/home/malrawi/MyPrograms/Data/ClothCoParse'
self.files_A = sorted(glob.glob(os.path.join(root, "%s/A" % mode) + "/*.*")) # get the source image file-names
self.files_B = sorted(glob.glob(os.path.join(root, "%s/B" % mode) + "/*.*")) # get the target image file-names
def number_of_classes(self, opt):
if opt.person_detection:
return 2
else:
return(len(get_clothCoParse_class_names())) # this should do
def __getitem__(self, index):
annot = sio.loadmat(self.files_B[index % len(self.files_B)])
mask = annot["groundtruth"]
image_A = Image.open(self.files_A[index % len(self.files_A)]) # read the image, according to the file name, index select which image to read; index=1 means get the first image in the list self.files_A
if self.remove_background or self.person_detection:
mm = np.int8(mask>0) # thresholding the mask
if self.person_detection:
mask = mm # this is a binary mask; Image.fromarray(255*mask).show()
self.remove_background=False # background should not be removed in person-detection
if self.remove_background:
image_A = ImageChops.multiply(image_A, Image.fromarray(255*mm).convert('RGB') )
# instances are encoded as different colors
obj_ids = np.unique(mask)[1:] # first id is the background, so remove it
masks = mask == obj_ids[:, None, None] # split the color-encoded mask into a set of binary masks
# get bounding box coordinates for each mask
num_objs = len(obj_ids)
boxes = []
for i in range(num_objs):
pos = np.where(masks[i])
xmin = np.min(pos[1])
xmax = np.max(pos[1])
ymin = np.min(pos[0])
ymax = np.max(pos[0])
boxes.append([xmin, ymin, xmax, ymax])
# convert everything into torch.Tensor
boxes = torch.as_tensor(boxes, dtype=torch.float32)
area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0])
target = {}
target["boxes"] = boxes
target["labels"] = torch.as_tensor(obj_ids, dtype=torch.int64) # corrected by Rawi
target["masks"] = torch.as_tensor(masks, dtype=torch.uint8) #uint8
target["image_id"] = torch.tensor([index])
target["area"] = area
target["iscrowd"] = torch.zeros((num_objs,), dtype=torch.int64) # suppose all instances are not crowd
if self.transforms != None:
img = self.transforms(image_A)
if self.transforms_target != None:
target = self.transforms_target(target)
return img, target
def __len__(self): # this function returns the length of the dataset, the source might not equal the target if the data is unaligned
return len(self.files_B)
# transforms_ = [
# transforms.Resize((300, 300), Image.BICUBIC),
# transforms.ToTensor(),
# transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
# ]
# x_data = ImageDataset("../data/%s" % "ClothCoParse",
# transforms_= '', #transforms_,
# mode = "train",
# HPC_run = False,
# )
# for i in range(len(x_data)):
# print(i)
# z= x_data[i] #accessing the first element in the data, should have the first image and its corresponding pixel-levele annotation
# x_data[0][1]
# # plt.imshow(anno.convert('L'), cmap= plt.cm.get_cmap("gist_stern"), vmin=0, vmax=255)
# if num_objs==0: # this can/should be used to (data cleaning) remove pairs with no annotations as these will cause an error
# print('############ 0 objects ################# ')
# print(self.files_B[index % len(self.files_B)])
|
# -*- coding: utf-8 -*-
import requests
from alf.tokens import Token, TokenError, TokenStorage
from alf.adapters import mount_retry_adapter
class TokenManager(object):
def __init__(self, token_endpoint, client_id, client_secret,
token_storage=None, token_retries=None):
self._token_endpoint = token_endpoint
self._client_id = client_id
self._client_secret = client_secret
self._token_storage = TokenStorage(token_storage)
self._session = requests.Session()
if token_retries is not None:
self._token_retries = token_retries
mount_retry_adapter(self._session, token_retries)
self._token = Token()
def _has_token(self):
return self._token.is_valid()
def get_token(self):
if not self._has_token():
self._update_token()
return self._token.access_token
def _get_token_data(self):
token_data = self._token_storage.request_token()
if not token_data:
token_data = self._request_token()
expires_in = token_data.get('expires_in', 0)
token_data['expires_on'] = Token.calc_expires_on(expires_in)
return token_data
def reset_token(self):
self._token = Token()
self._token_storage(self._token)
def _update_token(self):
token_data = self._get_token_data()
access_token = token_data.get('access_token', '')
expires_on = token_data.get('expires_on', 0)
self._token = Token(access_token,
expires_on)
self._token_storage(self._token)
def _request_token(self):
response = self._session.post(
self._token_endpoint,
data={'grant_type': 'client_credentials'},
auth=(self._client_id, self._client_secret))
if not response.ok:
raise TokenError('Failed to request token', response)
return response.json()
|
from keras import activations, layers
from keras.utils.generic_utils import register_keras_serializable
from keras.utils.tf_utils import shape_type_conversion
@register_keras_serializable(package='TFSwin')
class MLP(layers.Layer):
def __init__(self, ratio, dropout, **kwargs):
super().__init__(**kwargs)
self.input_spec = layers.InputSpec(ndim=4)
self.ratio = ratio
self.dropout = dropout
@shape_type_conversion
def build(self, input_shape):
channels = input_shape[-1]
if channels is None:
raise ValueError('Channel dimension of the inputs should be defined. Found `None`.')
self.input_spec = layers.InputSpec(ndim=4, axes={-1: channels})
# noinspection PyAttributeOutsideInit
self.fc1 = layers.Dense(int(channels * self.ratio), name='fc1')
# noinspection PyAttributeOutsideInit
self.fc2 = layers.Dense(channels, name='fc2')
# noinspection PyAttributeOutsideInit
self.drop = layers.Dropout(self.dropout)
super().build(input_shape)
def call(self, inputs, *args, **kwargs):
outputs = self.fc1(inputs)
outputs = activations.gelu(outputs)
outputs = self.drop(outputs)
outputs = self.fc2(outputs)
outputs = self.drop(outputs)
return outputs
@shape_type_conversion
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = super().get_config()
config.update({
'ratio': self.ratio,
'dropout': self.dropout
})
return config
|
from django.db import models
# Create your models here.
class MeditationSession(models.Model):
date = models.DateField()
time = models.TimeField()
duration = models.IntegerField()
def __str__(self):
return "session " + str(self.id) + " @ " + str(self.time) + str(self.date)
class Song(models.Model):
session = models.ForeignKey(MeditationSession)
song = models.CharField(max_length=32)
class MeditationSettings(models.Model):
modified = models.DateTimeField(auto_now=True)
vid1 = models.CharField(default = 'YQlyHbu0zz4', max_length=32)
vid2 = models.CharField(default = 'QoitiIbdeaM', max_length=32)
countdown_time = models.IntegerField(default = 3600)
display_favorites_text = models.BooleanField(default = False)
display_heart_detector = models.BooleanField(default = False)
def __str__(self):
return '{} {} {} {} {}'.format(self.vid1, self.vid2, self.countdown_time, self.display_favorites_text, self.display_heart_detector)
|
# Copyright (c) Microsoft. All rights reserved.
'''Some helper functions for PyTorch, including:
- get_mean_and_std: calculate the mean and std value of dataset.
- msr_init: net parameter initialization.
- progress_bar: progress bar mimic xlua.progress.
'''
import os
import errno
import logging
import shutil
import optim
import torch
from tensorboardX import SummaryWriter
def mkdir(path):
# if it is the current folder, skip.
# otherwise the original code will raise FileNotFoundError
if path == '':
return
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
class SasaWriter():
def __init__(self, path, step_multiplier=1, phillypath=None):
self.steps = {}
self.sm = step_multiplier
self.writer = SummaryWriter(path)
self.phillywriter = None
if phillypath:
self.phillywriter = SummaryWriter(phillypath)
def add_scalar(self, name, val):
if name not in self.steps:
self.steps[name] = 0
self.writer.add_scalar(name, val, self.sm * self.steps[name])
if self.phillywriter:
self.phillywriter.add_scalar(name, val, self.sm * self.steps[name])
self.steps[name] += 1
def get_opt(cfg, net):
lr = cfg.OPTIM.LR
momentum = cfg.OPTIM.MOM
if cfg.OPTIM.OPT == 'sgd':
optimizer = torch.optim.SGD(net.parameters(), lr=lr, momentum=momentum,
weight_decay=cfg.OPTIM.WD)
elif cfg.OPTIM.OPT == 'adam':
optimizer = torch.optim.Adam(net.parameters(), lr=cfg.OPTIM.LR,
betas=(
cfg.OPTIM.ADAM.BETA1,
cfg.OPTIM.ADAM.BETA2),
weight_decay=cfg.OPTIM.WD)
elif cfg.OPTIM.OPT == 'qhm':
optimizer = optim.QHM(net.parameters(), lr=cfg.OPTIM.LR,
momentum=momentum,
nu=cfg.OPTIM.NU, weight_decay=cfg.OPTIM.WD)
elif cfg.OPTIM.OPT == 'yaida_seq':
optimizer = optim.SASAYaida(net.parameters(), lr=lr,
momentum=momentum,
weight_decay=cfg.OPTIM.WD,
minN=cfg.OPTIM.SASA.N,
maxN=20 * cfg.OPTIM.SASA.N,
warmup=cfg.OPTIM.WARMUP,
sigma=cfg.OPTIM.SASA.SIGMA,
zeta=1.0 / cfg.OPTIM.DROP_FACTOR,
delta=cfg.OPTIM.SASA.DELTA,
testfreq=cfg.OPTIM.SASA.TESTFREQ,
mode=cfg.OPTIM.SASA.MODE,
logstats=cfg.OPTIM.SASA.LOGSTATS)
elif cfg.OPTIM.OPT == 'yaida_ratio':
optimizer = optim.Yaida(net.parameters(), lr=lr, momentum=momentum,
weight_decay=cfg.OPTIM.WD,
minN=cfg.OPTIM.SASA.N,
maxN=20 * cfg.OPTIM.SASA.N,
warmup=cfg.OPTIM.WARMUP,
sigma=cfg.OPTIM.SASA.SIGMA,
zeta=1.0 / cfg.OPTIM.DROP_FACTOR,
delta=cfg.OPTIM.SASA.DELTA,
testfreq=cfg.OPTIM.SASA.TESTFREQ)
elif cfg.OPTIM.OPT == 'sasa_xd':
optimizer = optim.SASA_xd(net.parameters(), lr=lr, momentum=momentum,
nu=cfg.OPTIM.NU,
weight_decay=cfg.OPTIM.WD,
drop_factor=cfg.OPTIM.DROP_FACTOR,
sigma=cfg.OPTIM.SASA.SIGMA,
var_mode=cfg.OPTIM.SASA.MODE,
leaky_ratio=cfg.OPTIM.SASA.LEAKY_RATIO,
minN=cfg.OPTIM.SASA.N,
warmup=cfg.OPTIM.WARMUP,
testfreq=cfg.OPTIM.SASA.TESTFREQ,
logstats=cfg.OPTIM.SASA.LOGSTATS)
elif cfg.OPTIM.OPT == 'sgd_sls':
optimizer = optim.SGD_SLS(net.parameters(), lr=lr, momentum=momentum,
nu=cfg.OPTIM.NU,
weight_decay=cfg.OPTIM.WD,
gamma=cfg.OPTIM.LS.GAMMA,
ls_evl=cfg.OPTIM.LS.EVAL,
ls_sdc=cfg.OPTIM.LS.SDC,
ls_inc=cfg.OPTIM.LS.INC,
ls_dec=cfg.OPTIM.LS.DEC,
ls_max=cfg.OPTIM.LS.MAX,
ls_ign=cfg.OPTIM.LS.IGN)
elif cfg.OPTIM.OPT == 'salsa':
optimizer = optim.SALSA(net.parameters(), lr=lr, momentum=momentum,
nu=cfg.OPTIM.NU,
weight_decay=cfg.OPTIM.WD,
gamma=cfg.OPTIM.LS.GAMMA,
ls_evl=cfg.OPTIM.LS.EVAL,
ls_sdc=cfg.OPTIM.LS.SDC,
ls_inc=cfg.OPTIM.LS.INC,
ls_dec=cfg.OPTIM.LS.DEC,
ls_max=cfg.OPTIM.LS.MAX,
warmup=cfg.OPTIM.WARMUP,
drop_factor=cfg.OPTIM.DROP_FACTOR,
sigma=cfg.OPTIM.SASA.SIGMA,
var_mode=cfg.OPTIM.SASA.MODE,
leaky_ratio=cfg.OPTIM.SASA.LEAKY_RATIO,
minN=cfg.OPTIM.SASA.N,
testfreq=cfg.OPTIM.SASA.TESTFREQ,
logstats=cfg.OPTIM.SASA.LOGSTATS)
return optimizer
def get_lr_mom(optimizer, cfg):
lr, mom = None, None
# todo: better logging for adam.
if cfg.OPTIM.OPT == 'adam':
lr = optimizer.param_groups[0]['lr']
mom = optimizer.param_groups[0]['betas'][
0] # this isn't right to compare.
else:
lr = optimizer.param_groups[0]['lr']
mom = optimizer.param_groups[0]['momentum']
assert (lr is not None)
assert (mom is not None)
return lr, mom
def adjust_learning_rate(optimizer, epoch, cfg):
"""Sets the learning rate to the initial LR decayed by cfg.OPTIM.DROP_FACTOR every 30 epochs"""
lr = cfg.OPTIM.LR * 1.0 / (
cfg.OPTIM.DROP_FACTOR ** (epoch // cfg.OPTIM.DROP_FREQ))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class Checkpointer(object):
def __init__(
self,
model,
arch,
optimizer=None,
scheduler=None,
save_dir="",
logger=None,
is_test=False,
epoch=0,
best_acc=0.,
trainloss=None,
trainacc=None,
testloss=None,
testacc=None,
lrs=None,
moms=None,
us=None,
vs=None,
only_save_last=0
):
self.model = model
self.arch = arch
self.optimizer = optimizer
self.scheduler = scheduler
self.save_dir = save_dir
if logger is None:
logger = logging.getLogger(__name__)
self.logger = logger
self.is_test = is_test
self.resume = False
self.epoch = epoch
self.best_acc = best_acc
self.only_save_last = only_save_last
if vs is None:
vs = []
if us is None:
us = []
if moms is None:
moms = []
if lrs is None:
lrs = []
if testacc is None:
testacc = []
if testloss is None:
testloss = []
if trainacc is None:
trainacc = []
if trainloss is None:
trainloss = []
self.trainloss = trainloss
self.trainacc = trainacc
self.testloss = testloss
self.testacc = testacc
self.lrs = lrs
self.moms = moms
self.us = us
self.vs = vs
def save(self, is_best, **kwargs):
name = 'checkpoint_{}'.format(self.epoch)
if self.only_save_last:
name = 'checkpoint_last'
if not self.save_dir:
return
data = {"net": self.model.state_dict(), "arch": self.arch,
"epoch": self.epoch, "best_acc": self.best_acc,
"trainloss": self.trainloss, "trainacc": self.trainacc,
"testloss": self.testloss, "testacc": self.testacc,
"lrs": self.lrs, "moms": self.moms,
"us": self.us, "vs": self.vs}
if self.optimizer is not None:
data["optimizer"] = self.optimizer.state_dict()
if self.scheduler is not None:
data["scheduler"] = self.scheduler.state_dict()
data.update(kwargs)
save_file = os.path.join(self.save_dir, "{}.pth".format(name))
self.logger.info("Saving checkpoint to {}".format(save_file))
torch.save(data, save_file)
# self.tag_last_checkpoint(save_file)
# use relative path name to save the checkpoint
self.tag_last_checkpoint("{}.pth".format(name))
if is_best:
shutil.copyfile(save_file,
os.path.join(self.save_dir, "model_best.pth"))
def load(self, f=None):
if self.is_test and os.path.isfile(f):
# load the weights in config file if it is specified in testing
# stage otherwise it will load the lastest checkpoint in
# output_dir for testing
self.logger.info("Loading checkpoint from {}".format(f))
checkpoint = self._load_file(f)
self._load_model(checkpoint)
return checkpoint
if self.has_checkpoint():
# override argument with existing checkpoint
f = self.get_checkpoint_file()
# get the absolute path
f = os.path.join(self.save_dir, f)
self.resume = True
if not os.path.isfile(f):
# no checkpoint could be found
self.logger.info("No checkpoint found. Initializing model from "
"scratch")
return {}
self.logger.info("Loading checkpoint from {}".format(f))
checkpoint = self._load_file(f)
self._load_model(checkpoint)
# if resume training, load optimizer and scheduler,
# otherwise use the specified LR in config yaml for fine-tuning
if self.resume:
self.trainloss = checkpoint.pop('trainloss')
self.trainacc = checkpoint.pop('trainacc')
self.testloss = checkpoint.pop('testloss')
self.testacc = checkpoint.pop('testacc')
self.lrs = checkpoint.pop('lrs')
self.moms = checkpoint.pop('moms')
self.us = checkpoint.pop('us')
self.vs = checkpoint.pop('vs')
if "epoch" in checkpoint:
self.epoch = checkpoint.pop('epoch')
if "best_acc" in checkpoint:
self.best_acc = checkpoint.pop('best_acc')
if "optimizer" in checkpoint and self.optimizer:
self.logger.info("Loading optimizer from {}".format(f))
self.optimizer.load_state_dict(checkpoint.pop("optimizer"))
if "scheduler" in checkpoint and self.scheduler:
self.logger.info("Loading scheduler from {}".format(f))
self.scheduler.load_state_dict(checkpoint.pop("scheduler"))
# return any further checkpoint data
return checkpoint
def has_checkpoint(self):
save_file = os.path.join(self.save_dir, "last_checkpoint")
return os.path.exists(save_file)
def get_checkpoint_file(self):
save_file = os.path.join(self.save_dir, "last_checkpoint")
try:
with open(save_file, "r") as f:
last_saved = f.read()
last_saved = last_saved.strip()
except IOError:
# if file doesn't exist, maybe because it has just been
# deleted by a separate process
last_saved = ""
return last_saved
def tag_last_checkpoint(self, last_filename):
save_file = os.path.join(self.save_dir, "last_checkpoint")
with open(save_file, "w") as f:
f.write(last_filename)
def _load_file(self, f):
return torch.load(f.strip(), map_location=torch.device("cpu"))
def _load_model(self, checkpoint):
assert checkpoint.pop('arch') == self.arch
self.model.load_state_dict(checkpoint.pop("net"))
|
from reputation.models import Withdrawal
from user.tests.helpers import create_random_default_user
ADDRESS_1 = '0x0000000000000000000000000000000000000000'
ADDRESS_2 = '0x1123581321345589144233377610987159725844'
def create_withdrawals(count):
for x in range(count):
user = create_random_default_user(f'withdrawal_user_{x}')
create_withdrawal(user)
def create_withdrawal(
user,
amount='1.0',
from_address=ADDRESS_1,
to_address=ADDRESS_2,
):
Withdrawal.objects.create(
user=user,
amount=amount,
from_address=from_address,
to_address=to_address
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.