code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The volumes snapshots api."""
import webob
from webob import exc
from cinder.api import common
from cinder.api.openstack import wsgi
from cinder.api.v1 import volumes
from cinder.api import xmlutil
from cinder import exception
from cinder.openstack.common import log as logging
from cinder.openstack.common import strutils
from cinder import utils
from cinder import volume
LOG = logging.getLogger(__name__)
def _translate_snapshot_detail_view(context, snapshot):
"""Maps keys for snapshots details view."""
d = _translate_snapshot_summary_view(context, snapshot)
# NOTE(gagupta): No additional data / lookups at the moment
return d
def _translate_snapshot_summary_view(context, snapshot):
"""Maps keys for snapshots summary view."""
d = {}
d['id'] = snapshot['id']
d['created_at'] = snapshot['created_at']
d['display_name'] = snapshot['display_name']
d['display_description'] = snapshot['display_description']
d['volume_id'] = snapshot['volume_id']
d['status'] = snapshot['status']
d['size'] = snapshot['volume_size']
if snapshot.get('snapshot_metadata'):
metadata = snapshot.get('snapshot_metadata')
d['metadata'] = dict((item['key'], item['value']) for item in metadata)
# avoid circular ref when vol is a Volume instance
elif snapshot.get('metadata') and isinstance(snapshot.get('metadata'),
dict):
d['metadata'] = snapshot['metadata']
else:
d['metadata'] = {}
return d
def make_snapshot(elem):
elem.set('id')
elem.set('status')
elem.set('size')
elem.set('created_at')
elem.set('display_name')
elem.set('display_description')
elem.set('volume_id')
elem.append(common.MetadataTemplate())
class SnapshotTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('snapshot', selector='snapshot')
make_snapshot(root)
return xmlutil.MasterTemplate(root, 1)
class SnapshotsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('snapshots')
elem = xmlutil.SubTemplateElement(root, 'snapshot',
selector='snapshots')
make_snapshot(elem)
return xmlutil.MasterTemplate(root, 1)
class SnapshotsController(wsgi.Controller):
"""The Volumes API controller for the OpenStack API."""
def __init__(self, ext_mgr=None):
self.volume_api = volume.API()
self.ext_mgr = ext_mgr
super(SnapshotsController, self).__init__()
@wsgi.serializers(xml=SnapshotTemplate)
def show(self, req, id):
"""Return data about the given snapshot."""
context = req.environ['cinder.context']
try:
vol = self.volume_api.get_snapshot(context, id)
req.cache_resource(vol)
except exception.NotFound:
raise exc.HTTPNotFound()
return {'snapshot': _translate_snapshot_detail_view(context, vol)}
def delete(self, req, id):
"""Delete a snapshot."""
context = req.environ['cinder.context']
LOG.audit(_("Delete snapshot with id: %s"), id, context=context)
try:
snapshot = self.volume_api.get_snapshot(context, id)
self.volume_api.delete_snapshot(context, snapshot)
except exception.NotFound:
raise exc.HTTPNotFound()
return webob.Response(status_int=202)
@wsgi.serializers(xml=SnapshotsTemplate)
def index(self, req):
"""Returns a summary list of snapshots."""
return self._items(req, entity_maker=_translate_snapshot_summary_view)
@wsgi.serializers(xml=SnapshotsTemplate)
def detail(self, req):
"""Returns a detailed list of snapshots."""
return self._items(req, entity_maker=_translate_snapshot_detail_view)
def _items(self, req, entity_maker):
"""Returns a list of snapshots, transformed through entity_maker."""
context = req.environ['cinder.context']
#pop out limit and offset , they are not search_opts
search_opts = req.GET.copy()
search_opts.pop('limit', None)
search_opts.pop('offset', None)
#filter out invalid option
allowed_search_options = ('status', 'volume_id', 'display_name')
volumes.remove_invalid_options(context, search_opts,
allowed_search_options)
snapshots = self.volume_api.get_all_snapshots(context,
search_opts=search_opts)
limited_list = common.limited(snapshots, req)
req.cache_resource(limited_list)
res = [entity_maker(context, snapshot) for snapshot in limited_list]
return {'snapshots': res}
@wsgi.serializers(xml=SnapshotTemplate)
def create(self, req, body):
"""Creates a new snapshot."""
kwargs = {}
context = req.environ['cinder.context']
if not self.is_valid_body(body, 'snapshot'):
raise exc.HTTPUnprocessableEntity()
snapshot = body['snapshot']
kwargs['metadata'] = snapshot.get('metadata', None)
try:
volume_id = snapshot['volume_id']
except KeyError:
msg = _("'volume_id' must be specified")
raise exc.HTTPBadRequest(explanation=msg)
try:
volume = self.volume_api.get(context, volume_id)
except exception.NotFound:
raise exc.HTTPNotFound()
force = snapshot.get('force', False)
msg = _("Create snapshot from volume %s")
LOG.audit(msg, volume_id, context=context)
if not utils.is_valid_boolstr(force):
msg = _("Invalid value '%s' for force. ") % force
raise exception.InvalidParameterValue(err=msg)
if strutils.bool_from_string(force):
new_snapshot = self.volume_api.create_snapshot_force(
context,
volume,
snapshot.get('display_name'),
snapshot.get('display_description'),
**kwargs)
else:
new_snapshot = self.volume_api.create_snapshot(
context,
volume,
snapshot.get('display_name'),
snapshot.get('display_description'),
**kwargs)
retval = _translate_snapshot_detail_view(context, new_snapshot)
return {'snapshot': retval}
@wsgi.serializers(xml=SnapshotTemplate)
def update(self, req, id, body):
"""Update a snapshot."""
context = req.environ['cinder.context']
if not body:
raise exc.HTTPUnprocessableEntity()
if 'snapshot' not in body:
raise exc.HTTPUnprocessableEntity()
snapshot = body['snapshot']
update_dict = {}
valid_update_keys = (
'display_name',
'display_description',
)
for key in valid_update_keys:
if key in snapshot:
update_dict[key] = snapshot[key]
try:
snapshot = self.volume_api.get_snapshot(context, id)
self.volume_api.update_snapshot(context, snapshot, update_dict)
except exception.NotFound:
raise exc.HTTPNotFound()
snapshot.update(update_dict)
return {'snapshot': _translate_snapshot_detail_view(context, snapshot)}
def create_resource(ext_mgr):
return wsgi.Resource(SnapshotsController(ext_mgr))
|
NeCTAR-RC/cinder
|
cinder/api/v1/snapshots.py
|
Python
|
apache-2.0
| 8,175
|
import pytest
import doctest
from insights.parsers import docker_inspect, SkipException
from insights.tests import context_wrap
DOCKER_CONTAINER_INSPECT = """
[
{
"Id": "97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07",
"Created": "2016-06-23T05:12:25.433469799Z",
"Path": "/bin/bash",
"Args": [],
"State": {
"Status": "running",
"Running": true,
"Paused": false,
"Restarting": false,
"OOMKilled": false,
"Dead": false,
"Pid": 15096,
"ExitCode": 0,
"Error": "",
"StartedAt": "2016-06-23T05:37:56.925378831Z",
"FinishedAt": "2016-06-23T05:33:02.012653984Z"
},
"Image": "882ab98aae5394aebe91fe6d8a4297fa0387c3cfd421b2d892bddf218ac373b2",
"ResolvConfPath": "/var/lib/docker/containers/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07/resolv.conf",
"HostnamePath": "/var/lib/docker/containers/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07/hostname",
"HostsPath": "/var/lib/docker/containers/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07/hosts",
"LogPath": "/var/lib/docker/containers/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07-json.log",
"Name": "/hehe2",
"RestartCount": 0,
"Driver": "devicemapper",
"ExecDriver": "native-0.2",
"MountLabel": "system_u:object_r:svirt_sandbox_file_t:s0:c429,c690",
"ProcessLabel": "system_u:system_r:svirt_lxc_net_t:s0:c429,c690",
"AppArmorProfile": "",
"ExecIDs": null,
"HostConfig": {
"Binds": null,
"ContainerIDFile": "",
"LxcConf": [],
"Memory": 0,
"MemoryReservation": 0,
"MemorySwap": 0,
"KernelMemory": 0,
"CpuShares": 0,
"CpuPeriod": 0,
"CpusetCpus": "",
"CpusetMems": "",
"CpuQuota": 0,
"BlkioWeight": 0,
"OomKillDisable": false,
"MemorySwappiness": -1,
"Privileged": false,
"PortBindings": {},
"Links": null,
"PublishAllPorts": false,
"Dns": [],
"DnsOptions": [],
"DnsSearch": [],
"ExtraHosts": null,
"VolumesFrom": null,
"Devices": [],
"NetworkMode": "default",
"IpcMode": "",
"PidMode": "",
"UTSMode": "",
"CapAdd": null,
"CapDrop": null,
"GroupAdd": null,
"RestartPolicy": {
"Name": "no",
"MaximumRetryCount": 0
},
"SecurityOpt": null,
"ReadonlyRootfs": false,
"Ulimits": null,
"Sysctls": {},
"LogConfig": {
"Type": "json-file",
"Config": {
"max-file": "7",
"max-size": "10m"
}
},
"CgroupParent": "",
"ConsoleSize": [
0,
0
],
"VolumeDriver": "",
"ShmSize": 67108864
},
"GraphDriver": {
"Name": "devicemapper",
"Data": {
"DeviceId": "433",
"DeviceName": "docker-253:0-71431059-97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07",
"DeviceSize": "107374182400"
}
},
"Mounts": [],
"Config": {
"Hostname": "97d7cd1a5d8f",
"Domainname": "",
"User": "root",
"AttachStdin": true,
"AttachStdout": true,
"AttachStderr": true,
"Tty": true,
"OpenStdin": true,
"StdinOnce": true,
"Env": [
"container=docker",
"PKGM=yum",
"PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin"
],
"Cmd": [
"/bin/bash"
],
"Image": "rhel7_imagemagick",
"Volumes": null,
"WorkingDir": "",
"Entrypoint": null,
"OnBuild": null,
"Labels": {
"Architecture": "x86_64",
"Authoritative_Registry": "registry.access.redhat.com",
"BZComponent": "rhel-server-docker",
"Build_Host": "rcm-img03.build.eng.bos.redhat.com",
"Name": "rhel7/rhel",
"Release": "61",
"Vendor": "Red Hat, Inc.",
"Version": "7.2",
"io.projectatomic.Temporary": "true"
},
"StopSignal": "SIGTERM"
},
"NetworkSettings": {
"Bridge": "",
"SandboxID": "f1cce5397340364aff043879ff5bd7e2ce2fcc5b81cfb7fe1833ce7b57eb6cf8",
"HairpinMode": false,
"LinkLocalIPv6Address": "",
"LinkLocalIPv6PrefixLen": 0,
"Ports": {},
"SandboxKey": "/var/run/docker/netns/f1cce5397340",
"SecondaryIPAddresses": null,
"SecondaryIPv6Addresses": null,
"EndpointID": "59be4c94b2a1346eb0ec16472bc132e071d18733fd956c34b3b1defff9bba389",
"Gateway": "172.17.0.1",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"IPAddress": "172.17.0.2",
"IPPrefixLen": 16,
"IPv6Gateway": "",
"MacAddress": "02:42:ac:11:00:02",
"Networks": {
"bridge": {
"EndpointID": "59be4c94b2a1346eb0ec16472bc132e071d18733fd956c34b3b1defff9bba389",
"Gateway": "172.17.0.1",
"IPAddress": "172.17.0.2",
"IPPrefixLen": 16,
"IPv6Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"MacAddress": "02:42:ac:11:00:02"
}
}
}
}
]
""".splitlines()
DOCKER_IMAGE_INSPECT = """
[
{
"Id": "882ab98aae5394aebe91fe6d8a4297fa0387c3cfd421b2d892bddf218ac373b2",
"RepoTags": [
"rhel7_imagemagick:latest"
],
"RepoDigests": [],
"Parent": "34c167d900afb820ecab622a214ce3207af80ec755c0dcb6165b425087ddbc3a",
"Comment": "",
"Created": "2016-06-23T03:39:15.068803433Z",
"Container": "65410bf8809af52d2074c882917ea0651b119a91f460c1037bc99d4d5976532a",
"ContainerConfig": {
"Hostname": "cf3092658f01",
"Domainname": "",
"User": "root",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"container=docker",
"PKGM=yum",
"PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin"
],
"Cmd": [
"/bin/sh",
"-c",
"yum install -y ImageMagick-6.7.8.9-10.el7"
],
"Image": "34c167d900afb820ecab622a214ce3207af80ec755c0dcb6165b425087ddbc3a",
"Volumes": null,
"WorkingDir": "",
"Entrypoint": null,
"OnBuild": [],
"Labels": {
"Architecture": "x86_64",
"Authoritative_Registry": "registry.access.redhat.com",
"BZComponent": "rhel-server-docker",
"Build_Host": "rcm-img03.build.eng.bos.redhat.com",
"Name": "rhel7/rhel",
"Release": "61",
"Vendor": "Red Hat, Inc.",
"Version": "7.2"
}
},
"DockerVersion": "1.9.1",
"Author": "",
"Config": {
"Hostname": "cf3092658f01",
"Domainname": "",
"User": "root",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"container=docker",
"PKGM=yum",
"PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin"
],
"Cmd": [
"/usr/bin/bash"
],
"Image": "34c167d900afb820ecab622a214ce3207af80ec755c0dcb6165b425087ddbc3a",
"Volumes": null,
"WorkingDir": "",
"Entrypoint": null,
"OnBuild": [],
"Labels": {
"Architecture": "x86_64",
"Authoritative_Registry": "registry.access.redhat.com",
"BZComponent": "rhel-server-docker",
"Build_Host": "rcm-img03.build.eng.bos.redhat.com",
"Name": "rhel7/rhel",
"Release": "61",
"Vendor": "Red Hat, Inc.",
"Version": "7.2"
}
},
"Architecture": "amd64",
"Os": "linux",
"Size": 580094174,
"VirtualSize": 785437820,
"GraphDriver": {
"Name": "devicemapper",
"Data": {
"DeviceId": "431",
"DeviceName": "docker-253:0-71431059-882ab98aae5394aebe91fe6d8a4297fa0387c3cfd421b2d892bddf218ac373b2",
"DeviceSize": "107374182400"
}
}
}
]
""".splitlines()
DOCKER_CONTAINER_INSPECT_TRUNCATED = """
[
{
"Id": "97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07",
"Created": "2016-06-23T05:12:25.433469799Z",
"Path": "/bin/bash",
"""
def test_docker_object_container_inspect():
result = docker_inspect.DockerInspect(context_wrap(DOCKER_CONTAINER_INSPECT))
assert result.get('Id') == "97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07"
assert result.get('NetworkSettings').get('HairpinMode') is False
assert result.get('Config').get('Env') == ['container=docker', 'PKGM=yum', 'PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin']
assert result.get('GraphDriver').get('Data').get('DeviceSize') == '107374182400'
def test_docker_object_image_inspect():
result = docker_inspect.DockerInspect(context_wrap(DOCKER_IMAGE_INSPECT))
assert result.get('Id') == "882ab98aae5394aebe91fe6d8a4297fa0387c3cfd421b2d892bddf218ac373b2"
assert result.get('Size') == 580094174
assert result.get('Config').get('AttachStdin') is False
assert result.get('RepoDigests') == []
def test_docker_container_inspect_truncated_input():
with pytest.raises(SkipException):
docker_inspect.DockerInspectContainer(context_wrap(DOCKER_CONTAINER_INSPECT_TRUNCATED))
def test_doc_test():
dic = docker_inspect.DockerInspectContainer(context_wrap(DOCKER_CONTAINER_INSPECT))
dii = docker_inspect.DockerInspectImage(context_wrap(DOCKER_IMAGE_INSPECT))
env = {
'container': dic,
'image': dii,
}
failed, total = doctest.testmod(docker_inspect, globs=env)
assert failed == 0
|
RedHatInsights/insights-core
|
insights/parsers/tests/test_docker_inspect.py
|
Python
|
apache-2.0
| 10,303
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import sys
import mock
import time
import random
import shutil
import contextlib
import tempfile
import binascii
import platform
import select
import datetime
from io import BytesIO
from subprocess import Popen, PIPE
from dateutil.tz import tzlocal
import unittest
from nose.tools import assert_equal
import botocore.loaders
import botocore.session
from botocore.awsrequest import AWSResponse
from botocore.compat import (
parse_qs, six, urlparse, HAS_CRT
)
from botocore import utils
from botocore import credentials
from botocore.stub import Stubber
_LOADER = botocore.loaders.Loader()
def skip_unless_has_memory_collection(cls):
"""Class decorator to skip tests that require memory collection.
Any test that uses memory collection (such as the resource leak tests)
can decorate their class with skip_unless_has_memory_collection to
indicate that if the platform does not support memory collection
the tests should be skipped.
"""
if platform.system() not in ['Darwin', 'Linux']:
return unittest.skip('Memory tests only supported on mac/linux.')(cls)
return cls
def skip_if_windows(reason):
"""Decorator to skip tests that should not be run on windows.
Example usage:
@skip_if_windows("Not valid")
def test_some_non_windows_stuff(self):
self.assertEqual(...)
"""
def decorator(func):
return unittest.skipIf(
platform.system() not in ['Darwin', 'Linux'], reason)(func)
return decorator
def requires_crt(reason=None):
if reason is None:
reason = "Test requires awscrt to be installed"
def decorator(func):
return unittest.skipIf(not HAS_CRT, reason)(func)
return decorator
def random_chars(num_chars):
"""Returns random hex characters.
Useful for creating resources with random names.
"""
return binascii.hexlify(os.urandom(int(num_chars / 2))).decode('ascii')
def create_session(**kwargs):
# Create a Session object. By default,
# the _LOADER object is used as the loader
# so that we reused the same models across tests.
session = botocore.session.Session(**kwargs)
session.register_component('data_loader', _LOADER)
session.set_config_variable('credentials_file', 'noexist/foo/botocore')
return session
@contextlib.contextmanager
def temporary_file(mode):
"""This is a cross platform temporary file creation.
tempfile.NamedTemporary file on windows creates a secure temp file
that can't be read by other processes and can't be opened a second time.
For tests, we generally *want* them to be read multiple times.
The test fixture writes the temp file contents, the test reads the
temp file.
"""
temporary_directory = tempfile.mkdtemp()
basename = 'tmpfile-%s-%s' % (int(time.time()), random.randint(1, 1000))
full_filename = os.path.join(temporary_directory, basename)
open(full_filename, 'w').close()
try:
with open(full_filename, mode) as f:
yield f
finally:
shutil.rmtree(temporary_directory)
class BaseEnvVar(unittest.TestCase):
def setUp(self):
# Automatically patches out os.environ for you
# and gives you a self.environ attribute that simulates
# the environment. Also will automatically restore state
# for you in tearDown()
self.environ = {}
self.environ_patch = mock.patch('os.environ', self.environ)
self.environ_patch.start()
def tearDown(self):
self.environ_patch.stop()
class BaseSessionTest(BaseEnvVar):
"""Base class used to provide credentials.
This class can be used as a base class that want to use a real
session class but want to be completely isolated from the
external environment (including environment variables).
This class will also set credential vars so you can make fake
requests to services.
"""
def setUp(self, **environ):
super(BaseSessionTest, self).setUp()
self.environ['AWS_ACCESS_KEY_ID'] = 'access_key'
self.environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key'
self.environ['AWS_CONFIG_FILE'] = 'no-exist-foo'
self.environ.update(environ)
self.session = create_session()
self.session.config_filename = 'no-exist-foo'
@skip_unless_has_memory_collection
class BaseClientDriverTest(unittest.TestCase):
INJECT_DUMMY_CREDS = False
def setUp(self):
self.driver = ClientDriver()
env = None
if self.INJECT_DUMMY_CREDS:
env = {'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar'}
self.driver.start(env=env)
def cmd(self, *args):
self.driver.cmd(*args)
def send_cmd(self, *args):
self.driver.send_cmd(*args)
def record_memory(self):
self.driver.record_memory()
@property
def memory_samples(self):
return self.driver.memory_samples
def tearDown(self):
self.driver.stop()
class ClientDriver(object):
CLIENT_SERVER = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'cmd-runner'
)
def __init__(self):
self._popen = None
self.memory_samples = []
def _get_memory_with_ps(self, pid):
# It would be better to eventually switch to psutil,
# which should allow us to test on windows, but for now
# we'll just use ps and run on POSIX platforms.
command_list = ['ps', '-p', str(pid), '-o', 'rss']
p = Popen(command_list, stdout=PIPE)
stdout = p.communicate()[0]
if not p.returncode == 0:
raise RuntimeError("Could not retrieve memory")
else:
# Get the RSS from output that looks like this:
# RSS
# 4496
return int(stdout.splitlines()[1].split()[0]) * 1024
def record_memory(self):
mem = self._get_memory_with_ps(self._popen.pid)
self.memory_samples.append(mem)
def start(self, env=None):
"""Start up the command runner process."""
self._popen = Popen([sys.executable, self.CLIENT_SERVER],
stdout=PIPE, stdin=PIPE, env=env)
def stop(self):
"""Shutdown the command runner process."""
self.cmd('exit')
self._popen.wait()
def send_cmd(self, *cmd):
"""Send a command and return immediately.
This is a lower level method than cmd().
This method will instruct the cmd-runner process
to execute a command, but this method will
immediately return. You will need to use
``is_cmd_finished()`` to check that the command
is finished.
This method is useful if you want to record attributes
about the process while an operation is occurring. For
example, if you want to instruct the cmd-runner process
to upload a 1GB file to S3 and you'd like to record
the memory during the upload process, you can use
send_cmd() instead of cmd().
"""
cmd_str = ' '.join(cmd) + '\n'
cmd_bytes = cmd_str.encode('utf-8')
self._popen.stdin.write(cmd_bytes)
self._popen.stdin.flush()
def is_cmd_finished(self):
rlist = [self._popen.stdout.fileno()]
result = select.select(rlist, [], [], 0.01)
if result[0]:
return True
return False
def cmd(self, *cmd):
"""Send a command and block until it finishes.
This method will send a command to the cmd-runner process
to run. It will block until the cmd-runner process is
finished executing the command and sends back a status
response.
"""
self.send_cmd(*cmd)
result = self._popen.stdout.readline().strip()
if result != b'OK':
raise RuntimeError(
"Error from command '%s': %s" % (cmd, result))
# This is added to this file because it's used in both
# the functional and unit tests for cred refresh.
class IntegerRefresher(credentials.RefreshableCredentials):
"""Refreshable credentials to help with testing.
This class makes testing refreshable credentials easier.
It has the following functionality:
* A counter, self.refresh_counter, to indicate how many
times refresh was called.
* A way to specify how many seconds to make credentials
valid.
* Configurable advisory/mandatory refresh.
* An easy way to check consistency. Each time creds are
refreshed, all the cred values are set to the next
incrementing integer. Frozen credentials should always
have this value.
"""
_advisory_refresh_timeout = 2
_mandatory_refresh_timeout = 1
_credentials_expire = 3
def __init__(self, creds_last_for=_credentials_expire,
advisory_refresh=_advisory_refresh_timeout,
mandatory_refresh=_mandatory_refresh_timeout,
refresh_function=None):
expires_in = (
self._current_datetime() +
datetime.timedelta(seconds=creds_last_for))
if refresh_function is None:
refresh_function = self._do_refresh
super(IntegerRefresher, self).__init__(
'0', '0', '0', expires_in,
refresh_function, 'INTREFRESH')
self.creds_last_for = creds_last_for
self.refresh_counter = 0
self._advisory_refresh_timeout = advisory_refresh
self._mandatory_refresh_timeout = mandatory_refresh
def _do_refresh(self):
self.refresh_counter += 1
current = int(self._access_key)
next_id = str(current + 1)
return {
'access_key': next_id,
'secret_key': next_id,
'token': next_id,
'expiry_time': self._seconds_later(self.creds_last_for),
}
def _seconds_later(self, num_seconds):
# We need to guarantee at *least* num_seconds.
# Because this doesn't handle subsecond precision
# we'll round up to the next second.
num_seconds += 1
t = self._current_datetime() + datetime.timedelta(seconds=num_seconds)
return self._to_timestamp(t)
def _to_timestamp(self, datetime_obj):
obj = utils.parse_to_aware_datetime(datetime_obj)
return obj.strftime('%Y-%m-%dT%H:%M:%SZ')
def _current_timestamp(self):
return self._to_timestamp(self._current_datetime())
def _current_datetime(self):
return datetime.datetime.now(tzlocal())
def _urlparse(url):
if isinstance(url, six.binary_type):
# Not really necessary, but it helps to reduce noise on Python 2.x
url = url.decode('utf8')
return urlparse(url)
def assert_url_equal(url1, url2):
parts1 = _urlparse(url1)
parts2 = _urlparse(url2)
# Because the query string ordering isn't relevant, we have to parse
# every single part manually and then handle the query string.
assert_equal(parts1.scheme, parts2.scheme)
assert_equal(parts1.netloc, parts2.netloc)
assert_equal(parts1.path, parts2.path)
assert_equal(parts1.params, parts2.params)
assert_equal(parts1.fragment, parts2.fragment)
assert_equal(parts1.username, parts2.username)
assert_equal(parts1.password, parts2.password)
assert_equal(parts1.hostname, parts2.hostname)
assert_equal(parts1.port, parts2.port)
assert_equal(parse_qs(parts1.query), parse_qs(parts2.query))
class HTTPStubberException(Exception):
pass
class RawResponse(BytesIO):
# TODO: There's a few objects similar to this in various tests, let's
# try and consolidate to this one in a future commit.
def stream(self, **kwargs):
contents = self.read()
while contents:
yield contents
contents = self.read()
class BaseHTTPStubber(object):
def __init__(self, obj_with_event_emitter, strict=True):
self.reset()
self._strict = strict
self._obj_with_event_emitter = obj_with_event_emitter
def reset(self):
self.requests = []
self.responses = []
def add_response(self, url='https://example.com', status=200, headers=None,
body=b''):
if headers is None:
headers = {}
raw = RawResponse(body)
response = AWSResponse(url, status, headers, raw)
self.responses.append(response)
@property
def _events(self):
raise NotImplementedError('_events')
def start(self):
self._events.register('before-send', self)
def stop(self):
self._events.unregister('before-send', self)
def __enter__(self):
self.start()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.stop()
def __call__(self, request, **kwargs):
self.requests.append(request)
if self.responses:
response = self.responses.pop(0)
if isinstance(response, Exception):
raise response
else:
return response
elif self._strict:
raise HTTPStubberException('Insufficient responses')
else:
return None
class ClientHTTPStubber(BaseHTTPStubber):
@property
def _events(self):
return self._obj_with_event_emitter.meta.events
class SessionHTTPStubber(BaseHTTPStubber):
@property
def _events(self):
return self._obj_with_event_emitter.get_component('event_emitter')
class ConsistencyWaiterException(Exception):
pass
class ConsistencyWaiter(object):
"""
A waiter class for some check to reach a consistent state.
:type min_successes: int
:param min_successes: The minimum number of successful check calls to
treat the check as stable. Default of 1 success.
:type max_attempts: int
:param min_successes: The maximum number of times to attempt calling
the check. Default of 20 attempts.
:type delay: int
:param delay: The number of seconds to delay the next API call after a
failed check call. Default of 5 seconds.
"""
def __init__(self, min_successes=1, max_attempts=20, delay=5,
delay_initial_poll=False):
self.min_successes = min_successes
self.max_attempts = max_attempts
self.delay = delay
self.delay_initial_poll = delay_initial_poll
def wait(self, check, *args, **kwargs):
"""
Wait until the check succeeds the configured number of times
:type check: callable
:param check: A callable that returns True or False to indicate
if the check succeeded or failed.
:type args: list
:param args: Any ordered arguments to be passed to the check.
:type kwargs: dict
:param kwargs: Any keyword arguments to be passed to the check.
"""
attempts = 0
successes = 0
if self.delay_initial_poll:
time.sleep(self.delay)
while attempts < self.max_attempts:
attempts += 1
if check(*args, **kwargs):
successes += 1
if successes >= self.min_successes:
return
else:
time.sleep(self.delay)
fail_msg = self._fail_message(attempts, successes)
raise ConsistencyWaiterException(fail_msg)
def _fail_message(self, attempts, successes):
format_args = (attempts, successes)
return 'Failed after %s attempts, only had %s successes' % format_args
class StubbedSession(botocore.session.Session):
def __init__(self, *args, **kwargs):
super(StubbedSession, self).__init__(*args, **kwargs)
self._cached_clients = {}
self._client_stubs = {}
def create_client(self, service_name, *args, **kwargs):
if service_name not in self._cached_clients:
client = self._create_stubbed_client(service_name, *args, **kwargs)
self._cached_clients[service_name] = client
return self._cached_clients[service_name]
def _create_stubbed_client(self, service_name, *args, **kwargs):
client = super(StubbedSession, self).create_client(
service_name, *args, **kwargs)
stubber = Stubber(client)
self._client_stubs[service_name] = stubber
return client
def stub(self, service_name, *args, **kwargs):
if service_name not in self._client_stubs:
self.create_client(service_name, *args, **kwargs)
return self._client_stubs[service_name]
def activate_stubs(self):
for stub in self._client_stubs.values():
stub.activate()
def verify_stubs(self):
for stub in self._client_stubs.values():
stub.assert_no_pending_responses()
|
pplu/botocore
|
tests/__init__.py
|
Python
|
apache-2.0
| 17,369
|
#!/usr/bin/env python
"""
Ex 1. Construct a script that retrieves NAPALM facts from two IOS routers, two Arista switches, and one Junos device.
pynet-rtr1 (Cisco IOS) 184.105.247.70
pynet-rtr2 (Cisco IOS) 184.105.247.71
pynet-sw1 (Arista EOS) 184.105.247.72
pynet-sw2 (Arista EOS) 184.105.247.73
juniper-srx 184.105.247.76
Retrieve the 'model' number from each device and print the model to standard out.
As part of this exercise define the devices that you use in a Python file (for example my_devices.py) and import
these devices into your program. Optionally, define the devices in a YAML file and read this my_devices.yml file in.
"""
from __future__ import print_function
from __future__ import unicode_literals
from getpass import getpass
from pprint import pprint
from napalm_base import get_network_driver
from pyeapi.eapilib import CommandError
import yaml
import re
YAML_FILE = 'my_devices.yml'
def main():
with open(YAML_FILE) as f:
my_devices = yaml.load(f)
#pprint(my_devices)
pwd = getpass()
print("{:<20} {:<20} {:<20}".format("Device Type", "Hostname", "Model"))
for device_dict in my_devices:
device_dict['password'] = pwd
device_type = device_dict.pop('device_type')
driver = get_network_driver(device_type)
device=driver(**device_dict)
device.open()
facts = device.get_facts()
print('*' * 80)
print("{:<20} {:<20} {:<20}".format(device_type, device_dict['hostname'], facts['model']))
print('*' * 80)
print
if __name__ == "__main__":
main()
|
jrslocum17/pynet_test
|
Bonus3/napalm_get_model.py
|
Python
|
apache-2.0
| 1,612
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import contextlib
import datetime
import functools
import hashlib
import hmac
import inspect
import os
import pyclbr
import random
import re
import shutil
import socket
import struct
import sys
import tempfile
from xml.sax import saxutils
import eventlet
import netaddr
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_context import context as common_context
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_utils import encodeutils
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import timeutils
import six
from nova import exception
from nova.i18n import _, _LE, _LW
notify_decorator = 'nova.notifications.notify_decorator'
monkey_patch_opts = [
cfg.BoolOpt('monkey_patch',
default=False,
help='Whether to log monkey patching'),
cfg.ListOpt('monkey_patch_modules',
default=[
'nova.api.ec2.cloud:%s' % (notify_decorator),
'nova.compute.api:%s' % (notify_decorator)
],
help='List of modules/decorators to monkey patch'),
]
utils_opts = [
cfg.IntOpt('password_length',
default=12,
help='Length of generated instance admin passwords'),
cfg.StrOpt('instance_usage_audit_period',
default='month',
help='Time period to generate instance usages for. '
'Time period must be hour, day, month or year'),
cfg.StrOpt('rootwrap_config',
default="/etc/nova/rootwrap.conf",
help='Path to the rootwrap configuration file to use for '
'running commands as root'),
cfg.StrOpt('tempdir',
help='Explicitly specify the temporary working directory'),
]
""" This group is for very specific reasons.
If you're:
- Working around an issue in a system tool (e.g. libvirt or qemu) where the fix
is in flight/discussed in that community.
- The tool can be/is fixed in some distributions and rather than patch the code
those distributions can trivially set a config option to get the "correct"
behavior.
This is a good place for your workaround.
Please use with care!
Document the BugID that your workaround is paired with."""
workarounds_opts = [
cfg.BoolOpt('disable_rootwrap',
default=False,
help='This option allows a fallback to sudo for performance '
'reasons. For example see '
'https://bugs.launchpad.net/nova/+bug/1415106'),
cfg.BoolOpt('disable_libvirt_livesnapshot',
default=True,
help='When using libvirt 1.2.2 fails live snapshots '
'intermittently under load. This config option provides '
'mechanism to disable livesnapshot while this is '
'resolved. See '
'https://bugs.launchpad.net/nova/+bug/1334398'),
cfg.BoolOpt('destroy_after_evacuate',
default=True,
help='Whether to destroy instances on startup when we suspect '
'they have previously been evacuated. This can result in '
'data loss if undesired. See '
'https://launchpad.net/bugs/1419785'),
]
CONF = cfg.CONF
CONF.register_opts(monkey_patch_opts)
CONF.register_opts(utils_opts)
CONF.import_opt('network_api_class', 'nova.network')
CONF.register_opts(workarounds_opts, group='workarounds')
LOG = logging.getLogger(__name__)
# used in limits
TIME_UNITS = {
'SECOND': 1,
'MINUTE': 60,
'HOUR': 3600,
'DAY': 86400
}
_IS_NEUTRON = None
synchronized = lockutils.synchronized_with_prefix('nova-')
SM_IMAGE_PROP_PREFIX = "image_"
SM_INHERITABLE_KEYS = (
'min_ram', 'min_disk', 'disk_format', 'container_format',
)
def vpn_ping(address, port, timeout=0.05, session_id=None):
"""Sends a vpn negotiation packet and returns the server session.
Returns Boolean indicating whether the vpn_server is listening.
Basic packet structure is below.
Client packet (14 bytes)::
0 1 8 9 13
+-+--------+-----+
|x| cli_id |?????|
+-+--------+-----+
x = packet identifier 0x38
cli_id = 64 bit identifier
? = unknown, probably flags/padding
Server packet (26 bytes)::
0 1 8 9 13 14 21 2225
+-+--------+-----+--------+----+
|x| srv_id |?????| cli_id |????|
+-+--------+-----+--------+----+
x = packet identifier 0x40
cli_id = 64 bit identifier
? = unknown, probably flags/padding
bit 9 was 1 and the rest were 0 in testing
"""
# NOTE(tonyb) session_id isn't used for a real VPN connection so using a
# cryptographically weak value is fine.
if session_id is None:
session_id = random.randint(0, 0xffffffffffffffff)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
data = struct.pack('!BQxxxxx', 0x38, session_id)
sock.sendto(data, (address, port))
sock.settimeout(timeout)
try:
received = sock.recv(2048)
except socket.timeout:
return False
finally:
sock.close()
fmt = '!BQxxxxxQxxxx'
if len(received) != struct.calcsize(fmt):
LOG.warning(_LW('Expected to receive %(exp)s bytes, '
'but actually %(act)s'),
dict(exp=struct.calcsize(fmt), act=len(received)))
return False
(identifier, server_sess, client_sess) = struct.unpack(fmt, received)
return (identifier == 0x40 and client_sess == session_id)
def _get_root_helper():
if CONF.workarounds.disable_rootwrap:
cmd = 'sudo'
else:
cmd = 'sudo nova-rootwrap %s' % CONF.rootwrap_config
return cmd
def execute(*cmd, **kwargs):
"""Convenience wrapper around oslo's execute() method."""
if 'run_as_root' in kwargs and 'root_helper' not in kwargs:
kwargs['root_helper'] = _get_root_helper()
return processutils.execute(*cmd, **kwargs)
def trycmd(*args, **kwargs):
"""Convenience wrapper around oslo's trycmd() method."""
if 'run_as_root' in kwargs and 'root_helper' not in kwargs:
kwargs['root_helper'] = _get_root_helper()
return processutils.trycmd(*args, **kwargs)
def novadir():
import nova
return os.path.abspath(nova.__file__).split('nova/__init__.py')[0]
def generate_uid(topic, size=8):
characters = '01234567890abcdefghijklmnopqrstuvwxyz'
choices = [random.choice(characters) for _x in xrange(size)]
return '%s-%s' % (topic, ''.join(choices))
# Default symbols to use for passwords. Avoids visually confusing characters.
# ~6 bits per symbol
DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1
'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O
'abcdefghijkmnopqrstuvwxyz') # Removed: l
# ~5 bits per symbol
EASIER_PASSWORD_SYMBOLS = ('23456789', # Removed: 0, 1
'ABCDEFGHJKLMNPQRSTUVWXYZ') # Removed: I, O
def last_completed_audit_period(unit=None, before=None):
"""This method gives you the most recently *completed* audit period.
arguments:
units: string, one of 'hour', 'day', 'month', 'year'
Periods normally begin at the beginning (UTC) of the
period unit (So a 'day' period begins at midnight UTC,
a 'month' unit on the 1st, a 'year' on Jan, 1)
unit string may be appended with an optional offset
like so: 'day@18' This will begin the period at 18:00
UTC. 'month@15' starts a monthly period on the 15th,
and year@3 begins a yearly one on March 1st.
before: Give the audit period most recently completed before
<timestamp>. Defaults to now.
returns: 2 tuple of datetimes (begin, end)
The begin timestamp of this audit period is the same as the
end of the previous.
"""
if not unit:
unit = CONF.instance_usage_audit_period
offset = 0
if '@' in unit:
unit, offset = unit.split("@", 1)
offset = int(offset)
if before is not None:
rightnow = before
else:
rightnow = timeutils.utcnow()
if unit not in ('month', 'day', 'year', 'hour'):
raise ValueError('Time period must be hour, day, month or year')
if unit == 'month':
if offset == 0:
offset = 1
end = datetime.datetime(day=offset,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
year = rightnow.year
if 1 >= rightnow.month:
year -= 1
month = 12 + (rightnow.month - 1)
else:
month = rightnow.month - 1
end = datetime.datetime(day=offset,
month=month,
year=year)
year = end.year
if 1 >= end.month:
year -= 1
month = 12 + (end.month - 1)
else:
month = end.month - 1
begin = datetime.datetime(day=offset, month=month, year=year)
elif unit == 'year':
if offset == 0:
offset = 1
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
if end >= rightnow:
end = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 2)
else:
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
elif unit == 'day':
end = datetime.datetime(hour=offset,
day=rightnow.day,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
end = end - datetime.timedelta(days=1)
begin = end - datetime.timedelta(days=1)
elif unit == 'hour':
end = rightnow.replace(minute=offset, second=0, microsecond=0)
if end >= rightnow:
end = end - datetime.timedelta(hours=1)
begin = end - datetime.timedelta(hours=1)
return (begin, end)
def generate_password(length=None, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
"""Generate a random password from the supplied symbol groups.
At least one symbol from each group will be included. Unpredictable
results if length is less than the number of symbol groups.
Believed to be reasonably secure (with a reasonable password length!)
"""
if length is None:
length = CONF.password_length
r = random.SystemRandom()
# NOTE(jerdfelt): Some password policies require at least one character
# from each group of symbols, so start off with one random character
# from each symbol group
password = [r.choice(s) for s in symbolgroups]
# If length < len(symbolgroups), the leading characters will only
# be from the first length groups. Try our best to not be predictable
# by shuffling and then truncating.
r.shuffle(password)
password = password[:length]
length -= len(password)
# then fill with random characters from all symbol groups
symbols = ''.join(symbolgroups)
password.extend([r.choice(symbols) for _i in xrange(length)])
# finally shuffle to ensure first x characters aren't from a
# predictable group
r.shuffle(password)
return ''.join(password)
def get_my_linklocal(interface):
try:
if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface)
condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link'
links = [re.search(condition, x) for x in if_str[0].split('\n')]
address = [w.group(1) for w in links if w is not None]
if address[0] is not None:
return address[0]
else:
msg = _('Link Local address is not found.:%s') % if_str
raise exception.NovaException(msg)
except Exception as ex:
msg = _("Couldn't get Link Local IP of %(interface)s"
" :%(ex)s") % {'interface': interface, 'ex': ex}
raise exception.NovaException(msg)
def xhtml_escape(value):
"""Escapes a string so it is valid within XML or XHTML.
"""
return saxutils.escape(value, {'"': '"', "'": '''})
def utf8(value):
"""Try to turn a string into utf-8 if possible.
Code is directly from the utf8 function in
http://github.com/facebook/tornado/blob/master/tornado/escape.py
"""
if isinstance(value, unicode):
return value.encode('utf-8')
assert isinstance(value, str)
return value
def check_isinstance(obj, cls):
"""Checks that obj is of type cls, and lets PyLint infer types."""
if isinstance(obj, cls):
return obj
raise Exception(_('Expected object of type: %s') % (str(cls)))
def parse_server_string(server_str):
"""Parses the given server_string and returns a tuple of host and port.
If it's not a combination of host part and port, the port element
is an empty string. If the input is invalid expression, return a tuple of
two empty strings.
"""
try:
# First of all, exclude pure IPv6 address (w/o port).
if netaddr.valid_ipv6(server_str):
return (server_str, '')
# Next, check if this is IPv6 address with a port number combination.
if server_str.find("]:") != -1:
(address, port) = server_str.replace('[', '', 1).split(']:')
return (address, port)
# Third, check if this is a combination of an address and a port
if server_str.find(':') == -1:
return (server_str, '')
# This must be a combination of an address and a port
(address, port) = server_str.split(':')
return (address, port)
except (ValueError, netaddr.AddrFormatError):
LOG.error(_LE('Invalid server_string: %s'), server_str)
return ('', '')
def is_valid_ipv6_cidr(address):
try:
netaddr.IPNetwork(address, version=6).cidr
return True
except (TypeError, netaddr.AddrFormatError):
return False
def get_shortened_ipv6(address):
addr = netaddr.IPAddress(address, version=6)
return str(addr.ipv6())
def get_shortened_ipv6_cidr(address):
net = netaddr.IPNetwork(address, version=6)
return str(net.cidr)
def is_valid_cidr(address):
"""Check if address is valid
The provided address can be a IPv6 or a IPv4
CIDR address.
"""
try:
# Validate the correct CIDR Address
netaddr.IPNetwork(address)
except netaddr.AddrFormatError:
return False
# Prior validation partially verify /xx part
# Verify it here
ip_segment = address.split('/')
if (len(ip_segment) <= 1 or
ip_segment[1] == ''):
return False
return True
def get_ip_version(network):
"""Returns the IP version of a network (IPv4 or IPv6).
Raises AddrFormatError if invalid network.
"""
if netaddr.IPNetwork(network).version == 6:
return "IPv6"
elif netaddr.IPNetwork(network).version == 4:
return "IPv4"
def safe_ip_format(ip):
"""Transform ip string to "safe" format.
Will return ipv4 addresses unchanged, but will nest ipv6 addresses
inside square brackets.
"""
try:
if netaddr.IPAddress(ip).version == 6:
return '[%s]' % ip
except (TypeError, netaddr.AddrFormatError): # hostname
pass
# it's IPv4 or hostname
return ip
def monkey_patch():
"""If the CONF.monkey_patch set as True,
this function patches a decorator
for all functions in specified modules.
You can set decorators for each modules
using CONF.monkey_patch_modules.
The format is "Module path:Decorator function".
Example:
'nova.api.ec2.cloud:nova.notifications.notify_decorator'
Parameters of the decorator is as follows.
(See nova.notifications.notify_decorator)
name - name of the function
function - object of the function
"""
# If CONF.monkey_patch is not True, this function do nothing.
if not CONF.monkey_patch:
return
# Get list of modules and decorators
for module_and_decorator in CONF.monkey_patch_modules:
module, decorator_name = module_and_decorator.split(':')
# import decorator function
decorator = importutils.import_class(decorator_name)
__import__(module)
# Retrieve module information using pyclbr
module_data = pyclbr.readmodule_ex(module)
for key in module_data.keys():
# set the decorator for the class methods
if isinstance(module_data[key], pyclbr.Class):
clz = importutils.import_class("%s.%s" % (module, key))
for method, func in inspect.getmembers(clz, inspect.ismethod):
setattr(clz, method,
decorator("%s.%s.%s" % (module, key, method), func))
# set the decorator for the function
if isinstance(module_data[key], pyclbr.Function):
func = importutils.import_class("%s.%s" % (module, key))
setattr(sys.modules[module], key,
decorator("%s.%s" % (module, key), func))
def convert_to_list_dict(lst, label):
"""Convert a value or list into a list of dicts."""
if not lst:
return None
if not isinstance(lst, list):
lst = [lst]
return [{label: x} for x in lst]
def make_dev_path(dev, partition=None, base='/dev'):
"""Return a path to a particular device.
>>> make_dev_path('xvdc')
/dev/xvdc
>>> make_dev_path('xvdc', 1)
/dev/xvdc1
"""
path = os.path.join(base, dev)
if partition:
path += str(partition)
return path
def sanitize_hostname(hostname, default_name=None):
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs except
the length of hostname.
Window, Linux, and Dnsmasq has different limitation:
Windows: 255 (net_bios limits to 15, but window will truncate it)
Linux: 64
Dnsmasq: 63
Due to nova-network will leverage dnsmasq to set hostname, so we chose
63.
"""
def truncate_hostname(name):
if len(name) > 63:
LOG.warning(_LW("Hostname %(hostname)s is longer than 63, "
"truncate it to %(truncated_name)s"),
{'hostname': name, 'truncated_name': name[:63]})
return name[:63]
if isinstance(hostname, unicode):
hostname = hostname.encode('latin-1', 'ignore')
hostname = re.sub('[ _]', '-', hostname)
hostname = re.sub('[^\w.-]+', '', hostname)
hostname = hostname.lower()
hostname = hostname.strip('.-')
# NOTE(eliqiao): set hostname to default_display_name to avoid
# empty hostname
if hostname == "" and default_name is not None:
return truncate_hostname(default_name)
return truncate_hostname(hostname)
def read_cached_file(filename, cache_info, reload_func=None):
"""Read from a file if it has been modified.
:param cache_info: dictionary to hold opaque cache.
:param reload_func: optional function to be called with data when
file is reloaded due to a modification.
:returns: data from file
"""
mtime = os.path.getmtime(filename)
if not cache_info or mtime != cache_info.get('mtime'):
LOG.debug("Reloading cached file %s", filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
if reload_func:
reload_func(cache_info['data'])
return cache_info['data']
@contextlib.contextmanager
def temporary_mutation(obj, **kwargs):
"""Temporarily set the attr on a particular object to a given value then
revert when finished.
One use of this is to temporarily set the read_deleted flag on a context
object:
with temporary_mutation(context, read_deleted="yes"):
do_something_that_needed_deleted_objects()
"""
def is_dict_like(thing):
return hasattr(thing, 'has_key')
def get(thing, attr, default):
if is_dict_like(thing):
return thing.get(attr, default)
else:
return getattr(thing, attr, default)
def set_value(thing, attr, val):
if is_dict_like(thing):
thing[attr] = val
else:
setattr(thing, attr, val)
def delete(thing, attr):
if is_dict_like(thing):
del thing[attr]
else:
delattr(thing, attr)
NOT_PRESENT = object()
old_values = {}
for attr, new_value in kwargs.items():
old_values[attr] = get(obj, attr, NOT_PRESENT)
set_value(obj, attr, new_value)
try:
yield
finally:
for attr, old_value in old_values.items():
if old_value is NOT_PRESENT:
delete(obj, attr)
else:
set_value(obj, attr, old_value)
def generate_mac_address():
"""Generate an Ethernet MAC address."""
# NOTE(vish): We would prefer to use 0xfe here to ensure that linux
# bridge mac addresses don't change, but it appears to
# conflict with libvirt, so we use the next highest octet
# that has the unicast and locally administered bits set
# properly: 0xfa.
# Discussion: https://bugs.launchpad.net/nova/+bug/921838
mac = [0xfa, 0x16, 0x3e,
random.randint(0x00, 0xff),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff)]
return ':'.join(map(lambda x: "%02x" % x, mac))
def read_file_as_root(file_path):
"""Secure helper to read file as root."""
try:
out, _err = execute('cat', file_path, run_as_root=True)
return out
except processutils.ProcessExecutionError:
raise exception.FileNotFound(file_path=file_path)
@contextlib.contextmanager
def temporary_chown(path, owner_uid=None):
"""Temporarily chown a path.
:param owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
execute('chown', owner_uid, path, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
execute('chown', orig_uid, path, run_as_root=True)
@contextlib.contextmanager
def tempdir(**kwargs):
argdict = kwargs.copy()
if 'dir' not in argdict:
argdict['dir'] = CONF.tempdir
tmpdir = tempfile.mkdtemp(**argdict)
try:
yield tmpdir
finally:
try:
shutil.rmtree(tmpdir)
except OSError as e:
LOG.error(_LE('Could not remove tmpdir: %s'), e)
def walk_class_hierarchy(clazz, encountered=None):
"""Walk class hierarchy, yielding most derived classes first."""
if not encountered:
encountered = []
for subclass in clazz.__subclasses__():
if subclass not in encountered:
encountered.append(subclass)
# drill down to leaves first
for subsubclass in walk_class_hierarchy(subclass, encountered):
yield subsubclass
yield subclass
class UndoManager(object):
"""Provides a mechanism to facilitate rolling back a series of actions
when an exception is raised.
"""
def __init__(self):
self.undo_stack = []
def undo_with(self, undo_func):
self.undo_stack.append(undo_func)
def _rollback(self):
for undo_func in reversed(self.undo_stack):
undo_func()
def rollback_and_reraise(self, msg=None, **kwargs):
"""Rollback a series of actions then re-raise the exception.
.. note:: (sirp) This should only be called within an
exception handler.
"""
with excutils.save_and_reraise_exception():
if msg:
LOG.exception(msg, **kwargs)
self._rollback()
def mkfs(fs, path, label=None, run_as_root=False):
"""Format a file or block device
:param fs: Filesystem type (examples include 'swap', 'ext3', 'ext4'
'btrfs', etc.)
:param path: Path to file or block device to format
:param label: Volume label to use
"""
if fs == 'swap':
args = ['mkswap']
else:
args = ['mkfs', '-t', fs]
# add -F to force no interactive execute on non-block device.
if fs in ('ext3', 'ext4', 'ntfs'):
args.extend(['-F'])
if label:
if fs in ('msdos', 'vfat'):
label_opt = '-n'
else:
label_opt = '-L'
args.extend([label_opt, label])
args.append(path)
execute(*args, run_as_root=run_as_root)
def last_bytes(file_like_object, num):
"""Return num bytes from the end of the file, and remaining byte count.
:param file_like_object: The file to read
:param num: The number of bytes to return
:returns (data, remaining)
"""
try:
file_like_object.seek(-num, os.SEEK_END)
except IOError as e:
if e.errno == 22:
file_like_object.seek(0, os.SEEK_SET)
else:
raise
remaining = file_like_object.tell()
return (file_like_object.read(), remaining)
def metadata_to_dict(metadata):
result = {}
for item in metadata:
if not item.get('deleted'):
result[item['key']] = item['value']
return result
def dict_to_metadata(metadata):
result = []
for key, value in metadata.iteritems():
result.append(dict(key=key, value=value))
return result
def instance_meta(instance):
if isinstance(instance['metadata'], dict):
return instance['metadata']
else:
return metadata_to_dict(instance['metadata'])
def instance_sys_meta(instance):
if not instance.get('system_metadata'):
return {}
if isinstance(instance['system_metadata'], dict):
return instance['system_metadata']
else:
return metadata_to_dict(instance['system_metadata'])
def get_wrapped_function(function):
"""Get the method at the bottom of a stack of decorators."""
if not hasattr(function, 'func_closure') or not function.func_closure:
return function
def _get_wrapped_function(function):
if not hasattr(function, 'func_closure') or not function.func_closure:
return None
for closure in function.func_closure:
func = closure.cell_contents
deeper_func = _get_wrapped_function(func)
if deeper_func:
return deeper_func
elif hasattr(closure.cell_contents, '__call__'):
return closure.cell_contents
return _get_wrapped_function(function)
def expects_func_args(*args):
def _decorator_checker(dec):
@functools.wraps(dec)
def _decorator(f):
base_f = get_wrapped_function(f)
arg_names, a, kw, _default = inspect.getargspec(base_f)
if a or kw or set(args) <= set(arg_names):
# NOTE (ndipanov): We can't really tell if correct stuff will
# be passed if it's a function with *args or **kwargs so
# we still carry on and hope for the best
return dec(f)
else:
raise TypeError("Decorated function %(f_name)s does not "
"have the arguments expected by the "
"decorator %(d_name)s" %
{'f_name': base_f.__name__,
'd_name': dec.__name__})
return _decorator
return _decorator_checker
class ExceptionHelper(object):
"""Class to wrap another and translate the ClientExceptions raised by its
function calls to the actual ones.
"""
def __init__(self, target):
self._target = target
def __getattr__(self, name):
func = getattr(self._target, name)
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except messaging.ExpectedException as e:
raise (e.exc_info[1], None, e.exc_info[2])
return wrapper
def check_string_length(value, name=None, min_length=0, max_length=None):
"""Check the length of specified string
:param value: the value of the string
:param name: the name of the string
:param min_length: the min_length of the string
:param max_length: the max_length of the string
"""
if not isinstance(value, six.string_types):
if name is None:
msg = _("The input is not a string or unicode")
else:
msg = _("%s is not a string or unicode") % name
raise exception.InvalidInput(message=msg)
if name is None:
name = value
if len(value) < min_length:
msg = _("%(name)s has a minimum character requirement of "
"%(min_length)s.") % {'name': name, 'min_length': min_length}
raise exception.InvalidInput(message=msg)
if max_length and len(value) > max_length:
msg = _("%(name)s has more than %(max_length)s "
"characters.") % {'name': name, 'max_length': max_length}
raise exception.InvalidInput(message=msg)
def validate_integer(value, name, min_value=None, max_value=None):
"""Make sure that value is a valid integer, potentially within range."""
try:
value = int(str(value))
except (ValueError, UnicodeEncodeError):
msg = _('%(value_name)s must be an integer')
raise exception.InvalidInput(reason=(
msg % {'value_name': name}))
if min_value is not None:
if value < min_value:
msg = _('%(value_name)s must be >= %(min_value)d')
raise exception.InvalidInput(
reason=(msg % {'value_name': name,
'min_value': min_value}))
if max_value is not None:
if value > max_value:
msg = _('%(value_name)s must be <= %(max_value)d')
raise exception.InvalidInput(
reason=(
msg % {'value_name': name,
'max_value': max_value})
)
return value
def spawn(func, *args, **kwargs):
"""Passthrough method for eventlet.spawn.
This utility exists so that it can be stubbed for testing without
interfering with the service spawns.
It will also grab the context from the threadlocal store and add it to
the store on the new thread. This allows for continuity in logging the
context when using this method to spawn a new thread.
"""
_context = common_context.get_current()
@functools.wraps(func)
def context_wrapper(*args, **kwargs):
# NOTE: If update_store is not called after spawn it won't be
# available for the logger to pull from threadlocal storage.
if _context is not None:
_context.update_store()
return func(*args, **kwargs)
return eventlet.spawn(context_wrapper, *args, **kwargs)
def spawn_n(func, *args, **kwargs):
"""Passthrough method for eventlet.spawn_n.
This utility exists so that it can be stubbed for testing without
interfering with the service spawns.
It will also grab the context from the threadlocal store and add it to
the store on the new thread. This allows for continuity in logging the
context when using this method to spawn a new thread.
"""
_context = common_context.get_current()
@functools.wraps(func)
def context_wrapper(*args, **kwargs):
# NOTE: If update_store is not called after spawn_n it won't be
# available for the logger to pull from threadlocal storage.
if _context is not None:
_context.update_store()
func(*args, **kwargs)
eventlet.spawn_n(context_wrapper, *args, **kwargs)
def is_none_string(val):
"""Check if a string represents a None value.
"""
if not isinstance(val, six.string_types):
return False
return val.lower() == 'none'
def convert_version_to_int(version):
try:
if isinstance(version, six.string_types):
version = convert_version_to_tuple(version)
if isinstance(version, tuple):
return reduce(lambda x, y: (x * 1000) + y, version)
except Exception:
msg = _("Hypervisor version %s is invalid.") % version
raise exception.NovaException(msg)
def convert_version_to_str(version_int):
version_numbers = []
factor = 1000
while version_int != 0:
version_number = version_int - (version_int // factor * factor)
version_numbers.insert(0, str(version_number))
version_int = version_int / factor
return reduce(lambda x, y: "%s.%s" % (x, y), version_numbers)
def convert_version_to_tuple(version_str):
return tuple(int(part) for part in version_str.split('.'))
def is_neutron():
global _IS_NEUTRON
if _IS_NEUTRON is not None:
return _IS_NEUTRON
try:
# compatibility with Folsom/Grizzly configs
cls_name = CONF.network_api_class
if cls_name == 'nova.network.quantumv2.api.API':
cls_name = 'nova.network.neutronv2.api.API'
from nova.network.neutronv2 import api as neutron_api
_IS_NEUTRON = issubclass(importutils.import_class(cls_name),
neutron_api.API)
except ImportError:
_IS_NEUTRON = False
return _IS_NEUTRON
def is_auto_disk_config_disabled(auto_disk_config_raw):
auto_disk_config_disabled = False
if auto_disk_config_raw is not None:
adc_lowered = auto_disk_config_raw.strip().lower()
if adc_lowered == "disabled":
auto_disk_config_disabled = True
return auto_disk_config_disabled
def get_auto_disk_config_from_instance(instance=None, sys_meta=None):
if sys_meta is None:
sys_meta = instance_sys_meta(instance)
return sys_meta.get("image_auto_disk_config")
def get_auto_disk_config_from_image_props(image_properties):
return image_properties.get("auto_disk_config")
def get_system_metadata_from_image(image_meta, flavor=None):
system_meta = {}
prefix_format = SM_IMAGE_PROP_PREFIX + '%s'
for key, value in image_meta.get('properties', {}).iteritems():
new_value = safe_truncate(unicode(value), 255)
system_meta[prefix_format % key] = new_value
for key in SM_INHERITABLE_KEYS:
value = image_meta.get(key)
if key == 'min_disk' and flavor:
if image_meta.get('disk_format') == 'vhd':
value = flavor['root_gb']
else:
value = max(value, flavor['root_gb'])
if value is None:
continue
system_meta[prefix_format % key] = value
return system_meta
def get_image_from_system_metadata(system_meta):
image_meta = {}
properties = {}
if not isinstance(system_meta, dict):
system_meta = metadata_to_dict(system_meta)
for key, value in system_meta.iteritems():
if value is None:
continue
# NOTE(xqueralt): Not sure this has to inherit all the properties or
# just the ones we need. Leaving it for now to keep the old behaviour.
if key.startswith(SM_IMAGE_PROP_PREFIX):
key = key[len(SM_IMAGE_PROP_PREFIX):]
if key in SM_INHERITABLE_KEYS:
image_meta[key] = value
else:
# Skip properties that are non-inheritable
if key in CONF.non_inheritable_image_properties:
continue
properties[key] = value
image_meta['properties'] = properties
return image_meta
def get_hash_str(base_str):
"""returns string that represents hash of base_str (in hex format)."""
return hashlib.md5(base_str).hexdigest()
if hasattr(hmac, 'compare_digest'):
constant_time_compare = hmac.compare_digest
else:
def constant_time_compare(first, second):
"""Returns True if both string inputs are equal, otherwise False.
This function should take a constant amount of time regardless of
how many characters in the strings match.
"""
if len(first) != len(second):
return False
result = 0
for x, y in zip(first, second):
result |= ord(x) ^ ord(y)
return result == 0
def filter_and_format_resource_metadata(resource_type, resource_list,
search_filts, metadata_type=None):
"""Get all metadata for a list of resources after filtering.
Search_filts is a list of dictionaries, where the values in the dictionary
can be string or regex string, or a list of strings/regex strings.
Let's call a dict a 'filter block' and an item in the dict
a 'filter'. A tag is returned if it matches ALL the filters in
a filter block. If more than one values are specified for a
filter, a tag is returned if it matches ATLEAST ONE value of the filter. If
more than one filter blocks are specified, the tag should match ALL the
filter blocks.
For example:
search_filts = [{'key': ['key1', 'key2'], 'value': 'val1'},
{'value': 'val2'}]
The filter translates to 'match any tag for which':
((key=key1 AND value=val1) OR (key=key2 AND value=val1)) AND
(value=val2)
This example filter will never match a tag.
:param resource_type: The resource type as a string, e.g. 'instance'
:param resource_list: List of resource objects
:param search_filts: Filters to filter metadata to be returned. Can be
dict (e.g. {'key': 'env', 'value': 'prod'}, or a list of dicts
(e.g. [{'key': 'env'}, {'value': 'beta'}]. Note that the values
of the dict can be regular expressions.
:param metadata_type: Provided to search for a specific metadata type
(e.g. 'system_metadata')
:returns: List of dicts where each dict is of the form {'key':
'somekey', 'value': 'somevalue', 'instance_id':
'some-instance-uuid-aaa'} if resource_type is 'instance'.
"""
if isinstance(search_filts, dict):
search_filts = [search_filts]
def _get_id(resource):
if resource_type == 'instance':
return resource.get('uuid')
def _match_any(pattern_list, string):
if isinstance(pattern_list, str):
pattern_list = [pattern_list]
return any([re.match(pattern, string)
for pattern in pattern_list])
def _filter_metadata(resource, search_filt, input_metadata):
ids = search_filt.get('resource_id', [])
keys_filter = search_filt.get('key', [])
values_filter = search_filt.get('value', [])
output_metadata = {}
if ids and _get_id(resource) not in ids:
return {}
for k, v in six.iteritems(input_metadata):
# Both keys and value defined -- AND
if (keys_filter and values_filter and
not _match_any(keys_filter, k) and
not _match_any(values_filter, v)):
continue
# Only keys or value is defined
elif ((keys_filter and not _match_any(keys_filter, k)) or
(values_filter and not _match_any(values_filter, v))):
continue
output_metadata[k] = v
return output_metadata
formatted_metadata_list = []
for res in resource_list:
if resource_type == 'instance':
# NOTE(rushiagr): metadata_type should be 'metadata' or
# 'system_metadata' if resource_type is instance. Defaulting to
# 'metadata' if not specified.
if metadata_type is None:
metadata_type = 'metadata'
metadata = res.get(metadata_type, {})
for filt in search_filts:
# By chaining the input to the output, the filters are
# ANDed together
metadata = _filter_metadata(res, filt, metadata)
for (k, v) in metadata.items():
formatted_metadata_list.append({'key': k, 'value': v,
'%s_id' % resource_type: _get_id(res)})
return formatted_metadata_list
def safe_truncate(value, length):
"""Safely truncates unicode strings such that their encoded length is
no greater than the length provided.
"""
b_value = encodeutils.safe_encode(value)[:length]
# NOTE(chaochin) UTF-8 character byte size varies from 1 to 6. If
# truncating a long byte string to 255, the last character may be
# cut in the middle, so that UnicodeDecodeError will occur when
# converting it back to unicode.
decode_ok = False
while not decode_ok:
try:
u_value = encodeutils.safe_decode(b_value)
decode_ok = True
except UnicodeDecodeError:
b_value = b_value[:-1]
return u_value
|
alexandrucoman/vbox-nova-driver
|
nova/utils.py
|
Python
|
apache-2.0
| 42,459
|
# Copyright 2014 Google Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
from caniusepython3 import command
from caniusepython3.test import unittest, skip_pypi_timeouts
from distutils import dist
def make_command(requires):
return command.Command(dist.Distribution(requires))
class RequiresTests(unittest.TestCase):
def verify_cmd(self, requirements):
requires = {requirements: ['pip']}
cmd = make_command(requires)
got = cmd._dependencies()
self.assertEqual(frozenset(got), frozenset(['pip']))
return cmd
def test_install_requires(self):
self.verify_cmd('install_requires')
def test_tests_require(self):
self.verify_cmd('tests_require')
def test_extras_require(self):
cmd = make_command({'extras_require': {'testing': ['pip']}})
got = frozenset(cmd._dependencies())
self.assertEqual(got, frozenset(['pip']))
class OptionsTests(unittest.TestCase):
def test_finalize_options(self):
# Don't expect anything to happen.
make_command({}).finalize_options()
class NetworkTests(unittest.TestCase):
@skip_pypi_timeouts
def test_run(self):
make_command({'install_requires': ['pip']}).run()
|
dhamaniasad/caniusepython3
|
caniusepython3/test/test_command.py
|
Python
|
apache-2.0
| 1,792
|
import fractions
import math
print('PI =', math.pi)
f_pi = fractions.Fraction(str(math.pi))
print('No limit =', f_pi)
for i in [1, 6, 11, 60, 70, 90, 100]:
limited = f_pi.limit_denominator(i)
print('{0:8} = {1}'.format(i, limited))
|
jasonwee/asus-rt-n14uhp-mrtg
|
src/lesson_mathematics/fractions_limit_denominator.py
|
Python
|
apache-2.0
| 249
|
#!/usr/bin/python
import sqlite3
conn = sqlite3.connect('accesslist.db')
conn.execute('''CREATE TABLE USUARIO
(CELLPHONE CHAR(11) PRIMARY KEY NOT NULL,
PASSWD CHAR(138) NOT NULL);''')
print "Table created successfully";
conn.close()
|
aepereyra/smslock
|
creatabla.py
|
Python
|
apache-2.0
| 264
|
import random
import requests
import shutil
import logging
import os
import traceback
import ujson
from typing import List, Dict, Any, Optional, Set, Callable, Iterable, Tuple, TypeVar
from django.forms.models import model_to_dict
from zerver.models import Realm, RealmEmoji, Subscription, Recipient, \
Attachment, Stream, Message, UserProfile
from zerver.data_import.sequencer import NEXT_ID
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS as stream_colors
from zerver.lib.avatar_hash import user_avatar_path_from_ids
from zerver.lib.parallel import run_parallel, JobData
# stubs
ZerverFieldsT = Dict[str, Any]
def build_zerver_realm(realm_id: int, realm_subdomain: str, time: float,
other_product: str) -> List[ZerverFieldsT]:
realm = Realm(id=realm_id, date_created=time,
name=realm_subdomain, string_id=realm_subdomain,
description=("Organization imported from %s!" % (other_product)))
auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods]
realm_dict = model_to_dict(realm, exclude='authentication_methods')
realm_dict['authentication_methods'] = auth_methods
return[realm_dict]
def build_user_profile(avatar_source: str,
date_joined: Any,
delivery_email: str,
email: str,
full_name: str,
id: int,
is_active: bool,
is_realm_admin: bool,
is_guest: bool,
is_mirror_dummy: bool,
realm_id: int,
short_name: str,
timezone: Optional[str]) -> ZerverFieldsT:
pointer = -1
obj = UserProfile(
avatar_source=avatar_source,
date_joined=date_joined,
delivery_email=delivery_email,
email=email,
full_name=full_name,
id=id,
is_active=is_active,
is_realm_admin=is_realm_admin,
is_guest=is_guest,
pointer=pointer,
realm_id=realm_id,
short_name=short_name,
timezone=timezone,
)
dct = model_to_dict(obj)
return dct
def build_avatar(zulip_user_id: int, realm_id: int, email: str, avatar_url: str,
timestamp: Any, avatar_list: List[ZerverFieldsT]) -> None:
avatar = dict(
path=avatar_url, # Save original avatar url here, which is downloaded later
realm_id=realm_id,
content_type=None,
user_profile_id=zulip_user_id,
last_modified=timestamp,
user_profile_email=email,
s3_path="",
size="")
avatar_list.append(avatar)
def make_subscriber_map(zerver_subscription: List[ZerverFieldsT]) -> Dict[int, Set[int]]:
'''
This can be convenient for building up UserMessage
rows.
'''
subscriber_map = dict() # type: Dict[int, Set[int]]
for sub in zerver_subscription:
user_id = sub['user_profile']
recipient_id = sub['recipient']
if recipient_id not in subscriber_map:
subscriber_map[recipient_id] = set()
subscriber_map[recipient_id].add(user_id)
return subscriber_map
def build_subscription(recipient_id: int, user_id: int,
subscription_id: int) -> ZerverFieldsT:
subscription = Subscription(
color=random.choice(stream_colors),
id=subscription_id)
subscription_dict = model_to_dict(subscription, exclude=['user_profile', 'recipient_id'])
subscription_dict['user_profile'] = user_id
subscription_dict['recipient'] = recipient_id
return subscription_dict
def build_public_stream_subscriptions(
zerver_userprofile: List[ZerverFieldsT],
zerver_recipient: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
'''
This function is only used for Hipchat now, but it may apply to
future conversions. We often don't get full subscriber data in
the Hipchat export, so this function just autosubscribes all
users to every public stream. This returns a list of Subscription
dicts.
'''
subscriptions = [] # type: List[ZerverFieldsT]
public_stream_ids = {
stream['id']
for stream in zerver_stream
if not stream['invite_only']
}
public_stream_recipient_ids = {
recipient['id']
for recipient in zerver_recipient
if recipient['type'] == Recipient.STREAM
and recipient['type_id'] in public_stream_ids
}
user_ids = [
user['id']
for user in zerver_userprofile
]
for recipient_id in public_stream_recipient_ids:
for user_id in user_ids:
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID('subscription'),
)
subscriptions.append(subscription)
return subscriptions
def build_private_stream_subscriptions(
get_users: Callable[..., Set[int]],
zerver_recipient: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
subscriptions = [] # type: List[ZerverFieldsT]
stream_ids = {
stream['id']
for stream in zerver_stream
if stream['invite_only']
}
recipient_map = {
recipient['id']: recipient['type_id'] # recipient_id -> stream_id
for recipient in zerver_recipient
if recipient['type'] == Recipient.STREAM
and recipient['type_id'] in stream_ids
}
for recipient_id, stream_id in recipient_map.items():
user_ids = get_users(stream_id=stream_id)
for user_id in user_ids:
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID('subscription'),
)
subscriptions.append(subscription)
return subscriptions
def build_personal_subscriptions(zerver_recipient: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
subscriptions = [] # type: List[ZerverFieldsT]
personal_recipients = [
recipient
for recipient in zerver_recipient
if recipient['type'] == Recipient.PERSONAL
]
for recipient in personal_recipients:
recipient_id = recipient['id']
user_id = recipient['type_id']
subscription = build_subscription(
recipient_id=recipient_id,
user_id=user_id,
subscription_id=NEXT_ID('subscription'),
)
subscriptions.append(subscription)
return subscriptions
def build_recipient(type_id: int, recipient_id: int, type: int) -> ZerverFieldsT:
recipient = Recipient(
type_id=type_id, # stream id
id=recipient_id,
type=type)
recipient_dict = model_to_dict(recipient)
return recipient_dict
def build_recipients(zerver_userprofile: List[ZerverFieldsT],
zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
'''
As of this writing, we only use this in the HipChat
conversion. The Slack and Gitter conversions do it more
tightly integrated with creating other objects.
'''
recipients = []
for user in zerver_userprofile:
type_id = user['id']
type = Recipient.PERSONAL
recipient = Recipient(
type_id=type_id,
id=NEXT_ID('recipient'),
type=type,
)
recipient_dict = model_to_dict(recipient)
recipients.append(recipient_dict)
for stream in zerver_stream:
type_id = stream['id']
type = Recipient.STREAM
recipient = Recipient(
type_id=type_id,
id=NEXT_ID('recipient'),
type=type,
)
recipient_dict = model_to_dict(recipient)
recipients.append(recipient_dict)
return recipients
def build_realm(zerver_realm: List[ZerverFieldsT], realm_id: int,
domain_name: str) -> ZerverFieldsT:
realm = dict(zerver_client=[{"name": "populate_db", "id": 1},
{"name": "website", "id": 2},
{"name": "API", "id": 3}],
zerver_customprofilefield=[],
zerver_customprofilefieldvalue=[],
zerver_userpresence=[], # shows last logged in data, which is not available
zerver_userprofile_mirrordummy=[],
zerver_realmdomain=[{"realm": realm_id,
"allow_subdomains": False,
"domain": domain_name,
"id": realm_id}],
zerver_useractivity=[],
zerver_realm=zerver_realm,
zerver_huddle=[],
zerver_userprofile_crossrealm=[],
zerver_useractivityinterval=[],
zerver_reaction=[],
zerver_realmemoji=[],
zerver_realmfilter=[])
return realm
def build_usermessages(zerver_usermessage: List[ZerverFieldsT],
subscriber_map: Dict[int, Set[int]],
recipient_id: int,
mentioned_user_ids: List[int],
message_id: int) -> None:
user_ids = subscriber_map.get(recipient_id, set())
if user_ids:
for user_id in sorted(user_ids):
is_mentioned = user_id in mentioned_user_ids
# Slack and Gitter don't yet triage private messages.
# It's possible we don't even get PMs from them.
is_private = False
usermessage = build_user_message(
user_id=user_id,
message_id=message_id,
is_private=is_private,
is_mentioned=is_mentioned,
)
zerver_usermessage.append(usermessage)
def build_user_message(user_id: int,
message_id: int,
is_private: bool,
is_mentioned: bool) -> ZerverFieldsT:
flags_mask = 1 # For read
if is_mentioned:
flags_mask += 8 # For mentioned
if is_private:
flags_mask += 2048 # For is_private
id = NEXT_ID('user_message')
usermessage = dict(
id=id,
user_profile=user_id,
message=message_id,
flags_mask=flags_mask,
)
return usermessage
def build_defaultstream(realm_id: int, stream_id: int,
defaultstream_id: int) -> ZerverFieldsT:
defaultstream = dict(
stream=stream_id,
realm=realm_id,
id=defaultstream_id)
return defaultstream
def build_stream(date_created: Any, realm_id: int, name: str,
description: str, stream_id: int, deactivated: bool=False,
invite_only: bool=False) -> ZerverFieldsT:
stream = Stream(
name=name,
deactivated=deactivated,
description=description,
date_created=date_created,
invite_only=invite_only,
id=stream_id)
stream_dict = model_to_dict(stream,
exclude=['realm'])
stream_dict['realm'] = realm_id
return stream_dict
def build_message(topic_name: str, pub_date: float, message_id: int, content: str,
rendered_content: Optional[str], user_id: int, recipient_id: int,
has_image: bool=False, has_link: bool=False,
has_attachment: bool=True) -> ZerverFieldsT:
zulip_message = Message(
rendered_content_version=1, # this is Zulip specific
pub_date=pub_date,
id=message_id,
content=content,
rendered_content=rendered_content,
has_image=has_image,
has_attachment=has_attachment,
has_link=has_link)
zulip_message.set_topic_name(topic_name)
zulip_message_dict = model_to_dict(zulip_message,
exclude=['recipient', 'sender', 'sending_client'])
zulip_message_dict['sender'] = user_id
zulip_message_dict['sending_client'] = 1
zulip_message_dict['recipient'] = recipient_id
return zulip_message_dict
def build_attachment(realm_id: int, message_ids: Set[int],
user_id: int, fileinfo: ZerverFieldsT, s3_path: str,
zerver_attachment: List[ZerverFieldsT]) -> None:
"""
This function should be passed a 'fileinfo' dictionary, which contains
information about 'size', 'created' (created time) and ['name'] (filename).
"""
attachment_id = NEXT_ID('attachment')
attachment = Attachment(
id=attachment_id,
size=fileinfo['size'],
create_time=fileinfo['created'],
is_realm_public=True,
path_id=s3_path,
file_name=fileinfo['name'])
attachment_dict = model_to_dict(attachment,
exclude=['owner', 'messages', 'realm'])
attachment_dict['owner'] = user_id
attachment_dict['messages'] = list(message_ids)
attachment_dict['realm'] = realm_id
zerver_attachment.append(attachment_dict)
def process_avatars(avatar_list: List[ZerverFieldsT], avatar_dir: str, realm_id: int,
threads: int, size_url_suffix: str='') -> List[ZerverFieldsT]:
"""
This function gets the avatar of the user and saves it in the
user's avatar directory with both the extensions '.png' and '.original'
Required parameters:
1. avatar_list: List of avatars to be mapped in avatars records.json file
2. avatar_dir: Folder where the downloaded avatars are saved
3. realm_id: Realm ID.
We use this for Slack and Gitter conversions, where avatars need to be
downloaded. For simpler conversions see write_avatar_png.
"""
def get_avatar(avatar_upload_item: List[str]) -> None:
avatar_url = avatar_upload_item[0]
image_path = os.path.join(avatar_dir, avatar_upload_item[1])
original_image_path = os.path.join(avatar_dir, avatar_upload_item[2])
response = requests.get(avatar_url + size_url_suffix, stream=True)
with open(image_path, 'wb') as image_file:
shutil.copyfileobj(response.raw, image_file)
shutil.copy(image_path, original_image_path)
logging.info('######### GETTING AVATARS #########\n')
logging.info('DOWNLOADING AVATARS .......\n')
avatar_original_list = []
avatar_upload_list = []
for avatar in avatar_list:
avatar_hash = user_avatar_path_from_ids(avatar['user_profile_id'], realm_id)
avatar_url = avatar['path']
avatar_original = dict(avatar)
image_path = ('%s.png' % (avatar_hash))
original_image_path = ('%s.original' % (avatar_hash))
avatar_upload_list.append([avatar_url, image_path, original_image_path])
# We don't add the size field here in avatar's records.json,
# since the metadata is not needed on the import end, and we
# don't have it until we've downloaded the files anyway.
avatar['path'] = image_path
avatar['s3_path'] = image_path
avatar_original['path'] = original_image_path
avatar_original['s3_path'] = original_image_path
avatar_original_list.append(avatar_original)
# Run downloads parallely
output = []
for (status, job) in run_parallel_wrapper(get_avatar, avatar_upload_list, threads=threads):
output.append(job)
logging.info('######### GETTING AVATARS FINISHED #########\n')
return avatar_list + avatar_original_list
def write_avatar_png(avatar_folder: str,
realm_id: int,
user_id: int,
bits: bytes) -> ZerverFieldsT:
'''
Use this function for conversions like Hipchat where
the bits for the .png file come in something like
a users.json file, and where we don't have to
fetch avatar images externally.
'''
avatar_hash = user_avatar_path_from_ids(
user_profile_id=user_id,
realm_id=realm_id,
)
image_fn = avatar_hash + '.original'
image_path = os.path.join(avatar_folder, image_fn)
with open(image_path, 'wb') as image_file:
image_file.write(bits)
# Return metadata that eventually goes in records.json.
metadata = dict(
path=image_path,
s3_path=image_path,
realm_id=realm_id,
user_profile_id=user_id,
)
return metadata
ListJobData = TypeVar('ListJobData')
def run_parallel_wrapper(f: Callable[[ListJobData], None], full_items: List[ListJobData],
threads: int=6) -> Iterable[Tuple[int, List[ListJobData]]]:
logging.info("Distributing %s items across %s threads" % (len(full_items), threads))
def wrapping_function(items: List[ListJobData]) -> int:
count = 0
for item in items:
try:
f(item)
except Exception:
logging.info("Error processing item: %s" % (item,))
traceback.print_exc()
count += 1
if count % 1000 == 0:
logging.info("A download thread finished %s items" % (count,))
return 0
job_lists = [full_items[i::threads] for i in range(threads)] # type: List[List[ListJobData]]
return run_parallel(wrapping_function, job_lists, threads=threads)
def process_uploads(upload_list: List[ZerverFieldsT], upload_dir: str,
threads: int) -> List[ZerverFieldsT]:
"""
This function downloads the uploads and saves it in the realm's upload directory.
Required parameters:
1. upload_list: List of uploads to be mapped in uploads records.json file
2. upload_dir: Folder where the downloaded uploads are saved
"""
def get_uploads(upload: List[str]) -> None:
upload_url = upload[0]
upload_path = upload[1]
upload_path = os.path.join(upload_dir, upload_path)
response = requests.get(upload_url, stream=True)
os.makedirs(os.path.dirname(upload_path), exist_ok=True)
with open(upload_path, 'wb') as upload_file:
shutil.copyfileobj(response.raw, upload_file)
logging.info('######### GETTING ATTACHMENTS #########\n')
logging.info('DOWNLOADING ATTACHMENTS .......\n')
upload_url_list = []
for upload in upload_list:
upload_url = upload['path']
upload_s3_path = upload['s3_path']
upload_url_list.append([upload_url, upload_s3_path])
upload['path'] = upload_s3_path
# Run downloads parallely
output = []
for (status, job) in run_parallel_wrapper(get_uploads, upload_url_list, threads=threads):
output.append(job)
logging.info('######### GETTING ATTACHMENTS FINISHED #########\n')
return upload_list
def build_realm_emoji(realm_id: int,
name: str,
id: int,
file_name: str) -> ZerverFieldsT:
return model_to_dict(
RealmEmoji(
realm_id=realm_id,
name=name,
id=id,
file_name=file_name,
)
)
def process_emojis(zerver_realmemoji: List[ZerverFieldsT], emoji_dir: str,
emoji_url_map: ZerverFieldsT, threads: int) -> List[ZerverFieldsT]:
"""
This function downloads the custom emojis and saves in the output emoji folder.
Required parameters:
1. zerver_realmemoji: List of all RealmEmoji objects to be imported
2. emoji_dir: Folder where the downloaded emojis are saved
3. emoji_url_map: Maps emoji name to its url
"""
def get_emojis(upload: List[str]) -> None:
emoji_url = upload[0]
emoji_path = upload[1]
upload_emoji_path = os.path.join(emoji_dir, emoji_path)
response = requests.get(emoji_url, stream=True)
os.makedirs(os.path.dirname(upload_emoji_path), exist_ok=True)
with open(upload_emoji_path, 'wb') as emoji_file:
shutil.copyfileobj(response.raw, emoji_file)
emoji_records = []
upload_emoji_list = []
logging.info('######### GETTING EMOJIS #########\n')
logging.info('DOWNLOADING EMOJIS .......\n')
for emoji in zerver_realmemoji:
emoji_url = emoji_url_map[emoji['name']]
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id=emoji['realm'],
emoji_file_name=emoji['name'])
upload_emoji_list.append([emoji_url, emoji_path])
emoji_record = dict(emoji)
emoji_record['path'] = emoji_path
emoji_record['s3_path'] = emoji_path
emoji_record['realm_id'] = emoji_record['realm']
emoji_record.pop('realm')
emoji_records.append(emoji_record)
# Run downloads parallely
output = []
for (status, job) in run_parallel_wrapper(get_emojis, upload_emoji_list, threads=threads):
output.append(job)
logging.info('######### GETTING EMOJIS FINISHED #########\n')
return emoji_records
def create_converted_data_files(data: Any, output_dir: str, file_path: str) -> None:
output_file = output_dir + file_path
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(output_file, 'w') as fp:
ujson.dump(data, fp, indent=4)
|
jackrzhang/zulip
|
zerver/data_import/import_util.py
|
Python
|
apache-2.0
| 21,272
|
# Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
import testtools
from designate import context
from designate import exceptions
from designate import policy
import designate.tests
class TestDesignateContext(designate.tests.TestCase):
def test_deepcopy(self):
orig = context.DesignateContext(
user_id='12345', project_id='54321'
)
copy = orig.deepcopy()
self.assertEqual(orig.to_dict(), copy.to_dict())
def test_tsigkey_id_override(self):
orig = context.DesignateContext(
tsigkey_id='12345', project_id='54321'
)
copy = orig.to_dict()
self.assertEqual('TSIG:12345 54321 - - -', copy['user_identity'])
def test_elevated(self):
ctxt = context.DesignateContext(
user_id='12345', project_id='54321'
)
admin_ctxt = ctxt.elevated()
self.assertFalse(ctxt.is_admin)
self.assertTrue(admin_ctxt.is_admin)
self.assertEqual(0, len(ctxt.roles))
def test_elevated_with_show_deleted(self):
ctxt = context.DesignateContext(
user_id='12345', project_id='54321'
)
admin_ctxt = ctxt.elevated(show_deleted=True)
self.assertTrue(admin_ctxt.show_deleted)
def test_all_tenants(self):
ctxt = context.DesignateContext(
user_id='12345', project_id='54321'
)
admin_ctxt = ctxt.elevated()
admin_ctxt.all_tenants = True
self.assertFalse(ctxt.is_admin)
self.assertTrue(admin_ctxt.is_admin)
self.assertTrue(admin_ctxt.all_tenants)
def test_all_tenants_policy_failure(self):
ctxt = context.DesignateContext(
user_id='12345', project_id='54321'
)
with testtools.ExpectedException(exceptions.Forbidden):
ctxt.all_tenants = True
def test_edit_managed_records(self):
ctxt = context.DesignateContext(
user_id='12345', project_id='54321'
)
admin_ctxt = ctxt.elevated()
admin_ctxt.edit_managed_records = True
self.assertFalse(ctxt.is_admin)
self.assertTrue(admin_ctxt.is_admin)
self.assertTrue(admin_ctxt.edit_managed_records)
def test_edit_managed_records_failure(self):
ctxt = context.DesignateContext(
user_id='12345', project_id='54321'
)
with testtools.ExpectedException(exceptions.Forbidden):
ctxt.edit_managed_records = True
@mock.patch.object(policy, 'check')
def test_sudo(self, mock_policy_check):
ctxt = context.DesignateContext(
user_id='12345', project_id='old_project'
)
ctxt.sudo('new_project')
self.assertTrue(mock_policy_check.called)
self.assertEqual('new_project', ctxt.project_id)
self.assertEqual('old_project', ctxt.original_project_id)
|
openstack/designate
|
designate/tests/unit/test_context.py
|
Python
|
apache-2.0
| 3,468
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains convenience wrappers for typical Neural Network TensorFlow layers.
Additionally it maintains a collection with update_ops that need to be
updated after the ops have been computed, for exmaple to update moving means
and moving variances of batch_norm.
Ops that have different behavior during training or eval have an is_training
parameter. Additionally Ops that contain variables.variable have a trainable
parameter, which control if the ops variables are trainable or not.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.training import moving_averages
from inception.slim import losses
from inception.slim import scopes
from inception.slim import variables
# Used to keep the update ops done by batch_norm.
UPDATE_OPS_COLLECTION = '_update_ops_'
@scopes.add_arg_scope
def batch_norm(inputs,
decay=0.999,
center=True,
scale=False,
epsilon=0.001,
moving_vars='moving_vars',
activation=None,
is_training=True,
trainable=True,
restore=True,
scope=None,
reuse=None):
"""Adds a Batch Normalization layer.
Args:
inputs: a tensor of size [batch_size, height, width, channels]
or [batch_size, channels].
decay: decay for the moving average.
center: If True, subtract beta. If False, beta is not created and ignored.
scale: If True, multiply by gamma. If False, gamma is
not used. When the next layer is linear (also e.g. ReLU), this can be
disabled since the scaling can be done by the next layer.
epsilon: small float added to variance to avoid dividing by zero.
moving_vars: collection to store the moving_mean and moving_variance.
activation: activation function.
is_training: whether or not the model is in training mode.
trainable: whether or not the variables should be trainable or not.
restore: whether or not the variables should be marked for restore.
scope: Optional scope for variable_scope.
reuse: whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
Returns:
a tensor representing the output of the operation.
"""
inputs_shape = inputs.get_shape()
with tf.variable_scope(scope, 'BatchNorm', [inputs], reuse=reuse):
axis = list(range(len(inputs_shape) - 1))
params_shape = inputs_shape[-1:]
# Allocate parameters for the beta and gamma of the normalization.
beta, gamma = None, None
if center:
beta = variables.variable('beta',
params_shape,
initializer=tf.zeros_initializer(),
trainable=trainable,
restore=restore)
if scale:
gamma = variables.variable('gamma',
params_shape,
initializer=tf.ones_initializer(),
trainable=trainable,
restore=restore)
# Create moving_mean and moving_variance add them to
# GraphKeys.MOVING_AVERAGE_VARIABLES collections.
moving_collections = [moving_vars, tf.GraphKeys.MOVING_AVERAGE_VARIABLES]
moving_mean = variables.variable('moving_mean',
params_shape,
initializer=tf.zeros_initializer(),
trainable=False,
restore=restore,
collections=moving_collections)
moving_variance = variables.variable('moving_variance',
params_shape,
initializer=tf.ones_initializer(),
trainable=False,
restore=restore,
collections=moving_collections)
if is_training:
# Calculate the moments based on the individual batch.
mean, variance = tf.nn.moments(inputs, axis)
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay)
tf.add_to_collection(UPDATE_OPS_COLLECTION, update_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay)
tf.add_to_collection(UPDATE_OPS_COLLECTION, update_moving_variance)
else:
# Just use the moving_mean and moving_variance.
mean = moving_mean
variance = moving_variance
# Normalize the activations.
outputs = tf.nn.batch_normalization(
inputs, mean, variance, beta, gamma, epsilon)
outputs.set_shape(inputs.get_shape())
if activation:
outputs = activation(outputs)
return outputs
def _two_element_tuple(int_or_tuple):
"""Converts `int_or_tuple` to height, width.
Several of the functions that follow accept arguments as either
a tuple of 2 integers or a single integer. A single integer
indicates that the 2 values of the tuple are the same.
This functions normalizes the input value by always returning a tuple.
Args:
int_or_tuple: A list of 2 ints, a single int or a tf.TensorShape.
Returns:
A tuple with 2 values.
Raises:
ValueError: If `int_or_tuple` it not well formed.
"""
if isinstance(int_or_tuple, (list, tuple)):
if len(int_or_tuple) != 2:
raise ValueError('Must be a list with 2 elements: %s' % int_or_tuple)
return int(int_or_tuple[0]), int(int_or_tuple[1])
if isinstance(int_or_tuple, int):
return int(int_or_tuple), int(int_or_tuple)
if isinstance(int_or_tuple, tf.TensorShape):
if len(int_or_tuple) == 2:
return int_or_tuple[0], int_or_tuple[1]
raise ValueError('Must be an int, a list with 2 elements or a TensorShape of '
'length 2')
@scopes.add_arg_scope
def conv2d(inputs,
num_filters_out,
kernel_size,
stride=1,
padding='SAME',
activation=tf.nn.relu,
stddev=0.01,
bias=0.0,
weight_decay=0,
batch_norm_params=None,
is_training=True,
trainable=True,
restore=True,
scope=None,
reuse=None):
"""Adds a 2D convolution followed by an optional batch_norm layer.
conv2d creates a variable called 'weights', representing the convolutional
kernel, that is convolved with the input. If `batch_norm_params` is None, a
second variable called 'biases' is added to the result of the convolution
operation.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_filters_out: the number of output filters.
kernel_size: a list of length 2: [kernel_height, kernel_width] of
of the filters. Can be an int if both values are the same.
stride: a list of length 2: [stride_height, stride_width].
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: one of 'VALID' or 'SAME'.
activation: activation function.
stddev: standard deviation of the truncated guassian weight distribution.
bias: the initial value of the biases.
weight_decay: the weight decay.
batch_norm_params: parameters for the batch_norm. If is None don't use it.
is_training: whether or not the model is in training mode.
trainable: whether or not the variables should be trainable or not.
restore: whether or not the variables should be marked for restore.
scope: Optional scope for variable_scope.
reuse: whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
Returns:
a tensor representing the output of the operation.
"""
with tf.variable_scope(scope, 'Conv', [inputs], reuse=reuse):
kernel_h, kernel_w = _two_element_tuple(kernel_size)
stride_h, stride_w = _two_element_tuple(stride)
num_filters_in = inputs.get_shape()[-1]
weights_shape = [kernel_h, kernel_w,
num_filters_in, num_filters_out]
weights_initializer = tf.truncated_normal_initializer(stddev=stddev)
l2_regularizer = None
if weight_decay and weight_decay > 0:
l2_regularizer = losses.l2_regularizer(weight_decay)
weights = variables.variable('weights',
shape=weights_shape,
initializer=weights_initializer,
regularizer=l2_regularizer,
trainable=trainable,
restore=restore)
conv = tf.nn.conv2d(inputs, weights, [1, stride_h, stride_w, 1],
padding=padding)
if batch_norm_params is not None:
with scopes.arg_scope([batch_norm], is_training=is_training,
trainable=trainable, restore=restore):
outputs = batch_norm(conv, **batch_norm_params)
else:
bias_shape = [num_filters_out,]
bias_initializer = tf.constant_initializer(bias)
biases = variables.variable('biases',
shape=bias_shape,
initializer=bias_initializer,
trainable=trainable,
restore=restore)
outputs = tf.nn.bias_add(conv, biases)
if activation:
outputs = activation(outputs)
return outputs
@scopes.add_arg_scope
def fc(inputs,
num_units_out,
activation=tf.nn.relu,
stddev=0.01,
bias=0.0,
weight_decay=0,
batch_norm_params=None,
is_training=True,
trainable=True,
restore=True,
scope=None,
reuse=None):
"""Adds a fully connected layer followed by an optional batch_norm layer.
FC creates a variable called 'weights', representing the fully connected
weight matrix, that is multiplied by the input. If `batch_norm` is None, a
second variable called 'biases' is added to the result of the initial
vector-matrix multiplication.
Args:
inputs: a [B x N] tensor where B is the batch size and N is the number of
input units in the layer.
num_units_out: the number of output units in the layer.
activation: activation function.
stddev: the standard deviation for the weights.
bias: the initial value of the biases.
weight_decay: the weight decay.
batch_norm_params: parameters for the batch_norm. If is None don't use it.
is_training: whether or not the model is in training mode.
trainable: whether or not the variables should be trainable or not.
restore: whether or not the variables should be marked for restore.
scope: Optional scope for variable_scope.
reuse: whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
Returns:
the tensor variable representing the result of the series of operations.
"""
with tf.variable_scope(scope, 'FC', [inputs], reuse=reuse):
num_units_in = inputs.get_shape()[1]
weights_shape = [num_units_in, num_units_out]
weights_initializer = tf.truncated_normal_initializer(stddev=stddev)
l2_regularizer = None
if weight_decay and weight_decay > 0:
l2_regularizer = losses.l2_regularizer(weight_decay)
weights = variables.variable('weights',
shape=weights_shape,
initializer=weights_initializer,
regularizer=l2_regularizer,
trainable=trainable,
restore=restore)
if batch_norm_params is not None:
outputs = tf.matmul(inputs, weights)
with scopes.arg_scope([batch_norm], is_training=is_training,
trainable=trainable, restore=restore):
outputs = batch_norm(outputs, **batch_norm_params)
else:
bias_shape = [num_units_out,]
bias_initializer = tf.constant_initializer(bias)
biases = variables.variable('biases',
shape=bias_shape,
initializer=bias_initializer,
trainable=trainable,
restore=restore)
outputs = tf.nn.xw_plus_b(inputs, weights, biases)
if activation:
outputs = activation(outputs)
return outputs
def one_hot_encoding(labels, num_classes, scope=None):
"""Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for name_scope.
Returns:
one hot encoding of the labels.
"""
with tf.name_scope(scope, 'OneHotEncoding', [labels]):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(axis=1, values=[indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.stack([batch_size, num_classes]), 1.0, 0.0)
onehot_labels.set_shape([batch_size, num_classes])
return onehot_labels
@scopes.add_arg_scope
def max_pool(inputs, kernel_size, stride=2, padding='VALID', scope=None):
"""Adds a Max Pooling layer.
It is assumed by the wrapper that the pooling is only done per image and not
in depth or batch.
Args:
inputs: a tensor of size [batch_size, height, width, depth].
kernel_size: a list of length 2: [kernel_height, kernel_width] of the
pooling kernel over which the op is computed. Can be an int if both
values are the same.
stride: a list of length 2: [stride_height, stride_width].
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: the padding method, either 'VALID' or 'SAME'.
scope: Optional scope for name_scope.
Returns:
a tensor representing the results of the pooling operation.
Raises:
ValueError: if 'kernel_size' is not a 2-D list
"""
with tf.name_scope(scope, 'MaxPool', [inputs]):
kernel_h, kernel_w = _two_element_tuple(kernel_size)
stride_h, stride_w = _two_element_tuple(stride)
return tf.nn.max_pool(inputs,
ksize=[1, kernel_h, kernel_w, 1],
strides=[1, stride_h, stride_w, 1],
padding=padding)
@scopes.add_arg_scope
def avg_pool(inputs, kernel_size, stride=2, padding='VALID', scope=None):
"""Adds a Avg Pooling layer.
It is assumed by the wrapper that the pooling is only done per image and not
in depth or batch.
Args:
inputs: a tensor of size [batch_size, height, width, depth].
kernel_size: a list of length 2: [kernel_height, kernel_width] of the
pooling kernel over which the op is computed. Can be an int if both
values are the same.
stride: a list of length 2: [stride_height, stride_width].
Can be an int if both strides are the same. Note that presently
both strides must have the same value.
padding: the padding method, either 'VALID' or 'SAME'.
scope: Optional scope for name_scope.
Returns:
a tensor representing the results of the pooling operation.
"""
with tf.name_scope(scope, 'AvgPool', [inputs]):
kernel_h, kernel_w = _two_element_tuple(kernel_size)
stride_h, stride_w = _two_element_tuple(stride)
return tf.nn.avg_pool(inputs,
ksize=[1, kernel_h, kernel_w, 1],
strides=[1, stride_h, stride_w, 1],
padding=padding)
@scopes.add_arg_scope
def dropout(inputs, keep_prob=0.5, is_training=True, scope=None):
"""Returns a dropout layer applied to the input.
Args:
inputs: the tensor to pass to the Dropout layer.
keep_prob: the probability of keeping each input unit.
is_training: whether or not the model is in training mode. If so, dropout is
applied and values scaled. Otherwise, inputs is returned.
scope: Optional scope for name_scope.
Returns:
a tensor representing the output of the operation.
"""
if is_training and keep_prob > 0:
with tf.name_scope(scope, 'Dropout', [inputs]):
return tf.nn.dropout(inputs, keep_prob)
else:
return inputs
def flatten(inputs, scope=None):
"""Flattens the input while maintaining the batch_size.
Assumes that the first dimension represents the batch.
Args:
inputs: a tensor of size [batch_size, ...].
scope: Optional scope for name_scope.
Returns:
a flattened tensor with shape [batch_size, k].
Raises:
ValueError: if inputs.shape is wrong.
"""
if len(inputs.get_shape()) < 2:
raise ValueError('Inputs must be have a least 2 dimensions')
dims = inputs.get_shape()[1:]
k = dims.num_elements()
with tf.name_scope(scope, 'Flatten', [inputs]):
return tf.reshape(inputs, [-1, k])
def repeat_op(repetitions, inputs, op, *args, **kwargs):
"""Build a sequential Tower starting from inputs by using an op repeatedly.
It creates new scopes for each operation by increasing the counter.
Example: given repeat_op(3, _, ops.conv2d, 64, [3, 3], scope='conv1')
it will repeat the given op under the following variable_scopes:
conv1/Conv
conv1/Conv_1
conv1/Conv_2
Args:
repetitions: number or repetitions.
inputs: a tensor of size [batch_size, height, width, channels].
op: an operation.
*args: args for the op.
**kwargs: kwargs for the op.
Returns:
a tensor result of applying the operation op, num times.
Raises:
ValueError: if the op is unknown or wrong.
"""
scope = kwargs.pop('scope', None)
with tf.variable_scope(scope, 'RepeatOp', [inputs]):
tower = inputs
for _ in range(repetitions):
tower = op(tower, *args, **kwargs)
return tower
|
beomyeol/models
|
inception/inception/slim/ops.py
|
Python
|
apache-2.0
| 18,781
|
#!/usr/bin/python
__version__='0.2.1'
import argparse
import logging
import os
import os.path
import sys
import requests
import getpass
import ConfigParser
def _persist( export, rcfile ):
f = open( rcfile, 'w' )
logging.debug( "writing to {}".format( rcfile ) )
f.write( "\n".join( export ) )
f.close()
os.chmod( rcfile, 0600 )
logging.info( "saved Swift credentials" )
def sh(creds, auth_version, savepw=False, persist=False ):
export = []
if auth_version == 'v1':
export.append(
"unset OS_USERNAME OS_PASSWORD OS_TENANT_NAME OS_AUTH_URL" )
export.append(
"unset OS_AUTH_TOKEN OS_STORAGE_URL" )
export.append( "export ST_USER='{}'".format( creds['account'] ) )
export.append( "export ST_KEY='{}'".format( creds['password'] ) )
export.append( "export ST_AUTH='{}'".format( v1AuthUrl ) )
else:
export.append(
"unset ST_USER ST_KEY ST_AUTH" )
export.append( "export OS_USERNAME='{}'".format( creds['user'] ) )
export.append(
"export OS_TENANT_NAME='AUTH_Swift_{}'".format( creds['account'] ) )
if savepw:
export.append(
"export OS_PASSWORD='{}'".format( creds['password'] ) )
export.append( "export OS_AUTH_URL='{}'".format( v2AuthUrl ) )
print ";".join( export )
if persist:
rcfile = os.environ[ 'HOME' ] + "/.swiftrc"
logging.debug( "persisting environment variables" )
_persist( export, rcfile )
def csh(creds, auth_version, savepw=False, persist=False ):
export = []
if auth_version == 'v1':
export.append(
"unsetenv OS_USERNAME OS_PASSWORD OS_TENANT_NAME OS_AUTH_URL" )
export.append(
"unsetenv OS_AUTH_TOKEN OS_STORAGE_URL" )
export.append( "setenv ST_USER '{}'".format( creds['account'] ) )
export.append( "setenv ST_KEY '{}'".format( creds['password'] ) )
export.append( "setenv ST_AUTH '{}'".format( v1AuthUrl ) )
else:
export.append(
"unsetenv ST_USER ST_KEY ST_AUTH" )
export.append( "setenv OS_USERNAME '{}'".format( creds['user'] ) )
export.append(
"setenv OS_TENANT_NAME 'AUTH_Swift_{}'".format( creds['account'] ) )
if savepw:
export.append(
"setenv OS_PASSWORD '{}'".format( creds['password'] ) )
export.append( "setenv OS_AUTH_URL '{}'".format( v2AuthUrl ) )
print ";".join( export )
if persist:
rcfile = os.environ[ 'HOME' ] + "/.swift.cshrc"
logging.debug( "persisting environment variables" )
_persist( export, rcfile )
shell_output = {
'sh': sh,
'ksh': sh,
'bash': sh,
'zsh': sh,
'csh': csh,
'tcsh': csh
}
class LocalParser( argparse.ArgumentParser ):
def error( self, message ):
sys.stderr.write( "Error: too few arguments\n" )
sys.stderr.write( "usage: sw2account lastname_f\n" )
sys.stderr.write(
"use \"sw2account --help\" for full help information\n" )
sys.exit(1)
def print_help( self ):
self._print_message( self.format_help(), sys.stderr )
sys.exit(0)
def return_v1_auth( args ):
# If server URL is unspecified, look for "SW2_URL" in current environment
account = args.account
server_url = args.server_url
logging.debug(
'asking {} for credentials for {}'.format( server_url, account )
)
if not server_url:
try:
server_url = os.environ[ 'SW2_URL' ]
except KeyError:
logging.error( "Server URL is unset (not in arguments or SW2_URL)" )
sys.exit(1)
# Add account name to URL
server_url = '/'.join( [ server_url, account ] )
logging.debug( 'final url is {}'.format( server_url ) )
# Get user authentication credentials
user = getpass.getuser()
passwd = getpass.getpass( 'Enter password for {}: '.format(user) )
# Get account credentials from server_url
r = requests.get( server_url, verify = args.verify_ssl, auth=( user, passwd ) )
if r.status_code == 200:
creds = r.json()
logging.debug(
"got credentials for account {}".format( creds['account'] )
)
creds['url'] = 'https://tin/some/crap'
shell_output[ args.shell ](
creds=creds,
persist=args.persist,
auth_version=args.auth_version
)
elif r.status_code == 401:
logging.error(
"invalid username/password supplied to server"
)
elif r.status_code == 403:
logging.error(
"user {} is not permitted to use {} ({})".format(
user, account, r.status_code
)
)
elif r.status_code == 404:
try:
message = r.json()['message']
except KeyError:
logging.error( "404 returned from server with no message" )
sys.exit(1)
logging.error("{} (HTTP{})".format(
message, r.status_code
)
)
else:
logging.error(
"error {} retrieving credentials from server".format(
r.status_code
)
)
def return_v2_auth( args ):
creds = {}
# authentication is done using Swiftstack version 2 authentication
# requires additional "tenant name" in addition to username and password
creds['account'] = args.account
# take username password from currently logged in user
creds['user'] = getpass.getuser()
if args.savepw:
logging.warning( "Saving passwords is insecure and not recommended." )
creds['password'] = getpass.getpass(
'Enter password for {}: '.format( creds['user'] ) )
logging.debug(
"got credentials for account {}".format( creds['account'] )
)
if args.savepw:
logging.debug( 'saving password in rc and environment' )
shell_output[ args.shell ](
creds=creds,
persist=args.persist,
savepw=args.savepw,
auth_version=args.auth_version
)
def add_common_args( aparser ):
aparser.add_argument(
'shell',
help = "format output for shell <shell>",
choices = shell_output.keys()
)
aparser.add_argument(
'account',
help = "retrieve credentials for account <account>"
)
aparser.add_argument(
'--config',
default = "/etc/sw2account.cfg",
help = "configuration file to use (default=/etc/sw2account.cfg)"
)
aparser.add_argument(
'--stack',
default = "default",
help = "stack name to authentication against (see configfile)"
)
aparser.add_argument(
'--save', '--persist',
dest = 'persist',
action = 'store_true',
help = "write credentials to $HOME/.swiftrc"
)
aparser.add_argument(
'--no-save', '--no-persist',
dest = 'persist',
action = 'store_false',
help = "do not write credentials to $HOME/.swiftrc"
)
aparser.add_argument(
'--version', '-v',
help = "show script version",
action = 'version',
version = "sw2account version {}".format( __version__)
)
aparser.add_argument(
'--debug',
action = "store_true",
help = "log level for client"
)
if __name__ == "__main__":
# Get the config first
# Need to prime the pump to find defaults
tparse = argparse.ArgumentParser()
tparse.add_argument(
'--config',
default = "/etc/sw2account.cfg",
help = "configuration file to use (default=/etc/sw2account.cfg)"
)
tparse.add_argument(
'--stack',
default = "default",
help = "stack name to authenticate against (see configfile)"
)
args, unknown = tparse.parse_known_args()
# Read config file with defaults
if not os.path.isfile( args.config ):
logging.error( "missing config file %s", args.config )
sys.exit(1)
appdefaults = ConfigParser.ConfigParser()
try:
appdefaults.read( args.config )
logging.debug( "reading config from %s", args.config )
except ConfigParser.ParsingError:
logging.error(
"error reading configuration file %s - check format", args.config
)
sys.exit(1)
try:
v1AuthUrl = appdefaults.get( args.stack, 'v1AuthUrl' )
v2AuthUrl = appdefaults.get( args.stack, 'v2AuthUrl' )
auth_version_default = appdefaults.get(
args.stack, 'auth_version_default' )
except ConfigParser.NoSectionError:
logging.error( "Stack '%s' not configured in configfile %s",
args.stack, args.config )
sys.exit(1)
except ConfigParser.NoOptionError:
logging.error(
"Configfile %s does not contain correct entries for stack '%s'",
args.config, args.stack
)
sys.exit(1)
# Fix argument order so that v1/v2 is first argument
try:
if sys.argv[1] not in ['v1','v2'] and (
'-h' not in sys.argv or '--help' not in sys.argv ):
if 'v1' in sys.argv:
logging.debug( "reordering arguments to put v1 arg at head" )
sys.argv.remove('v1')
sys.argv.insert(1, 'v1')
elif 'v2' in sys.argv:
logging.debug( "reordering arguments to put v2 arg at head" )
sys.argv.remove('v2')
sys.argv.insert(1, 'v2')
else:
logging.debug( "setting default version" )
sys.argv.insert(1, auth_version_default)
except IndexError:
pass
#parser = LocalParser()
parser = argparse.ArgumentParser()
subparser = parser.add_subparsers(
dest = "auth_version", help='authentication version to use'
)
v1parser = subparser.add_parser('v1', help='use version 1 authentication')
add_common_args( v1parser )
v1parser.add_argument(
'--server-url',
help = "URL of server with account data"
)
v1parser.add_argument(
'--no-verify-ssl',
action = "store_false",
dest = "verify_ssl",
help = "verify ssl certification",
)
v1parser.add_argument(
'--verify-ssl',
action = "store_true",
dest = "verify_ssl",
help = "verify ssl certification",
)
v2parser = subparser.add_parser('v2', help='use version 2 authentication')
add_common_args( v2parser )
v2parser.add_argument(
'--save-password',
action = "store_true",
dest = "savepw",
help = "save password in rc and environment- not recommended",
)
v2parser.add_argument(
'--no-save-password',
action = "store_false",
dest = "savepw",
help = "(default) do not save password in rc and environment- recommended",
)
args = parser.parse_args()
if args.debug:
logging.basicConfig( level=logging.DEBUG )
logging.debug( 'arguments: %s', args )
if args.shell in [ 'bash', 'ksh', 'sh', 'zsh' ]:
rcfile = os.environ[ 'HOME' ] + "/.swiftrc"
elif args.shell in [ 'tcsh', 'csh' ]:
rcfile = os.environ[ 'HOME' ] + "/.swift.cshrc"
logging.debug( 'checking for rcfile: {}'.format(rcfile))
if not os.path.isfile( rcfile ):
parser.set_defaults( persist=True )
logging.debug( "no rcfile: set persist default to true" )
else:
parser.set_defaults( persist=False )
logging.debug( "found rcfile: set persist default to false" )
args = parser.parse_args()
logging.debug( "persist is set to {} after second wash".format(
args.persist
))
if args.auth_version == 'v1':
return_v1_auth( args )
elif args.auth_version == 'v2':
return_v2_auth( args )
|
FredHutch/swift-switch-account
|
sw2account/sw2account.py
|
Python
|
apache-2.0
| 11,887
|
# Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ResNet model family."""
import functools
import haiku as hk
import jax
import jax.numpy as jnp
from nfnets import base
class ResNet(hk.Module):
"""ResNetv2 Models."""
variant_dict = {'ResNet50': {'depth': [3, 4, 6, 3]},
'ResNet101': {'depth': [3, 4, 23, 3]},
'ResNet152': {'depth': [3, 8, 36, 3]},
'ResNet200': {'depth': [3, 24, 36, 3]},
'ResNet288': {'depth': [24, 24, 24, 24]},
'ResNet600': {'depth': [50, 50, 50, 50]},
}
def __init__(self, width, num_classes,
variant='ResNet50',
which_norm='BatchNorm', norm_kwargs=None,
activation='relu', drop_rate=0.0,
fc_init=jnp.zeros, conv_kwargs=None,
preactivation=True, use_se=False, se_ratio=0.25,
name='ResNet'):
super().__init__(name=name)
self.width = width
self.num_classes = num_classes
self.variant = variant
self.depth_pattern = self.variant_dict[variant]['depth']
self.activation = getattr(jax.nn, activation)
self.drop_rate = drop_rate
self.which_norm = getattr(hk, which_norm)
if norm_kwargs is not None:
self.which_norm = functools.partial(self.which_norm, **norm_kwargs)
if conv_kwargs is not None:
self.which_conv = functools.partial(hk.Conv2D, **conv_kwargs)
else:
self.which_conv = hk.Conv2D
self.preactivation = preactivation
# Stem
self.initial_conv = self.which_conv(16 * self.width, kernel_shape=7,
stride=2, padding='SAME',
with_bias=False, name='initial_conv')
if not self.preactivation:
self.initial_bn = self.which_norm(name='initial_bn')
which_block = ResBlockV2 if self.preactivation else ResBlockV1
# Body
self.blocks = []
for multiplier, blocks_per_stage, stride in zip([64, 128, 256, 512],
self.depth_pattern,
[1, 2, 2, 2]):
for block_index in range(blocks_per_stage):
self.blocks += [which_block(multiplier * self.width,
use_projection=block_index == 0,
stride=stride if block_index == 0 else 1,
activation=self.activation,
which_norm=self.which_norm,
which_conv=self.which_conv,
use_se=use_se,
se_ratio=se_ratio)]
# Head
self.final_bn = self.which_norm(name='final_bn')
self.fc = hk.Linear(self.num_classes, w_init=fc_init, with_bias=True)
def __call__(self, x, is_training, test_local_stats=False,
return_metrics=False):
"""Return the output of the final layer without any [log-]softmax."""
outputs = {}
# Stem
out = self.initial_conv(x)
if not self.preactivation:
out = self.activation(self.initial_bn(out, is_training, test_local_stats))
out = hk.max_pool(out, window_shape=(1, 3, 3, 1),
strides=(1, 2, 2, 1), padding='SAME')
if return_metrics:
outputs.update(base.signal_metrics(out, 0))
# Blocks
for i, block in enumerate(self.blocks):
out, res_var = block(out, is_training, test_local_stats)
if return_metrics:
outputs.update(base.signal_metrics(out, i + 1))
outputs[f'res_avg_var_{i}'] = res_var
if self.preactivation:
out = self.activation(self.final_bn(out, is_training, test_local_stats))
# Pool, dropout, classify
pool = jnp.mean(out, axis=[1, 2])
# Return pool before dropout in case we want to regularize it separately.
outputs['pool'] = pool
# Optionally apply dropout
if self.drop_rate > 0.0 and is_training:
pool = hk.dropout(hk.next_rng_key(), self.drop_rate, pool)
outputs['logits'] = self.fc(pool)
return outputs
class ResBlockV2(hk.Module):
"""ResNet preac block, 1x1->3x3->1x1 with strides and shortcut downsample."""
def __init__(self, out_ch, stride=1, use_projection=False,
activation=jax.nn.relu, which_norm=hk.BatchNorm,
which_conv=hk.Conv2D, use_se=False, se_ratio=0.25,
name=None):
super().__init__(name=name)
self.out_ch = out_ch
self.stride = stride
self.use_projection = use_projection
self.activation = activation
self.which_norm = which_norm
self.which_conv = which_conv
self.use_se = use_se
self.se_ratio = se_ratio
self.width = self.out_ch // 4
self.bn0 = which_norm(name='bn0')
self.conv0 = which_conv(self.width, kernel_shape=1, with_bias=False,
padding='SAME', name='conv0')
self.bn1 = which_norm(name='bn1')
self.conv1 = which_conv(self.width, stride=self.stride,
kernel_shape=3, with_bias=False,
padding='SAME', name='conv1')
self.bn2 = which_norm(name='bn2')
self.conv2 = which_conv(self.out_ch, kernel_shape=1, with_bias=False,
padding='SAME', name='conv2')
if self.use_projection:
self.conv_shortcut = which_conv(self.out_ch, stride=stride,
kernel_shape=1, with_bias=False,
padding='SAME', name='conv_shortcut')
if self.use_se:
self.se = base.SqueezeExcite(self.out_ch, self.out_ch, self.se_ratio)
def __call__(self, x, is_training, test_local_stats):
bn_args = (is_training, test_local_stats)
out = self.activation(self.bn0(x, *bn_args))
if self.use_projection:
shortcut = self.conv_shortcut(out)
else:
shortcut = x
out = self.conv0(out)
out = self.conv1(self.activation(self.bn1(out, *bn_args)))
out = self.conv2(self.activation(self.bn2(out, *bn_args)))
if self.use_se:
out = self.se(out) * out
# Get average residual standard deviation for reporting metrics.
res_avg_var = jnp.mean(jnp.var(out, axis=[0, 1, 2]))
return out + shortcut, res_avg_var
class ResBlockV1(ResBlockV2):
"""Post-Ac Residual Block."""
def __call__(self, x, is_training, test_local_stats):
bn_args = (is_training, test_local_stats)
if self.use_projection:
shortcut = self.conv_shortcut(x)
shortcut = self.which_norm(name='shortcut_bn')(shortcut, *bn_args)
else:
shortcut = x
out = self.activation(self.bn0(self.conv0(x), *bn_args))
out = self.activation(self.bn1(self.conv1(out), *bn_args))
out = self.bn2(self.conv2(out), *bn_args)
if self.use_se:
out = self.se(out) * out
res_avg_var = jnp.mean(jnp.var(out, axis=[0, 1, 2]))
return self.activation(out + shortcut), res_avg_var
|
deepmind/deepmind-research
|
nfnets/resnet.py
|
Python
|
apache-2.0
| 7,567
|
# Copyright 2019 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from kubernetes import client
from kfserving import (
constants,
KFServingClient,
V1beta1InferenceService,
V1beta1InferenceServiceSpec,
V1beta1PredictorSpec,
V1beta1TorchServeSpec,
)
from kubernetes.client import V1ResourceRequirements
from ..common.utils import predict
from ..common.utils import KFSERVING_TEST_NAMESPACE
KFServing = KFServingClient(config_file=os.environ.get("KUBECONFIG", "~/.kube/config"))
def test_torchserve_kfserving():
service_name = "mnist"
predictor = V1beta1PredictorSpec(
min_replicas=1,
pytorch=V1beta1TorchServeSpec(
storage_uri="gs://kfserving-examples/models/torchserve/image_classifier",
protocol_version="v1",
resources=V1ResourceRequirements(
requests={"cpu": "1", "memory": "4Gi"},
limits={"cpu": "1", "memory": "4Gi"},
),
),
)
isvc = V1beta1InferenceService(
api_version=constants.KFSERVING_V1BETA1,
kind=constants.KFSERVING_KIND,
metadata=client.V1ObjectMeta(
name=service_name, namespace=KFSERVING_TEST_NAMESPACE
),
spec=V1beta1InferenceServiceSpec(predictor=predictor),
)
KFServing.create(isvc)
KFServing.wait_isvc_ready(service_name, namespace=KFSERVING_TEST_NAMESPACE)
res = predict(service_name, "./data/torchserve_input.json")
assert(res.get("predictions")[0] == 2)
KFServing.delete(service_name, KFSERVING_TEST_NAMESPACE)
|
kubeflow/kfserving-lts
|
test/e2e/predictor/test_torchserve.py
|
Python
|
apache-2.0
| 2,082
|
# -*- coding: utf-8 -*-
import os
basedir=os.path.abspath(os.path.dirname(__file__))#get basedir of the project
WTF_CSRF_ENABLED = True
SECRET_KEY = 'you-will-guess'
#for database
# SQLALCHEMY_DATABASE_URI = 'mysql:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_DATABASE_URI = "mysql://username:password@server_ip:port/database_name"
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
#for upload pic
UPLOAD_FOLDER = basedir+'/uploads/' #should use basedir
MAX_CONTENT_LENGTH=2*1024*1024
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg'])#TODO:make user aware
#for upload excel
UPLOAD_EXCEL = basedir+'/app/static/add_info/' #should use basedir
|
SpeedMe/leihuang.org
|
config.py
|
Python
|
apache-2.0
| 665
|
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.object_storage import base
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
CONF = config.CONF
class ObjectACLsNegativeTest(base.BaseObjectTest):
"""Negative tests of object ACLs"""
credentials = [['operator', CONF.object_storage.operator_role],
['operator_alt', CONF.object_storage.operator_role]]
@classmethod
def setup_credentials(cls):
super(ObjectACLsNegativeTest, cls).setup_credentials()
cls.os_operator = cls.os_roles_operator_alt
@classmethod
def resource_setup(cls):
super(ObjectACLsNegativeTest, cls).resource_setup()
cls.test_auth_data = cls.os_operator.auth_provider.auth_data
def setUp(self):
super(ObjectACLsNegativeTest, self).setUp()
self.container_name = data_utils.rand_name(name='TestContainer')
self.container_client.update_container(self.container_name)
@classmethod
def resource_cleanup(cls):
cls.delete_containers()
super(ObjectACLsNegativeTest, cls).resource_cleanup()
@decorators.attr(type=['negative'])
@decorators.idempotent_id('af587587-0c24-4e15-9822-8352ce711013')
def test_write_object_without_using_creds(self):
"""Test writing object without using credentials"""
# trying to create object with empty headers
# X-Auth-Token is not provided
object_name = data_utils.rand_name(name='Object')
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=None
)
self.assertRaises(lib_exc.Unauthorized,
self.object_client.create_object,
self.container_name, object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('af85af0b-a025-4e72-a90e-121babf55720')
def test_delete_object_without_using_creds(self):
"""Test deleting object without using credentials"""
# create object
object_name = data_utils.rand_name(name='Object')
self.object_client.create_object(self.container_name, object_name,
'data')
# trying to delete object with empty headers
# X-Auth-Token is not provided
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=None
)
self.assertRaises(lib_exc.Unauthorized,
self.object_client.delete_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('63d84e37-55a6-42e2-9e5f-276e60e26a00')
def test_write_object_with_non_authorized_user(self):
"""Test writing object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
# trying to create object with non-authorized user
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name, object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('abf63359-be52-4feb-87dd-447689fc77fd')
def test_read_object_with_non_authorized_user(self):
"""Test reading object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(
self.container_name, object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# trying to get object with non authorized user token
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.get_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('7343ac3d-cfed-4198-9bb0-00149741a492')
def test_delete_object_with_non_authorized_user(self):
"""Test deleting object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(
self.container_name, object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# trying to delete object with non-authorized user token
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.delete_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('9ed01334-01e9-41ea-87ea-e6f465582823')
def test_read_object_without_rights(self):
"""Test reading object without rights"""
# update X-Container-Read metadata ACL
cont_headers = {'X-Container-Read': 'badtenant:baduser'}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# create object
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(self.container_name,
object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# Trying to read the object without rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.get_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('a3a585a7-d8cf-4b65-a1a0-edc2b1204f85')
def test_write_object_without_rights(self):
"""Test writing object without rights"""
# update X-Container-Write metadata ACL
cont_headers = {'X-Container-Write': 'badtenant:baduser'}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# Trying to write the object without rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
object_name = data_utils.rand_name(name='Object')
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name,
object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('8ba512ad-aa6e-444e-b882-2906a0ea2052')
def test_write_object_without_write_rights(self):
"""Test writing object without write rights"""
# update X-Container-Read and X-Container-Write metadata ACL
tenant_name = self.os_operator.credentials.tenant_name
username = self.os_operator.credentials.username
cont_headers = {'X-Container-Read':
tenant_name + ':' + username,
'X-Container-Write': ''}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# Trying to write the object without write rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
object_name = data_utils.rand_name(name='Object')
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name,
object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('b4e366f8-f185-47ab-b789-df4416f9ecdb')
def test_delete_object_without_write_rights(self):
"""Test deleting object without write rights"""
# update X-Container-Read and X-Container-Write metadata ACL
tenant_name = self.os_operator.credentials.tenant_name
username = self.os_operator.credentials.username
cont_headers = {'X-Container-Read':
tenant_name + ':' + username,
'X-Container-Write': ''}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# create object
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(self.container_name,
object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# Trying to delete the object without write rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.delete_object,
self.container_name,
object_name)
|
openstack/tempest
|
tempest/api/object_storage/test_container_acl_negative.py
|
Python
|
apache-2.0
| 10,958
|
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the data reference resolver."""
import base64
import responses
from drydock_provisioner.statemgmt.design.resolver import ReferenceResolver
class TestClass(object):
def test_resolve_file_url(self, input_files):
"""Test that the resolver will resolve file URLs."""
input_file = input_files.join("fullsite.yaml")
url = 'file://%s' % str(input_file)
content = ReferenceResolver.resolve_reference(url)
assert len(content) > 0
@responses.activate
def test_resolve_http_url(self):
"""Test that the resolver will resolve http URLs."""
url = 'http://foo.com/test.yaml'
responses.add(responses.GET, url)
ReferenceResolver.resolve_reference(url)
assert len(responses.calls) == 1
assert responses.calls[0].request.url == url
@responses.activate
def test_resolve_http_basicauth_url(self):
"""Test the resolver will resolve http URLs w/ basic auth."""
url = 'http://user:pass@foo.com/test.yaml'
auth_header = "Basic %s" % base64.b64encode(
"user:pass".encode('utf-8')).decode('utf-8')
responses.add(responses.GET, url)
ReferenceResolver.resolve_reference(url)
assert len(responses.calls) == 1
assert 'Authorization' in responses.calls[0].request.headers
assert responses.calls[0].request.headers.get(
'Authorization') == auth_header
|
att-comdev/drydock
|
tests/unit/test_reference_resolver.py
|
Python
|
apache-2.0
| 2,055
|
__author__ = 'ray'
import wave
import numpy as np
wav_1_path = "origin.wav"
wav_2_path = "clap.wav"
wav_out_path = "mixed.wav"
wav_1 = wave.open(wav_1_path, 'rb')
wav_2 = wave.open(wav_2_path, 'rb')
wav_out = wave.open(wav_out_path, 'wb')
len_1 = wav_1.getnframes()
len_2 = wav_2.getnframes()
if len_1>len_2:
wav_out.setparams(wav_1.getparams())
else:
wav_out.setparams(wav_2.getparams())
signal_1 = np.fromstring(wav_1.readframes(-1), 'Int16')
signal_2 = np.fromstring(wav_2.readframes(-1), 'Int16')
if len_1>len_2:
signal_out = np.append(signal_1[:len_2]+signal_2, signal_1[len_2:]).tostring()
elif len_2>len_1:
signal_out = np.append(signal_1+signal_2[:len_1], signal_2[len_1:]).tostring()
else:
signal_out = (signal_1+signal_2).tostring()
wav_out.writeframes(signal_out)
wav_1.close()
wav_2.close()
wav_out.close()
print 'done!'
|
raybrshen/pattern_recognition
|
noise_detection/tools/mix_wav.py
|
Python
|
apache-2.0
| 864
|
# Copyright 2017, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import pytest
from google.rpc import status_pb2
from google.cloud import videointelligence_v1beta2
from google.cloud.videointelligence_v1beta2.proto import video_intelligence_pb2
from google.longrunning import operations_pb2
class MultiCallableStub(object):
"""Stub for the grpc.UnaryUnaryMultiCallable interface."""
def __init__(self, method, channel_stub):
self.method = method
self.channel_stub = channel_stub
def __call__(self, request, timeout=None, metadata=None, credentials=None):
self.channel_stub.requests.append((self.method, request))
response = None
if self.channel_stub.responses:
response = self.channel_stub.responses.pop()
if isinstance(response, Exception):
raise response
if response:
return response
class ChannelStub(object):
"""Stub for the grpc.Channel interface."""
def __init__(self, responses=[]):
self.responses = responses
self.requests = []
def unary_unary(self,
method,
request_serializer=None,
response_deserializer=None):
return MultiCallableStub(method, self)
class CustomException(Exception):
pass
class TestVideoIntelligenceServiceClient(object):
def test_annotate_video(self):
# Setup Expected Response
expected_response = {}
expected_response = video_intelligence_pb2.AnnotateVideoResponse(
**expected_response)
operation = operations_pb2.Operation(
name='operations/test_annotate_video', done=True)
operation.response.Pack(expected_response)
# Mock the API response
channel = ChannelStub(responses=[operation])
client = videointelligence_v1beta2.VideoIntelligenceServiceClient(
channel=channel)
response = client.annotate_video()
result = response.result()
assert expected_response == result
assert len(channel.requests) == 1
expected_request = video_intelligence_pb2.AnnotateVideoRequest()
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_annotate_video_exception(self):
# Setup Response
error = status_pb2.Status()
operation = operations_pb2.Operation(
name='operations/test_annotate_video_exception', done=True)
operation.error.CopyFrom(error)
# Mock the API response
channel = ChannelStub(responses=[operation])
client = videointelligence_v1beta2.VideoIntelligenceServiceClient(
channel=channel)
response = client.annotate_video()
exception = response.exception()
assert exception.errors[0] == error
|
tseaver/gcloud-python
|
videointelligence/tests/unit/gapic/v1beta2/test_video_intelligence_service_client_v1beta2.py
|
Python
|
apache-2.0
| 3,394
|
import asyncio
from functools import partial
class AsyncWrapper:
def __init__(self, target_instance, executor=None):
self._target_inst = target_instance
self._loop = asyncio.get_event_loop()
self._executor = executor
def __getattribute__(self, name):
try:
return super().__getattribute__(name)
except AttributeError:
method = self._target_inst.__getattribute__(name)
return partial(self._async_wrapper, method)
async def _async_wrapper(self, method_name, *args, **kwargs):
coroutine_wrapped = partial(method_name, *args, **kwargs)
return self._loop.run_in_executor(self._executor, coroutine_wrapped)
|
KeepSafe/translation-real-time-validaton
|
notifier/executor.py
|
Python
|
apache-2.0
| 708
|
# Copyright 2018 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Module containing methods needed to load skill
data such as dialogs, intents and regular expressions.
"""
from os import walk
from os.path import splitext, join
import re
from mycroft.messagebus.message import Message
def load_vocab_from_file(path, vocab_type, bus):
"""Load Mycroft vocabulary from file
The vocab is sent to the intent handler using the message bus
Args:
path: path to vocabulary file (*.voc)
vocab_type: keyword name
bus: Mycroft messagebus connection
skill_id(str): skill id
"""
if path.endswith('.voc'):
with open(path, 'r') as voc_file:
for line in voc_file.readlines():
if line.startswith("#"):
continue
parts = line.strip().split("|")
entity = parts[0]
bus.emit(Message("register_vocab", {
'start': entity, 'end': vocab_type
}))
for alias in parts[1:]:
bus.emit(Message("register_vocab", {
'start': alias, 'end': vocab_type, 'alias_of': entity
}))
def load_regex_from_file(path, bus, skill_id):
"""Load regex from file
The regex is sent to the intent handler using the message bus
Args:
path: path to vocabulary file (*.voc)
bus: Mycroft messagebus connection
"""
if path.endswith('.rx'):
with open(path, 'r') as reg_file:
for line in reg_file.readlines():
if line.startswith("#"):
continue
re.compile(munge_regex(line.strip(), skill_id))
bus.emit(
Message("register_vocab",
{'regex': munge_regex(line.strip(), skill_id)}))
def load_vocabulary(basedir, bus, skill_id):
"""Load vocabulary from all files in the specified directory.
Args:
basedir (str): path of directory to load from (will recurse)
bus (messagebus emitter): messagebus instance used to send the vocab to
the intent service
skill_id: skill the data belongs to
"""
for path, _, files in walk(basedir):
for f in files:
if f.endswith(".voc"):
vocab_type = to_alnum(skill_id) + splitext(f)[0]
load_vocab_from_file(join(path, f), vocab_type, bus)
def load_regex(basedir, bus, skill_id):
"""Load regex from all files in the specified directory.
Args:
basedir (str): path of directory to load from
bus (messagebus emitter): messagebus instance used to send the vocab to
the intent service
skill_id (str): skill identifier
"""
for path, _, files in walk(basedir):
for f in files:
if f.endswith(".rx"):
load_regex_from_file(join(path, f), bus, skill_id)
def to_alnum(skill_id):
"""Convert a skill id to only alphanumeric characters
Non alpha-numeric characters are converted to "_"
Args:
skill_id (str): identifier to be converted
Returns:
(str) String of letters
"""
return ''.join(c if c.isalnum() else '_' for c in str(skill_id))
def munge_regex(regex, skill_id):
"""Insert skill id as letters into match groups.
Args:
regex (str): regex string
skill_id (str): skill identifier
Returns:
(str) munged regex
"""
base = '(?P<' + to_alnum(skill_id)
return base.join(regex.split('(?P<'))
def munge_intent_parser(intent_parser, name, skill_id):
"""Rename intent keywords to make them skill exclusive
This gives the intent parser an exclusive name in the
format <skill_id>:<name>. The keywords are given unique
names in the format <Skill id as letters><Intent name>.
The function will not munge instances that's already been
munged
Args:
intent_parser: (IntentParser) object to update
name: (str) Skill name
skill_id: (int) skill identifier
"""
# Munge parser name
if str(skill_id) + ':' not in name:
intent_parser.name = str(skill_id) + ':' + name
else:
intent_parser.name = name
# Munge keywords
skill_id = to_alnum(skill_id)
# Munge required keyword
reqs = []
for i in intent_parser.requires:
if skill_id not in i[0]:
kw = (skill_id + i[0], skill_id + i[0])
reqs.append(kw)
else:
reqs.append(i)
intent_parser.requires = reqs
# Munge optional keywords
opts = []
for i in intent_parser.optional:
if skill_id not in i[0]:
kw = (skill_id + i[0], skill_id + i[0])
opts.append(kw)
else:
opts.append(i)
intent_parser.optional = opts
# Munge at_least_one keywords
at_least_one = []
for i in intent_parser.at_least_one:
element = [skill_id + e.replace(skill_id, '') for e in i]
at_least_one.append(tuple(element))
intent_parser.at_least_one = at_least_one
|
linuxipho/mycroft-core
|
mycroft/skills/skill_data.py
|
Python
|
apache-2.0
| 5,706
|
#!/usr/bin/env python
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
from a_sync import block
from paasta_tools.mesos.exceptions import MasterNotAvailableException
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.metrics.metastatus_lib import assert_frameworks_exist
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--expected",
"-e",
dest="expected",
type=str,
default="",
help="Comma separated list of frameworks to expect.\n"
"Will fail if any of these are not found",
)
return parser.parse_args()
def check_mesos_active_frameworks() -> None:
options = parse_args()
expected = options.expected.split(",")
master = get_mesos_master()
try:
state = block(master.state)
except MasterNotAvailableException as e:
print("CRITICAL: %s" % e.args[0])
sys.exit(2)
result = assert_frameworks_exist(state, expected)
if result.healthy:
print("OK: " + result.message)
sys.exit(0)
else:
print(result.message)
sys.exit(2)
if __name__ == "__main__":
check_mesos_active_frameworks()
|
Yelp/paasta
|
paasta_tools/monitoring/check_mesos_active_frameworks.py
|
Python
|
apache-2.0
| 1,748
|
"""
@author waziz
"""
import logging
import sys
import traceback
import os
import argparse
import numpy as np
from functools import partial
from multiprocessing import Pool
from chisel.decoder import MBR, MAP, consensus
from chisel.util.iotools import read_sampled_derivations, read_block, list_numbered_files
from chisel.decoder.estimates import EmpiricalDistribution
from chisel.smt import groupby, KBestSolution
from chisel.util import scaled_fmap, dict2str
from chisel.util.config import configure
from chisel.util.wmap import WMap
from chisel.util.iotools import smart_ropen, smart_wopen
from chisel.learning.newestimates import py
from chisel.mteval.fast_bleu import DecodingBLEU
import chisel.mteval as mteval
def cmpYLPQ(lhs, rhs):
if lhs[1] != rhs[1]: # loss
return cmp(lhs[1], rhs[1])
elif lhs[2] != rhs[2]: # posterior
return cmp(rhs[2], lhs[2])
elif lhs[3] != rhs[3]: # proxy
return cmp(rhs[3], lhs[3])
else: # yield
return cmp(lhs[0], rhs[0])
def create_decision_rule_dir(workspace, decision_rule, metric_name=None):
if metric_name is None:
output_dir = '{0}/decisions/{1}'.format(workspace, decision_rule)
else:
output_dir = '{0}/decisions/{1}-{2}'.format(workspace, decision_rule, metric_name)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
return output_dir
def make_decisions(job_desc, headers, options):
# this code runs in a Pool, thus we wrap in try/except in order to have more informative exceptions
jid, path = job_desc
try:
derivations, q_wmap, p_wmap = read_sampled_derivations(smart_ropen(path))
logging.debug('job=%d derivations=%d empdist...', jid, len(derivations))
support, posterior, proxy = py(derivations, q_wmap, p_wmap,
get_yield=lambda d: d.tree.projection,
empirical_q=True,
alpha=1.0, beta=1.0) # TODO: make option
logging.info('job=%d derivations=%d strings=%d', jid, len(derivations), len(support))
logging.debug('job=%s consensus...', jid)
scorer = DecodingBLEU([Dy.leaves for Dy in support], posterior)
losses = np.array([scorer.loss(Dy.leaves) for Dy in support], float)
return sorted(zip((Dy.projection for Dy in support), losses, posterior, proxy), cmp=cmpYLPQ)
except:
raise Exception(''.join(traceback.format_exception(*sys.exc_info())))
def decide_and_save(job_desc, headers, options, output_dir):
# this code runs in a Pool, thus we wrap in try/except in order to have more informative exceptions
jid, path = job_desc
try:
# make decisions
ranking = make_decisions(job_desc, headers, options)
# write to file if necessary
with smart_wopen('{0}/{1}.gz'.format(output_dir, jid)) as out: # TODO: save nbest
out.write('{0}\n'.format('\t'.join(['#target', '#p', '#q', '#yield'])))
if options.nbest > 0:
for y, l, p, q in ranking[0:options.nbest]:
out.write('{0}\n'.format('\t'.join(str(x) for x in [l, p, q, y])))
else:
for y, l, p, q in ranking:
out.write('{0}\n'.format('\t'.join(str(x) for x in [l, p, q, y])))
return ranking[0]
except:
raise Exception('job={0} exception={1}'.format(jid, ''.join(traceback.format_exception(*sys.exc_info()))))
def argparse_and_config():
parser = argparse.ArgumentParser(description='Applies a decision rule to a sample.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('config',
type=str,
help="configuration file")
parser.add_argument("workspace",
type=str, default=None,
help="where samples can be found and where decisions are placed")
parser.add_argument("--nbest", '-k',
type=int, default=1,
help="number of solutions")
parser.add_argument("--jobs", '-j',
type=int, default=2,
help="number of processes")
parser.add_argument('--verbose', '-v',
action='store_true',
help='increases verbosity')
args, config, failed = configure(parser,
set_defaults=['chisel:model', 'chisel:decision'],
required_sections=['proxy', 'target'],
configure_logging=True)
logging.debug('arguments: %s', vars(args))
if failed:
sys.exit(1)
return args, config
def main():
options, config = argparse_and_config()
# check for input folder
samples_dir = '{0}/samples'.format(options.workspace)
if not os.path.isdir(samples_dir):
raise Exception('If a workspace is set, samples are expected to be found under $workspace/samples')
logging.info('Reading samples from %s', samples_dir)
# create output folders
if not os.path.isdir('{0}/output'.format(options.workspace)):
os.makedirs('{0}/output'.format(options.workspace))
output_dir = create_decision_rule_dir(options.workspace, 'consensus', 'bleu')
one_best_file = '{0}/output/{1}-{2}'.format(options.workspace, 'consensus', 'bleu')
logging.info("Writing '%s' solutions to %s", 'consensus', output_dir)
logging.info("Writing 1-best '%s' yields to %s", 'consensus', one_best_file)
# TODO: generalise this
headers = {'derivation': 'd', 'vector': 'v', 'count': 'n', 'log_ur': 'log_ur', 'importance': 'importance'}
# read jobs from workspace
input_files = list_numbered_files(samples_dir)
jobs = [(fid, input_file) for fid, input_file in input_files]
logging.info('%d jobs', len(jobs))
# run jobs in parallel
pool = Pool(options.jobs)
# run decision rules and save them to files
results = pool.map(partial(decide_and_save,
headers=headers,
options=options,
output_dir=output_dir),
jobs)
# save the 1-best solution for each decision rule in a separate file
with smart_wopen(one_best_file) as fout:
for y, l, p, q in results:
fout.write('{0}\n'.format(y))
if __name__ == '__main__':
main()
|
wilkeraziz/chisel
|
python/chisel/fast_consensus.py
|
Python
|
apache-2.0
| 6,459
|
#!/usr/bin/env python
#
# Copyright 2011 Google Inc. All Rights Reserved.
import unittest
from google.appengine.api import files
from google.appengine.ext import db
from mapreduce import control
from mapreduce import model
from mapreduce import output_writers
from mapreduce import test_support
from testlib import testutil
BLOBSTORE_WRITER_NAME = (output_writers.__name__ + "." +
output_writers.BlobstoreOutputWriter.__name__)
FILE_WRITER_NAME = (output_writers.__name__ + "." +
output_writers.FileOutputWriter.__name__)
class TestEntity(db.Model):
"""Test entity class."""
def test_handler_yield_key_str(entity):
"""Test handler which yields entity key."""
yield str(entity.key()) + "\n"
class FileOutputWriterEndToEndTest(testutil.HandlerTestBase):
"""End-to-end tests for FileOutputWriter using googlestore."""
def testSingleShard(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
"filesystem": "gs",
"gs_bucket_name": "bucket"
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=FILE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.FileOutputWriter.get_filenames(mapreduce_state)
self.assertEqual(1, len(filenames))
self.assertTrue(filenames[0].startswith("/gs/bucket/"))
with files.open(filenames[0], "r") as f:
data = f.read(10000000)
self.assertEquals(1000, len(data.strip().split("\n")))
def testDedicatedParams(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"input_reader": {
"entity_kind": __name__ + "." + TestEntity.__name__,
},
"output_writer": {
"filesystem": "gs",
"gs_bucket_name": "bucket",
},
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=FILE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.FileOutputWriter.get_filenames(mapreduce_state)
self.assertEqual(1, len(filenames))
self.assertTrue(filenames[0].startswith("/gs/bucket/"))
with files.open(filenames[0], "r") as f:
data = f.read(10000000)
self.assertEquals(1000, len(data.strip().split("\n")))
def testMultipleShards(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
"output_sharding": "input",
"filesystem": "gs",
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=BLOBSTORE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.BlobstoreOutputWriter.get_filenames(
mapreduce_state)
self.assertEqual(4, len(filenames))
file_lengths = []
for filename in filenames:
self.assertTrue(filename.startswith("/blobstore/"))
self.assertFalse(filename.startswith("/blobstore/writable:"))
with files.open(filename, "r") as f:
data = f.read(10000000)
file_lengths.append(len(data.strip().split("\n")))
# these numbers are totally random and depend on our sharding,
# which is quite deterministic.
expected_lengths = [199, 210, 275, 316]
self.assertEqual(1000, sum(expected_lengths))
self.assertEquals(expected_lengths, file_lengths)
class BlobstoreOutputWriterEndToEndTest(testutil.HandlerTestBase):
"""End-to-end tests for BlobstoreOutputWriter.
BlobstoreOutputWriter isn't complex enough yet to do extensive
unit tests. Do end-to-end tests just to check that it works.
"""
def testSingleShard(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=BLOBSTORE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.BlobstoreOutputWriter.get_filenames(
mapreduce_state)
self.assertEqual(1, len(filenames))
blob_name = filenames[0]
self.assertTrue(blob_name.startswith("/blobstore/"))
self.assertFalse(blob_name.startswith("/blobstore/writable:"))
with files.open(blob_name, "r") as f:
data = f.read(10000000)
self.assertEquals(1000, len(data.strip().split("\n")))
def testMultipleShards(self):
entity_count = 1000
for _ in range(entity_count):
TestEntity().put()
mapreduce_id = control.start_map(
"test_map",
__name__ + ".test_handler_yield_key_str",
"mapreduce.input_readers.DatastoreInputReader",
{
"entity_kind": __name__ + "." + TestEntity.__name__,
"output_sharding": "input",
},
shard_count=4,
base_path="/mapreduce_base_path",
output_writer_spec=BLOBSTORE_WRITER_NAME)
test_support.execute_until_empty(self.taskqueue)
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
filenames = output_writers.BlobstoreOutputWriter.get_filenames(
mapreduce_state)
self.assertEqual(4, len(filenames))
file_lengths = []
for filename in filenames:
self.assertTrue(filename.startswith("/blobstore/"))
self.assertFalse(filename.startswith("/blobstore/writable:"))
with files.open(filename, "r") as f:
data = f.read(10000000)
file_lengths.append(len(data.strip().split("\n")))
# these numbers are totally random and depend on our sharding,
# which is quite deterministic.
expected_lengths = [199, 210, 275, 316]
self.assertEqual(1000, sum(expected_lengths))
self.assertEquals(expected_lengths, file_lengths)
if __name__ == "__main__":
unittest.main()
|
bslatkin/8-bits
|
appengine-mapreduce/python/test/mapreduce/output_writers_end_to_end_test.py
|
Python
|
apache-2.0
| 7,023
|
# Copyright (c) 2015 Openstack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import binascii
import netaddr
from oslo_log import log as logging
from oslo_utils import excutils
from neutron.agent.l3 import dvr_fip_ns
from neutron.agent.l3 import router_info as router
from neutron.agent.linux import ip_lib
from neutron.common import constants as l3_constants
from neutron.common import exceptions
from neutron.common import utils as common_utils
from neutron.i18n import _LE
LOG = logging.getLogger(__name__)
# xor-folding mask used for IPv6 rule index
MASK_30 = 0x3fffffff
class DvrLocalRouter(router.RouterInfo):
def __init__(self, agent, host, *args, **kwargs):
super(DvrLocalRouter, self).__init__(*args, **kwargs)
self.agent = agent
self.host = host
self.floating_ips_dict = {}
self.snat_iptables_manager = None
# Linklocal subnet for router and floating IP namespace link
self.rtr_fip_subnet = None
self.dist_fip_count = None
self.fip_ns = None
def get_floating_ips(self):
"""Filter Floating IPs to be hosted on this agent."""
floating_ips = super(DvrLocalRouter, self).get_floating_ips()
return [i for i in floating_ips if i['host'] == self.host]
def get_snat_interfaces(self):
return self.router.get(l3_constants.SNAT_ROUTER_INTF_KEY, [])
def _handle_fip_nat_rules(self, interface_name, action):
"""Configures NAT rules for Floating IPs for DVR.
Remove all the rules. This is safe because if
use_namespaces is set as False then the agent can
only configure one router, otherwise each router's
NAT rules will be in their own namespace.
"""
self.iptables_manager.ipv4['nat'].empty_chain('POSTROUTING')
self.iptables_manager.ipv4['nat'].empty_chain('snat')
# Add back the jump to float-snat
self.iptables_manager.ipv4['nat'].add_rule('snat', '-j $float-snat')
# And add them back if the action is add_rules
if action == 'add_rules' and interface_name:
rule = ('POSTROUTING', '! -i %(interface_name)s '
'! -o %(interface_name)s -m conntrack ! '
'--ctstate DNAT -j ACCEPT' %
{'interface_name': interface_name})
self.iptables_manager.ipv4['nat'].add_rule(*rule)
self.iptables_manager.apply()
def floating_ip_added_dist(self, fip, fip_cidr):
"""Add floating IP to FIP namespace."""
floating_ip = fip['floating_ip_address']
fixed_ip = fip['fixed_ip_address']
rule_pr = self.fip_ns.allocate_rule_priority()
self.floating_ips_dict[floating_ip] = rule_pr
fip_2_rtr_name = self.fip_ns.get_int_device_name(self.router_id)
ip_rule = ip_lib.IPRule(namespace=self.ns_name)
ip_rule.rule.add(ip=fixed_ip,
table=dvr_fip_ns.FIP_RT_TBL,
priority=rule_pr)
#Add routing rule in fip namespace
fip_ns_name = self.fip_ns.get_name()
rtr_2_fip, _ = self.rtr_fip_subnet.get_pair()
device = ip_lib.IPDevice(fip_2_rtr_name, namespace=fip_ns_name)
device.route.add_route(fip_cidr, str(rtr_2_fip.ip))
interface_name = (
self.fip_ns.get_ext_device_name(
self.fip_ns.agent_gateway_port['id']))
ip_lib.send_ip_addr_adv_notif(fip_ns_name,
interface_name,
floating_ip,
self.agent_conf)
# update internal structures
self.dist_fip_count = self.dist_fip_count + 1
def floating_ip_removed_dist(self, fip_cidr):
"""Remove floating IP from FIP namespace."""
floating_ip = fip_cidr.split('/')[0]
rtr_2_fip_name = self.fip_ns.get_rtr_ext_device_name(self.router_id)
fip_2_rtr_name = self.fip_ns.get_int_device_name(self.router_id)
if self.rtr_fip_subnet is None:
self.rtr_fip_subnet = self.fip_ns.local_subnets.allocate(
self.router_id)
rtr_2_fip, fip_2_rtr = self.rtr_fip_subnet.get_pair()
fip_ns_name = self.fip_ns.get_name()
if floating_ip in self.floating_ips_dict:
rule_pr = self.floating_ips_dict[floating_ip]
ip_rule = ip_lib.IPRule(namespace=self.ns_name)
ip_rule.rule.delete(ip=floating_ip,
table=dvr_fip_ns.FIP_RT_TBL,
priority=rule_pr)
self.fip_ns.deallocate_rule_priority(rule_pr)
#TODO(rajeev): Handle else case - exception/log?
device = ip_lib.IPDevice(fip_2_rtr_name, namespace=fip_ns_name)
device.route.delete_route(fip_cidr, str(rtr_2_fip.ip))
# check if this is the last FIP for this router
self.dist_fip_count = self.dist_fip_count - 1
if self.dist_fip_count == 0:
#remove default route entry
device = ip_lib.IPDevice(rtr_2_fip_name, namespace=self.ns_name)
ns_ip = ip_lib.IPWrapper(namespace=fip_ns_name)
device.route.delete_gateway(str(fip_2_rtr.ip),
table=dvr_fip_ns.FIP_RT_TBL)
self.fip_ns.local_subnets.release(self.router_id)
self.rtr_fip_subnet = None
ns_ip.del_veth(fip_2_rtr_name)
is_last = self.fip_ns.unsubscribe(self.router_id)
if is_last:
# TODO(Carl) I can't help but think that another router could
# come in and want to start using this namespace while this is
# destroying it. The two could end up conflicting on
# creating/destroying interfaces and such. I think I'd like a
# semaphore to sync creation/deletion of this namespace.
self.fip_ns.delete()
self.fip_ns = None
def add_floating_ip(self, fip, interface_name, device):
if not self._add_fip_addr_to_device(fip, device):
return l3_constants.FLOATINGIP_STATUS_ERROR
# Special Handling for DVR - update FIP namespace
ip_cidr = common_utils.ip_to_cidr(fip['floating_ip_address'])
self.floating_ip_added_dist(fip, ip_cidr)
return l3_constants.FLOATINGIP_STATUS_ACTIVE
def remove_floating_ip(self, device, ip_cidr):
super(DvrLocalRouter, self).remove_floating_ip(device, ip_cidr)
self.floating_ip_removed_dist(ip_cidr)
def _get_internal_port(self, subnet_id):
"""Return internal router port based on subnet_id."""
router_ports = self.router.get(l3_constants.INTERFACE_KEY, [])
for port in router_ports:
fips = port['fixed_ips']
for f in fips:
if f['subnet_id'] == subnet_id:
return port
def _update_arp_entry(self, ip, mac, subnet_id, operation):
"""Add or delete arp entry into router namespace for the subnet."""
port = self._get_internal_port(subnet_id)
# update arp entry only if the subnet is attached to the router
if not port:
return
try:
# TODO(mrsmith): optimize the calls below for bulk calls
interface_name = self.get_internal_device_name(port['id'])
device = ip_lib.IPDevice(interface_name, namespace=self.ns_name)
if operation == 'add':
device.neigh.add(ip, mac)
elif operation == 'delete':
device.neigh.delete(ip, mac)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("DVR: Failed updating arp entry"))
def _set_subnet_arp_info(self, subnet_id):
"""Set ARP info retrieved from Plugin for existing ports."""
# TODO(Carl) Can we eliminate the need to make this RPC while
# processing a router.
subnet_ports = self.agent.get_ports_by_subnet(subnet_id)
for p in subnet_ports:
if p['device_owner'] not in l3_constants.ROUTER_INTERFACE_OWNERS:
for fixed_ip in p['fixed_ips']:
self._update_arp_entry(fixed_ip['ip_address'],
p['mac_address'],
subnet_id,
'add')
def _map_internal_interfaces(self, int_port, snat_ports):
"""Return the SNAT port for the given internal interface port."""
fixed_ip = int_port['fixed_ips'][0]
subnet_id = fixed_ip['subnet_id']
match_port = [p for p in snat_ports if
p['fixed_ips'][0]['subnet_id'] == subnet_id]
if match_port:
return match_port[0]
else:
LOG.error(_LE('DVR: no map match_port found!'))
@staticmethod
def _get_snat_idx(ip_cidr):
"""Generate index for DVR snat rules and route tables.
The index value has to be 32 bits or less but more than the system
generated entries i.e. 32768. For IPv4 use the numeric value of the
cidr. For IPv6 generate a crc32 bit hash and xor-fold to 30 bits.
Use the freed range to extend smaller values so that they become
greater than system generated entries.
"""
net = netaddr.IPNetwork(ip_cidr)
if net.version == 6:
# the crc32 & 0xffffffff is for Python 2.6 and 3.0 compatibility
snat_idx = binascii.crc32(ip_cidr) & 0xffffffff
# xor-fold the hash to reserve upper range to extend smaller values
snat_idx = (snat_idx >> 30) ^ (snat_idx & MASK_30)
if snat_idx < 32768:
snat_idx = snat_idx + MASK_30
else:
snat_idx = net.value
return snat_idx
def _delete_gateway_device_if_exists(self, ns_ip_device, gw_ip_addr,
snat_idx):
try:
ns_ip_device.route.delete_gateway(gw_ip_addr,
table=snat_idx)
except exceptions.DeviceNotFoundError:
pass
def _snat_redirect_modify(self, gateway, sn_port, sn_int, is_add):
"""Adds or removes rules and routes for SNAT redirection."""
try:
ns_ipr = ip_lib.IPRule(namespace=self.ns_name)
ns_ipd = ip_lib.IPDevice(sn_int, namespace=self.ns_name)
if is_add:
ns_ipwrapr = ip_lib.IPWrapper(namespace=self.ns_name)
for port_fixed_ip in sn_port['fixed_ips']:
# Find the first gateway IP address matching this IP version
port_ip_addr = port_fixed_ip['ip_address']
port_ip_vers = netaddr.IPAddress(port_ip_addr).version
for gw_fixed_ip in gateway['fixed_ips']:
gw_ip_addr = gw_fixed_ip['ip_address']
if netaddr.IPAddress(gw_ip_addr).version == port_ip_vers:
sn_port_cidr = common_utils.ip_to_cidr(
port_ip_addr, port_fixed_ip['prefixlen'])
snat_idx = self._get_snat_idx(sn_port_cidr)
if is_add:
ns_ipd.route.add_gateway(gw_ip_addr,
table=snat_idx)
ns_ipr.rule.add(ip=sn_port_cidr,
table=snat_idx,
priority=snat_idx)
ns_ipwrapr.netns.execute(
['sysctl', '-w',
'net.ipv4.conf.%s.send_redirects=0' % sn_int])
else:
self._delete_gateway_device_if_exists(ns_ipd,
gw_ip_addr,
snat_idx)
ns_ipr.rule.delete(ip=sn_port_cidr,
table=snat_idx,
priority=snat_idx)
break
except Exception:
if is_add:
exc = _LE('DVR: error adding redirection logic')
else:
exc = _LE('DVR: removed snat failed')
LOG.exception(exc)
def _snat_redirect_add(self, gateway, sn_port, sn_int):
"""Adds rules and routes for SNAT redirection."""
self._snat_redirect_modify(gateway, sn_port, sn_int, is_add=True)
def _snat_redirect_remove(self, gateway, sn_port, sn_int):
"""Removes rules and routes for SNAT redirection."""
self._snat_redirect_modify(gateway, sn_port, sn_int, is_add=False)
def get_gw_port_host(self):
host = self.router.get('gw_port_host')
if not host:
LOG.debug("gw_port_host missing from router: %s",
self.router['id'])
return host
def internal_network_added(self, port):
super(DvrLocalRouter, self).internal_network_added(port)
# NOTE: The following function _set_subnet_arp_info
# should be called to dynamically populate the arp
# entries for the dvr services ports into the router
# namespace. This does not have dependency on the
# external_gateway port or the agent_mode.
for subnet in port['subnets']:
self._set_subnet_arp_info(subnet['id'])
ex_gw_port = self.get_ex_gw_port()
if not ex_gw_port:
return
snat_ports = self.get_snat_interfaces()
sn_port = self._map_internal_interfaces(port, snat_ports)
if not sn_port:
return
interface_name = self.get_internal_device_name(port['id'])
self._snat_redirect_add(sn_port, port, interface_name)
def _dvr_internal_network_removed(self, port):
if not self.ex_gw_port:
return
sn_port = self._map_internal_interfaces(port, self.snat_ports)
if not sn_port:
return
# DVR handling code for SNAT
interface_name = self.get_internal_device_name(port['id'])
self._snat_redirect_remove(sn_port, port, interface_name)
def internal_network_removed(self, port):
self._dvr_internal_network_removed(port)
super(DvrLocalRouter, self).internal_network_removed(port)
def get_floating_agent_gw_interface(self, ext_net_id):
"""Filter Floating Agent GW port for the external network."""
fip_ports = self.router.get(l3_constants.FLOATINGIP_AGENT_INTF_KEY, [])
return next(
(p for p in fip_ports if p['network_id'] == ext_net_id), None)
def get_external_device_interface_name(self, ex_gw_port):
fip_int = self.fip_ns.get_int_device_name(self.router_id)
if ip_lib.device_exists(fip_int, namespace=self.fip_ns.get_name()):
return self.fip_ns.get_rtr_ext_device_name(self.router_id)
def external_gateway_added(self, ex_gw_port, interface_name):
# TODO(Carl) Refactor external_gateway_added/updated/removed to use
# super class implementation where possible. Looks like preserve_ips,
# and ns_name are the key differences.
ip_wrapr = ip_lib.IPWrapper(namespace=self.ns_name)
ip_wrapr.netns.execute(['sysctl', '-w',
'net.ipv4.conf.all.send_redirects=0'])
snat_ports = self.get_snat_interfaces()
for p in self.internal_ports:
gateway = self._map_internal_interfaces(p, snat_ports)
id_name = self.get_internal_device_name(p['id'])
if gateway:
self._snat_redirect_add(gateway, p, id_name)
for port in snat_ports:
for ip in port['fixed_ips']:
self._update_arp_entry(ip['ip_address'],
port['mac_address'],
ip['subnet_id'],
'add')
def external_gateway_updated(self, ex_gw_port, interface_name):
pass
def external_gateway_removed(self, ex_gw_port, interface_name):
# TODO(Carl) Should this be calling process_snat_dnat_for_fip?
self.process_floating_ip_nat_rules()
if self.fip_ns:
to_fip_interface_name = (
self.get_external_device_interface_name(ex_gw_port))
self.process_floating_ip_addresses(to_fip_interface_name)
snat_ports = self.get_snat_interfaces()
for p in self.internal_ports:
gateway = self._map_internal_interfaces(p, snat_ports)
internal_interface = self.get_internal_device_name(p['id'])
self._snat_redirect_remove(gateway, p, internal_interface)
def _handle_router_snat_rules(self, ex_gw_port,
interface_name, action):
if not self.snat_iptables_manager:
LOG.debug("DVR router: no snat rules to be handled")
return
with self.snat_iptables_manager.defer_apply():
self._empty_snat_chains(self.snat_iptables_manager)
# NOTE DVR doesn't add the jump to float snat like the super class.
self._add_snat_rules(ex_gw_port, self.snat_iptables_manager,
interface_name, action)
def perform_snat_action(self, snat_callback, *args):
# NOTE DVR skips this step in a few cases...
if not self.get_ex_gw_port():
return
if self.get_gw_port_host() != self.host:
return
super(DvrLocalRouter,
self).perform_snat_action(snat_callback, *args)
def process_external(self, agent):
ex_gw_port = self.get_ex_gw_port()
if ex_gw_port:
self.create_dvr_fip_interfaces(ex_gw_port)
super(DvrLocalRouter, self).process_external(agent)
def create_dvr_fip_interfaces(self, ex_gw_port):
floating_ips = self.get_floating_ips()
fip_agent_port = self.get_floating_agent_gw_interface(
ex_gw_port['network_id'])
LOG.debug("FloatingIP agent gateway port received from the plugin: "
"%s", fip_agent_port)
is_first = False
if floating_ips:
is_first = self.fip_ns.subscribe(self.router_id)
if is_first and not fip_agent_port:
LOG.debug("No FloatingIP agent gateway port possibly due to "
"late binding of the private port to the host, "
"requesting agent gateway port for 'network-id' :"
"%s", ex_gw_port['network_id'])
fip_agent_port = self.agent.plugin_rpc.get_agent_gateway_port(
self.agent.context, ex_gw_port['network_id'])
if not fip_agent_port:
LOG.error(_LE("No FloatingIP agent gateway port "
"returned from server for 'network-id': "
"%s"), ex_gw_port['network_id'])
if is_first and fip_agent_port:
if 'subnets' not in fip_agent_port:
LOG.error(_LE('Missing subnet/agent_gateway_port'))
else:
self.fip_ns.create_gateway_port(fip_agent_port)
if self.fip_ns.agent_gateway_port and floating_ips:
if self.dist_fip_count == 0 or is_first:
self.fip_ns.create_rtr_2_fip_link(self)
# kicks the FW Agent to add rules for the IR namespace if
# configured
self.agent.process_router_add(self)
def process(self, agent):
ex_gw_port = self.get_ex_gw_port()
if ex_gw_port:
self.fip_ns = agent.get_fip_ns(ex_gw_port['network_id'])
self.fip_ns.scan_fip_ports(self)
super(DvrLocalRouter, self).process(agent)
|
mandeepdhami/neutron
|
neutron/agent/l3/dvr_local_router.py
|
Python
|
apache-2.0
| 20,535
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CGpm(object):
"""Interface for composable generative population models.
Composable generative population models provide a computational abstraction
for multivariate probability densities and stochastic samplers.
"""
def __init__(self, outputs, inputs, schema, rng):
"""Initialize the CGpm.
Parameters
----------
outputs : list<int>
List of endogenous variables whose joint distribution is modeled by
the CGpm. The CGpm is required to simulate and evaluate the log
density of an arbitrary susbet of output variables, by marginalizing
and/or conditioning on another (disjoint) subset of output
variables.
inputs : list<int>, optional
List of exogenous variables unmodeled by the CGpm which are needed
on a per-row basis. A full realization of all inputs (if any)
is required for each simulate and logpdf query.
schema : **kwargs
An arbitrary data structure used by the CGpm to initialize itself.
Often contains information about hyperparameters, parameters,
sufficient statistics, configuration settings, or metadata about
the input variables.
rng : numpy.random.RandomState
Source of entropy.
"""
raise NotImplementedError
def incorporate(self, rowid, observation, inputs=None):
"""Record an observation for `rowid` into the dataset.
rowid : int
A unique integer identifying the member.
observation : dict{int:value}
The observed values. The keys of `observation` must be a subset of the
`output` variables, and `value` must be type-matched based on
the statistical data type of that variable. Missing values may
be either omitted, or specified as float(nan).
inputs : dict{int:value}, optional
Values of all required `input` variables for the `rowid`.
"""
raise NotImplementedError
def unincorporate(self, rowid):
"""Remove all incorporated observations of `rowid` from the dataset."""
raise NotImplementedError
def logpdf(self, rowid, targets, constraints=None, inputs=None):
"""Return the density of `targets` given `constraints` and `inputs`.
Pr[targets | constraints; inputs]
rowid : int, or None to indicate a hypothetical row
Specifies the identity of the population member against which to
evaluate the log density.
targets : dict{int:value}
The keys of `targets` must be a subset of the `output` variables.
If `rowid` corresponds to an existing member, it is an error for
`targets` to contain any output variable for that `rowid` which has
already been incorporated.
constraints : dict{int:value}, optional
The keys of `constraints` must be a subset of the `output`
variables, and disjoint from the keys of `targets`. These
constraints serve as probabilistic conditions on the multivariate
output distribution. If `rowid` corresponds to an existing member,
it is an error for `constraints` to contain any output variable for
that `rowid` which has already been incorporated.
inputs : dict{int:value}, optional
The keys of `inputs` must contain all the cgpm's `input` variables,
if any. These values comprise a full realization of all exogenous
variables required by the cgpm. If `rowid` corresponds to an
existing member, then `inputs` is expected to be None.
"""
raise NotImplementedError
def simulate(self, rowid, query, constraints=None, inputs=None, N=None):
"""Return N iid samples of `targets` given `constraints` and `inputs`.
(X_1, X_2, ... X_N) ~iid Pr[targets | constraints; inputs]
rowid : int, or None to indicate a hypothetical row
Specifies the identity of the population member whose posterior
distribution over unobserved outputs to simulate from.
query : list<int>
List of `output` variables to simulate. If `rowid` corresponds to an
existing member, it is an error for `targets` to contain any output
variable for that `rowid` which has already been incorporated.
constraints : dict{int:value}, optional
The keys of `constraints` must be a subset of the `output`
variables, and disjoint from the keys of `targets`. These
constraints serve as probabilistic conditions on the multivariate
output distribution. If `rowid` corresponds to an existing member,
it is an error for `constraints` to contain any output variable for
that `rowid` which has already been incorporated.
inputs : dict{int:value}, optional
The keys of `inputs` must contain all the cgpm's `input` variables,
if any. These values comprise a full realization of all exogenous
variables required by the cgpm. If `rowid` corresponds to an
existing member, then `inputs` is expected to be None.
N : int, (optional, default None)
Number of samples to return. If None, returns a single sample as
a dictionary with size len(query), where each key is an `output`
variable and each value the sample for that dimension. If `N` is
is not None, a size N list of dictionaries will be returned, each
corresponding to a single sample.
"""
raise NotImplementedError
def logpdf_score(self):
"""Return joint density of all observations and current latent state."""
raise NotImplementedError
def transition(self, **kwargs):
"""Apply an inference operator transitioning the internal state of CGpm.
**kwargs : arbitrary keyword arguments Opaque binary parsed by the CGpm
to apply inference over its latents. There are no restrictions on
the learning mechanism, which may be based on optimization
(variational inference, maximum likelihood, EM, etc), Markov chain
Monte Carlo sampling (SMC, MH, etc), arbitrary heuristics, or
others.
"""
raise NotImplementedError
def to_metadata(self):
"""Return the binary (json-friendly) representation of the CGpm.
The returned B is expected to contain an entry ['factory'] which can
be used to deserialize the binary in the following way:
>> B = C.to_metadata()
>> modname, attrname = B['factory']
>> mod = importlib.import_module(modname)
>> builder = getattr(modname, attrname)
>> C = builder.from_metadata(binary)
"""
raise NotImplementedError
@staticmethod
def from_metadata(cls, metadata, rng=None):
"""Load CGpm from its binary representation.
Refer to the usage example in `to_metadata`.
"""
raise NotImplementedError
|
probcomp/cgpm
|
src/cgpm.py
|
Python
|
apache-2.0
| 7,812
|
#!/usr/bin/env python
# Copyright 2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import unittest
import os
import tempfile
# internal modules:
from yotta.lib.fsutils import mkDirP, rmRf
from yotta.lib.detect import systemDefaultTarget
from yotta.lib import component
from .cli import cli
Test_Files = {
'.yotta_ignore': '''
#comment
/moo
b/c/d
b/c/*.txt
/a/b/test.txt
b/*.c
/source/a/b/test.txt
/test/foo
sometest/a
someothertest
ignoredbyfname.c
''',
'module.json': '''
{
"name": "test-testdep-f",
"version": "0.0.6",
"description": "Module to test test-dependencies and ignoring things",
"author": "autopulated",
"licenses": [
{
"url": "https://spdx.org/licenses/Apache-2.0",
"type": "Apache-2.0"
}
],
"dependencies": {},
"testDependencies": {}
}
''',
'a/b/c/d/e/f/test.txt': '',
'a/b/c/d/e/test.c': '#error should be ignored',
'a/b/c/d/e/test.txt': '',
'a/b/c/d/test.c': '#error should be ignored',
'a/b/c/d/test.txt': '',
'a/b/c/d/z/test.c':'#error should be ignored',
'a/b/c/test.txt': '',
'a/b/test.txt':'',
'a/test.txt':'',
'comment':'# should not be ignored',
'f/f.h':'''
#ifndef __F_H__
#define __F_H__
int f();
#endif
''',
'source/moo/test.txt':'',
'source/a/b/c/d/e/f/test.txt': '',
'source/a/b/c/d/e/test.c': '#error should be ignored',
'source/a/b/c/d/e/test.txt': '',
'source/a/b/c/d/test.c': '#error should be ignored',
'source/a/b/c/d/test.txt': '',
'source/a/b/c/d/z/test.c':'#error should be ignored',
'source/a/b/c/test.txt': '',
'source/a/b/test.txt':'',
'source/a/test.txt':'',
'source/f.c':'''
int f(){
return 6;
}
''',
'test/anothertest/ignoredbyfname.c':'#error should be ignored',
'test/anothertest/ignoredbyfname.c':'''
#include <stdio.h>
#include "f/f.h"
int main(){
int result = f();
printf("%d\n", result);
return !(result == 6);
}
''',
'test/foo/ignored.c':'''
#error should be ignored
''',
'test/someothertest/alsoignored.c':'''
#error should be ignored
''',
'test/sometest/a/ignored.c':'''
#error should be ignored
'''
}
Default_Test_Files = {
'module.json': '''
{
"name": "test-testdep-f",
"version": "0.0.6",
"license": "Apache-2.0"
}'''
}
def isWindows():
# can't run tests that hit github without an authn token
return os.name == 'nt'
def writeTestFiles(files):
test_dir = tempfile.mkdtemp()
for path, contents in files.items():
path_dir, file_name = os.path.split(path)
path_dir = os.path.join(test_dir, path_dir)
mkDirP(path_dir)
with open(os.path.join(path_dir, file_name), 'w') as f:
f.write(contents)
return test_dir
class TestPackIgnores(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_dir = writeTestFiles(Test_Files)
@classmethod
def tearDownClass(cls):
rmRf(cls.test_dir)
def test_absolute_ignores(self):
c = component.Component(self.test_dir)
self.assertTrue(c.ignores('moo'))
self.assertTrue(c.ignores('test/foo/ignored.c'))
def test_glob_ignores(self):
c = component.Component(self.test_dir)
self.assertTrue(c.ignores('a/b/c/test.txt'))
self.assertTrue(c.ignores('a/b/test.txt'))
self.assertTrue(c.ignores('a/b/test.c'))
self.assertTrue(c.ignores('source/a/b/c/test.txt'))
self.assertTrue(c.ignores('source/a/b/test.txt'))
self.assertTrue(c.ignores('source/a/b/test.c'))
def test_relative_ignores(self):
c = component.Component(self.test_dir)
self.assertTrue(c.ignores('a/b/c/d/e/f/test.txt'))
self.assertTrue(c.ignores('a/b/test.txt'))
self.assertTrue(c.ignores('source/a/b/c/d/e/f/test.txt'))
self.assertTrue(c.ignores('source/a/b/test.txt'))
self.assertTrue(c.ignores('test/anothertest/ignoredbyfname.c'))
self.assertTrue(c.ignores('test/someothertest/alsoignored.c'))
def test_default_ignores(self):
default_test_dir = writeTestFiles(Default_Test_Files)
c = component.Component(default_test_dir)
self.assertTrue(c.ignores('.something.c.swp'))
self.assertTrue(c.ignores('.something.c~'))
self.assertTrue(c.ignores('path/to/.something.c.swm'))
self.assertTrue(c.ignores('path/to/.something.c~'))
self.assertTrue(c.ignores('.DS_Store'))
self.assertTrue(c.ignores('.git'))
self.assertTrue(c.ignores('.hg'))
self.assertTrue(c.ignores('.svn'))
self.assertTrue(c.ignores('yotta_modules'))
self.assertTrue(c.ignores('yotta_targets'))
self.assertTrue(c.ignores('build'))
self.assertTrue(c.ignores('.yotta.json'))
rmRf(default_test_dir)
def test_comments(self):
c = component.Component(self.test_dir)
self.assertFalse(c.ignores('comment'))
@unittest.skipIf(isWindows(), "can't build natively on windows yet")
def test_build(self):
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'clean'], self.test_dir)
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'build'], self.test_dir)
self.assertNotIn('ignoredbyfname', stdout)
self.assertNotIn('someothertest', stdout)
self.assertNotIn('sometest', stdout)
@unittest.skipIf(isWindows(), "can't build natively on windows yet")
def test_test(self):
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'clean'], self.test_dir)
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'test'], self.test_dir)
self.assertNotIn('ignoredbyfname', stdout)
self.assertNotIn('someothertest', stdout)
self.assertNotIn('sometest', stdout)
def runCheckCommand(self, args, test_dir):
stdout, stderr, statuscode = cli.run(args, cwd=self.test_dir)
if statuscode != 0:
print('command failed with status %s' % statuscode)
print(stdout)
print(stderr)
self.assertEqual(statuscode, 0)
return stdout or stderr
if __name__ == '__main__':
unittest.main()
|
BlackstoneEngineering/yotta
|
yotta/test/ignores.py
|
Python
|
apache-2.0
| 6,131
|
"""Gauge watcher configuration."""
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2015 Brad Cowie, Christopher Lorier and Joe Stringer.
# Copyright (C) 2015 Research and Education Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2017 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
from faucet.conf import Conf
class WatcherConf(Conf):
"""Gauge watcher configuration."""
db = None # pylint: disable=invalid-name
dp = None # pylint: disable=invalid-name
prom_client = None
defaults = {
'name': None,
'type': None,
'dps': None,
'interval': 30,
'db': None,
'db_type': 'text',
'file': None,
'influx_db': 'faucet',
# influx database name
'influx_host': 'localhost',
# influx database location
'influx_port': 8086,
'influx_user': '',
# influx username
'influx_pwd': '',
# influx password
'influx_timeout': 10,
# timeout on influx requests
'influx_retries': 3,
# attempts to retry influx request
# prometheus config
'prometheus_port': 9303,
'prometheus_addr': '127.0.0.1',
'views': {},
'db_update_counter': 0,
'nosql_db': '',
'db_password': '',
'flows_doc': '',
'db_ip': '',
'db_port': 0,
'gdb_type': '',
'driver': '',
'db_username': '',
'switches_doc': '',
}
def __init__(self, _id, conf, prom_client):
super(WatcherConf, self).__init__(_id, conf)
self.prom_client = prom_client
self.name = str(self._id)
def add_db(self, db_conf):
"""Add database config to this watcher."""
db_conf = deepcopy(db_conf)
db_type = db_conf.pop('type')
db_conf['db_type'] = db_type
self.update(db_conf)
def add_dp(self, dp): # pylint: disable=invalid-name
"""Add a datapath to this watcher."""
self.dp = dp # pylint: disable=invalid-name
|
byllyfish/faucet
|
faucet/watcher_conf.py
|
Python
|
apache-2.0
| 2,593
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-07-10 07:31
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('person', '0049_patient_photo'),
]
operations = [
migrations.RenameField(
model_name='patient',
old_name='photo',
new_name='picture',
),
]
|
nanomolina/JP
|
src/odontology/person/migrations/0050_auto_20160710_0431.py
|
Python
|
apache-2.0
| 422
|
#!/usr/bin/env python
# Copyright 2012 Google Inc. All Rights Reserved.
"""Tests for grr.lib.flows.general.services."""
from grr.lib import aff4
from grr.lib import rdfvalue
from grr.lib import test_lib
class ServicesTest(test_lib.FlowTestsBaseclass):
def testEnumerateRunningServices(self):
class ClientMock(object):
def EnumerateRunningServices(self, _):
service = rdfvalue.Service(label="org.openbsd.ssh-agent",
args="/usr/bin/ssh-agent -l")
service.osx_launchd.sessiontype = "Aqua"
service.osx_launchd.lastexitstatus = 0
service.osx_launchd.timeout = 30
service.osx_launchd.ondemand = 1
return [service]
# Run the flow in the emulated way.
for _ in test_lib.TestFlowHelper(
"EnumerateRunningServices", ClientMock(), client_id=self.client_id,
token=self.token):
pass
# Check the output file is created
fd = aff4.FACTORY.Open(rdfvalue.RDFURN(self.client_id)
.Add("analysis/Services"),
token=self.token)
self.assertEqual(fd.__class__.__name__, "RDFValueCollection")
jobs = list(fd)
self.assertEqual(len(fd), 1)
self.assertEqual(jobs[0].label, "org.openbsd.ssh-agent")
self.assertEqual(jobs[0].args, "/usr/bin/ssh-agent -l")
self.assertIsInstance(jobs[0], rdfvalue.Service)
|
MiniSEC/GRR_clone
|
lib/flows/general/services_test.py
|
Python
|
apache-2.0
| 1,398
|
"""Converting BAM to BEDPE and normalized BigWig files."""
import os
from resolwe.process import (
Cmd,
DataField,
FileField,
FloatField,
Process,
SchedulingClass,
StringField,
)
class BamToBedpe(Process):
"""Takes in a BAM file and calculates a normalization factor in BEDPE format.
Done by sorting with Samtools and transformed with Bedtools.
"""
slug = "bedtools-bamtobed"
name = "Bedtools bamtobed"
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/genialis/resolwebio/rnaseq:6.0.0"}
},
"resources": {"cores": 1, "memory": 8192},
}
data_name = "Bedtools bamtobed ({{alignment|sample_name|default('?')}})"
version = "1.2.0"
process_type = "data:bedpe"
category = "Other"
entity = {"type": "sample"}
scheduling_class = SchedulingClass.BATCH
class Input:
"""Input fields."""
alignment = DataField("alignment:bam", label="Alignment BAM file")
class Output:
"""Output fields."""
bedpe = FileField(label="BEDPE file")
species = StringField(label="Species")
build = StringField(label="Build")
def run(self, inputs, outputs):
"""Run the analysis."""
path = inputs.alignment.output.bam.path
basename = os.path.basename(path)
assert basename.endswith(".bam")
name = basename[:-4]
bedpe_file = f"{name}.bedpe"
samtools_param = ["-n", path]
bedtools_param = ["-bedpe", "-i"]
(
Cmd["samtools"]["sort"][samtools_param]
| Cmd["bedtools"]["bamtobed"][bedtools_param]
> bedpe_file
)()
if not os.path.exists(bedpe_file):
self.error("Converting BAM to BEDPE with Bedtools bamtobed failed.")
outputs.bedpe = bedpe_file
outputs.species = inputs.alignment.output.species
outputs.build = inputs.alignment.output.build
class ScaleBigWig(Process):
"""Creates a scaled BigWig file."""
slug = "scale-bigwig"
name = "Deeptools bamCoverage"
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/genialis/resolwebio/rnaseq:6.0.0"}
},
"resources": {"cores": 1, "memory": 16384},
}
data_name = "Scale BigWig ({{alignment|sample_name|default('?')}})"
version = "1.2.0"
process_type = "data:coverage:bigwig"
category = "Other"
entity = {"type": "sample"}
scheduling_class = SchedulingClass.BATCH
class Input:
"""Input fields."""
alignment = DataField("alignment:bam", label="Alignment BAM file")
bedpe = DataField(
"bedpe",
label="BEDPE Normalization factor",
description="The BEDPE file describes disjoint genome features, "
"such as structural variations or paired-end sequence alignments. "
"It is used to estimate the scale factor.",
)
scale = FloatField(
label="Scale for the normalization factor",
description="Magnitude of the scale factor. "
"The scaling factor is calculated by dividing the scale "
"with the number of features in BEDPE "
"(scale/(number of features)).",
default=10000,
)
class Output:
"""Output fields."""
bigwig = FileField(label="bigwig file")
species = StringField(label="Species")
build = StringField(label="Build")
def run(self, inputs, outputs):
"""Run the analysis."""
path = inputs.alignment.output.bam.path
basename = os.path.basename(path)
assert basename.endswith(".bam")
name = basename[:-4]
out_file = f"{name}.SInorm.bigwig"
out_index = f"{name}.bai"
with open(inputs.bedpe.output.bedpe.path) as f:
spike_count = f.readlines()
spike_count = len(spike_count)
scale_factor = inputs.scale / spike_count
bam_coverage_param = [
"--bam",
path,
"--scaleFactor",
scale_factor,
"--outFileName",
out_file,
"--numberOfProcessors",
self.requirements.resources.cores,
"--outFileFormat",
"bigwig",
]
(Cmd["samtools"]["index"][path][out_index])()
self.progress(0.5)
(Cmd["bamCoverage"][bam_coverage_param])()
if not os.path.exists(out_file):
self.error("Generation of a scaled BigWig file with bamCoverage failed.")
outputs.bigwig = out_file
outputs.species = inputs.alignment.output.species
outputs.build = inputs.alignment.output.build
|
genialis/resolwe-bio
|
resolwe_bio/processes/support_processors/bam_conversion.py
|
Python
|
apache-2.0
| 4,807
|
# The content of this file was generated using the Python profile of libCellML 0.2.0.
from enum import Enum
from math import *
__version__ = "0.3.0"
LIBCELLML_VERSION = "0.2.0"
STATE_COUNT = 1
VARIABLE_COUNT = 2
class VariableType(Enum):
VARIABLE_OF_INTEGRATION = 1
STATE = 2
CONSTANT = 3
COMPUTED_CONSTANT = 4
ALGEBRAIC = 5
VOI_INFO = {"name": "time", "units": "second", "component": "my_component", "type": VariableType.VARIABLE_OF_INTEGRATION}
STATE_INFO = [
{"name": "x", "units": "dimensionless", "component": "my_component", "type": VariableType.STATE}
]
VARIABLE_INFO = [
{"name": "b", "units": "second", "component": "my_component", "type": VariableType.ALGEBRAIC},
{"name": "a", "units": "second", "component": "my_component", "type": VariableType.ALGEBRAIC}
]
def create_states_array():
return [nan]*STATE_COUNT
def create_variables_array():
return [nan]*VARIABLE_COUNT
def initialise_states_and_constants(states, variables):
states[0] = 0.0
def compute_computed_constants(variables):
pass
def compute_rates(voi, states, rates, variables):
rates[0] = 1.0
def compute_variables(voi, states, rates, variables):
variables[0] = 2.0*voi
variables[1] = 3.0*variables[0]
|
nickerso/libcellml
|
tests/resources/generator/dependent_eqns/model.py
|
Python
|
apache-2.0
| 1,256
|
#!/usr/bin/env python
# BEGIN ALL
import rospy
import math
from std_msgs.msg import String
from geometry_msgs.msg import Twist
key_mapping = { 'w': [ 0, 1], 'x': [ 0, -1],
'a': [ 1, 0], 'd': [-1, 0],
's': [ 0, 0] }
g_twist_pub = None
g_target_twist = None
g_last_twist = None
g_last_send_time = None
g_vel_scales = [0.1, 0.1] # default to very slow
g_vel_ramps = [1, 1] # units: meters per second^2
# BEGIN RAMP
def ramped_vel(v_prev, v_target, t_prev, t_now, ramp_rate):
# compute maximum velocity step
step = ramp_rate * (t_now - t_prev).to_sec()
sign = 1.0 if (v_target > v_prev) else -1.0
error = math.fabs(v_target - v_prev)
if error < step: # we can get there within this timestep. we're done.
return v_target
else:
return v_prev + sign * step # take a step towards the target
# END RAMP
def ramped_twist(prev, target, t_prev, t_now, ramps):
tw = Twist()
tw.angular.z = ramped_vel(prev.angular.z, target.angular.z, t_prev,
t_now, ramps[0])
tw.linear.x = ramped_vel(prev.linear.x, target.linear.x, t_prev,
t_now, ramps[1])
return tw
def send_twist():
global g_last_twist_send_time, g_target_twist, g_last_twist,\
g_vel_scales, g_vel_ramps, g_twist_pub
t_now = rospy.Time.now()
g_last_twist = ramped_twist(g_last_twist, g_target_twist,
g_last_twist_send_time, t_now, g_vel_ramps)
g_last_twist_send_time = t_now
g_twist_pub.publish(g_last_twist)
def keys_cb(msg):
global g_target_twist, g_last_twist, g_vel_scales
if len(msg.data) == 0 or not key_mapping.has_key(msg.data[0]):
return # unknown key.
vels = key_mapping[msg.data[0]]
g_target_twist.angular.z = vels[0] * g_vel_scales[0]
g_target_twist.linear.x = vels[1] * g_vel_scales[1]
def fetch_param(name, default):
if rospy.has_param(name):
return rospy.get_param(name)
else:
print "parameter [%s] not defined. Defaulting to %.3f" % (name, default)
return default
if __name__ == '__main__':
rospy.init_node('keys_to_twist')
g_last_twist_send_time = rospy.Time.now()
g_twist_pub = rospy.Publisher('cmd_vel', Twist, queue_size=1)
rospy.Subscriber('keys', String, keys_cb)
g_target_twist = Twist() # initializes to zero
g_last_twist = Twist()
g_vel_scales[0] = fetch_param('~angular_scale', 0.1)
g_vel_scales[1] = fetch_param('~linear_scale', 0.1)
g_vel_ramps[0] = fetch_param('~angular_accel', 1.0)
g_vel_ramps[1] = fetch_param('~linear_accel', 1.0)
rate = rospy.Rate(20)
while not rospy.is_shutdown():
send_twist()
rate.sleep()
# END ALL
|
osrf/rosbook
|
teleop_bot/keys_to_twist_with_ramps.py
|
Python
|
apache-2.0
| 2,634
|
# -*- coding: utf-8 -*-
from tm import TuringMachine
class TuringMachineBuilder:
"""
Creates a turing machine step by step by retrieving all the necessary
information.
By default (can be specified) sets the halt state to 'HALT and the
blank symbol to '#'
"""
def __init__(self):
"""
Initialize a new TuringMachineBuilder with the specified haltstate and
blank symbol.
haltstate takes as default value 'HALT'
blnak takes as default value '#' and must be one char length
"""
self._states = set()
self._in_alphabet = set()
self._trans_function = {}
self._istate = None
self._fstates = set()
self._blank = None
self._haltstate = None
#
#
def clean(self):
"""
Clear all the previous stored data
"""
self._states = set()
self._in_alphabet = set()
self._trans_function = {}
self._istate = None
self._fstates = set()
self._blank = None
self._haltstate = None
#
#
def addTransition(self, state, symbol, new_state, new_symbol, movement):
"""
addTransition(state, symbol, new_state, new_symbol, movement)
Adds the transition:
From state,symbol To new_state writing new_symbol at the current
possition and moving the head in movement direction
- state: something that represents a state, must be hashable
- symbol: something that represents a symbol, must be hashable
- new_state: something that represents a state, must be hashable
- new_symbol: something that represents a symbol, must be hashable
- movement: TuringMachine.MOVE_LEFT or TuringMachine.MOVE_RIGHT or
TuringMachine.NON_MOVEMENT
Raise Exception if symbols have more than one char length
"""
if movement not in TuringMachine.HEAD_MOVEMENTS:
raise Exception('Invalid movement')
if (hasattr(symbol, 'len') and len(symbol) > 1) or \
(hasattr(new_symbol, 'len') and len(new_symbol) > 1):
raise Exception('Symbol length > 1')
if state not in self._states:
self._states.add(state)
if symbol != self._blank and symbol not in self._in_alphabet:
self._in_alphabet.add(symbol)
if new_state not in self._states:
self._states.add(new_state)
if new_symbol != self._blank and new_symbol not in self._in_alphabet:
self._in_alphabet.add(new_symbol)
self._trans_function[(state,symbol)] = (new_state, new_symbol,
movement)
#
#
def addFinalState(self, state):
"""
Adds the specified state to the set of final states
"""
if state not in self._states:
self._states.add(state)
if state not in self._fstates:
self._fstates.add(state)
#
#
def setInitialState(self, state):
"""
Set the specified state as the initial. Mandatory operation
"""
if state not in self._states:
self._states.add(state)
self._istate = state
#
#
def hasInitialState(self):
"""
Returns True if the initial state was specified on a previous call
to setInitialState
"""
return self._istate != None
#
#
def hasHaltState(self):
"""
Returns True if the halt state was specified on a preivous call to
setHaltState
"""
return self._haltstate != None
#
#
def hasBlankSymbol(self):
"""
Returns True if the halt state was specified on a preivous call to
setBlankSymbol
"""
return self._blank != None
#
#
def setBlankSymbol(self, blank_sym):
"""
Specifies a new blank symbol
- The blank symbol must be one char length
Raise Exception if blank_sym has more than one char length
"""
if not blank_sym or len(blank_sym) > 1:
raise Exception('Symbol must be one char length')
self._blank = blank_sym
#
#
def setHaltState(self, haltstate):
"""
Specifies a new halt state
"""
# If there are a previous halt state. Check if it appears in some
# transition otherwise delete it from the list of states
if self.hasHaltState():
old_remains = False
for k, v in self._trans_function.iteritems():
if k[0] == self._haltstate or v[0] == self._haltstate:
old_remains = True
break
if not old_remains:
self._states.remove(self._haltstate)
self._haltstate = haltstate
self._states.add(self._haltstate)
#
#
def create(self):
"""
Creates a turing machine instance with the collected information.
Raises an Exception if:
The initial state remains unset
The halt state remains unset
The blank symbol remains unset
At this point the tape_alphabet is set to be: in_alphabet U {blank}
"""
if not self.hasInitialState():
raise Exception('It is necessary to specify an initial state')
if not self.hasBlankSymbol():
raise Exception('It is necessary to specify the blank symbol')
if not self.hasHaltState():
raise Exception('It is necessary to specify the halt state')
tape_alphabet = set(self._in_alphabet)
tape_alphabet.add(self._blank)
return TuringMachine(self._states, self._in_alphabet, tape_alphabet,
self._trans_function, self._istate,
self._fstates, self._haltstate, self._blank)
#
#
def getHaltState(self):
"""
Returns the halt state specified or assigned by default on the
initialization of this Builder
"""
return self._haltstate
if __name__ == '__main__':
tmb = TuringMachineBuilder()
tmb.setBlankSymbol('#')
tmb.setHaltState('HALT')
tmb.addTransition(1, 0, 2, 1, TuringMachine.MOVE_RIGHT)
tmb.addTransition(1, 1, 2, 0, TuringMachine.MOVE_RIGHT)
tmb.addTransition(2, 0, 1, 0, TuringMachine.NON_MOVEMENT)
tmb.addTransition(2, 1, 3, 1, TuringMachine.MOVE_RIGHT)
tmb.addTransition(3, 0, 'HALT', 0, TuringMachine.NON_MOVEMENT)
tmb.addTransition(3, 1, 'HALT', 1, TuringMachine.NON_MOVEMENT)
tmb.addTransition(3, '#', 'HALT', '#', TuringMachine.NON_MOVEMENT)
tmb.setInitialState(1)
tmb.addFinalState(2)
print tmb.create()
|
jainpranav/Turing_Machine_Simulator
|
Simulator/tmbuilder.py
|
Python
|
apache-2.0
| 7,293
|
#
# Copyright 2015 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import six
import yaml
from oslo_config import fixture as fixture_config
from oslo_utils import fileutils
from oslotest import mockpatch
from ceilometer import declarative
from ceilometer.hardware.inspector import base as inspector_base
from ceilometer.hardware.pollsters import generic
from ceilometer import sample
from ceilometer.tests import base as test_base
class TestMeterDefinition(test_base.BaseTestCase):
def test_config_definition(self):
cfg = dict(name='test',
type='gauge',
unit='B',
snmp_inspector={})
definition = generic.MeterDefinition(cfg)
self.assertEqual('test', definition.name)
self.assertEqual('gauge', definition.type)
self.assertEqual('B', definition.unit)
self.assertEqual({}, definition.snmp_inspector)
def test_config_missing_field(self):
cfg = dict(name='test', type='gauge')
try:
generic.MeterDefinition(cfg)
except declarative.MeterDefinitionException as e:
self.assertEqual("Missing field unit", e.brief_message)
def test_config_invalid_field(self):
cfg = dict(name='test',
type='gauge',
unit='B',
invalid={})
definition = generic.MeterDefinition(cfg)
self.assertEqual("foobar", getattr(definition, 'invalid', 'foobar'))
def test_config_invalid_type_field(self):
cfg = dict(name='test',
type='invalid',
unit='B',
snmp_inspector={})
try:
generic.MeterDefinition(cfg)
except declarative.MeterDefinitionException as e:
self.assertEqual("Unrecognized type value invalid",
e.brief_message)
@mock.patch('ceilometer.hardware.pollsters.generic.LOG')
def test_bad_metric_skip(self, LOG):
cfg = {'metric': [dict(name='test1',
type='gauge',
unit='B',
snmp_inspector={}),
dict(name='test_bad',
type='invalid',
unit='B',
snmp_inspector={}),
dict(name='test2',
type='gauge',
unit='B',
snmp_inspector={})]}
data = generic.load_definition(cfg)
self.assertEqual(2, len(data))
LOG.error.assert_called_with(
"Error loading meter definition: %s",
"Unrecognized type value invalid")
class FakeInspector(inspector_base.Inspector):
net_metadata = dict(name='test.teest',
mac='001122334455',
ip='10.0.0.2',
speed=1000)
DATA = {
'test': (0.99, {}, {}),
'test2': (90, net_metadata, {}),
}
def inspect_generic(self, host, cache,
extra_metadata=None, param=None):
yield self.DATA[host.hostname]
class TestGenericPollsters(test_base.BaseTestCase):
@staticmethod
def faux_get_inspector(url, namespace=None):
return FakeInspector()
def setUp(self):
super(TestGenericPollsters, self).setUp()
self.conf = self.useFixture(fixture_config.Config()).conf
self.resources = ["snmp://test", "snmp://test2"]
self.useFixture(mockpatch.Patch(
'ceilometer.hardware.inspector.get_inspector',
self.faux_get_inspector))
self.conf(args=[])
self.pollster = generic.GenericHardwareDeclarativePollster()
def _setup_meter_def_file(self, cfg):
if six.PY3:
cfg = cfg.encode('utf-8')
meter_cfg_file = fileutils.write_to_tempfile(content=cfg,
prefix="snmp",
suffix="yaml")
self.conf.set_override(
'meter_definitions_file',
meter_cfg_file, group='hardware')
cfg = declarative.load_definitions(
self.conf, {}, self.conf.hardware.meter_definitions_file)
return cfg
def _check_get_samples(self, name, definition,
expected_value, expected_type, expected_unit=None):
self.pollster._update_meter_definition(definition)
cache = {}
samples = list(self.pollster.get_samples(None, cache,
self.resources))
self.assertTrue(samples)
self.assertIn(self.pollster.CACHE_KEY, cache)
for resource in self.resources:
self.assertIn(resource, cache[self.pollster.CACHE_KEY])
self.assertEqual(set([name]),
set([s.name for s in samples]))
match = [s for s in samples if s.name == name]
self.assertEqual(expected_value, match[0].volume)
self.assertEqual(expected_type, match[0].type)
if expected_unit:
self.assertEqual(expected_unit, match[0].unit)
def test_get_samples(self):
param = dict(matching_type='type_exact',
oid='1.3.6.1.4.1.2021.10.1.3.1',
type='lambda x: float(str(x))')
meter_def = generic.MeterDefinition(dict(type='gauge',
name='hardware.test1',
unit='process',
snmp_inspector=param))
self._check_get_samples('hardware.test1',
meter_def,
0.99, sample.TYPE_GAUGE,
expected_unit='process')
def test_get_pollsters_extensions(self):
param = dict(matching_type='type_exact',
oid='1.3.6.1.4.1.2021.10.1.3.1',
type='lambda x: float(str(x))')
meter_cfg = yaml.dump(
{'metric': [dict(type='gauge',
name='hardware.test1',
unit='process',
snmp_inspector=param),
dict(type='gauge',
name='hardware.test2.abc',
unit='process',
snmp_inspector=param)]})
self._setup_meter_def_file(meter_cfg)
pollster = generic.GenericHardwareDeclarativePollster
# Clear cached mapping
pollster.mapping = None
exts = pollster.get_pollsters_extensions()
self.assertEqual(2, len(exts))
self.assertIn(exts[0].name, ['hardware.test1', 'hardware.test2.abc'])
self.assertIn(exts[1].name, ['hardware.test1', 'hardware.test2.abc'])
|
ityaptin/ceilometer
|
ceilometer/tests/unit/hardware/pollsters/test_generic.py
|
Python
|
apache-2.0
| 7,408
|
from django.contrib import admin
from django.core.urlresolvers import NoReverseMatch
from . import models
class WPSiteAdmin(admin.ModelAdmin):
list_display = ('name', 'url', 'hook')
readonly_fields = ('name', 'description')
def save_model(self, request, obj, form, change):
# TODO do this sync async (give celery another shot?)
obj.save()
obj.fetch_all()
# CUSTOM METHODS #
def hook(self, obj):
"""
This is where an admin can find what url to point the webhook to.
Doing it as an absolute url lets us cheat and make the browser figure
out the host for us.
Requires HookPress: http://wordpress.org/plugins/hookpress/
"""
try:
return (u'<a href="{}" title="Add a save_post hook with the ID">'
'Webhook</a>'.format(obj.hook_url))
except NoReverseMatch:
return ''
hook.allow_tags = True
admin.site.register(models.WPSite, WPSiteAdmin)
class WPUserAdmin(admin.ModelAdmin):
readonly_fields = ('synced_at', )
admin.site.register(models.WPUser, WPUserAdmin)
class WPCategoryAdmin(admin.ModelAdmin):
readonly_fields = ('synced_at', )
admin.site.register(models.WPCategory, WPCategoryAdmin)
class WPTagAdmin(admin.ModelAdmin):
readonly_fields = ('synced_at', )
admin.site.register(models.WPTag, WPTagAdmin)
class WPPostAdmin(admin.ModelAdmin):
list_display = ('title', 'date', 'type', 'status', )
list_filter = ('type', 'status', )
readonly_fields = ('synced_at', )
admin.site.register(models.WPPost, WPPostAdmin)
class WPLogAdmin(admin.ModelAdmin):
list_display = ('timestamp', 'wp', 'action', )
list_filter = ('wp', 'action', )
readonly_fields = ('wp', 'timestamp', 'action', 'body', )
admin.site.register(models.WPLog, WPLogAdmin)
|
texastribune/wjordpress
|
wjordpress/admin.py
|
Python
|
apache-2.0
| 1,830
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
from transformer_layers import TransformerBlock
import tensorflow as tf
def mean_pool(x, m):
m = tf.cast(m, tf.float32)
x = tf.multiply(x, tf.expand_dims(m, 2))
x = tf.reduce_sum(x, 1) / tf.reduce_sum(m, 1, keepdims=True)
return x
class RNN(object):
def __init__(self, num_units):
self.rnn_fw = tf.keras.layers.CuDNNLSTM(units=num_units // 2,
return_sequences=True,
go_backwards=False,
name='rnn_fw')
self.rnn_bw = tf.keras.layers.CuDNNLSTM(units=num_units // 2,
return_sequences=True,
go_backwards=False,
name='rnn_bw')
def forward(self, inputs, masks):
def rnn_fn(x, m, rnn):
x = rnn(x)
# x = tf.reduce_max(x, 1) # max pooling
# x = mean_pool(x, m) # mean pooling
indices = tf.reduce_sum(m, 1, keepdims=True) - 1
x = tf.gather_nd(x, tf.cast(indices, tf.int32), batch_dims=1)
return x
lengths = tf.reduce_sum(tf.cast(masks, tf.int32), axis=1)
masks = tf.cast(masks, tf.float32)
inputs = tf.multiply(inputs, tf.expand_dims(masks, 2))
inputs_bw = tf.reverse_sequence(inputs, lengths, 1, 0)
outputs_fw = rnn_fn(inputs, masks, self.rnn_fw)
outputs_bw = rnn_fn(inputs_bw, masks, self.rnn_bw)
outputs = tf.concat([outputs_fw, outputs_bw], axis=1)
return outputs
class Transformer(object):
def __init__(self, num_units):
self.hidden = tf.keras.layers.Dense(num_units)
self.transformer = TransformerBlock(num_units, num_units * 4,
num_layer=2)
def forward(self, inputs, masks):
masks = tf.cast(masks, tf.float32)
inputs = tf.multiply(inputs, tf.expand_dims(masks, 2))
inputs = self.hidden(inputs)
return self.transformer.forward(inputs, masks)
class DAN(object):
def __init__(self, num_units):
self.hidden = tf.keras.layers.Dense(num_units, activation=tf.nn.relu)
def forward(self, inputs, masks):
masks = tf.cast(masks, tf.float32)
inputs = tf.multiply(inputs, tf.expand_dims(masks, 2))
inputs = tf.reduce_sum(inputs, 1) / tf.reduce_sum(masks, 1, keepdims=True)
return self.hidden(inputs)
def get_text_encoder(encoder_type='rnn'):
if encoder_type == 'rnn':
return RNN
elif encoder_type == 'trans':
return Transformer
elif encoder_type == 'dan':
return DAN
else:
raise ValueError(encoder_type)
class ImageTextEmbedding(object):
def __init__(self, word_emb, encoder_dim, encoder_type='rnn', norm=True,
drop_p=0.25, contrastive=False, margin=0.5, num_neg_sample=10,
lambda1=1.0, lambda2=1.0, internal=True):
self.word_emb = tf.Variable(tf.convert_to_tensor(word_emb), name="emb",
trainable=True)
self.text_encoder = get_text_encoder(encoder_type)(encoder_dim)
self.text_feat_proj = tf.keras.layers.Dense(encoder_dim)
self.img_feat_proj = tf.keras.layers.Dense(encoder_dim)
self.dropout = tf.keras.layers.Dropout(drop_p)
self.margin = margin
self.num_neg_sample = num_neg_sample
self.lambda1 = lambda1
self.lambda2 = lambda2
self.contrastive = contrastive
self.internal = internal
self.norm = norm # normalize the embedding
self.text_outputs = []
def forward_img(self, img_inputs, training):
x = self.img_feat_proj(img_inputs)
if self.norm:
x = tf.nn.l2_normalize(x, axis=-1)
return self.dropout(x, training=training)
def forward_text(self, text_inputs, text_masks, training):
if len(text_inputs.get_shape()) == 2:
x = tf.nn.embedding_lookup(self.word_emb, text_inputs)
else:
x = text_inputs
self.text_outputs.append(mean_pool(x, text_masks))
x = self.text_encoder.forward(x, text_masks)
self.text_outputs.append(x)
x = self.text_feat_proj(x)
if self.norm:
x = tf.nn.l2_normalize(x, axis=-1)
return self.dropout(x, training=training)
def encode(self, img_inputs, text_inputs, text_masks, training):
img_feats = self.forward_img(img_inputs, training)
text_feats = self.forward_text(text_inputs, text_masks, training)
return img_feats, text_feats
def forward(self, img_inputs, text_inputs, text_masks, labels, training):
img_feats, text_feats = self.encode(img_inputs, text_inputs,
text_masks, training)
if self.contrastive:
loss = contrastive_loss(img_feats, text_feats, self.margin)
sent_im_dist = - similarity_fn(text_feats, img_feats)
elif self.internal:
loss = internal_loss(img_feats, text_feats, labels)
sent_im_dist = - similarity_fn(text_feats, img_feats)
else:
loss = embedding_loss(img_feats, text_feats, labels, self.margin,
self.num_neg_sample, self.lambda1, self.lambda2)
sent_im_dist = pdist(text_feats, img_feats)
rec = recall_k(sent_im_dist, labels, ks=[1, 5, 10])
return loss, rec
def order_sim(im, s):
im = tf.expand_dims(im, 0)
s = tf.expand_dims(s, 1)
diff = tf.clip_by_value(s - im, 0, 1e6)
dist = tf.sqrt(tf.reduce_sum(diff ** 2, 2))
scores = -tf.transpose(dist)
return scores
def similarity_fn(im, s, order=False):
if order:
return order_sim(im, s)
return tf.matmul(im, s, transpose_b=True)
def internal_loss(im_embeds, sent_embeds, im_labels):
logits_s = tf.matmul(sent_embeds, im_embeds, transpose_b=True)
cost_s = tf.nn.softmax_cross_entropy_with_logits_v2(im_labels, logits_s)
logits_im = tf.matmul(im_embeds, sent_embeds, transpose_b=True)
cost_im = tf.nn.softmax_cross_entropy_with_logits_v2(tf.transpose(im_labels),
logits_im)
return tf.reduce_mean(cost_s) + tf.reduce_mean(cost_im)
def contrastive_loss(im_embeds, sent_embeds, margin, max_violation=True):
""" modified https://github.com/fartashf/vsepp/blob/master/model.py#L260 """
scores = similarity_fn(im_embeds, sent_embeds)
batch_size = tf.shape(im_embeds)[0]
diagonal = tf.diag_part(scores)
d1 = tf.reshape(diagonal, (batch_size, 1))
d2 = tf.reshape(diagonal, (1, batch_size))
cost_s = tf.clip_by_value(margin + scores - d1, 0, 1e6)
cost_im = tf.clip_by_value(margin + scores - d2, 0, 1e6)
zeros = tf.zeros(batch_size)
cost_s = tf.matrix_set_diag(cost_s, zeros)
cost_im = tf.matrix_set_diag(cost_im, zeros)
if max_violation:
cost_s = tf.reduce_max(cost_s, 1)
cost_im = tf.reduce_max(cost_im, 0)
return tf.reduce_sum(cost_s) + tf.reduce_sum(cost_im)
def pdist(x1, x2):
"""
x1: Tensor of shape (h1, w)
x2: Tensor of shape (h2, w)
Return pairwise distance for each row vector in x1, x2 as
a Tensor of shape (h1, h2)
"""
x1_square = tf.reshape(tf.reduce_sum(x1 * x1, axis=1), [-1, 1])
x2_square = tf.reshape(tf.reduce_sum(x2 * x2, axis=1), [1, -1])
return tf.sqrt(x1_square - 2 * tf.matmul(x1, tf.transpose(x2)) + x2_square +
1e-4)
def embedding_loss(im_embeds, sent_embeds, im_labels, margin, num_neg_sample,
lambda1, lambda2):
"""
im_embeds: (b, 512) image embedding tensors
sent_embeds: (sample_size * b, 512) sentence embedding tensors
where the order of sentence corresponds to the order of images and
setnteces for the same image are next to each other
im_labels: (sample_size * b, b) boolean tensor, where (i, j) entry is
True if and only if sentence[i], image[j] is a positive pair
"""
im_labels = tf.cast(im_labels, tf.bool)
# compute embedding loss
num_img = tf.shape(im_embeds)[0]
num_sent = tf.shape(sent_embeds)[0]
sent_im_ratio = tf.div(num_sent, num_img)
sent_im_dist = pdist(sent_embeds, im_embeds)
# image loss: sentence, positive image, and negative image
pos_pair_dist = tf.reshape(tf.boolean_mask(sent_im_dist, im_labels),
[num_sent, 1])
neg_pair_dist = tf.reshape(tf.boolean_mask(sent_im_dist, ~im_labels),
[num_sent, -1])
im_loss = tf.clip_by_value(margin + pos_pair_dist - neg_pair_dist,
0, 1e6)
im_loss = tf.reduce_mean(tf.nn.top_k(im_loss, k=num_neg_sample)[0])
# sentence loss: image, positive sentence, and negative sentence
neg_pair_dist = tf.reshape(
tf.boolean_mask(tf.transpose(sent_im_dist), ~tf.transpose(im_labels)),
[num_img, -1])
neg_pair_dist = tf.reshape(
tf.tile(neg_pair_dist, [1, sent_im_ratio]), [num_sent, -1])
sent_loss = tf.clip_by_value(margin + pos_pair_dist - neg_pair_dist, 0, 1e6)
sent_loss = tf.reduce_mean(tf.nn.top_k(sent_loss, k=num_neg_sample)[0])
# sentence only loss (neighborhood-preserving constraints)
sent_sent_dist = pdist(sent_embeds, sent_embeds)
sent_sent_mask = tf.reshape(tf.tile(tf.transpose(im_labels),
[1, sent_im_ratio]),
[num_sent, num_sent])
pos_pair_dist = tf.reshape(tf.boolean_mask(sent_sent_dist, sent_sent_mask),
[-1, sent_im_ratio])
pos_pair_dist = tf.reduce_max(pos_pair_dist, axis=1, keep_dims=True)
neg_pair_dist = tf.reshape(tf.boolean_mask(sent_sent_dist, ~sent_sent_mask),
[num_sent, -1])
sent_only_loss = tf.clip_by_value(margin + pos_pair_dist - neg_pair_dist,
0, 1e6)
sent_only_loss = tf.reduce_mean(tf.nn.top_k(sent_only_loss,
k=num_neg_sample)[0])
loss = im_loss * lambda1 + sent_loss + sent_only_loss * lambda2
return loss
def recall_k(sent_im_dist, im_labels, ks=(1, 5, 10)):
"""
Compute recall at given ks.
"""
im_labels = tf.cast(im_labels, tf.bool)
def retrieval_recall(dist, labels, k):
# Use negative distance to find the index of
# the smallest k elements in each row.
pred = tf.nn.top_k(-dist, k=k)[1]
# Create a boolean mask for each column (k value) in pred,
# s.t. mask[i][j] is 1 iff pred[i][k] = j.
pred_k_mask = lambda topk_idx: tf.one_hot(topk_idx, tf.shape(labels)[1],
on_value=True, off_value=False,
dtype=tf.bool)
# Create a boolean mask for the predicted indices
# by taking logical or of boolean masks for each column,
# s.t. mask[i][j] is 1 iff j is in pred[i].
pred_mask = tf.reduce_any(tf.map_fn(
pred_k_mask, tf.transpose(pred), dtype=tf.bool), axis=0)
# pred_mask = tf.map_fn(create_pred_mask, pred)
# Entry (i, j) is matched iff pred_mask[i][j] and labels[i][j] are 1.
matched = tf.cast(tf.logical_and(pred_mask, labels), dtype=tf.float32)
return tf.reduce_mean(tf.reduce_max(matched, axis=1))
img_sent_recall = [retrieval_recall(tf.transpose(sent_im_dist),
tf.transpose(im_labels), k) for k in ks]
sent_img_recall = [retrieval_recall(sent_im_dist, im_labels, k) for k in ks]
return img_sent_recall + sent_img_recall
|
google/embedding-tests
|
thought/image_text_model.py
|
Python
|
apache-2.0
| 11,824
|
"""
pygments.styles.sas
~~~~~~~~~~~~~~~~~~~
Style inspired by SAS' enhanced program editor. Note This is not
meant to be a complete style. It's merely meant to mimic SAS'
program editor syntax highlighting.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Other, Whitespace, Generic
class SasStyle(Style):
"""
Style inspired by SAS' enhanced program editor. Note This is not
meant to be a complete style. It's merely meant to mimic SAS'
program editor syntax highlighting.
"""
default_style = ''
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #008800',
String: '#800080',
Number: 'bold #2e8b57',
Other: 'bg:#ffffe0',
Keyword: '#2c2cff',
Keyword.Reserved: 'bold #353580',
Keyword.Constant: 'bold',
Name.Builtin: '#2c2cff',
Name.Function: 'bold italic',
Name.Variable: 'bold #2c2cff',
Generic: '#2c2cff',
Generic.Emph: '#008800',
Generic.Error: '#d30202',
Error: 'bg:#e3d2d2 #a61717'
}
|
sonntagsgesicht/regtest
|
.aux/venv/lib/python3.9/site-packages/pygments/styles/sas.py
|
Python
|
apache-2.0
| 1,417
|
from JumpScale9 import j
redisFound = False
try:
from .Redis import Redis
from .RedisQueue import RedisQueue
from redis._compat import nativestr
# import itertools
import socket
redisFound = True
except:
pass
import os
import time
# import sys
from JumpScale9 import tcpPortConnectionTest
from JumpScale9.core.JSBase import JSBase as JSBASE
class RedisFactory(JSBASE):
"""
"""
def __init__(self):
self.__jslocation__ = "j.clients.redis"
JSBASE.__init__(self)
self.cache_clear()
self._running = None
def cache_clear(self):
"""
clear the cache formed by the functions get() and getQueue()
"""
self._redis = {}
self._redisq = {}
self._config = {}
def get(
self,
ipaddr="localhost",
port=6379,
password="",
fromcache=True,
unixsocket=None,
ardb_patch=False,
set_patch=False,
ssl=False,
ssl_certfile=None,
ssl_keyfile=None,
**args):
"""
get an instance of redis client, store it in cache so we could easily retrieve it later
:param ipaddr: used to form the key when no unixsocket
:param port: used to form the key when no unixsocket
:param fromcache: if False, will create a new one instead of checking cache
:param unixsocket: path of unixsocket to be used while creating Redis
other arguments to redis: ssl_cert_reqs=None, ssl_ca_certs=None
set_patch is needed when using the client for gedis
"""
if redisFound == False:
raise RuntimeError("redis libraries are not installed, please pip3 install them.")
if unixsocket is None:
key = "%s_%s" % (ipaddr, port)
else:
key = unixsocket
if key not in self._redis or not fromcache:
if unixsocket is None:
self._redis[key] = Redis(ipaddr, port, password=password, ssl=ssl, ssl_certfile=ssl_certfile, ssl_keyfile=ssl_keyfile, **args)
else:
self._redis[key] = Redis(unix_socket_path=unixsocket, password=password, ssl=ssl, ssl_certfile=ssl_certfile, ssl_keyfile=ssl_keyfile, **args)
if ardb_patch:
self._ardb_patch(self._redis[key])
if set_patch:
self._set_patch(self._redis[key])
return self._redis[key]
def _ardb_patch(self, client):
client.response_callbacks['HDEL'] = lambda r: r and nativestr(r) == 'OK'
def _set_patch(self, client):
client.response_callbacks['SET'] = lambda r: r
def getQueue(self, ipaddr, port, name, namespace="queues", fromcache=True):
"""
get an instance of redis queue, store it in cache so we can easily retrieve it later
:param ipaddr: used to form the key when no unixsocket
:param port: used to form the key when no unixsocket
:param name: name of the queue
:param namespace: value of namespace for the queue
:param fromcache: if False, will create a new one instead of checking cache
"""
if not fromcache:
return RedisQueue(self.get(ipaddr, port, fromcache=False), name, namespace=namespace)
key = "%s_%s_%s_%s" % (ipaddr, port, name, namespace)
if key not in self._redisq:
self._redisq[key] = RedisQueue(
self.get(ipaddr, port), name, namespace=namespace)
return self._redisq[key]
def core_get(self):
"""
will try to create redis connection to $tmpdir/redis.sock
if that doesn't work then will look for std redis port
if that does not work then will return None
j.clients.redis.core_get()
"""
unix_socket_path = '%s/redis.sock' % j.dirs.TMPDIR
db = None
if os.path.exists(path=unix_socket_path):
db = Redis(unix_socket_path=unix_socket_path)
else:
self.core_start()
db = Redis(unix_socket_path=unix_socket_path)
return db
def kill(self):
"""
kill all running redis instances
"""
j.sal.process.execute("redis-cli -s %s/redis.sock shutdown" %
j.dirs.TMPDIR, die=False, showout=False)
j.sal.process.execute("redis-cli shutdown", die=False, showout=False)
j.sal.process.killall("redis")
j.sal.process.killall("redis-server")
def core_running(self):
if self._running==None:
self._running=j.sal.nettools.tcpPortConnectionTest("localhost",6379)
return self._running
def core_check(self):
if not self.core_running():
self.core_start()
return self._running
def core_start(self, timeout=20):
"""
starts a redis instance in separate ProcessLookupError
standard on $tmpdir/redis.sock
"""
if j.core.platformtype.myplatform.isMac:
if not j.sal.process.checkInstalled("redis-server"):
# prefab.system.package.install('redis')
j.sal.process.execute("brew unlink redis", die=False)
j.sal.process.execute("brew install redis;brew link redis")
if not j.sal.process.checkInstalled("redis-server"):
raise RuntimeError("Cannot find redis-server even after install")
j.sal.process.execute("redis-cli -s %s/redis.sock shutdown" %
j.dirs.TMPDIR, die=False, showout=False)
j.sal.process.execute("redis-cli shutdown", die=False, showout=False)
elif j.core.platformtype.myplatform.isLinux:
if j.core.platformtype.myplatform.isAlpine:
os.system("apk add redis")
elif j.core.platformtype.myplatform.isUbuntu:
os.system("apt install redis-server -y")
else:
raise RuntimeError("platform not supported for start redis")
# cmd = "redis-server --port 6379 --unixsocket %s/redis.sock --maxmemory 100000000 --daemonize yes" % tmpdir # 100MB
# self.logger.info("start redis in background (osx)")
# os.system(cmd)
# self.logger.info("started")
# time.sleep(1)
# elif j.core.platformtype.myplatform.isCygwin:
# cmd = "redis-server --maxmemory 100000000 & "
# self.logger.info("start redis in background (win)")
# os.system(cmd)
cmd = "echo never > /sys/kernel/mm/transparent_hugepage/enabled"
os.system(cmd)
if not j.core.platformtype.myplatform.isMac:
cmd = "sysctl vm.overcommit_memory=1"
os.system(cmd)
# redis_bin = "redis-server"
if "TMPDIR" in os.environ:
tmpdir = os.environ["TMPDIR"]
else:
tmpdir = "/tmp"
cmd = "redis-server --port 6379 --unixsocket %s/redis.sock --maxmemory 100000000 --daemonize yes" % tmpdir
self.logger.info(cmd)
j.sal.process.execute(cmd)
limit_timeout = time.time() + timeout
while time.time() < limit_timeout:
if tcpPortConnectionTest("localhost", 6379):
break
time.sleep(2)
else:
raise j.exceptions.Timeout("Couldn't start redis server")
|
Jumpscale/core9
|
JumpScale9/clients/redis/RedisFactory.py
|
Python
|
apache-2.0
| 7,379
|
# Copyright 2016 Quora, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Enum implementation.
"""
__all__ = ["Enum", "EnumType", "EnumValueGenerator", "Flags", "IntEnum"]
import inspect
import sys
from . import helpers
from . import inspection
_no_default = helpers.MarkerObject("no_default @ enums")
class EnumType(type):
"""Metaclass for all enum types."""
def __init__(cls, what, bases=None, dict=None):
super().__init__(what, bases, dict)
cls.process()
def __contains__(self, k):
return k in self._value_to_name
def __len__(self):
return len(self._members)
def __iter__(self):
return iter(self._members)
def __call__(self, value, default=_no_default):
"""Instantiating an Enum always produces an existing value or throws an exception."""
return self.parse(value, default=default)
def process(self):
name_to_member = {}
value_to_member = {}
value_to_name = {}
flag_values = []
members = []
for k, v in list(inspect.getmembers(self)):
# ensure that names are unicode, even in py2
if isinstance(k, bytes):
k = k.decode("ascii")
if isinstance(type(v), EnumType):
v = v.value # For inherited members
if isinstance(v, int):
assert (
v not in value_to_member
), "Duplicate enum value: %s (class: %s)." % (
v,
inspection.get_full_name(self),
)
member = self._make_value(v)
name_to_member[k] = member
value_to_member[v] = member
value_to_name[v] = k
if v != 0:
flag_values.append(v)
members.append(member)
self._name_to_member = name_to_member
self._value_to_member = value_to_member
self._value_to_name = value_to_name
self._flag_values = list(reversed(sorted(flag_values)))
self._members = sorted(members, key=lambda m: m.value)
for m in members:
setattr(self, m.short_name, m)
def _make_value(self, value):
"""Instantiates an enum with an arbitrary value."""
member = self.__new__(self, value)
member.__init__(value)
return member
# Needed bcz of a six bug: https://github.com/benjaminp/six/issues/252
@classmethod
def __prepare__(cls, name, bases, **kwargs):
return {}
class EnumBase(metaclass=EnumType):
_name_to_member = {}
_value_to_member = {}
_value_to_name = {}
_flag_values = []
_members = []
def __init__(self, value):
self.value = int(value)
@property
def short_name(self):
"""Returns the enum member's name, like "foo"."""
raise NotImplementedError
@property
def long_name(self):
"""Returns the enum member's name including the class name, like "MyEnum.foo"."""
return "%s.%s" % (self.__class__.__name__, self.short_name)
@property
def title(self):
"""Returns the enum member's name in title case, like "FooBar" for MyEnum.foo_bar."""
return self.short_name.replace("_", " ").title()
@property
def full_name(self):
"""Returns the enum meber's name including the module, like "mymodule.MyEnum.foo"."""
return "%s.%s" % (self.__class__.__module__, self.long_name)
def is_valid(self):
raise NotImplementedError
def assert_valid(self):
if not self.is_valid():
raise _create_invalid_value_error(self.__class__, self.value)
def __int__(self):
return self.value
def __call__(self):
return self.value
def __eq__(self, other):
return self.value == other
def __ne__(self, other):
return self.value != other
def __hash__(self):
return hash(self.value)
def __str__(self):
if self.is_valid():
return self.short_name
else:
return "%s(%s)" % (self.__class__.__name__, self.value)
def __repr__(self):
if self.is_valid():
return self.__class__.__name__ + "." + self.short_name
else:
return "%s(%s)" % (self.__class__.__name__, self.value)
@classmethod
def get_names(cls):
"""Returns the names of all members of this enum."""
return [m.short_name for m in cls._members]
@classmethod
def get_members(cls):
return cls._members
@classmethod
def create(cls, name, members):
"""Creates a new enum type based on this one (cls) and adds newly
passed members to the newly created subclass of cls.
This method helps to create enums having the same member values as
values of other enum(s).
:param name: name of the newly created type
:param members: 1) a dict or 2) a list of (name, value) tuples
and/or EnumBase instances describing new members
:return: newly created enum type.
"""
NewEnum = type(name, (cls,), {})
if isinstance(members, dict):
members = members.items()
for member in members:
if isinstance(member, tuple):
name, value = member
setattr(NewEnum, name, value)
elif isinstance(member, EnumBase):
setattr(NewEnum, member.short_name, member.value)
else:
assert False, (
"members must be either a dict, "
+ "a list of (name, value) tuples, "
+ "or a list of EnumBase instances."
)
NewEnum.process()
# needed for pickling to work (hopefully); taken from the namedtuple implementation in the
# standard library
try:
NewEnum.__module__ = sys._getframe(1).f_globals.get("__name__", "__main__")
except (AttributeError, ValueError):
pass
return NewEnum
@classmethod
def parse(cls, value, default=_no_default):
"""Parses a value into a member of this enum."""
raise NotImplementedError
def __reduce_ex__(self, proto):
return self.__class__, (self.value,)
class Enum(EnumBase):
def is_valid(self):
return self.value in self._value_to_member
@property
def short_name(self):
self.assert_valid()
return self._value_to_name[self.value]
@classmethod
def parse(cls, value, default=_no_default):
"""Parses an enum member name or value into an enum member.
Accepts the following types:
- Members of this enum class. These are returned directly.
- Integers. If there is an enum member with the integer as a value, that member is returned.
- Strings. If there is an enum member with the string as its name, that member is returned.
For integers and strings that don't correspond to an enum member, default is returned; if
no default is given the function raises KeyError instead.
Examples:
>>> class Color(Enum):
... red = 1
... blue = 2
>>> Color.parse(Color.red)
Color.red
>>> Color.parse(1)
Color.red
>>> Color.parse('blue')
Color.blue
"""
if isinstance(value, cls):
return value
elif isinstance(value, int) and not isinstance(value, EnumBase):
e = cls._value_to_member.get(value, _no_default)
else:
e = cls._name_to_member.get(value, _no_default)
if e is _no_default or not e.is_valid():
if default is _no_default:
raise _create_invalid_value_error(cls, value)
return default
return e
class Flags(EnumBase):
def is_valid(self):
value = self.value
for v in self._flag_values:
if (v | value) == value:
value ^= v
return value == 0
@property
def short_name(self):
self.assert_valid()
result = []
l = self.value
for v in self._flag_values:
if (v | l) == l:
l ^= v
result.append(self._value_to_name[v])
if not result:
if 0 in self._value_to_name:
return self._value_to_name[0]
else:
return ""
return ",".join(result)
@classmethod
def parse(cls, value, default=_no_default):
"""Parses a flag integer or string into a Flags instance.
Accepts the following types:
- Members of this enum class. These are returned directly.
- Integers. These are converted directly into a Flags instance with the given name.
- Strings. The function accepts a comma-delimited list of flag names, corresponding to
members of the enum. These are all ORed together.
Examples:
>>> class Car(Flags):
... is_big = 1
... has_wheels = 2
>>> Car.parse(1)
Car.is_big
>>> Car.parse(3)
Car.parse('has_wheels,is_big')
>>> Car.parse('is_big,has_wheels')
Car.parse('has_wheels,is_big')
"""
if isinstance(value, cls):
return value
elif isinstance(value, int):
e = cls._make_value(value)
else:
if not value:
e = cls._make_value(0)
else:
r = 0
for k in value.split(","):
v = cls._name_to_member.get(k, _no_default)
if v is _no_default:
if default is _no_default:
raise _create_invalid_value_error(cls, value)
else:
return default
r |= v.value
e = cls._make_value(r)
if not e.is_valid():
if default is _no_default:
raise _create_invalid_value_error(cls, value)
return default
return e
def __contains__(self, item):
item = int(item)
if item == 0:
return True
return item == (self.value & item)
def __or__(self, other):
return self.__class__(self.value | int(other))
def __and__(self, other):
return self.__class__(self.value & int(other))
def __xor__(self, other):
return self.__class__(self.value ^ int(other))
def __repr__(self):
if self.is_valid():
name = self.short_name
if "," in name:
return "%s.parse('%s')" % (self.__class__.__name__, self.short_name)
else:
return self.__class__.__name__ + "." + self.short_name
else:
return "%s(%s)" % (self.__class__.__name__, self.value)
class IntEnum(int, Enum):
"""Enum subclass that offers more compatibility with int."""
def __repr__(self):
return Enum.__repr__(self)
class EnumValueGenerator(object):
def __init__(self, start=1):
self._next_value = start
def reset(self, start=1):
self._next_value = start
def next(self):
result = self._next_value
self._next_value += 1
return result
def __call__(self):
return self.next()
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self._next_value)
# Private part
def _create_invalid_value_error(cls, value):
return KeyError("Invalid %s value: %r" % (inspection.get_full_name(cls), value))
|
quora/qcore
|
qcore/enum.py
|
Python
|
apache-2.0
| 12,142
|
# Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Provider info for Azure
"""
from perfkitbenchmarker import provider_info
from perfkitbenchmarker import benchmark_spec
class AzureProviderInfo(provider_info.BaseProviderInfo):
UNSUPPORTED_BENCHMARKS = ['mysql_service']
CLOUD = benchmark_spec.AZURE
|
mateusz-blaszkowski/PerfKitBenchmarker
|
perfkitbenchmarker/providers/azure/provider_info.py
|
Python
|
apache-2.0
| 872
|
#!python
import sys
import io
import re
import urllib
import urllib2
import urlparse
import lxml.etree
def get_outlinks(url):
'''
url: the url to the page to crawl
'''
result = []
if url is None:
return result
html = None
resp = None
try:
url = url.strip()
resp = urllib2.urlopen(url)
if resp.code == 200:
html = resp.read()
except (urllib2.URLError, Exception) as e:
print "can't access {0}: {1}".format(url, e)
finally:
if resp is not None:
resp.close()
if html is None:
return result
html_parser = lxml.etree.HTMLParser()
try:
uhtml = html.decode('utf-8', 'ignore')
tree = lxml.etree.parse(io.StringIO(uhtml), html_parser)
anchors = tree.xpath('//a')
for anchor in anchors:
href = anchor.attrib.get('href', None)
if href is not None:
href = href.strip()
dest = urlparse.urljoin(url, href)
if dest.startswith('http://'):
result.append(dest)
except Exception as e:
print "can't parse {0}: {1}".format(url, e)
return result
def crawl(urls,
max_to_handle,
handle_url,
crawl_test = None,
handle_test = None):
handled = []
visited = set()
i = 0
p = 0
while len(handled) < max_to_handle and i < len(urls):
url = urls[i]
if url not in visited and crawl_test(url):
outlinks = get_outlinks(url)
visited.add(url)
urls.extend(outlinks)
if handle_test(url) and url not in handled:
handle_url(url, p + 1, max_to_handle)
handled.append(url)
p += 1
i += 1
return handled
def call_semantics_service(url, i, max_to_handle):
service_pattern = "http://ecology-service.cse.tamu.edu/BigSemanticsService/metadata.xml?url={0}"
qurl = urllib.quote(url)
surl = service_pattern.format(qurl)
resp = urllib2.urlopen(surl)
content = resp.read()
is_downloaded = content.find('DOWNLOAD_DONE') >= 0
is_typed = content.find('</amazon_product>') >= 0
if resp.code == 200 and is_downloaded and is_typed:
print "[{0}/{1}] service called on {2}".format(
i, max_to_handle, url)
else:
print "[{0}/{1}] error calling service: {2}: c={3}, d={4}, t={5}".format(
i, max_to_handle, surl, resp.code, is_downloaded, is_typed)
def call_downloader_service(url, i, max_to_handle):
agent = "Mozilla%2F5.0%20(Windows%20NT%206.2%3B%20Win64%3B%20x64)%20AppleWebKit%2F537.36%20(KHTML%2C%20like%20Gecko)%20Chrome%2F32.0.1667.0%20Safari%2F537.36"
service_pattern = "http://ecology-service.cse.tamu.edu/DownloaderPool/page/download.xml?url={0}&agent={1}"
qurl = urllib.quote(url)
resp = urllib2.urlopen(service_pattern.format(qurl, agent))
if resp.code == 200:
print "[{0}/{1}] successful downloading invocation on {2}".format(
i, max_to_handle, url)
else:
print "[{0}/{1}] downloading error code {2} for {3}".format(
i, max_to_handle, resp.code, url)
if __name__ == '__main__':
if len(sys.argv) < 3:
print "usage: {0} <url_lst_file> <max_to_handle>".format(sys.argv[0])
quit()
f = open(sys.argv[1])
urls = f.readlines()
n = int(sys.argv[2])
crawl_test = lambda(url): url.find('amazon.com') > 0;
p_prod = r'^http://www.amazon.com/([^/]+/)?dp/[^/]+';
handle_test = lambda(url): re.search(p_prod, url) is not None;
handled = crawl(urls, n, call_semantics_service, crawl_test, handle_test);
for url in handled:
print url
|
ecologylab/BigSemanticsService
|
Scripts/utils/crawl.py
|
Python
|
apache-2.0
| 3,552
|
import logging
from datetime import timedelta
from core import Feed
import pandas as pd
from core.observables import Ip, Observable
from core.errors import ObservableValidationError
class ThreatFox(Feed):
default_values = {
"frequency": timedelta(hours=1),
"name": "ThreatFox",
"source": "https://threatfox.abuse.ch/export/json/recent/",
"description": "Feed ThreatFox by Abuse.ch",
}
def update(self):
for index, line in self.update_json():
self.analyze(line)
def update_json(self):
r = self._make_request(sort=False)
if r:
res = r.json()
values = [r[0] for r in res.values()]
df = pd.DataFrame(values)
df["first_seen_utc"] = pd.to_datetime(df["first_seen_utc"])
df["last_seen_utc"] = pd.to_datetime(df["last_seen_utc"])
if self.last_run:
df = df[df["first_seen_utc"] > self.last_run]
df.fillna("-", inplace=True)
return df.iterrows()
def analyze(self, item):
first_seen = item["first_seen_utc"]
ioc_value = item["ioc_value"]
ioc_type = item["ioc_type"]
threat_type = item["threat_type"]
malware_alias = item["malware_alias"]
malware_printable = item["malware_printable"]
last_seen_utc = item["last_seen_utc"]
confidence_level = item["confidence_level"]
reference = item["reference"]
reporter = item["reporter"]
tags = []
context = {"source": self.name}
context["first_seen"] = first_seen
if reference:
context["reference"] = reference
else:
context["reference"] = "Unknown"
if reporter:
context["reporter"] = reporter
else:
context["reporter"] = "Unknown"
if threat_type:
context["threat_type"] = threat_type
if item["tags"]:
tags.extend(item["tags"].split(","))
if malware_printable:
tags.append(malware_printable)
if malware_alias:
context["malware_alias"] = malware_alias
if last_seen_utc:
context["last_seen_utc"] = last_seen_utc
if confidence_level:
context["confidence_level"] = confidence_level
value = None
obs = None
try:
if "ip" in ioc_type:
value, port = ioc_value.split(":")
context["port"] = port
obs = Ip.get_or_create(value=value)
else:
obs = Observable.add_text(ioc_value)
except ObservableValidationError as e:
logging.error(e)
return
if obs:
obs.add_context(context)
obs.add_source(self.name)
if tags:
obs.tag(tags)
if malware_printable:
obs.tags
|
yeti-platform/yeti
|
plugins/feeds/public/threatfox.py
|
Python
|
apache-2.0
| 2,925
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class yc_config_openconfig_macsec__macsec_mka_policies_policy_config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/policies/policy/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration of the MKA policy
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__key_server_priority','__macsec_cipher_suite','__confidentiality_offset','__delay_protection','__include_icv_indicator','__sak_rekey_interval','__sak_rekey_on_live_peer_loss','__use_updated_eth_header',)
_yang_name = 'config'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)
self.__key_server_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(16), is_leaf=True, yang_name="key-server-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint8', is_config=True)
self.__macsec_cipher_suite = YANGDynClass(unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GCM_AES_128': {}, 'GCM_AES_256': {}, 'GCM_AES_XPN_128': {}, 'GCM_AES_XPN_256': {}},)), is_leaf=False, yang_name="macsec-cipher-suite", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:macsec-cipher-suite', is_config=True)
self.__confidentiality_offset = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'0_BYTES': {}, '30_BYTES': {}, '50_BYTES': {}},), default=six.text_type("0_BYTES"), is_leaf=True, yang_name="confidentiality-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:confidentiality-offset', is_config=True)
self.__delay_protection = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="delay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
self.__include_icv_indicator = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="include-icv-indicator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
self.__sak_rekey_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0', '30..65535']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(0), is_leaf=True, yang_name="sak-rekey-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint32', is_config=True)
self.__sak_rekey_on_live_peer_loss = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="sak-rekey-on-live-peer-loss", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
self.__use_updated_eth_header = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="use-updated-eth-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'policies', 'policy', 'config']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /macsec/mka/policies/policy/config/name (string)
YANG Description: Name of the MKA policy.
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /macsec/mka/policies/policy/config/name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Name of the MKA policy.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)
def _get_key_server_priority(self):
"""
Getter method for key_server_priority, mapped from YANG variable /macsec/mka/policies/policy/config/key_server_priority (uint8)
YANG Description: Specifies the key server priority used by the MACsec Key Agreement
(MKA) protocol to select the key server when MACsec is enabled using
static connectivity association key (CAK) security mode. The switch with
the lower priority-number is selected as the key server. If the
priority-number is identical on both sides of a point-to-point link, the
MKA protocol selects the device with the lower MAC address as the key
server
"""
return self.__key_server_priority
def _set_key_server_priority(self, v, load=False):
"""
Setter method for key_server_priority, mapped from YANG variable /macsec/mka/policies/policy/config/key_server_priority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_server_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_server_priority() directly.
YANG Description: Specifies the key server priority used by the MACsec Key Agreement
(MKA) protocol to select the key server when MACsec is enabled using
static connectivity association key (CAK) security mode. The switch with
the lower priority-number is selected as the key server. If the
priority-number is identical on both sides of a point-to-point link, the
MKA protocol selects the device with the lower MAC address as the key
server
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(16), is_leaf=True, yang_name="key-server-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint8', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """key_server_priority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(16), is_leaf=True, yang_name="key-server-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint8', is_config=True)""",
})
self.__key_server_priority = t
if hasattr(self, '_set'):
self._set()
def _unset_key_server_priority(self):
self.__key_server_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(16), is_leaf=True, yang_name="key-server-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint8', is_config=True)
def _get_macsec_cipher_suite(self):
"""
Getter method for macsec_cipher_suite, mapped from YANG variable /macsec/mka/policies/policy/config/macsec_cipher_suite (macsec-types:macsec-cipher-suite)
YANG Description: Set Cipher suite(s) for SAK derivation
"""
return self.__macsec_cipher_suite
def _set_macsec_cipher_suite(self, v, load=False):
"""
Setter method for macsec_cipher_suite, mapped from YANG variable /macsec/mka/policies/policy/config/macsec_cipher_suite (macsec-types:macsec-cipher-suite)
If this variable is read-only (config: false) in the
source YANG file, then _set_macsec_cipher_suite is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_macsec_cipher_suite() directly.
YANG Description: Set Cipher suite(s) for SAK derivation
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GCM_AES_128': {}, 'GCM_AES_256': {}, 'GCM_AES_XPN_128': {}, 'GCM_AES_XPN_256': {}},)), is_leaf=False, yang_name="macsec-cipher-suite", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:macsec-cipher-suite', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """macsec_cipher_suite must be of a type compatible with macsec-types:macsec-cipher-suite""",
'defined-type': "macsec-types:macsec-cipher-suite",
'generated-type': """YANGDynClass(unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GCM_AES_128': {}, 'GCM_AES_256': {}, 'GCM_AES_XPN_128': {}, 'GCM_AES_XPN_256': {}},)), is_leaf=False, yang_name="macsec-cipher-suite", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:macsec-cipher-suite', is_config=True)""",
})
self.__macsec_cipher_suite = t
if hasattr(self, '_set'):
self._set()
def _unset_macsec_cipher_suite(self):
self.__macsec_cipher_suite = YANGDynClass(unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GCM_AES_128': {}, 'GCM_AES_256': {}, 'GCM_AES_XPN_128': {}, 'GCM_AES_XPN_256': {}},)), is_leaf=False, yang_name="macsec-cipher-suite", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:macsec-cipher-suite', is_config=True)
def _get_confidentiality_offset(self):
"""
Getter method for confidentiality_offset, mapped from YANG variable /macsec/mka/policies/policy/config/confidentiality_offset (macsec-types:confidentiality-offset)
YANG Description: The confidentiality offset specifies a number of octets in an Ethernet
frame that are sent in unencrypted plain-text
"""
return self.__confidentiality_offset
def _set_confidentiality_offset(self, v, load=False):
"""
Setter method for confidentiality_offset, mapped from YANG variable /macsec/mka/policies/policy/config/confidentiality_offset (macsec-types:confidentiality-offset)
If this variable is read-only (config: false) in the
source YANG file, then _set_confidentiality_offset is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_confidentiality_offset() directly.
YANG Description: The confidentiality offset specifies a number of octets in an Ethernet
frame that are sent in unencrypted plain-text
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'0_BYTES': {}, '30_BYTES': {}, '50_BYTES': {}},), default=six.text_type("0_BYTES"), is_leaf=True, yang_name="confidentiality-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:confidentiality-offset', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """confidentiality_offset must be of a type compatible with macsec-types:confidentiality-offset""",
'defined-type': "macsec-types:confidentiality-offset",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'0_BYTES': {}, '30_BYTES': {}, '50_BYTES': {}},), default=six.text_type("0_BYTES"), is_leaf=True, yang_name="confidentiality-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:confidentiality-offset', is_config=True)""",
})
self.__confidentiality_offset = t
if hasattr(self, '_set'):
self._set()
def _unset_confidentiality_offset(self):
self.__confidentiality_offset = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'0_BYTES': {}, '30_BYTES': {}, '50_BYTES': {}},), default=six.text_type("0_BYTES"), is_leaf=True, yang_name="confidentiality-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:confidentiality-offset', is_config=True)
def _get_delay_protection(self):
"""
Getter method for delay_protection, mapped from YANG variable /macsec/mka/policies/policy/config/delay_protection (boolean)
YANG Description: Traffic delayed longer than 2 seconds is rejected by the interfaces
enabled with delay protection.
"""
return self.__delay_protection
def _set_delay_protection(self, v, load=False):
"""
Setter method for delay_protection, mapped from YANG variable /macsec/mka/policies/policy/config/delay_protection (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_delay_protection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_delay_protection() directly.
YANG Description: Traffic delayed longer than 2 seconds is rejected by the interfaces
enabled with delay protection.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="delay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """delay_protection must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="delay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)""",
})
self.__delay_protection = t
if hasattr(self, '_set'):
self._set()
def _unset_delay_protection(self):
self.__delay_protection = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="delay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
def _get_include_icv_indicator(self):
"""
Getter method for include_icv_indicator, mapped from YANG variable /macsec/mka/policies/policy/config/include_icv_indicator (boolean)
YANG Description: Generate and include an Integrity Check Value (ICV) field in the MKPDU.
For compatibility with previous MACsec implementation that do not
require an ICV
"""
return self.__include_icv_indicator
def _set_include_icv_indicator(self, v, load=False):
"""
Setter method for include_icv_indicator, mapped from YANG variable /macsec/mka/policies/policy/config/include_icv_indicator (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_include_icv_indicator is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_include_icv_indicator() directly.
YANG Description: Generate and include an Integrity Check Value (ICV) field in the MKPDU.
For compatibility with previous MACsec implementation that do not
require an ICV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="include-icv-indicator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """include_icv_indicator must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="include-icv-indicator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)""",
})
self.__include_icv_indicator = t
if hasattr(self, '_set'):
self._set()
def _unset_include_icv_indicator(self):
self.__include_icv_indicator = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="include-icv-indicator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
def _get_sak_rekey_interval(self):
"""
Getter method for sak_rekey_interval, mapped from YANG variable /macsec/mka/policies/policy/config/sak_rekey_interval (uint32)
YANG Description: SAK Rekey interval in seconds. The default value is 0 where no rekey is
performed.
"""
return self.__sak_rekey_interval
def _set_sak_rekey_interval(self, v, load=False):
"""
Setter method for sak_rekey_interval, mapped from YANG variable /macsec/mka/policies/policy/config/sak_rekey_interval (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_sak_rekey_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sak_rekey_interval() directly.
YANG Description: SAK Rekey interval in seconds. The default value is 0 where no rekey is
performed.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0', '30..65535']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(0), is_leaf=True, yang_name="sak-rekey-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sak_rekey_interval must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0', '30..65535']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(0), is_leaf=True, yang_name="sak-rekey-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint32', is_config=True)""",
})
self.__sak_rekey_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_sak_rekey_interval(self):
self.__sak_rekey_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0', '30..65535']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(0), is_leaf=True, yang_name="sak-rekey-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint32', is_config=True)
def _get_sak_rekey_on_live_peer_loss(self):
"""
Getter method for sak_rekey_on_live_peer_loss, mapped from YANG variable /macsec/mka/policies/policy/config/sak_rekey_on_live_peer_loss (boolean)
YANG Description: Rekey on peer loss
"""
return self.__sak_rekey_on_live_peer_loss
def _set_sak_rekey_on_live_peer_loss(self, v, load=False):
"""
Setter method for sak_rekey_on_live_peer_loss, mapped from YANG variable /macsec/mka/policies/policy/config/sak_rekey_on_live_peer_loss (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_sak_rekey_on_live_peer_loss is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sak_rekey_on_live_peer_loss() directly.
YANG Description: Rekey on peer loss
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="sak-rekey-on-live-peer-loss", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sak_rekey_on_live_peer_loss must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="sak-rekey-on-live-peer-loss", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)""",
})
self.__sak_rekey_on_live_peer_loss = t
if hasattr(self, '_set'):
self._set()
def _unset_sak_rekey_on_live_peer_loss(self):
self.__sak_rekey_on_live_peer_loss = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="sak-rekey-on-live-peer-loss", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
def _get_use_updated_eth_header(self):
"""
Getter method for use_updated_eth_header, mapped from YANG variable /macsec/mka/policies/policy/config/use_updated_eth_header (boolean)
YANG Description: Use updated ethernet header for ICV calculation. In case the Ethernet
frame headers change, use the updated headers to calculate the ICV.
"""
return self.__use_updated_eth_header
def _set_use_updated_eth_header(self, v, load=False):
"""
Setter method for use_updated_eth_header, mapped from YANG variable /macsec/mka/policies/policy/config/use_updated_eth_header (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_use_updated_eth_header is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_use_updated_eth_header() directly.
YANG Description: Use updated ethernet header for ICV calculation. In case the Ethernet
frame headers change, use the updated headers to calculate the ICV.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="use-updated-eth-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """use_updated_eth_header must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="use-updated-eth-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)""",
})
self.__use_updated_eth_header = t
if hasattr(self, '_set'):
self._set()
def _unset_use_updated_eth_header(self):
self.__use_updated_eth_header = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="use-updated-eth-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
name = __builtin__.property(_get_name, _set_name)
key_server_priority = __builtin__.property(_get_key_server_priority, _set_key_server_priority)
macsec_cipher_suite = __builtin__.property(_get_macsec_cipher_suite, _set_macsec_cipher_suite)
confidentiality_offset = __builtin__.property(_get_confidentiality_offset, _set_confidentiality_offset)
delay_protection = __builtin__.property(_get_delay_protection, _set_delay_protection)
include_icv_indicator = __builtin__.property(_get_include_icv_indicator, _set_include_icv_indicator)
sak_rekey_interval = __builtin__.property(_get_sak_rekey_interval, _set_sak_rekey_interval)
sak_rekey_on_live_peer_loss = __builtin__.property(_get_sak_rekey_on_live_peer_loss, _set_sak_rekey_on_live_peer_loss)
use_updated_eth_header = __builtin__.property(_get_use_updated_eth_header, _set_use_updated_eth_header)
_pyangbind_elements = OrderedDict([('name', name), ('key_server_priority', key_server_priority), ('macsec_cipher_suite', macsec_cipher_suite), ('confidentiality_offset', confidentiality_offset), ('delay_protection', delay_protection), ('include_icv_indicator', include_icv_indicator), ('sak_rekey_interval', sak_rekey_interval), ('sak_rekey_on_live_peer_loss', sak_rekey_on_live_peer_loss), ('use_updated_eth_header', use_updated_eth_header), ])
class yc_state_openconfig_macsec__macsec_mka_policies_policy_state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/policies/policy/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data for MKA policy
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__key_server_priority','__macsec_cipher_suite','__confidentiality_offset','__delay_protection','__include_icv_indicator','__sak_rekey_interval','__sak_rekey_on_live_peer_loss','__use_updated_eth_header',)
_yang_name = 'state'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)
self.__key_server_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(16), is_leaf=True, yang_name="key-server-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint8', is_config=False)
self.__macsec_cipher_suite = YANGDynClass(unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GCM_AES_128': {}, 'GCM_AES_256': {}, 'GCM_AES_XPN_128': {}, 'GCM_AES_XPN_256': {}},)), is_leaf=False, yang_name="macsec-cipher-suite", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:macsec-cipher-suite', is_config=False)
self.__confidentiality_offset = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'0_BYTES': {}, '30_BYTES': {}, '50_BYTES': {}},), default=six.text_type("0_BYTES"), is_leaf=True, yang_name="confidentiality-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:confidentiality-offset', is_config=False)
self.__delay_protection = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="delay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
self.__include_icv_indicator = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="include-icv-indicator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
self.__sak_rekey_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0', '30..65535']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(0), is_leaf=True, yang_name="sak-rekey-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint32', is_config=False)
self.__sak_rekey_on_live_peer_loss = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="sak-rekey-on-live-peer-loss", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
self.__use_updated_eth_header = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="use-updated-eth-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'policies', 'policy', 'state']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /macsec/mka/policies/policy/state/name (string)
YANG Description: Name of the MKA policy.
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /macsec/mka/policies/policy/state/name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Name of the MKA policy.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)
def _get_key_server_priority(self):
"""
Getter method for key_server_priority, mapped from YANG variable /macsec/mka/policies/policy/state/key_server_priority (uint8)
YANG Description: Specifies the key server priority used by the MACsec Key Agreement
(MKA) protocol to select the key server when MACsec is enabled using
static connectivity association key (CAK) security mode. The switch with
the lower priority-number is selected as the key server. If the
priority-number is identical on both sides of a point-to-point link, the
MKA protocol selects the device with the lower MAC address as the key
server
"""
return self.__key_server_priority
def _set_key_server_priority(self, v, load=False):
"""
Setter method for key_server_priority, mapped from YANG variable /macsec/mka/policies/policy/state/key_server_priority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_server_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_server_priority() directly.
YANG Description: Specifies the key server priority used by the MACsec Key Agreement
(MKA) protocol to select the key server when MACsec is enabled using
static connectivity association key (CAK) security mode. The switch with
the lower priority-number is selected as the key server. If the
priority-number is identical on both sides of a point-to-point link, the
MKA protocol selects the device with the lower MAC address as the key
server
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(16), is_leaf=True, yang_name="key-server-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """key_server_priority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(16), is_leaf=True, yang_name="key-server-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint8', is_config=False)""",
})
self.__key_server_priority = t
if hasattr(self, '_set'):
self._set()
def _unset_key_server_priority(self):
self.__key_server_priority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(16), is_leaf=True, yang_name="key-server-priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint8', is_config=False)
def _get_macsec_cipher_suite(self):
"""
Getter method for macsec_cipher_suite, mapped from YANG variable /macsec/mka/policies/policy/state/macsec_cipher_suite (macsec-types:macsec-cipher-suite)
YANG Description: Set Cipher suite(s) for SAK derivation
"""
return self.__macsec_cipher_suite
def _set_macsec_cipher_suite(self, v, load=False):
"""
Setter method for macsec_cipher_suite, mapped from YANG variable /macsec/mka/policies/policy/state/macsec_cipher_suite (macsec-types:macsec-cipher-suite)
If this variable is read-only (config: false) in the
source YANG file, then _set_macsec_cipher_suite is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_macsec_cipher_suite() directly.
YANG Description: Set Cipher suite(s) for SAK derivation
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GCM_AES_128': {}, 'GCM_AES_256': {}, 'GCM_AES_XPN_128': {}, 'GCM_AES_XPN_256': {}},)), is_leaf=False, yang_name="macsec-cipher-suite", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:macsec-cipher-suite', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """macsec_cipher_suite must be of a type compatible with macsec-types:macsec-cipher-suite""",
'defined-type': "macsec-types:macsec-cipher-suite",
'generated-type': """YANGDynClass(unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GCM_AES_128': {}, 'GCM_AES_256': {}, 'GCM_AES_XPN_128': {}, 'GCM_AES_XPN_256': {}},)), is_leaf=False, yang_name="macsec-cipher-suite", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:macsec-cipher-suite', is_config=False)""",
})
self.__macsec_cipher_suite = t
if hasattr(self, '_set'):
self._set()
def _unset_macsec_cipher_suite(self):
self.__macsec_cipher_suite = YANGDynClass(unique=True, base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GCM_AES_128': {}, 'GCM_AES_256': {}, 'GCM_AES_XPN_128': {}, 'GCM_AES_XPN_256': {}},)), is_leaf=False, yang_name="macsec-cipher-suite", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:macsec-cipher-suite', is_config=False)
def _get_confidentiality_offset(self):
"""
Getter method for confidentiality_offset, mapped from YANG variable /macsec/mka/policies/policy/state/confidentiality_offset (macsec-types:confidentiality-offset)
YANG Description: The confidentiality offset specifies a number of octets in an Ethernet
frame that are sent in unencrypted plain-text
"""
return self.__confidentiality_offset
def _set_confidentiality_offset(self, v, load=False):
"""
Setter method for confidentiality_offset, mapped from YANG variable /macsec/mka/policies/policy/state/confidentiality_offset (macsec-types:confidentiality-offset)
If this variable is read-only (config: false) in the
source YANG file, then _set_confidentiality_offset is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_confidentiality_offset() directly.
YANG Description: The confidentiality offset specifies a number of octets in an Ethernet
frame that are sent in unencrypted plain-text
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'0_BYTES': {}, '30_BYTES': {}, '50_BYTES': {}},), default=six.text_type("0_BYTES"), is_leaf=True, yang_name="confidentiality-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:confidentiality-offset', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """confidentiality_offset must be of a type compatible with macsec-types:confidentiality-offset""",
'defined-type': "macsec-types:confidentiality-offset",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'0_BYTES': {}, '30_BYTES': {}, '50_BYTES': {}},), default=six.text_type("0_BYTES"), is_leaf=True, yang_name="confidentiality-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:confidentiality-offset', is_config=False)""",
})
self.__confidentiality_offset = t
if hasattr(self, '_set'):
self._set()
def _unset_confidentiality_offset(self):
self.__confidentiality_offset = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'0_BYTES': {}, '30_BYTES': {}, '50_BYTES': {}},), default=six.text_type("0_BYTES"), is_leaf=True, yang_name="confidentiality-offset", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='macsec-types:confidentiality-offset', is_config=False)
def _get_delay_protection(self):
"""
Getter method for delay_protection, mapped from YANG variable /macsec/mka/policies/policy/state/delay_protection (boolean)
YANG Description: Traffic delayed longer than 2 seconds is rejected by the interfaces
enabled with delay protection.
"""
return self.__delay_protection
def _set_delay_protection(self, v, load=False):
"""
Setter method for delay_protection, mapped from YANG variable /macsec/mka/policies/policy/state/delay_protection (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_delay_protection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_delay_protection() directly.
YANG Description: Traffic delayed longer than 2 seconds is rejected by the interfaces
enabled with delay protection.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="delay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """delay_protection must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="delay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)""",
})
self.__delay_protection = t
if hasattr(self, '_set'):
self._set()
def _unset_delay_protection(self):
self.__delay_protection = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="delay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
def _get_include_icv_indicator(self):
"""
Getter method for include_icv_indicator, mapped from YANG variable /macsec/mka/policies/policy/state/include_icv_indicator (boolean)
YANG Description: Generate and include an Integrity Check Value (ICV) field in the MKPDU.
For compatibility with previous MACsec implementation that do not
require an ICV
"""
return self.__include_icv_indicator
def _set_include_icv_indicator(self, v, load=False):
"""
Setter method for include_icv_indicator, mapped from YANG variable /macsec/mka/policies/policy/state/include_icv_indicator (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_include_icv_indicator is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_include_icv_indicator() directly.
YANG Description: Generate and include an Integrity Check Value (ICV) field in the MKPDU.
For compatibility with previous MACsec implementation that do not
require an ICV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="include-icv-indicator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """include_icv_indicator must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="include-icv-indicator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)""",
})
self.__include_icv_indicator = t
if hasattr(self, '_set'):
self._set()
def _unset_include_icv_indicator(self):
self.__include_icv_indicator = YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="include-icv-indicator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
def _get_sak_rekey_interval(self):
"""
Getter method for sak_rekey_interval, mapped from YANG variable /macsec/mka/policies/policy/state/sak_rekey_interval (uint32)
YANG Description: SAK Rekey interval in seconds. The default value is 0 where no rekey is
performed.
"""
return self.__sak_rekey_interval
def _set_sak_rekey_interval(self, v, load=False):
"""
Setter method for sak_rekey_interval, mapped from YANG variable /macsec/mka/policies/policy/state/sak_rekey_interval (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_sak_rekey_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sak_rekey_interval() directly.
YANG Description: SAK Rekey interval in seconds. The default value is 0 where no rekey is
performed.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0', '30..65535']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(0), is_leaf=True, yang_name="sak-rekey-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sak_rekey_interval must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0', '30..65535']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(0), is_leaf=True, yang_name="sak-rekey-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint32', is_config=False)""",
})
self.__sak_rekey_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_sak_rekey_interval(self):
self.__sak_rekey_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['0', '30..65535']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(0), is_leaf=True, yang_name="sak-rekey-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint32', is_config=False)
def _get_sak_rekey_on_live_peer_loss(self):
"""
Getter method for sak_rekey_on_live_peer_loss, mapped from YANG variable /macsec/mka/policies/policy/state/sak_rekey_on_live_peer_loss (boolean)
YANG Description: Rekey on peer loss
"""
return self.__sak_rekey_on_live_peer_loss
def _set_sak_rekey_on_live_peer_loss(self, v, load=False):
"""
Setter method for sak_rekey_on_live_peer_loss, mapped from YANG variable /macsec/mka/policies/policy/state/sak_rekey_on_live_peer_loss (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_sak_rekey_on_live_peer_loss is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sak_rekey_on_live_peer_loss() directly.
YANG Description: Rekey on peer loss
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="sak-rekey-on-live-peer-loss", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sak_rekey_on_live_peer_loss must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="sak-rekey-on-live-peer-loss", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)""",
})
self.__sak_rekey_on_live_peer_loss = t
if hasattr(self, '_set'):
self._set()
def _unset_sak_rekey_on_live_peer_loss(self):
self.__sak_rekey_on_live_peer_loss = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="sak-rekey-on-live-peer-loss", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
def _get_use_updated_eth_header(self):
"""
Getter method for use_updated_eth_header, mapped from YANG variable /macsec/mka/policies/policy/state/use_updated_eth_header (boolean)
YANG Description: Use updated ethernet header for ICV calculation. In case the Ethernet
frame headers change, use the updated headers to calculate the ICV.
"""
return self.__use_updated_eth_header
def _set_use_updated_eth_header(self, v, load=False):
"""
Setter method for use_updated_eth_header, mapped from YANG variable /macsec/mka/policies/policy/state/use_updated_eth_header (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_use_updated_eth_header is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_use_updated_eth_header() directly.
YANG Description: Use updated ethernet header for ICV calculation. In case the Ethernet
frame headers change, use the updated headers to calculate the ICV.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="use-updated-eth-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """use_updated_eth_header must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="use-updated-eth-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)""",
})
self.__use_updated_eth_header = t
if hasattr(self, '_set'):
self._set()
def _unset_use_updated_eth_header(self):
self.__use_updated_eth_header = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="use-updated-eth-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
name = __builtin__.property(_get_name)
key_server_priority = __builtin__.property(_get_key_server_priority)
macsec_cipher_suite = __builtin__.property(_get_macsec_cipher_suite)
confidentiality_offset = __builtin__.property(_get_confidentiality_offset)
delay_protection = __builtin__.property(_get_delay_protection)
include_icv_indicator = __builtin__.property(_get_include_icv_indicator)
sak_rekey_interval = __builtin__.property(_get_sak_rekey_interval)
sak_rekey_on_live_peer_loss = __builtin__.property(_get_sak_rekey_on_live_peer_loss)
use_updated_eth_header = __builtin__.property(_get_use_updated_eth_header)
_pyangbind_elements = OrderedDict([('name', name), ('key_server_priority', key_server_priority), ('macsec_cipher_suite', macsec_cipher_suite), ('confidentiality_offset', confidentiality_offset), ('delay_protection', delay_protection), ('include_icv_indicator', include_icv_indicator), ('sak_rekey_interval', sak_rekey_interval), ('sak_rekey_on_live_peer_loss', sak_rekey_on_live_peer_loss), ('use_updated_eth_header', use_updated_eth_header), ])
class yc_policy_openconfig_macsec__macsec_mka_policies_policy(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/policies/policy. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: List of MKA policies
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__config','__state',)
_yang_name = 'policy'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_mka_policies_policy_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_policies_policy_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'policies', 'policy']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /macsec/mka/policies/policy/name (leafref)
YANG Description: Reference to MKA policy name
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /macsec/mka/policies/policy/name (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Reference to MKA policy name
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /macsec/mka/policies/policy/config (container)
YANG Description: Configuration of the MKA policy
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /macsec/mka/policies/policy/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration of the MKA policy
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_macsec__macsec_mka_policies_policy_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_macsec__macsec_mka_policies_policy_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_mka_policies_policy_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /macsec/mka/policies/policy/state (container)
YANG Description: Operational state data for MKA policy
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /macsec/mka/policies/policy/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state data for MKA policy
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_macsec__macsec_mka_policies_policy_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_policies_policy_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_policies_policy_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
name = __builtin__.property(_get_name, _set_name)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('name', name), ('config', config), ('state', state), ])
class yc_policies_openconfig_macsec__macsec_mka_policies(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/policies. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Enclosing container for the list of MKA policies
"""
__slots__ = ('_path_helper', '_extmethods', '__policy',)
_yang_name = 'policies'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__policy = YANGDynClass(base=YANGListType("name",yc_policy_openconfig_macsec__macsec_mka_policies_policy, yang_name="policy", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'policies']
def _get_policy(self):
"""
Getter method for policy, mapped from YANG variable /macsec/mka/policies/policy (list)
YANG Description: List of MKA policies
"""
return self.__policy
def _set_policy(self, v, load=False):
"""
Setter method for policy, mapped from YANG variable /macsec/mka/policies/policy (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_policy() directly.
YANG Description: List of MKA policies
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("name",yc_policy_openconfig_macsec__macsec_mka_policies_policy, yang_name="policy", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """policy must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("name",yc_policy_openconfig_macsec__macsec_mka_policies_policy, yang_name="policy", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)""",
})
self.__policy = t
if hasattr(self, '_set'):
self._set()
def _unset_policy(self):
self.__policy = YANGDynClass(base=YANGListType("name",yc_policy_openconfig_macsec__macsec_mka_policies_policy, yang_name="policy", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
policy = __builtin__.property(_get_policy, _set_policy)
_pyangbind_elements = OrderedDict([('policy', policy), ])
class yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/key-chains/key-chain/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration of the MKA key chain
"""
__slots__ = ('_path_helper', '_extmethods', '__name',)
_yang_name = 'config'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'key-chains', 'key-chain', 'config']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /macsec/mka/key_chains/key_chain/config/name (string)
YANG Description: MKA Key-chain name
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /macsec/mka/key_chains/key_chain/config/name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: MKA Key-chain name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)
name = __builtin__.property(_get_name, _set_name)
_pyangbind_elements = OrderedDict([('name', name), ])
class yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/key-chains/key-chain/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data for MKA key chain
"""
__slots__ = ('_path_helper', '_extmethods', '__name',)
_yang_name = 'state'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'key-chains', 'key-chain', 'state']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /macsec/mka/key_chains/key_chain/state/name (string)
YANG Description: MKA Key-chain name
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /macsec/mka/key_chains/key_chain/state/name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: MKA Key-chain name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)
name = __builtin__.property(_get_name)
_pyangbind_elements = OrderedDict([('name', name), ])
class yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/key-chains/key-chain/mka-keys/mka-key/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration of MKA key
"""
__slots__ = ('_path_helper', '_extmethods', '__id','__key_clear_text','__cryptographic_algorithm','__valid_date_time','__expiration_date_time',)
_yang_name = 'config'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__id = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['1..64']}), is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=True)
self.__key_clear_text = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-clear-text", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)
self.__cryptographic_algorithm = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'AES_128_CMAC': {}, 'AES_256_CMAC': {}},), is_leaf=True, yang_name="cryptographic-algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='enumeration', is_config=True)
self.__valid_date_time = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'VALID_IMMEDIATELY': {}},),], default=six.text_type("VALID_IMMEDIATELY"), is_leaf=True, yang_name="valid-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=True)
self.__expiration_date_time = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'NO_EXPIRATION': {}},),], default=six.text_type("NO_EXPIRATION"), is_leaf=True, yang_name="expiration-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'key-chains', 'key-chain', 'mka-keys', 'mka-key', 'config']
def _get_id(self):
"""
Getter method for id, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/id (oc-yang:hex-string)
YANG Description: Key identifier is used as the
Connectivity Association Key name (CKN)
"""
return self.__id
def _set_id(self, v, load=False):
"""
Setter method for id, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/id (oc-yang:hex-string)
If this variable is read-only (config: false) in the
source YANG file, then _set_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_id() directly.
YANG Description: Key identifier is used as the
Connectivity Association Key name (CKN)
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['1..64']}), is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """id must be of a type compatible with oc-yang:hex-string""",
'defined-type': "oc-yang:hex-string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['1..64']}), is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=True)""",
})
self.__id = t
if hasattr(self, '_set'):
self._set()
def _unset_id(self):
self.__id = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['1..64']}), is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=True)
def _get_key_clear_text(self):
"""
Getter method for key_clear_text, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/key_clear_text (string)
YANG Description: The key, used for signing and encrypting. Supplied as a clear text
string. When read, also returned as clear text string.
"""
return self.__key_clear_text
def _set_key_clear_text(self, v, load=False):
"""
Setter method for key_clear_text, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/key_clear_text (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_clear_text is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_clear_text() directly.
YANG Description: The key, used for signing and encrypting. Supplied as a clear text
string. When read, also returned as clear text string.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="key-clear-text", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """key_clear_text must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-clear-text", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)""",
})
self.__key_clear_text = t
if hasattr(self, '_set'):
self._set()
def _unset_key_clear_text(self):
self.__key_clear_text = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-clear-text", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=True)
def _get_cryptographic_algorithm(self):
"""
Getter method for cryptographic_algorithm, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/cryptographic_algorithm (enumeration)
YANG Description: MKA Cryptographic authentication algorithm to use
"""
return self.__cryptographic_algorithm
def _set_cryptographic_algorithm(self, v, load=False):
"""
Setter method for cryptographic_algorithm, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/cryptographic_algorithm (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_cryptographic_algorithm is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cryptographic_algorithm() directly.
YANG Description: MKA Cryptographic authentication algorithm to use
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'AES_128_CMAC': {}, 'AES_256_CMAC': {}},), is_leaf=True, yang_name="cryptographic-algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='enumeration', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cryptographic_algorithm must be of a type compatible with enumeration""",
'defined-type': "openconfig-macsec:enumeration",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'AES_128_CMAC': {}, 'AES_256_CMAC': {}},), is_leaf=True, yang_name="cryptographic-algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='enumeration', is_config=True)""",
})
self.__cryptographic_algorithm = t
if hasattr(self, '_set'):
self._set()
def _unset_cryptographic_algorithm(self):
self.__cryptographic_algorithm = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'AES_128_CMAC': {}, 'AES_256_CMAC': {}},), is_leaf=True, yang_name="cryptographic-algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='enumeration', is_config=True)
def _get_valid_date_time(self):
"""
Getter method for valid_date_time, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/valid_date_time (union)
YANG Description: Date and time the key starts being valid according to local date and
time configuration.
"""
return self.__valid_date_time
def _set_valid_date_time(self, v, load=False):
"""
Setter method for valid_date_time, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/valid_date_time (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_valid_date_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_valid_date_time() directly.
YANG Description: Date and time the key starts being valid according to local date and
time configuration.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'VALID_IMMEDIATELY': {}},),], default=six.text_type("VALID_IMMEDIATELY"), is_leaf=True, yang_name="valid-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """valid_date_time must be of a type compatible with union""",
'defined-type': "openconfig-macsec:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'VALID_IMMEDIATELY': {}},),], default=six.text_type("VALID_IMMEDIATELY"), is_leaf=True, yang_name="valid-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=True)""",
})
self.__valid_date_time = t
if hasattr(self, '_set'):
self._set()
def _unset_valid_date_time(self):
self.__valid_date_time = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'VALID_IMMEDIATELY': {}},),], default=six.text_type("VALID_IMMEDIATELY"), is_leaf=True, yang_name="valid-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=True)
def _get_expiration_date_time(self):
"""
Getter method for expiration_date_time, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/expiration_date_time (union)
YANG Description: Key date and time expiration according to local date and time
configuration.
"""
return self.__expiration_date_time
def _set_expiration_date_time(self, v, load=False):
"""
Setter method for expiration_date_time, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config/expiration_date_time (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_expiration_date_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_expiration_date_time() directly.
YANG Description: Key date and time expiration according to local date and time
configuration.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'NO_EXPIRATION': {}},),], default=six.text_type("NO_EXPIRATION"), is_leaf=True, yang_name="expiration-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """expiration_date_time must be of a type compatible with union""",
'defined-type': "openconfig-macsec:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'NO_EXPIRATION': {}},),], default=six.text_type("NO_EXPIRATION"), is_leaf=True, yang_name="expiration-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=True)""",
})
self.__expiration_date_time = t
if hasattr(self, '_set'):
self._set()
def _unset_expiration_date_time(self):
self.__expiration_date_time = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'NO_EXPIRATION': {}},),], default=six.text_type("NO_EXPIRATION"), is_leaf=True, yang_name="expiration-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=True)
id = __builtin__.property(_get_id, _set_id)
key_clear_text = __builtin__.property(_get_key_clear_text, _set_key_clear_text)
cryptographic_algorithm = __builtin__.property(_get_cryptographic_algorithm, _set_cryptographic_algorithm)
valid_date_time = __builtin__.property(_get_valid_date_time, _set_valid_date_time)
expiration_date_time = __builtin__.property(_get_expiration_date_time, _set_expiration_date_time)
_pyangbind_elements = OrderedDict([('id', id), ('key_clear_text', key_clear_text), ('cryptographic_algorithm', cryptographic_algorithm), ('valid_date_time', valid_date_time), ('expiration_date_time', expiration_date_time), ])
class yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/key-chains/key-chain/mka-keys/mka-key/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data for MKA key
"""
__slots__ = ('_path_helper', '_extmethods', '__id','__key_clear_text','__cryptographic_algorithm','__valid_date_time','__expiration_date_time',)
_yang_name = 'state'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__id = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['1..64']}), is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)
self.__key_clear_text = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-clear-text", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)
self.__cryptographic_algorithm = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'AES_128_CMAC': {}, 'AES_256_CMAC': {}},), is_leaf=True, yang_name="cryptographic-algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='enumeration', is_config=False)
self.__valid_date_time = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'VALID_IMMEDIATELY': {}},),], default=six.text_type("VALID_IMMEDIATELY"), is_leaf=True, yang_name="valid-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=False)
self.__expiration_date_time = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'NO_EXPIRATION': {}},),], default=six.text_type("NO_EXPIRATION"), is_leaf=True, yang_name="expiration-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'key-chains', 'key-chain', 'mka-keys', 'mka-key', 'state']
def _get_id(self):
"""
Getter method for id, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/id (oc-yang:hex-string)
YANG Description: Key identifier is used as the
Connectivity Association Key name (CKN)
"""
return self.__id
def _set_id(self, v, load=False):
"""
Setter method for id, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/id (oc-yang:hex-string)
If this variable is read-only (config: false) in the
source YANG file, then _set_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_id() directly.
YANG Description: Key identifier is used as the
Connectivity Association Key name (CKN)
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['1..64']}), is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """id must be of a type compatible with oc-yang:hex-string""",
'defined-type': "oc-yang:hex-string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['1..64']}), is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)""",
})
self.__id = t
if hasattr(self, '_set'):
self._set()
def _unset_id(self):
self.__id = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['1..64']}), is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)
def _get_key_clear_text(self):
"""
Getter method for key_clear_text, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/key_clear_text (string)
YANG Description: The key, used for signing and encrypting. Supplied as a clear text
string. When read, also returned as clear text string.
"""
return self.__key_clear_text
def _set_key_clear_text(self, v, load=False):
"""
Setter method for key_clear_text, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/key_clear_text (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_clear_text is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_clear_text() directly.
YANG Description: The key, used for signing and encrypting. Supplied as a clear text
string. When read, also returned as clear text string.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="key-clear-text", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """key_clear_text must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-clear-text", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)""",
})
self.__key_clear_text = t
if hasattr(self, '_set'):
self._set()
def _unset_key_clear_text(self):
self.__key_clear_text = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-clear-text", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='string', is_config=False)
def _get_cryptographic_algorithm(self):
"""
Getter method for cryptographic_algorithm, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/cryptographic_algorithm (enumeration)
YANG Description: MKA Cryptographic authentication algorithm to use
"""
return self.__cryptographic_algorithm
def _set_cryptographic_algorithm(self, v, load=False):
"""
Setter method for cryptographic_algorithm, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/cryptographic_algorithm (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_cryptographic_algorithm is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_cryptographic_algorithm() directly.
YANG Description: MKA Cryptographic authentication algorithm to use
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'AES_128_CMAC': {}, 'AES_256_CMAC': {}},), is_leaf=True, yang_name="cryptographic-algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='enumeration', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """cryptographic_algorithm must be of a type compatible with enumeration""",
'defined-type': "openconfig-macsec:enumeration",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'AES_128_CMAC': {}, 'AES_256_CMAC': {}},), is_leaf=True, yang_name="cryptographic-algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='enumeration', is_config=False)""",
})
self.__cryptographic_algorithm = t
if hasattr(self, '_set'):
self._set()
def _unset_cryptographic_algorithm(self):
self.__cryptographic_algorithm = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'AES_128_CMAC': {}, 'AES_256_CMAC': {}},), is_leaf=True, yang_name="cryptographic-algorithm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='enumeration', is_config=False)
def _get_valid_date_time(self):
"""
Getter method for valid_date_time, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/valid_date_time (union)
YANG Description: Date and time the key starts being valid according to local date and
time configuration.
"""
return self.__valid_date_time
def _set_valid_date_time(self, v, load=False):
"""
Setter method for valid_date_time, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/valid_date_time (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_valid_date_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_valid_date_time() directly.
YANG Description: Date and time the key starts being valid according to local date and
time configuration.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'VALID_IMMEDIATELY': {}},),], default=six.text_type("VALID_IMMEDIATELY"), is_leaf=True, yang_name="valid-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """valid_date_time must be of a type compatible with union""",
'defined-type': "openconfig-macsec:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'VALID_IMMEDIATELY': {}},),], default=six.text_type("VALID_IMMEDIATELY"), is_leaf=True, yang_name="valid-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=False)""",
})
self.__valid_date_time = t
if hasattr(self, '_set'):
self._set()
def _unset_valid_date_time(self):
self.__valid_date_time = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'VALID_IMMEDIATELY': {}},),], default=six.text_type("VALID_IMMEDIATELY"), is_leaf=True, yang_name="valid-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=False)
def _get_expiration_date_time(self):
"""
Getter method for expiration_date_time, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/expiration_date_time (union)
YANG Description: Key date and time expiration according to local date and time
configuration.
"""
return self.__expiration_date_time
def _set_expiration_date_time(self, v, load=False):
"""
Setter method for expiration_date_time, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state/expiration_date_time (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_expiration_date_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_expiration_date_time() directly.
YANG Description: Key date and time expiration according to local date and time
configuration.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'NO_EXPIRATION': {}},),], default=six.text_type("NO_EXPIRATION"), is_leaf=True, yang_name="expiration-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """expiration_date_time must be of a type compatible with union""",
'defined-type': "openconfig-macsec:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'NO_EXPIRATION': {}},),], default=six.text_type("NO_EXPIRATION"), is_leaf=True, yang_name="expiration-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=False)""",
})
self.__expiration_date_time = t
if hasattr(self, '_set'):
self._set()
def _unset_expiration_date_time(self):
self.__expiration_date_time = YANGDynClass(base=[RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9]{4}\\-(0[1-9]|1[0-2])\\-(0[1-9]|[12][0-9]|3[01])[Tt](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9]):(0[0-9]|[1-5][0-9]|60)(\\.[0-9]+)?([Zz]|([+-](0[0-9]|1[0-9]|2[0-3]):(0[0-9]|[1-5][0-9])))'}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'NO_EXPIRATION': {}},),], default=six.text_type("NO_EXPIRATION"), is_leaf=True, yang_name="expiration-date-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='union', is_config=False)
id = __builtin__.property(_get_id)
key_clear_text = __builtin__.property(_get_key_clear_text)
cryptographic_algorithm = __builtin__.property(_get_cryptographic_algorithm)
valid_date_time = __builtin__.property(_get_valid_date_time)
expiration_date_time = __builtin__.property(_get_expiration_date_time)
_pyangbind_elements = OrderedDict([('id', id), ('key_clear_text', key_clear_text), ('cryptographic_algorithm', cryptographic_algorithm), ('valid_date_time', valid_date_time), ('expiration_date_time', expiration_date_time), ])
class yc_mka_key_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/key-chains/key-chain/mka-keys/mka-key. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: List of MKA keys
"""
__slots__ = ('_path_helper', '_extmethods', '__id','__config','__state',)
_yang_name = 'mka-key'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'key-chains', 'key-chain', 'mka-keys', 'mka-key']
def _get_id(self):
"""
Getter method for id, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/id (leafref)
YANG Description: Reference to the MKA key id
"""
return self.__id
def _set_id(self, v, load=False):
"""
Setter method for id, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_id() directly.
YANG Description: Reference to the MKA key id
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """id must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)""",
})
self.__id = t
if hasattr(self, '_set'):
self._set()
def _unset_id(self):
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config (container)
YANG Description: Configuration of MKA key
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration of MKA key
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state (container)
YANG Description: Operational state data for MKA key
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state data for MKA key
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
id = __builtin__.property(_get_id, _set_id)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('id', id), ('config', config), ('state', state), ])
class yc_mka_keys_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/key-chains/key-chain/mka-keys. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Enclosing container for the list of MKA keys
"""
__slots__ = ('_path_helper', '_extmethods', '__mka_key',)
_yang_name = 'mka-keys'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__mka_key = YANGDynClass(base=YANGListType("id",yc_mka_key_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key, yang_name="mka-key", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='id', extensions=None), is_container='list', yang_name="mka-key", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'key-chains', 'key-chain', 'mka-keys']
def _get_mka_key(self):
"""
Getter method for mka_key, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key (list)
YANG Description: List of MKA keys
"""
return self.__mka_key
def _set_mka_key(self, v, load=False):
"""
Setter method for mka_key, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys/mka_key (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_mka_key is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mka_key() directly.
YANG Description: List of MKA keys
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("id",yc_mka_key_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key, yang_name="mka-key", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='id', extensions=None), is_container='list', yang_name="mka-key", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mka_key must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("id",yc_mka_key_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key, yang_name="mka-key", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='id', extensions=None), is_container='list', yang_name="mka-key", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)""",
})
self.__mka_key = t
if hasattr(self, '_set'):
self._set()
def _unset_mka_key(self):
self.__mka_key = YANGDynClass(base=YANGListType("id",yc_mka_key_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys_mka_key, yang_name="mka-key", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='id', extensions=None), is_container='list', yang_name="mka-key", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
mka_key = __builtin__.property(_get_mka_key, _set_mka_key)
_pyangbind_elements = OrderedDict([('mka_key', mka_key), ])
class yc_key_chain_openconfig_macsec__macsec_mka_key_chains_key_chain(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/key-chains/key-chain. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: MKA Key chain name
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__config','__state','__mka_keys',)
_yang_name = 'key-chain'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__mka_keys = YANGDynClass(base=yc_mka_keys_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys, is_container='container', yang_name="mka-keys", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'key-chains', 'key-chain']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /macsec/mka/key_chains/key_chain/name (leafref)
YANG Description: Reference to the MKA Key chain name
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /macsec/mka/key_chains/key_chain/name (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Reference to the MKA Key chain name
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /macsec/mka/key_chains/key_chain/config (container)
YANG Description: Configuration of the MKA key chain
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /macsec/mka/key_chains/key_chain/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration of the MKA key chain
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_mka_key_chains_key_chain_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /macsec/mka/key_chains/key_chain/state (container)
YANG Description: Operational state data for MKA key chain
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /macsec/mka/key_chains/key_chain/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state data for MKA key chain
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_key_chains_key_chain_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_mka_keys(self):
"""
Getter method for mka_keys, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys (container)
YANG Description: Enclosing container for the list of MKA keys
"""
return self.__mka_keys
def _set_mka_keys(self, v, load=False):
"""
Setter method for mka_keys, mapped from YANG variable /macsec/mka/key_chains/key_chain/mka_keys (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mka_keys is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mka_keys() directly.
YANG Description: Enclosing container for the list of MKA keys
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_mka_keys_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys, is_container='container', yang_name="mka-keys", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mka_keys must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_mka_keys_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys, is_container='container', yang_name="mka-keys", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__mka_keys = t
if hasattr(self, '_set'):
self._set()
def _unset_mka_keys(self):
self.__mka_keys = YANGDynClass(base=yc_mka_keys_openconfig_macsec__macsec_mka_key_chains_key_chain_mka_keys, is_container='container', yang_name="mka-keys", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
name = __builtin__.property(_get_name, _set_name)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
mka_keys = __builtin__.property(_get_mka_keys, _set_mka_keys)
_pyangbind_elements = OrderedDict([('name', name), ('config', config), ('state', state), ('mka_keys', mka_keys), ])
class yc_key_chains_openconfig_macsec__macsec_mka_key_chains(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/key-chains. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Enclosing container for the MKA key chains
"""
__slots__ = ('_path_helper', '_extmethods', '__key_chain',)
_yang_name = 'key-chains'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__key_chain = YANGDynClass(base=YANGListType("name",yc_key_chain_openconfig_macsec__macsec_mka_key_chains_key_chain, yang_name="key-chain", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'key-chains']
def _get_key_chain(self):
"""
Getter method for key_chain, mapped from YANG variable /macsec/mka/key_chains/key_chain (list)
YANG Description: MKA Key chain name
"""
return self.__key_chain
def _set_key_chain(self, v, load=False):
"""
Setter method for key_chain, mapped from YANG variable /macsec/mka/key_chains/key_chain (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_chain is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_chain() directly.
YANG Description: MKA Key chain name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("name",yc_key_chain_openconfig_macsec__macsec_mka_key_chains_key_chain, yang_name="key-chain", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """key_chain must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("name",yc_key_chain_openconfig_macsec__macsec_mka_key_chains_key_chain, yang_name="key-chain", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)""",
})
self.__key_chain = t
if hasattr(self, '_set'):
self._set()
def _unset_key_chain(self):
self.__key_chain = YANGDynClass(base=YANGListType("name",yc_key_chain_openconfig_macsec__macsec_mka_key_chains_key_chain, yang_name="key-chain", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
key_chain = __builtin__.property(_get_key_chain, _set_key_chain)
_pyangbind_elements = OrderedDict([('key_chain', key_chain), ])
class yc_counters_openconfig_macsec__macsec_mka_state_counters(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/state/counters. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: MKA global counters
"""
__slots__ = ('_path_helper', '_extmethods', '__out_mkpdu_errors','__in_mkpdu_icv_verification_errors','__in_mkpdu_validation_errors','__in_mkpdu_bad_peer_errors','__in_mkpdu_peer_list_errors','__sak_generation_errors','__sak_hash_errors','__sak_encryption_errors','__sak_decryption_errors','__sak_cipher_mismatch_errors',)
_yang_name = 'counters'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__out_mkpdu_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-mkpdu-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__in_mkpdu_icv_verification_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-icv-verification-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__in_mkpdu_validation_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-validation-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__in_mkpdu_bad_peer_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-bad-peer-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__in_mkpdu_peer_list_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-peer-list-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sak_generation_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-generation-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sak_hash_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-hash-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sak_encryption_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-encryption-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sak_decryption_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-decryption-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sak_cipher_mismatch_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-cipher-mismatch-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'state', 'counters']
def _get_out_mkpdu_errors(self):
"""
Getter method for out_mkpdu_errors, mapped from YANG variable /macsec/mka/state/counters/out_mkpdu_errors (oc-yang:counter64)
YANG Description: MKPDU TX error count
"""
return self.__out_mkpdu_errors
def _set_out_mkpdu_errors(self, v, load=False):
"""
Setter method for out_mkpdu_errors, mapped from YANG variable /macsec/mka/state/counters/out_mkpdu_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_out_mkpdu_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_out_mkpdu_errors() directly.
YANG Description: MKPDU TX error count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-mkpdu-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """out_mkpdu_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-mkpdu-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__out_mkpdu_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_out_mkpdu_errors(self):
self.__out_mkpdu_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-mkpdu-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_in_mkpdu_icv_verification_errors(self):
"""
Getter method for in_mkpdu_icv_verification_errors, mapped from YANG variable /macsec/mka/state/counters/in_mkpdu_icv_verification_errors (oc-yang:counter64)
YANG Description: MKPDU RX ICV verification error count
"""
return self.__in_mkpdu_icv_verification_errors
def _set_in_mkpdu_icv_verification_errors(self, v, load=False):
"""
Setter method for in_mkpdu_icv_verification_errors, mapped from YANG variable /macsec/mka/state/counters/in_mkpdu_icv_verification_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_in_mkpdu_icv_verification_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_in_mkpdu_icv_verification_errors() directly.
YANG Description: MKPDU RX ICV verification error count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-icv-verification-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """in_mkpdu_icv_verification_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-icv-verification-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__in_mkpdu_icv_verification_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_in_mkpdu_icv_verification_errors(self):
self.__in_mkpdu_icv_verification_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-icv-verification-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_in_mkpdu_validation_errors(self):
"""
Getter method for in_mkpdu_validation_errors, mapped from YANG variable /macsec/mka/state/counters/in_mkpdu_validation_errors (oc-yang:counter64)
YANG Description: MKPDU RX validation error count
"""
return self.__in_mkpdu_validation_errors
def _set_in_mkpdu_validation_errors(self, v, load=False):
"""
Setter method for in_mkpdu_validation_errors, mapped from YANG variable /macsec/mka/state/counters/in_mkpdu_validation_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_in_mkpdu_validation_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_in_mkpdu_validation_errors() directly.
YANG Description: MKPDU RX validation error count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-validation-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """in_mkpdu_validation_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-validation-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__in_mkpdu_validation_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_in_mkpdu_validation_errors(self):
self.__in_mkpdu_validation_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-validation-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_in_mkpdu_bad_peer_errors(self):
"""
Getter method for in_mkpdu_bad_peer_errors, mapped from YANG variable /macsec/mka/state/counters/in_mkpdu_bad_peer_errors (oc-yang:counter64)
YANG Description: MKPDU RX bad peer message number error count
"""
return self.__in_mkpdu_bad_peer_errors
def _set_in_mkpdu_bad_peer_errors(self, v, load=False):
"""
Setter method for in_mkpdu_bad_peer_errors, mapped from YANG variable /macsec/mka/state/counters/in_mkpdu_bad_peer_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_in_mkpdu_bad_peer_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_in_mkpdu_bad_peer_errors() directly.
YANG Description: MKPDU RX bad peer message number error count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-bad-peer-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """in_mkpdu_bad_peer_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-bad-peer-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__in_mkpdu_bad_peer_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_in_mkpdu_bad_peer_errors(self):
self.__in_mkpdu_bad_peer_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-bad-peer-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_in_mkpdu_peer_list_errors(self):
"""
Getter method for in_mkpdu_peer_list_errors, mapped from YANG variable /macsec/mka/state/counters/in_mkpdu_peer_list_errors (oc-yang:counter64)
YANG Description: MKPDU RX non-recent peer list Message Number error count
"""
return self.__in_mkpdu_peer_list_errors
def _set_in_mkpdu_peer_list_errors(self, v, load=False):
"""
Setter method for in_mkpdu_peer_list_errors, mapped from YANG variable /macsec/mka/state/counters/in_mkpdu_peer_list_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_in_mkpdu_peer_list_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_in_mkpdu_peer_list_errors() directly.
YANG Description: MKPDU RX non-recent peer list Message Number error count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-peer-list-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """in_mkpdu_peer_list_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-peer-list-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__in_mkpdu_peer_list_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_in_mkpdu_peer_list_errors(self):
self.__in_mkpdu_peer_list_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu-peer-list-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sak_generation_errors(self):
"""
Getter method for sak_generation_errors, mapped from YANG variable /macsec/mka/state/counters/sak_generation_errors (oc-yang:counter64)
YANG Description: MKA error SAK generation count
"""
return self.__sak_generation_errors
def _set_sak_generation_errors(self, v, load=False):
"""
Setter method for sak_generation_errors, mapped from YANG variable /macsec/mka/state/counters/sak_generation_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sak_generation_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sak_generation_errors() directly.
YANG Description: MKA error SAK generation count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-generation-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sak_generation_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-generation-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sak_generation_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_sak_generation_errors(self):
self.__sak_generation_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-generation-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sak_hash_errors(self):
"""
Getter method for sak_hash_errors, mapped from YANG variable /macsec/mka/state/counters/sak_hash_errors (oc-yang:counter64)
YANG Description: MKA error Hash Key generation count
"""
return self.__sak_hash_errors
def _set_sak_hash_errors(self, v, load=False):
"""
Setter method for sak_hash_errors, mapped from YANG variable /macsec/mka/state/counters/sak_hash_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sak_hash_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sak_hash_errors() directly.
YANG Description: MKA error Hash Key generation count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-hash-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sak_hash_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-hash-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sak_hash_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_sak_hash_errors(self):
self.__sak_hash_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-hash-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sak_encryption_errors(self):
"""
Getter method for sak_encryption_errors, mapped from YANG variable /macsec/mka/state/counters/sak_encryption_errors (oc-yang:counter64)
YANG Description: MKA error SAK encryption/wrap count
"""
return self.__sak_encryption_errors
def _set_sak_encryption_errors(self, v, load=False):
"""
Setter method for sak_encryption_errors, mapped from YANG variable /macsec/mka/state/counters/sak_encryption_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sak_encryption_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sak_encryption_errors() directly.
YANG Description: MKA error SAK encryption/wrap count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-encryption-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sak_encryption_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-encryption-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sak_encryption_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_sak_encryption_errors(self):
self.__sak_encryption_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-encryption-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sak_decryption_errors(self):
"""
Getter method for sak_decryption_errors, mapped from YANG variable /macsec/mka/state/counters/sak_decryption_errors (oc-yang:counter64)
YANG Description: MKA error SAK decryption/unwrap count
"""
return self.__sak_decryption_errors
def _set_sak_decryption_errors(self, v, load=False):
"""
Setter method for sak_decryption_errors, mapped from YANG variable /macsec/mka/state/counters/sak_decryption_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sak_decryption_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sak_decryption_errors() directly.
YANG Description: MKA error SAK decryption/unwrap count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-decryption-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sak_decryption_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-decryption-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sak_decryption_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_sak_decryption_errors(self):
self.__sak_decryption_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-decryption-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sak_cipher_mismatch_errors(self):
"""
Getter method for sak_cipher_mismatch_errors, mapped from YANG variable /macsec/mka/state/counters/sak_cipher_mismatch_errors (oc-yang:counter64)
YANG Description: MKA error SAK cipher mismatch count
"""
return self.__sak_cipher_mismatch_errors
def _set_sak_cipher_mismatch_errors(self, v, load=False):
"""
Setter method for sak_cipher_mismatch_errors, mapped from YANG variable /macsec/mka/state/counters/sak_cipher_mismatch_errors (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sak_cipher_mismatch_errors is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sak_cipher_mismatch_errors() directly.
YANG Description: MKA error SAK cipher mismatch count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-cipher-mismatch-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sak_cipher_mismatch_errors must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-cipher-mismatch-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sak_cipher_mismatch_errors = t
if hasattr(self, '_set'):
self._set()
def _unset_sak_cipher_mismatch_errors(self):
self.__sak_cipher_mismatch_errors = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sak-cipher-mismatch-errors", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
out_mkpdu_errors = __builtin__.property(_get_out_mkpdu_errors)
in_mkpdu_icv_verification_errors = __builtin__.property(_get_in_mkpdu_icv_verification_errors)
in_mkpdu_validation_errors = __builtin__.property(_get_in_mkpdu_validation_errors)
in_mkpdu_bad_peer_errors = __builtin__.property(_get_in_mkpdu_bad_peer_errors)
in_mkpdu_peer_list_errors = __builtin__.property(_get_in_mkpdu_peer_list_errors)
sak_generation_errors = __builtin__.property(_get_sak_generation_errors)
sak_hash_errors = __builtin__.property(_get_sak_hash_errors)
sak_encryption_errors = __builtin__.property(_get_sak_encryption_errors)
sak_decryption_errors = __builtin__.property(_get_sak_decryption_errors)
sak_cipher_mismatch_errors = __builtin__.property(_get_sak_cipher_mismatch_errors)
_pyangbind_elements = OrderedDict([('out_mkpdu_errors', out_mkpdu_errors), ('in_mkpdu_icv_verification_errors', in_mkpdu_icv_verification_errors), ('in_mkpdu_validation_errors', in_mkpdu_validation_errors), ('in_mkpdu_bad_peer_errors', in_mkpdu_bad_peer_errors), ('in_mkpdu_peer_list_errors', in_mkpdu_peer_list_errors), ('sak_generation_errors', sak_generation_errors), ('sak_hash_errors', sak_hash_errors), ('sak_encryption_errors', sak_encryption_errors), ('sak_decryption_errors', sak_decryption_errors), ('sak_cipher_mismatch_errors', sak_cipher_mismatch_errors), ])
class yc_state_openconfig_macsec__macsec_mka_state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data for MKA
"""
__slots__ = ('_path_helper', '_extmethods', '__counters',)
_yang_name = 'state'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_mka_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka', 'state']
def _get_counters(self):
"""
Getter method for counters, mapped from YANG variable /macsec/mka/state/counters (container)
YANG Description: MKA global counters
"""
return self.__counters
def _set_counters(self, v, load=False):
"""
Setter method for counters, mapped from YANG variable /macsec/mka/state/counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_counters() directly.
YANG Description: MKA global counters
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_counters_openconfig_macsec__macsec_mka_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """counters must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_counters_openconfig_macsec__macsec_mka_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)""",
})
self.__counters = t
if hasattr(self, '_set'):
self._set()
def _unset_counters(self):
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_mka_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
counters = __builtin__.property(_get_counters)
_pyangbind_elements = OrderedDict([('counters', counters), ])
class yc_mka_openconfig_macsec__macsec_mka(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/mka. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The MKA
"""
__slots__ = ('_path_helper', '_extmethods', '__policies','__key_chains','__state',)
_yang_name = 'mka'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__policies = YANGDynClass(base=yc_policies_openconfig_macsec__macsec_mka_policies, is_container='container', yang_name="policies", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__key_chains = YANGDynClass(base=yc_key_chains_openconfig_macsec__macsec_mka_key_chains, is_container='container', yang_name="key-chains", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'mka']
def _get_policies(self):
"""
Getter method for policies, mapped from YANG variable /macsec/mka/policies (container)
YANG Description: Enclosing container for the list of MKA policies
"""
return self.__policies
def _set_policies(self, v, load=False):
"""
Setter method for policies, mapped from YANG variable /macsec/mka/policies (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_policies is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_policies() directly.
YANG Description: Enclosing container for the list of MKA policies
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_policies_openconfig_macsec__macsec_mka_policies, is_container='container', yang_name="policies", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """policies must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_policies_openconfig_macsec__macsec_mka_policies, is_container='container', yang_name="policies", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__policies = t
if hasattr(self, '_set'):
self._set()
def _unset_policies(self):
self.__policies = YANGDynClass(base=yc_policies_openconfig_macsec__macsec_mka_policies, is_container='container', yang_name="policies", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_key_chains(self):
"""
Getter method for key_chains, mapped from YANG variable /macsec/mka/key_chains (container)
YANG Description: Enclosing container for the MKA key chains
"""
return self.__key_chains
def _set_key_chains(self, v, load=False):
"""
Setter method for key_chains, mapped from YANG variable /macsec/mka/key_chains (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_chains is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_chains() directly.
YANG Description: Enclosing container for the MKA key chains
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_key_chains_openconfig_macsec__macsec_mka_key_chains, is_container='container', yang_name="key-chains", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """key_chains must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_key_chains_openconfig_macsec__macsec_mka_key_chains, is_container='container', yang_name="key-chains", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__key_chains = t
if hasattr(self, '_set'):
self._set()
def _unset_key_chains(self):
self.__key_chains = YANGDynClass(base=yc_key_chains_openconfig_macsec__macsec_mka_key_chains, is_container='container', yang_name="key-chains", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /macsec/mka/state (container)
YANG Description: Operational state data for MKA
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /macsec/mka/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state data for MKA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_macsec__macsec_mka_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_mka_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
policies = __builtin__.property(_get_policies, _set_policies)
key_chains = __builtin__.property(_get_key_chains, _set_key_chains)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('policies', policies), ('key_chains', key_chains), ('state', state), ])
class yc_config_openconfig_macsec__macsec_interfaces_interface_config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration data for MACsec on each interface
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__enable','__replay_protection',)
_yang_name = 'config'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-if:base-interface-ref', is_config=True)
self.__enable = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
self.__replay_protection = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16)(0), is_leaf=True, yang_name="replay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint16', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'config']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /macsec/interfaces/interface/config/name (oc-if:base-interface-ref)
YANG Description: Reference to the MACsec Ethernet interface
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /macsec/interfaces/interface/config/name (oc-if:base-interface-ref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Reference to the MACsec Ethernet interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-if:base-interface-ref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with oc-if:base-interface-ref""",
'defined-type': "oc-if:base-interface-ref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-if:base-interface-ref', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-if:base-interface-ref', is_config=True)
def _get_enable(self):
"""
Getter method for enable, mapped from YANG variable /macsec/interfaces/interface/config/enable (boolean)
YANG Description: Enable MACsec on an interface
"""
return self.__enable
def _set_enable(self, v, load=False):
"""
Setter method for enable, mapped from YANG variable /macsec/interfaces/interface/config/enable (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable() directly.
YANG Description: Enable MACsec on an interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)""",
})
self.__enable = t
if hasattr(self, '_set'):
self._set()
def _unset_enable(self):
self.__enable = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=True)
def _get_replay_protection(self):
"""
Getter method for replay_protection, mapped from YANG variable /macsec/interfaces/interface/config/replay_protection (uint16)
YANG Description: MACsec window size, as defined by the number of out-of-order frames
that are accepted. A value of 0 means that frames are accepted only in
the correct order.
"""
return self.__replay_protection
def _set_replay_protection(self, v, load=False):
"""
Setter method for replay_protection, mapped from YANG variable /macsec/interfaces/interface/config/replay_protection (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_replay_protection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_replay_protection() directly.
YANG Description: MACsec window size, as defined by the number of out-of-order frames
that are accepted. A value of 0 means that frames are accepted only in
the correct order.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16)(0), is_leaf=True, yang_name="replay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint16', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """replay_protection must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16)(0), is_leaf=True, yang_name="replay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint16', is_config=True)""",
})
self.__replay_protection = t
if hasattr(self, '_set'):
self._set()
def _unset_replay_protection(self):
self.__replay_protection = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16)(0), is_leaf=True, yang_name="replay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint16', is_config=True)
name = __builtin__.property(_get_name, _set_name)
enable = __builtin__.property(_get_enable, _set_enable)
replay_protection = __builtin__.property(_get_replay_protection, _set_replay_protection)
_pyangbind_elements = OrderedDict([('name', name), ('enable', enable), ('replay_protection', replay_protection), ])
class yc_counters_openconfig_macsec__macsec_interfaces_interface_state_counters(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/state/counters. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: MACsec interface counters
"""
__slots__ = ('_path_helper', '_extmethods', '__tx_untagged_pkts','__rx_untagged_pkts','__rx_badtag_pkts','__rx_unknownsci_pkts','__rx_nosci_pkts',)
_yang_name = 'counters'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__tx_untagged_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="tx-untagged-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__rx_untagged_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-untagged-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__rx_badtag_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-badtag-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__rx_unknownsci_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-unknownsci-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__rx_nosci_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-nosci-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'state', 'counters']
def _get_tx_untagged_pkts(self):
"""
Getter method for tx_untagged_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/tx_untagged_pkts (oc-yang:counter64)
YANG Description: MACsec interface level Transmit untagged Packets counter.
This counter will increment if MACsec is enabled on interface and the
outgoing packet is not tagged with MACsec header.
"""
return self.__tx_untagged_pkts
def _set_tx_untagged_pkts(self, v, load=False):
"""
Setter method for tx_untagged_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/tx_untagged_pkts (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_tx_untagged_pkts is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_tx_untagged_pkts() directly.
YANG Description: MACsec interface level Transmit untagged Packets counter.
This counter will increment if MACsec is enabled on interface and the
outgoing packet is not tagged with MACsec header.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="tx-untagged-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """tx_untagged_pkts must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="tx-untagged-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__tx_untagged_pkts = t
if hasattr(self, '_set'):
self._set()
def _unset_tx_untagged_pkts(self):
self.__tx_untagged_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="tx-untagged-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_rx_untagged_pkts(self):
"""
Getter method for rx_untagged_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/rx_untagged_pkts (oc-yang:counter64)
YANG Description: MACsec interface level Receive untagged Packets counter.
This counter will increment if MACsec is enabled on interface and the
incoming packet does not have MACsec tag.
"""
return self.__rx_untagged_pkts
def _set_rx_untagged_pkts(self, v, load=False):
"""
Setter method for rx_untagged_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/rx_untagged_pkts (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_rx_untagged_pkts is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rx_untagged_pkts() directly.
YANG Description: MACsec interface level Receive untagged Packets counter.
This counter will increment if MACsec is enabled on interface and the
incoming packet does not have MACsec tag.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-untagged-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """rx_untagged_pkts must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-untagged-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__rx_untagged_pkts = t
if hasattr(self, '_set'):
self._set()
def _unset_rx_untagged_pkts(self):
self.__rx_untagged_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-untagged-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_rx_badtag_pkts(self):
"""
Getter method for rx_badtag_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/rx_badtag_pkts (oc-yang:counter64)
YANG Description: MACsec interface level Receive Bad Tag Packets counter.
This counter will increment if MACsec is enabled on interface and
incoming packet has incorrect MACsec tag.
"""
return self.__rx_badtag_pkts
def _set_rx_badtag_pkts(self, v, load=False):
"""
Setter method for rx_badtag_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/rx_badtag_pkts (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_rx_badtag_pkts is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rx_badtag_pkts() directly.
YANG Description: MACsec interface level Receive Bad Tag Packets counter.
This counter will increment if MACsec is enabled on interface and
incoming packet has incorrect MACsec tag.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-badtag-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """rx_badtag_pkts must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-badtag-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__rx_badtag_pkts = t
if hasattr(self, '_set'):
self._set()
def _unset_rx_badtag_pkts(self):
self.__rx_badtag_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-badtag-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_rx_unknownsci_pkts(self):
"""
Getter method for rx_unknownsci_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/rx_unknownsci_pkts (oc-yang:counter64)
YANG Description: MACsec interface level Receive Unknown SCI Packets counter.
This counter will increment if MACsec is enabled on the interface and
SCI present in the MACsec tag of the incoming packet does not match any
SCI present in ingress SCI table.
"""
return self.__rx_unknownsci_pkts
def _set_rx_unknownsci_pkts(self, v, load=False):
"""
Setter method for rx_unknownsci_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/rx_unknownsci_pkts (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_rx_unknownsci_pkts is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rx_unknownsci_pkts() directly.
YANG Description: MACsec interface level Receive Unknown SCI Packets counter.
This counter will increment if MACsec is enabled on the interface and
SCI present in the MACsec tag of the incoming packet does not match any
SCI present in ingress SCI table.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-unknownsci-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """rx_unknownsci_pkts must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-unknownsci-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__rx_unknownsci_pkts = t
if hasattr(self, '_set'):
self._set()
def _unset_rx_unknownsci_pkts(self):
self.__rx_unknownsci_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-unknownsci-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_rx_nosci_pkts(self):
"""
Getter method for rx_nosci_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/rx_nosci_pkts (oc-yang:counter64)
YANG Description: MACsec interface level Receive No SCI Packets counter.
This counter will increment if MACsec is enabled on interface and
incoming packet does not have SCI field in MACsec tag.
"""
return self.__rx_nosci_pkts
def _set_rx_nosci_pkts(self, v, load=False):
"""
Setter method for rx_nosci_pkts, mapped from YANG variable /macsec/interfaces/interface/state/counters/rx_nosci_pkts (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_rx_nosci_pkts is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rx_nosci_pkts() directly.
YANG Description: MACsec interface level Receive No SCI Packets counter.
This counter will increment if MACsec is enabled on interface and
incoming packet does not have SCI field in MACsec tag.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-nosci-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """rx_nosci_pkts must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-nosci-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__rx_nosci_pkts = t
if hasattr(self, '_set'):
self._set()
def _unset_rx_nosci_pkts(self):
self.__rx_nosci_pkts = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="rx-nosci-pkts", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
tx_untagged_pkts = __builtin__.property(_get_tx_untagged_pkts)
rx_untagged_pkts = __builtin__.property(_get_rx_untagged_pkts)
rx_badtag_pkts = __builtin__.property(_get_rx_badtag_pkts)
rx_unknownsci_pkts = __builtin__.property(_get_rx_unknownsci_pkts)
rx_nosci_pkts = __builtin__.property(_get_rx_nosci_pkts)
_pyangbind_elements = OrderedDict([('tx_untagged_pkts', tx_untagged_pkts), ('rx_untagged_pkts', rx_untagged_pkts), ('rx_badtag_pkts', rx_badtag_pkts), ('rx_unknownsci_pkts', rx_unknownsci_pkts), ('rx_nosci_pkts', rx_nosci_pkts), ])
class yc_state_openconfig_macsec__macsec_interfaces_interface_state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__enable','__replay_protection','__counters',)
_yang_name = 'state'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-if:base-interface-ref', is_config=False)
self.__enable = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
self.__replay_protection = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16)(0), is_leaf=True, yang_name="replay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint16', is_config=False)
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'state']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /macsec/interfaces/interface/state/name (oc-if:base-interface-ref)
YANG Description: Reference to the MACsec Ethernet interface
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /macsec/interfaces/interface/state/name (oc-if:base-interface-ref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Reference to the MACsec Ethernet interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-if:base-interface-ref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with oc-if:base-interface-ref""",
'defined-type': "oc-if:base-interface-ref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-if:base-interface-ref', is_config=False)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-if:base-interface-ref', is_config=False)
def _get_enable(self):
"""
Getter method for enable, mapped from YANG variable /macsec/interfaces/interface/state/enable (boolean)
YANG Description: Enable MACsec on an interface
"""
return self.__enable
def _set_enable(self, v, load=False):
"""
Setter method for enable, mapped from YANG variable /macsec/interfaces/interface/state/enable (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable() directly.
YANG Description: Enable MACsec on an interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)""",
})
self.__enable = t
if hasattr(self, '_set'):
self._set()
def _unset_enable(self):
self.__enable = YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='boolean', is_config=False)
def _get_replay_protection(self):
"""
Getter method for replay_protection, mapped from YANG variable /macsec/interfaces/interface/state/replay_protection (uint16)
YANG Description: MACsec window size, as defined by the number of out-of-order frames
that are accepted. A value of 0 means that frames are accepted only in
the correct order.
"""
return self.__replay_protection
def _set_replay_protection(self, v, load=False):
"""
Setter method for replay_protection, mapped from YANG variable /macsec/interfaces/interface/state/replay_protection (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_replay_protection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_replay_protection() directly.
YANG Description: MACsec window size, as defined by the number of out-of-order frames
that are accepted. A value of 0 means that frames are accepted only in
the correct order.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16)(0), is_leaf=True, yang_name="replay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """replay_protection must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16)(0), is_leaf=True, yang_name="replay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint16', is_config=False)""",
})
self.__replay_protection = t
if hasattr(self, '_set'):
self._set()
def _unset_replay_protection(self):
self.__replay_protection = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16)(0), is_leaf=True, yang_name="replay-protection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='uint16', is_config=False)
def _get_counters(self):
"""
Getter method for counters, mapped from YANG variable /macsec/interfaces/interface/state/counters (container)
YANG Description: MACsec interface counters
"""
return self.__counters
def _set_counters(self, v, load=False):
"""
Setter method for counters, mapped from YANG variable /macsec/interfaces/interface/state/counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_counters() directly.
YANG Description: MACsec interface counters
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_counters_openconfig_macsec__macsec_interfaces_interface_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """counters must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)""",
})
self.__counters = t
if hasattr(self, '_set'):
self._set()
def _unset_counters(self):
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
name = __builtin__.property(_get_name)
enable = __builtin__.property(_get_enable)
replay_protection = __builtin__.property(_get_replay_protection)
counters = __builtin__.property(_get_counters)
_pyangbind_elements = OrderedDict([('name', name), ('enable', enable), ('replay_protection', replay_protection), ('counters', counters), ])
class yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state_counters(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/scsa-tx/scsa-tx/state/counters. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Counters container for macsec-scsa-tx-interface-stats
"""
__slots__ = ('_path_helper', '_extmethods', '__sc_auth_only','__sc_encrypted','__sa_auth_only','__sa_encrypted',)
_yang_name = 'counters'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sc_auth_only = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-auth-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sc_encrypted = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-encrypted", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sa_auth_only = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-auth-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sa_encrypted = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-encrypted", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'scsa-tx', 'scsa-tx', 'state', 'counters']
def _get_sc_auth_only(self):
"""
Getter method for sc_auth_only, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters/sc_auth_only (oc-yang:counter64)
YANG Description: Secure Channel Authenticated only TX Packets counter.
This counter reflects the number of authenticated only transmitted
packets in a secure channel.
"""
return self.__sc_auth_only
def _set_sc_auth_only(self, v, load=False):
"""
Setter method for sc_auth_only, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters/sc_auth_only (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sc_auth_only is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sc_auth_only() directly.
YANG Description: Secure Channel Authenticated only TX Packets counter.
This counter reflects the number of authenticated only transmitted
packets in a secure channel.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-auth-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sc_auth_only must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-auth-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sc_auth_only = t
if hasattr(self, '_set'):
self._set()
def _unset_sc_auth_only(self):
self.__sc_auth_only = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-auth-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sc_encrypted(self):
"""
Getter method for sc_encrypted, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters/sc_encrypted (oc-yang:counter64)
YANG Description: Secure Channel Encrypted TX Packets counter.
This counter reflects the number of encrypted and authenticated
transmitted packets in a secure channel.
"""
return self.__sc_encrypted
def _set_sc_encrypted(self, v, load=False):
"""
Setter method for sc_encrypted, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters/sc_encrypted (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sc_encrypted is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sc_encrypted() directly.
YANG Description: Secure Channel Encrypted TX Packets counter.
This counter reflects the number of encrypted and authenticated
transmitted packets in a secure channel.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-encrypted", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sc_encrypted must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-encrypted", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sc_encrypted = t
if hasattr(self, '_set'):
self._set()
def _unset_sc_encrypted(self):
self.__sc_encrypted = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-encrypted", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sa_auth_only(self):
"""
Getter method for sa_auth_only, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters/sa_auth_only (oc-yang:counter64)
YANG Description: Secure Association Authenticated only TX Packets counter.
This counter reflects the number of authenticated only, transmitted
packets in a secure association.
"""
return self.__sa_auth_only
def _set_sa_auth_only(self, v, load=False):
"""
Setter method for sa_auth_only, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters/sa_auth_only (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sa_auth_only is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sa_auth_only() directly.
YANG Description: Secure Association Authenticated only TX Packets counter.
This counter reflects the number of authenticated only, transmitted
packets in a secure association.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-auth-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sa_auth_only must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-auth-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sa_auth_only = t
if hasattr(self, '_set'):
self._set()
def _unset_sa_auth_only(self):
self.__sa_auth_only = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-auth-only", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sa_encrypted(self):
"""
Getter method for sa_encrypted, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters/sa_encrypted (oc-yang:counter64)
YANG Description: Secure Association Encrypted TX Packets counter.
This counter reflects the number of encrypted and authenticated
transmitted packets in a secure association.
"""
return self.__sa_encrypted
def _set_sa_encrypted(self, v, load=False):
"""
Setter method for sa_encrypted, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters/sa_encrypted (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sa_encrypted is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sa_encrypted() directly.
YANG Description: Secure Association Encrypted TX Packets counter.
This counter reflects the number of encrypted and authenticated
transmitted packets in a secure association.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-encrypted", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sa_encrypted must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-encrypted", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sa_encrypted = t
if hasattr(self, '_set'):
self._set()
def _unset_sa_encrypted(self):
self.__sa_encrypted = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-encrypted", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
sc_auth_only = __builtin__.property(_get_sc_auth_only)
sc_encrypted = __builtin__.property(_get_sc_encrypted)
sa_auth_only = __builtin__.property(_get_sa_auth_only)
sa_encrypted = __builtin__.property(_get_sa_encrypted)
_pyangbind_elements = OrderedDict([('sc_auth_only', sc_auth_only), ('sc_encrypted', sc_encrypted), ('sa_auth_only', sa_auth_only), ('sa_encrypted', sa_encrypted), ])
class yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/scsa-tx/scsa-tx/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State container for macsec-scsa-tx-interface-stats
"""
__slots__ = ('_path_helper', '_extmethods', '__sci_tx','__counters',)
_yang_name = 'state'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sci_tx = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['16']}), is_leaf=True, yang_name="sci-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'scsa-tx', 'scsa-tx', 'state']
def _get_sci_tx(self):
"""
Getter method for sci_tx, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/sci_tx (oc-yang:hex-string)
YANG Description: Secure Channel Identifier.
Every Transmit Channel is uniquely identified using this field.
"""
return self.__sci_tx
def _set_sci_tx(self, v, load=False):
"""
Setter method for sci_tx, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/sci_tx (oc-yang:hex-string)
If this variable is read-only (config: false) in the
source YANG file, then _set_sci_tx is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sci_tx() directly.
YANG Description: Secure Channel Identifier.
Every Transmit Channel is uniquely identified using this field.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['16']}), is_leaf=True, yang_name="sci-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sci_tx must be of a type compatible with oc-yang:hex-string""",
'defined-type': "oc-yang:hex-string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['16']}), is_leaf=True, yang_name="sci-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)""",
})
self.__sci_tx = t
if hasattr(self, '_set'):
self._set()
def _unset_sci_tx(self):
self.__sci_tx = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['16']}), is_leaf=True, yang_name="sci-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)
def _get_counters(self):
"""
Getter method for counters, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters (container)
YANG Description: Counters container for macsec-scsa-tx-interface-stats
"""
return self.__counters
def _set_counters(self, v, load=False):
"""
Setter method for counters, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state/counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_counters() directly.
YANG Description: Counters container for macsec-scsa-tx-interface-stats
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """counters must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)""",
})
self.__counters = t
if hasattr(self, '_set'):
self._set()
def _unset_counters(self):
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
sci_tx = __builtin__.property(_get_sci_tx)
counters = __builtin__.property(_get_counters)
_pyangbind_elements = OrderedDict([('sci_tx', sci_tx), ('counters', counters), ])
class yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/scsa-tx/scsa-tx. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: TX Secure Channel and Secure Association Statistics
"""
__slots__ = ('_path_helper', '_extmethods', '__sci_tx','__state',)
_yang_name = 'scsa-tx'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sci_tx = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="sci-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'scsa-tx', 'scsa-tx']
def _get_sci_tx(self):
"""
Getter method for sci_tx, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/sci_tx (leafref)
YANG Description: TX Secure Channel and Secure Association Statistics
"""
return self.__sci_tx
def _set_sci_tx(self, v, load=False):
"""
Setter method for sci_tx, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/sci_tx (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_sci_tx is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sci_tx() directly.
YANG Description: TX Secure Channel and Secure Association Statistics
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="sci-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sci_tx must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="sci-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)""",
})
self.__sci_tx = t
if hasattr(self, '_set'):
self._set()
def _unset_sci_tx(self):
self.__sci_tx = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="sci-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state (container)
YANG Description: State container for macsec-scsa-tx-interface-stats
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State container for macsec-scsa-tx-interface-stats
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
sci_tx = __builtin__.property(_get_sci_tx)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([('sci_tx', sci_tx), ('state', state), ])
class yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/scsa-tx. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Enclosing container for transmitted packets for Secure Channel and
Secure Association
"""
__slots__ = ('_path_helper', '_extmethods', '__scsa_tx',)
_yang_name = 'scsa-tx'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__scsa_tx = YANGDynClass(base=YANGListType("sci_tx",yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx, yang_name="scsa-tx", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='sci-tx', extensions=None), is_container='list', yang_name="scsa-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'scsa-tx']
def _get_scsa_tx(self):
"""
Getter method for scsa_tx, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx (list)
YANG Description: TX Secure Channel and Secure Association Statistics
"""
return self.__scsa_tx
def _set_scsa_tx(self, v, load=False):
"""
Setter method for scsa_tx, mapped from YANG variable /macsec/interfaces/interface/scsa_tx/scsa_tx (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_scsa_tx is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_scsa_tx() directly.
YANG Description: TX Secure Channel and Secure Association Statistics
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("sci_tx",yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx, yang_name="scsa-tx", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='sci-tx', extensions=None), is_container='list', yang_name="scsa-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """scsa_tx must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("sci_tx",yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx, yang_name="scsa-tx", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='sci-tx', extensions=None), is_container='list', yang_name="scsa-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=False)""",
})
self.__scsa_tx = t
if hasattr(self, '_set'):
self._set()
def _unset_scsa_tx(self):
self.__scsa_tx = YANGDynClass(base=YANGListType("sci_tx",yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx_scsa_tx, yang_name="scsa-tx", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='sci-tx', extensions=None), is_container='list', yang_name="scsa-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=False)
scsa_tx = __builtin__.property(_get_scsa_tx)
_pyangbind_elements = OrderedDict([('scsa_tx', scsa_tx), ])
class yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state_counters(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/scsa-rx/scsa-rx/state/counters. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Counters container for macsec-scsa-rx-interface-stats
"""
__slots__ = ('_path_helper', '_extmethods', '__sc_invalid','__sc_valid','__sa_invalid','__sa_valid',)
_yang_name = 'counters'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sc_invalid = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-invalid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sc_valid = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-valid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sa_invalid = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-invalid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__sa_valid = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-valid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'scsa-rx', 'scsa-rx', 'state', 'counters']
def _get_sc_invalid(self):
"""
Getter method for sc_invalid, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters/sc_invalid (oc-yang:counter64)
YANG Description: Invalid Secure Channel RX Packets counter.
This counter reflects the number of invalid received packets in a
secure channel.
"""
return self.__sc_invalid
def _set_sc_invalid(self, v, load=False):
"""
Setter method for sc_invalid, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters/sc_invalid (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sc_invalid is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sc_invalid() directly.
YANG Description: Invalid Secure Channel RX Packets counter.
This counter reflects the number of invalid received packets in a
secure channel.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-invalid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sc_invalid must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-invalid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sc_invalid = t
if hasattr(self, '_set'):
self._set()
def _unset_sc_invalid(self):
self.__sc_invalid = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-invalid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sc_valid(self):
"""
Getter method for sc_valid, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters/sc_valid (oc-yang:counter64)
YANG Description: Valid Secure Channel RX Packets counter.
This counter reflects the number of valid received packets in a
secure channel.
"""
return self.__sc_valid
def _set_sc_valid(self, v, load=False):
"""
Setter method for sc_valid, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters/sc_valid (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sc_valid is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sc_valid() directly.
YANG Description: Valid Secure Channel RX Packets counter.
This counter reflects the number of valid received packets in a
secure channel.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-valid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sc_valid must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-valid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sc_valid = t
if hasattr(self, '_set'):
self._set()
def _unset_sc_valid(self):
self.__sc_valid = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sc-valid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sa_invalid(self):
"""
Getter method for sa_invalid, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters/sa_invalid (oc-yang:counter64)
YANG Description: Invalid Secure Association RX Packets counter.
This counter reflects the number of integrity check fails for received
packets in a secure association.
"""
return self.__sa_invalid
def _set_sa_invalid(self, v, load=False):
"""
Setter method for sa_invalid, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters/sa_invalid (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sa_invalid is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sa_invalid() directly.
YANG Description: Invalid Secure Association RX Packets counter.
This counter reflects the number of integrity check fails for received
packets in a secure association.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-invalid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sa_invalid must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-invalid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sa_invalid = t
if hasattr(self, '_set'):
self._set()
def _unset_sa_invalid(self):
self.__sa_invalid = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-invalid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_sa_valid(self):
"""
Getter method for sa_valid, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters/sa_valid (oc-yang:counter64)
YANG Description: Secure Association Valid RX Packets counter.
This counter reflects the number of packets in a secure association
that passed integrity check.
"""
return self.__sa_valid
def _set_sa_valid(self, v, load=False):
"""
Setter method for sa_valid, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters/sa_valid (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_sa_valid is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sa_valid() directly.
YANG Description: Secure Association Valid RX Packets counter.
This counter reflects the number of packets in a secure association
that passed integrity check.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-valid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sa_valid must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-valid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__sa_valid = t
if hasattr(self, '_set'):
self._set()
def _unset_sa_valid(self):
self.__sa_valid = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="sa-valid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
sc_invalid = __builtin__.property(_get_sc_invalid)
sc_valid = __builtin__.property(_get_sc_valid)
sa_invalid = __builtin__.property(_get_sa_invalid)
sa_valid = __builtin__.property(_get_sa_valid)
_pyangbind_elements = OrderedDict([('sc_invalid', sc_invalid), ('sc_valid', sc_valid), ('sa_invalid', sa_invalid), ('sa_valid', sa_valid), ])
class yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/scsa-rx/scsa-rx/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State container for macsec-scsa-rx-interface-stats
"""
__slots__ = ('_path_helper', '_extmethods', '__sci_rx','__counters',)
_yang_name = 'state'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sci_rx = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['16']}), is_leaf=True, yang_name="sci-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'scsa-rx', 'scsa-rx', 'state']
def _get_sci_rx(self):
"""
Getter method for sci_rx, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/sci_rx (oc-yang:hex-string)
YANG Description: Secure Channel Identifier.
Every Receive Channel is uniquely identified using this field.
"""
return self.__sci_rx
def _set_sci_rx(self, v, load=False):
"""
Setter method for sci_rx, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/sci_rx (oc-yang:hex-string)
If this variable is read-only (config: false) in the
source YANG file, then _set_sci_rx is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sci_rx() directly.
YANG Description: Secure Channel Identifier.
Every Receive Channel is uniquely identified using this field.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['16']}), is_leaf=True, yang_name="sci-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sci_rx must be of a type compatible with oc-yang:hex-string""",
'defined-type': "oc-yang:hex-string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['16']}), is_leaf=True, yang_name="sci-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)""",
})
self.__sci_rx = t
if hasattr(self, '_set'):
self._set()
def _unset_sci_rx(self):
self.__sci_rx = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]*'}), restriction_dict={'length': ['16']}), is_leaf=True, yang_name="sci-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:hex-string', is_config=False)
def _get_counters(self):
"""
Getter method for counters, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters (container)
YANG Description: Counters container for macsec-scsa-rx-interface-stats
"""
return self.__counters
def _set_counters(self, v, load=False):
"""
Setter method for counters, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state/counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_counters() directly.
YANG Description: Counters container for macsec-scsa-rx-interface-stats
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """counters must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)""",
})
self.__counters = t
if hasattr(self, '_set'):
self._set()
def _unset_counters(self):
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
sci_rx = __builtin__.property(_get_sci_rx)
counters = __builtin__.property(_get_counters)
_pyangbind_elements = OrderedDict([('sci_rx', sci_rx), ('counters', counters), ])
class yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/scsa-rx/scsa-rx. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: RX Secure Channel and Secure Association Statistics
"""
__slots__ = ('_path_helper', '_extmethods', '__sci_rx','__state',)
_yang_name = 'scsa-rx'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__sci_rx = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="sci-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'scsa-rx', 'scsa-rx']
def _get_sci_rx(self):
"""
Getter method for sci_rx, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/sci_rx (leafref)
YANG Description: RX Secure Channel and Secure Association Statistics
"""
return self.__sci_rx
def _set_sci_rx(self, v, load=False):
"""
Setter method for sci_rx, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/sci_rx (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_sci_rx is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_sci_rx() directly.
YANG Description: RX Secure Channel and Secure Association Statistics
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="sci-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """sci_rx must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="sci-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)""",
})
self.__sci_rx = t
if hasattr(self, '_set'):
self._set()
def _unset_sci_rx(self):
self.__sci_rx = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="sci-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state (container)
YANG Description: State container for macsec-scsa-rx-interface-stats
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State container for macsec-scsa-rx-interface-stats
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
sci_rx = __builtin__.property(_get_sci_rx)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([('sci_rx', sci_rx), ('state', state), ])
class yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/scsa-rx. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Enclosing container for received packets for Secure Channel and
Secure Association
"""
__slots__ = ('_path_helper', '_extmethods', '__scsa_rx',)
_yang_name = 'scsa-rx'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__scsa_rx = YANGDynClass(base=YANGListType("sci_rx",yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx, yang_name="scsa-rx", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='sci-rx', extensions=None), is_container='list', yang_name="scsa-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'scsa-rx']
def _get_scsa_rx(self):
"""
Getter method for scsa_rx, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx (list)
YANG Description: RX Secure Channel and Secure Association Statistics
"""
return self.__scsa_rx
def _set_scsa_rx(self, v, load=False):
"""
Setter method for scsa_rx, mapped from YANG variable /macsec/interfaces/interface/scsa_rx/scsa_rx (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_scsa_rx is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_scsa_rx() directly.
YANG Description: RX Secure Channel and Secure Association Statistics
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("sci_rx",yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx, yang_name="scsa-rx", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='sci-rx', extensions=None), is_container='list', yang_name="scsa-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """scsa_rx must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("sci_rx",yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx, yang_name="scsa-rx", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='sci-rx', extensions=None), is_container='list', yang_name="scsa-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=False)""",
})
self.__scsa_rx = t
if hasattr(self, '_set'):
self._set()
def _unset_scsa_rx(self):
self.__scsa_rx = YANGDynClass(base=YANGListType("sci_rx",yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx_scsa_rx, yang_name="scsa-rx", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='sci-rx', extensions=None), is_container='list', yang_name="scsa-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=False)
scsa_rx = __builtin__.property(_get_scsa_rx)
_pyangbind_elements = OrderedDict([('scsa_rx', scsa_rx), ])
class yc_config_openconfig_macsec__macsec_interfaces_interface_mka_config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/mka/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration data for MKA interface
"""
__slots__ = ('_path_helper', '_extmethods', '__mka_policy','__key_chain',)
_yang_name = 'config'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__mka_policy = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="mka-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
self.__key_chain = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'mka', 'config']
def _get_mka_policy(self):
"""
Getter method for mka_policy, mapped from YANG variable /macsec/interfaces/interface/mka/config/mka_policy (leafref)
YANG Description: Apply MKA policy on the interface
"""
return self.__mka_policy
def _set_mka_policy(self, v, load=False):
"""
Setter method for mka_policy, mapped from YANG variable /macsec/interfaces/interface/mka/config/mka_policy (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_mka_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mka_policy() directly.
YANG Description: Apply MKA policy on the interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="mka-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mka_policy must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="mka-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)""",
})
self.__mka_policy = t
if hasattr(self, '_set'):
self._set()
def _unset_mka_policy(self):
self.__mka_policy = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="mka-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
def _get_key_chain(self):
"""
Getter method for key_chain, mapped from YANG variable /macsec/interfaces/interface/mka/config/key_chain (leafref)
YANG Description: Configure Key Chain name
"""
return self.__key_chain
def _set_key_chain(self, v, load=False):
"""
Setter method for key_chain, mapped from YANG variable /macsec/interfaces/interface/mka/config/key_chain (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_chain is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_chain() directly.
YANG Description: Configure Key Chain name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """key_chain must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)""",
})
self.__key_chain = t
if hasattr(self, '_set'):
self._set()
def _unset_key_chain(self):
self.__key_chain = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
mka_policy = __builtin__.property(_get_mka_policy, _set_mka_policy)
key_chain = __builtin__.property(_get_key_chain, _set_key_chain)
_pyangbind_elements = OrderedDict([('mka_policy', mka_policy), ('key_chain', key_chain), ])
class yc_counters_openconfig_macsec__macsec_interfaces_interface_mka_state_counters(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/mka/state/counters. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: MKA interface counters
"""
__slots__ = ('_path_helper', '_extmethods', '__in_mkpdu','__in_sak_mkpdu','__in_cak_mkpdu','__out_mkpdu','__out_sak_mkpdu','__out_cak_mkpdu',)
_yang_name = 'counters'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__in_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__in_sak_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-sak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__in_cak_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-cak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__out_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__out_sak_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-sak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
self.__out_cak_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-cak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'mka', 'state', 'counters']
def _get_in_mkpdu(self):
"""
Getter method for in_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/in_mkpdu (oc-yang:counter64)
YANG Description: Validated MKPDU received count
"""
return self.__in_mkpdu
def _set_in_mkpdu(self, v, load=False):
"""
Setter method for in_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/in_mkpdu (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_in_mkpdu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_in_mkpdu() directly.
YANG Description: Validated MKPDU received count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """in_mkpdu must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__in_mkpdu = t
if hasattr(self, '_set'):
self._set()
def _unset_in_mkpdu(self):
self.__in_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_in_sak_mkpdu(self):
"""
Getter method for in_sak_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/in_sak_mkpdu (oc-yang:counter64)
YANG Description: Validated MKPDU received SAK count
"""
return self.__in_sak_mkpdu
def _set_in_sak_mkpdu(self, v, load=False):
"""
Setter method for in_sak_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/in_sak_mkpdu (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_in_sak_mkpdu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_in_sak_mkpdu() directly.
YANG Description: Validated MKPDU received SAK count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-sak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """in_sak_mkpdu must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-sak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__in_sak_mkpdu = t
if hasattr(self, '_set'):
self._set()
def _unset_in_sak_mkpdu(self):
self.__in_sak_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-sak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_in_cak_mkpdu(self):
"""
Getter method for in_cak_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/in_cak_mkpdu (oc-yang:counter64)
YANG Description: Validated MKPDU received CAK count
"""
return self.__in_cak_mkpdu
def _set_in_cak_mkpdu(self, v, load=False):
"""
Setter method for in_cak_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/in_cak_mkpdu (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_in_cak_mkpdu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_in_cak_mkpdu() directly.
YANG Description: Validated MKPDU received CAK count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-cak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """in_cak_mkpdu must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-cak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__in_cak_mkpdu = t
if hasattr(self, '_set'):
self._set()
def _unset_in_cak_mkpdu(self):
self.__in_cak_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="in-cak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_out_mkpdu(self):
"""
Getter method for out_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/out_mkpdu (oc-yang:counter64)
YANG Description: MKPDU sent count
"""
return self.__out_mkpdu
def _set_out_mkpdu(self, v, load=False):
"""
Setter method for out_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/out_mkpdu (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_out_mkpdu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_out_mkpdu() directly.
YANG Description: MKPDU sent count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """out_mkpdu must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__out_mkpdu = t
if hasattr(self, '_set'):
self._set()
def _unset_out_mkpdu(self):
self.__out_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_out_sak_mkpdu(self):
"""
Getter method for out_sak_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/out_sak_mkpdu (oc-yang:counter64)
YANG Description: MKPDU SAK sent count
"""
return self.__out_sak_mkpdu
def _set_out_sak_mkpdu(self, v, load=False):
"""
Setter method for out_sak_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/out_sak_mkpdu (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_out_sak_mkpdu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_out_sak_mkpdu() directly.
YANG Description: MKPDU SAK sent count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-sak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """out_sak_mkpdu must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-sak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__out_sak_mkpdu = t
if hasattr(self, '_set'):
self._set()
def _unset_out_sak_mkpdu(self):
self.__out_sak_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-sak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
def _get_out_cak_mkpdu(self):
"""
Getter method for out_cak_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/out_cak_mkpdu (oc-yang:counter64)
YANG Description: MKPDU CAK sent count
"""
return self.__out_cak_mkpdu
def _set_out_cak_mkpdu(self, v, load=False):
"""
Setter method for out_cak_mkpdu, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters/out_cak_mkpdu (oc-yang:counter64)
If this variable is read-only (config: false) in the
source YANG file, then _set_out_cak_mkpdu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_out_cak_mkpdu() directly.
YANG Description: MKPDU CAK sent count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-cak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """out_cak_mkpdu must be of a type compatible with oc-yang:counter64""",
'defined-type': "oc-yang:counter64",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-cak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)""",
})
self.__out_cak_mkpdu = t
if hasattr(self, '_set'):
self._set()
def _unset_out_cak_mkpdu(self):
self.__out_cak_mkpdu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="out-cak-mkpdu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='oc-yang:counter64', is_config=False)
in_mkpdu = __builtin__.property(_get_in_mkpdu)
in_sak_mkpdu = __builtin__.property(_get_in_sak_mkpdu)
in_cak_mkpdu = __builtin__.property(_get_in_cak_mkpdu)
out_mkpdu = __builtin__.property(_get_out_mkpdu)
out_sak_mkpdu = __builtin__.property(_get_out_sak_mkpdu)
out_cak_mkpdu = __builtin__.property(_get_out_cak_mkpdu)
_pyangbind_elements = OrderedDict([('in_mkpdu', in_mkpdu), ('in_sak_mkpdu', in_sak_mkpdu), ('in_cak_mkpdu', in_cak_mkpdu), ('out_mkpdu', out_mkpdu), ('out_sak_mkpdu', out_sak_mkpdu), ('out_cak_mkpdu', out_cak_mkpdu), ])
class yc_state_openconfig_macsec__macsec_interfaces_interface_mka_state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/mka/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state data for MKA interface
"""
__slots__ = ('_path_helper', '_extmethods', '__mka_policy','__key_chain','__counters',)
_yang_name = 'state'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__mka_policy = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="mka-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
self.__key_chain = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_mka_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'mka', 'state']
def _get_mka_policy(self):
"""
Getter method for mka_policy, mapped from YANG variable /macsec/interfaces/interface/mka/state/mka_policy (leafref)
YANG Description: Apply MKA policy on the interface
"""
return self.__mka_policy
def _set_mka_policy(self, v, load=False):
"""
Setter method for mka_policy, mapped from YANG variable /macsec/interfaces/interface/mka/state/mka_policy (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_mka_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mka_policy() directly.
YANG Description: Apply MKA policy on the interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="mka-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mka_policy must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="mka-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)""",
})
self.__mka_policy = t
if hasattr(self, '_set'):
self._set()
def _unset_mka_policy(self):
self.__mka_policy = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="mka-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
def _get_key_chain(self):
"""
Getter method for key_chain, mapped from YANG variable /macsec/interfaces/interface/mka/state/key_chain (leafref)
YANG Description: Configure Key Chain name
"""
return self.__key_chain
def _set_key_chain(self, v, load=False):
"""
Setter method for key_chain, mapped from YANG variable /macsec/interfaces/interface/mka/state/key_chain (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_key_chain is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_key_chain() directly.
YANG Description: Configure Key Chain name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """key_chain must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)""",
})
self.__key_chain = t
if hasattr(self, '_set'):
self._set()
def _unset_key_chain(self):
self.__key_chain = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="key-chain", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=False)
def _get_counters(self):
"""
Getter method for counters, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters (container)
YANG Description: MKA interface counters
"""
return self.__counters
def _set_counters(self, v, load=False):
"""
Setter method for counters, mapped from YANG variable /macsec/interfaces/interface/mka/state/counters (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_counters is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_counters() directly.
YANG Description: MKA interface counters
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_counters_openconfig_macsec__macsec_interfaces_interface_mka_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """counters must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_mka_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)""",
})
self.__counters = t
if hasattr(self, '_set'):
self._set()
def _unset_counters(self):
self.__counters = YANGDynClass(base=yc_counters_openconfig_macsec__macsec_interfaces_interface_mka_state_counters, is_container='container', yang_name="counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=False)
mka_policy = __builtin__.property(_get_mka_policy)
key_chain = __builtin__.property(_get_key_chain)
counters = __builtin__.property(_get_counters)
_pyangbind_elements = OrderedDict([('mka_policy', mka_policy), ('key_chain', key_chain), ('counters', counters), ])
class yc_mka_openconfig_macsec__macsec_interfaces_interface_mka(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface/mka. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Enclosing container for the MKA interface
"""
__slots__ = ('_path_helper', '_extmethods', '__config','__state',)
_yang_name = 'mka'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_interfaces_interface_mka_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_mka_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface', 'mka']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /macsec/interfaces/interface/mka/config (container)
YANG Description: Configuration data for MKA interface
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /macsec/interfaces/interface/mka/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration data for MKA interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_macsec__macsec_interfaces_interface_mka_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_macsec__macsec_interfaces_interface_mka_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_interfaces_interface_mka_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /macsec/interfaces/interface/mka/state (container)
YANG Description: Operational state data for MKA interface
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /macsec/interfaces/interface/mka/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state data for MKA interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_macsec__macsec_interfaces_interface_mka_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_mka_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_mka_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = OrderedDict([('config', config), ('state', state), ])
class yc_interface_openconfig_macsec__macsec_interfaces_interface(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces/interface. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: List of interfaces on which MACsec is enabled / available
"""
__slots__ = ('_path_helper', '_extmethods', '__name','__config','__state','__scsa_tx','__scsa_rx','__mka',)
_yang_name = 'interface'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_interfaces_interface_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__scsa_tx = YANGDynClass(base=yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx, is_container='container', yang_name="scsa-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__scsa_rx = YANGDynClass(base=yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx, is_container='container', yang_name="scsa-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__mka = YANGDynClass(base=yc_mka_openconfig_macsec__macsec_interfaces_interface_mka, is_container='container', yang_name="mka", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces', 'interface']
def _get_name(self):
"""
Getter method for name, mapped from YANG variable /macsec/interfaces/interface/name (leafref)
YANG Description: Reference to the list key
"""
return self.__name
def _set_name(self, v, load=False):
"""
Setter method for name, mapped from YANG variable /macsec/interfaces/interface/name (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_name() directly.
YANG Description: Reference to the list key
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """name must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)""",
})
self.__name = t
if hasattr(self, '_set'):
self._set()
def _unset_name(self):
self.__name = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='leafref', is_config=True)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /macsec/interfaces/interface/config (container)
YANG Description: Configuration data for MACsec on each interface
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /macsec/interfaces/interface/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration data for MACsec on each interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_config_openconfig_macsec__macsec_interfaces_interface_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_config_openconfig_macsec__macsec_interfaces_interface_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=yc_config_openconfig_macsec__macsec_interfaces_interface_config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /macsec/interfaces/interface/state (container)
YANG Description: Operational state data
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /macsec/interfaces/interface/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state data
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_state_openconfig_macsec__macsec_interfaces_interface_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=yc_state_openconfig_macsec__macsec_interfaces_interface_state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_scsa_tx(self):
"""
Getter method for scsa_tx, mapped from YANG variable /macsec/interfaces/interface/scsa_tx (container)
YANG Description: Enclosing container for transmitted packets for Secure Channel and
Secure Association
"""
return self.__scsa_tx
def _set_scsa_tx(self, v, load=False):
"""
Setter method for scsa_tx, mapped from YANG variable /macsec/interfaces/interface/scsa_tx (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_scsa_tx is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_scsa_tx() directly.
YANG Description: Enclosing container for transmitted packets for Secure Channel and
Secure Association
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx, is_container='container', yang_name="scsa-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """scsa_tx must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx, is_container='container', yang_name="scsa-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__scsa_tx = t
if hasattr(self, '_set'):
self._set()
def _unset_scsa_tx(self):
self.__scsa_tx = YANGDynClass(base=yc_scsa_tx_openconfig_macsec__macsec_interfaces_interface_scsa_tx, is_container='container', yang_name="scsa-tx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_scsa_rx(self):
"""
Getter method for scsa_rx, mapped from YANG variable /macsec/interfaces/interface/scsa_rx (container)
YANG Description: Enclosing container for received packets for Secure Channel and
Secure Association
"""
return self.__scsa_rx
def _set_scsa_rx(self, v, load=False):
"""
Setter method for scsa_rx, mapped from YANG variable /macsec/interfaces/interface/scsa_rx (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_scsa_rx is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_scsa_rx() directly.
YANG Description: Enclosing container for received packets for Secure Channel and
Secure Association
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx, is_container='container', yang_name="scsa-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """scsa_rx must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx, is_container='container', yang_name="scsa-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__scsa_rx = t
if hasattr(self, '_set'):
self._set()
def _unset_scsa_rx(self):
self.__scsa_rx = YANGDynClass(base=yc_scsa_rx_openconfig_macsec__macsec_interfaces_interface_scsa_rx, is_container='container', yang_name="scsa-rx", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_mka(self):
"""
Getter method for mka, mapped from YANG variable /macsec/interfaces/interface/mka (container)
YANG Description: Enclosing container for the MKA interface
"""
return self.__mka
def _set_mka(self, v, load=False):
"""
Setter method for mka, mapped from YANG variable /macsec/interfaces/interface/mka (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mka is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mka() directly.
YANG Description: Enclosing container for the MKA interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_mka_openconfig_macsec__macsec_interfaces_interface_mka, is_container='container', yang_name="mka", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mka must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_mka_openconfig_macsec__macsec_interfaces_interface_mka, is_container='container', yang_name="mka", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__mka = t
if hasattr(self, '_set'):
self._set()
def _unset_mka(self):
self.__mka = YANGDynClass(base=yc_mka_openconfig_macsec__macsec_interfaces_interface_mka, is_container='container', yang_name="mka", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
name = __builtin__.property(_get_name, _set_name)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
scsa_tx = __builtin__.property(_get_scsa_tx, _set_scsa_tx)
scsa_rx = __builtin__.property(_get_scsa_rx, _set_scsa_rx)
mka = __builtin__.property(_get_mka, _set_mka)
_pyangbind_elements = OrderedDict([('name', name), ('config', config), ('state', state), ('scsa_tx', scsa_tx), ('scsa_rx', scsa_rx), ('mka', mka), ])
class yc_interfaces_openconfig_macsec__macsec_interfaces(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec/interfaces. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Enclosing container for the MACsec interfaces list
"""
__slots__ = ('_path_helper', '_extmethods', '__interface',)
_yang_name = 'interfaces'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__interface = YANGDynClass(base=YANGListType("name",yc_interface_openconfig_macsec__macsec_interfaces_interface, yang_name="interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec', 'interfaces']
def _get_interface(self):
"""
Getter method for interface, mapped from YANG variable /macsec/interfaces/interface (list)
YANG Description: List of interfaces on which MACsec is enabled / available
"""
return self.__interface
def _set_interface(self, v, load=False):
"""
Setter method for interface, mapped from YANG variable /macsec/interfaces/interface (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interface() directly.
YANG Description: List of interfaces on which MACsec is enabled / available
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("name",yc_interface_openconfig_macsec__macsec_interfaces_interface, yang_name="interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interface must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("name",yc_interface_openconfig_macsec__macsec_interfaces_interface, yang_name="interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)""",
})
self.__interface = t
if hasattr(self, '_set'):
self._set()
def _unset_interface(self):
self.__interface = YANGDynClass(base=YANGListType("name",yc_interface_openconfig_macsec__macsec_interfaces_interface, yang_name="interface", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions=None), is_container='list', yang_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='list', is_config=True)
interface = __builtin__.property(_get_interface, _set_interface)
_pyangbind_elements = OrderedDict([('interface', interface), ])
class yc_macsec_openconfig_macsec__macsec(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /macsec. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The MACsec
"""
__slots__ = ('_path_helper', '_extmethods', '__mka','__interfaces',)
_yang_name = 'macsec'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__mka = YANGDynClass(base=yc_mka_openconfig_macsec__macsec_mka, is_container='container', yang_name="mka", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
self.__interfaces = YANGDynClass(base=yc_interfaces_openconfig_macsec__macsec_interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['macsec']
def _get_mka(self):
"""
Getter method for mka, mapped from YANG variable /macsec/mka (container)
YANG Description: The MKA
"""
return self.__mka
def _set_mka(self, v, load=False):
"""
Setter method for mka, mapped from YANG variable /macsec/mka (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mka is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mka() directly.
YANG Description: The MKA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_mka_openconfig_macsec__macsec_mka, is_container='container', yang_name="mka", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mka must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_mka_openconfig_macsec__macsec_mka, is_container='container', yang_name="mka", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__mka = t
if hasattr(self, '_set'):
self._set()
def _unset_mka(self):
self.__mka = YANGDynClass(base=yc_mka_openconfig_macsec__macsec_mka, is_container='container', yang_name="mka", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
def _get_interfaces(self):
"""
Getter method for interfaces, mapped from YANG variable /macsec/interfaces (container)
YANG Description: Enclosing container for the MACsec interfaces list
"""
return self.__interfaces
def _set_interfaces(self, v, load=False):
"""
Setter method for interfaces, mapped from YANG variable /macsec/interfaces (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interfaces is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interfaces() directly.
YANG Description: Enclosing container for the MACsec interfaces list
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_interfaces_openconfig_macsec__macsec_interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interfaces must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_interfaces_openconfig_macsec__macsec_interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__interfaces = t
if hasattr(self, '_set'):
self._set()
def _unset_interfaces(self):
self.__interfaces = YANGDynClass(base=yc_interfaces_openconfig_macsec__macsec_interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
mka = __builtin__.property(_get_mka, _set_mka)
interfaces = __builtin__.property(_get_interfaces, _set_interfaces)
_pyangbind_elements = OrderedDict([('mka', mka), ('interfaces', interfaces), ])
class openconfig_macsec(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec - based on the path /openconfig-macsec. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines configuration and state data for
MACsec IEEE Std 802.1AE-2018.
"""
__slots__ = ('_path_helper', '_extmethods', '__macsec',)
_yang_name = 'openconfig-macsec'
_yang_namespace = 'http://openconfig.net/yang/macsec'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__macsec = YANGDynClass(base=yc_macsec_openconfig_macsec__macsec, is_container='container', yang_name="macsec", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_macsec(self):
"""
Getter method for macsec, mapped from YANG variable /macsec (container)
YANG Description: The MACsec
"""
return self.__macsec
def _set_macsec(self, v, load=False):
"""
Setter method for macsec, mapped from YANG variable /macsec (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_macsec is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_macsec() directly.
YANG Description: The MACsec
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=yc_macsec_openconfig_macsec__macsec, is_container='container', yang_name="macsec", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """macsec must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=yc_macsec_openconfig_macsec__macsec, is_container='container', yang_name="macsec", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)""",
})
self.__macsec = t
if hasattr(self, '_set'):
self._set()
def _unset_macsec(self):
self.__macsec = YANGDynClass(base=yc_macsec_openconfig_macsec__macsec, is_container='container', yang_name="macsec", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/macsec', defining_module='openconfig-macsec', yang_type='container', is_config=True)
macsec = __builtin__.property(_get_macsec, _set_macsec)
_pyangbind_elements = OrderedDict([('macsec', macsec), ])
class openconfig_macsec_types(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-macsec-types - based on the path /openconfig-macsec-types. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines types related to the MACsec configuration
and operational state model.
"""
_pyangbind_elements = {}
|
google/gnxi
|
oc_config_validate/oc_config_validate/models/macsec.py
|
Python
|
apache-2.0
| 391,144
|
"""Module to train sequence model.
Vectorizes training and validation texts into sequences and uses that for
training a sequence model - a sepCNN model. We use sequence model for text
classification when the ratio of number of samples to number of words per
sample for the given dataset is very large (>~15K).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import time
import tensorflow as tf
import numpy as np
import build_model
import load_data
import vectorize_data
import explore_data
FLAGS = None
# Limit on the number of features. We use the top 20K features.
TOP_K = 20000
def train_sequence_model(data,
learning_rate=1e-3,
epochs=1000,
batch_size=128,
blocks=2,
filters=64,
dropout_rate=0.2,
embedding_dim=200,
kernel_size=3,
pool_size=3):
"""Trains sequence model on the given dataset.
# Arguments
data: tuples of training and test texts and labels.
learning_rate: float, learning rate for training model.
epochs: int, number of epochs.
batch_size: int, number of samples per batch.
blocks: int, number of pairs of sepCNN and pooling blocks in the model.
filters: int, output dimension of sepCNN layers in the model.
dropout_rate: float: percentage of input to drop at Dropout layers.
embedding_dim: int, dimension of the embedding vectors.
kernel_size: int, length of the convolution window.
pool_size: int, factor by which to downscale input at MaxPooling layer.
# Raises
ValueError: If validation data has label values which were not seen
in the training data.
"""
# Get the data.
(train_texts, train_labels), (val_texts, val_labels) = data
# Verify that validation labels are in the same range as training labels.
num_classes = explore_data.get_num_classes(train_labels)
unexpected_labels = [v for v in val_labels if v not in range(num_classes)]
if len(unexpected_labels):
raise ValueError('Unexpected label values found in the validation set:'
' {unexpected_labels}. Please make sure that the '
'labels in the validation set are in the same range '
'as training labels.'.format(
unexpected_labels=unexpected_labels))
# Vectorize texts.
x_train, x_val, word_index = vectorize_data.sequence_vectorize(
train_texts, val_texts)
# Number of features will be the embedding input dimension. Add 1 for the
# reserved index 0.
num_features = min(len(word_index) + 1, TOP_K)
# Create model instance.
model = build_model.sepcnn_model(blocks=blocks,
filters=filters,
kernel_size=kernel_size,
embedding_dim=embedding_dim,
dropout_rate=dropout_rate,
pool_size=pool_size,
input_shape=x_train.shape[1:],
num_classes=num_classes,
num_features=num_features)
# Compile model with learning parameters.
if num_classes == 2:
loss = 'binary_crossentropy'
else:
loss = 'sparse_categorical_crossentropy'
optimizer = tf.keras.optimizers.Adam(lr=learning_rate)
model.compile(optimizer=optimizer, loss=loss, metrics=['acc'])
# Create callback for early stopping on validation loss. If the loss does
# not decrease in two consecutive tries, stop training.
callbacks = [tf.keras.callbacks.EarlyStopping(
monitor='val_loss', patience=2)]
# Train and validate model.
history = model.fit(
x_train,
train_labels,
epochs=epochs,
callbacks=callbacks,
validation_data=(x_val, val_labels),
verbose=2, # Logs once per epoch.
batch_size=batch_size)
# Print results.
history = history.history
print('Validation accuracy: {acc}, loss: {loss}'.format(
acc=history['val_acc'][-1], loss=history['val_loss'][-1]))
# Save model.
model.save('rotten_tomatoes_sepcnn_model.h5')
return history['val_acc'][-1], history['val_loss'][-1]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str, default='./data',
help='input data directory')
FLAGS, unparsed = parser.parse_known_args()
# Using the Rotten tomatoes movie reviews dataset to demonstrate
# training sequence model.
data = load_data.load_rotten_tomatoes_sentiment_analysis_dataset(
FLAGS.data_dir)
train_sequence_model(data)
|
google/eng-edu
|
ml/guides/text_classification/train_sequence_model.py
|
Python
|
apache-2.0
| 5,062
|
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
import sys
test_suite = "tests"
tests_require = ["mongo-orchestration >= 0.2, < 0.4", "requests >= 2.5.1"]
if sys.version_info[:2] == (2, 6):
# Need unittest2 to run unittests in Python 2.6
tests_require.append("unittest2")
test_suite = "unittest2.collector"
try:
with open("README.rst", "r") as fd:
long_description = fd.read()
except IOError:
long_description = None # Install without README.rst
setup(name='hzkgelastic2-doc-manager',
version='0.2.1.dev0',
maintainer='mongodb',
description='Elastic2 plugin for mongo-connector',
long_description=long_description,
platforms=['any'],
author='anna herlihy',
author_email='mongodb-user@googlegroups.com',
url='https://github.com/mongodb-labs/hzkgelastic2-doc-manager',
install_requires=['mongo-connector >= 2.3.0', "elasticsearch>=2.0.0,<3.0.0"],
packages=["mongo_connector", "mongo_connector.doc_managers"],
extras_require={'aws': ['boto3 >= 1.4.0', 'requests-aws-sign >= 0.1.1']},
license="Apache License, Version 2.0",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Database",
"Topic :: Software Development :: Libraries :: Python Modules",
"Operating System :: Unix",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX"
],
keywords=['mongo-connector', "mongodb", "elastic", "elasticsearch"],
test_suite=test_suite,
tests_require=tests_require
)
|
LaoLiulaoliu/hzkgelastic2-doc-manager
|
setup.py
|
Python
|
apache-2.0
| 2,047
|
# -*- coding: utf-8 -*-
#
# scanpdf documentation build configuration file, created by
# sphinx-quickstart on Wed Oct 23 13:43:29 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import pkg_resources
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Scan PDF'
copyright = u'2014, Virantha N. Ekanayake'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
try:
release = pkg_resources.get_distribution('scanpdf').version
except pkg_resources.DistributionNotFound:
print 'To build the documentation, The distribution information of scanpdf'
print 'Has to be available. Either install the package into your'
print 'development environment or run "setup.py develop" to setup the'
print 'metadata. A virtualenv is recommended!'
sys.exit(1)
del pkg_resources
version = '.'.join(release.split('.')[:2])
# The full version, including alpha/beta/rc tags.
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'scanpdfdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'scanpdf.tex', u'Scan PDF Documentation',
u'Virantha N. Ekanayake', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'scanpdf', u'Scan PDF Documentation',
[u'Author'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'scanpdf', u'Scan PDF Documentation',
u'Author', 'scanpdf', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'scanpdf'
epub_author = u'Author'
epub_publisher = u'Author'
epub_copyright = u'2013, Author'
# The basename for the epub file. It defaults to the project name.
#epub_basename = u'scanpdf'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
|
virantha/scanpdf
|
docs/conf.py
|
Python
|
apache-2.0
| 10,696
|
#!/usr/bin/python
# Copyright 2014 Google.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A base class for all codecs using encode-to-file."""
import encoder
import filecmp
import json
import os
import re
import subprocess
class FileCodec(encoder.Codec):
"""Base class for file-using codecs.
Subclasses MUST define:
- EncodeCommandLine
- DecodeCommandLine
- ResultData
"""
def __init__(self, name, formatter=None):
super(FileCodec, self).__init__(name, formatter=formatter)
self.extension = 'must-have-extension'
def _EncodeFile(self, parameters, bitrate, videofile, encodedfile):
commandline = self.EncodeCommandLine(
parameters, bitrate, videofile, encodedfile)
print commandline
with open(os.path.devnull, 'r') as nullinput:
times_start = os.times()
returncode = subprocess.call(commandline, shell=True, stdin=nullinput)
times_end = os.times()
subprocess_cpu = times_end[2] - times_start[2]
elapsed_clock = times_end[4] - times_start[4]
print "Encode took %f CPU seconds %f clock seconds" % (
subprocess_cpu, elapsed_clock)
if returncode:
raise Exception("Encode failed with returncode %d" % returncode)
return (subprocess_cpu, elapsed_clock)
def _DecodeFile(self, videofile, encodedfile, workdir):
tempyuvfile = os.path.join(workdir,
videofile.basename + 'tempyuvfile.yuv')
if os.path.isfile(tempyuvfile):
print "Removing tempfile before decode:", tempyuvfile
os.unlink(tempyuvfile)
commandline = self.DecodeCommandLine(videofile, encodedfile, tempyuvfile)
print commandline
with open(os.path.devnull, 'r') as nullinput:
subprocess_cpu_start = os.times()[2]
returncode = subprocess.call(commandline, shell=True,
stdin=nullinput)
if returncode:
raise Exception('Decode failed with returncode %d' % returncode)
subprocess_cpu = os.times()[2] - subprocess_cpu_start
print "Decode took %f seconds" % subprocess_cpu
commandline = encoder.Tool("psnr") + " %s %s %d %d 9999" % (
videofile.filename, tempyuvfile, videofile.width,
videofile.height)
print commandline
psnr = subprocess.check_output(commandline, shell=True, stdin=nullinput)
commandline = ['md5sum', tempyuvfile]
md5 = subprocess.check_output(commandline, shell=False)
yuv_md5 = md5.split(' ')[0]
os.unlink(tempyuvfile)
return psnr, subprocess_cpu, yuv_md5
def Execute(self, parameters, bitrate, videofile, workdir):
encodedfile = os.path.join(workdir,
'%s.%s' % (videofile.basename, self.extension))
subprocess_cpu, elapsed_clock = self._EncodeFile(parameters, bitrate,
videofile, encodedfile)
result = {}
result['encode_cputime'] = subprocess_cpu
result['encode_clocktime'] = elapsed_clock
result['encoder_version'] = self.EncoderVersion()
bitrate = videofile.MeasuredBitrate(os.path.getsize(encodedfile))
psnr, decode_cputime, yuv_md5 = self._DecodeFile(
videofile, encodedfile, workdir)
result['decode_cputime'] = decode_cputime
result['yuv_md5'] = yuv_md5
print "Bitrate", bitrate, "PSNR", psnr
result['bitrate'] = int(bitrate)
result['psnr'] = float(psnr)
result['cliptime'] = videofile.ClipTime()
result.update(self.ResultData(encodedfile))
return result
# Below are the fallback implementations of the interfaces
# that the subclasses have to implement.
def EncodeCommandLine(self, parameters, bitrate, videofile, encodedfile):
"""This function returns the command line that should be executed
in order to turn an YUV file into an encoded file."""
# pylint: disable=W0613,R0201
raise encoder.Error('EncodeCommandLine not defined')
def DecodeCommandLine(self, videofile, encodedfile, yuvfile):
"""This function returns the command line that should be executed
in order to turn an encoded file into an YUV file."""
# pylint: disable=W0613,R0201
raise encoder.Error('DecodeCommandLine not defined')
def ResultData(self, encodedfile):
"""Returns additional fields that the codec may know how to generate."""
# pylint: disable=W0613,R0201
return {}
def VerifyEncode(self, parameters, bitrate, videofile, workdir):
"""Returns true if a new encode of the file gives exactly the same file."""
old_encoded_file = '%s/%s.%s' % (workdir, videofile.basename,
self.extension)
if not os.path.isfile(old_encoded_file):
raise encoder.Error('Old encoded file missing: %s' % old_encoded_file)
new_encoded_file = '%s/%s_verify.%s' % (workdir, videofile.basename,
self.extension)
self._EncodeFile(parameters, bitrate, videofile,
new_encoded_file)
if not VideoFilesEqual(old_encoded_file, new_encoded_file, self.extension):
# If there is a difference, we leave the new encoded file so that
# they can be compared by hand if desired.
return False
os.unlink(new_encoded_file)
return True
def EncoderVersion(self):
raise encoder.Error('File codecs must define their own version')
# Tools that may be called upon by the codec implementation if needed.
def MatroskaFrameInfo(encodedfile):
# Run the mkvinfo tool across the file to get frame size info.
commandline = 'mkvinfo -v %s' % encodedfile
print commandline
mkvinfo = subprocess.check_output(commandline, shell=True)
frameinfo = []
for line in mkvinfo.splitlines():
match = re.search(r'Frame with size (\d+)', line)
if match:
# The mkvinfo tool gives frame size in bytes. We want bits.
frameinfo.append({'size': int(match.group(1))*8})
return frameinfo
def FfmpegFrameInfo(encodedfile):
# Uses the ffprobe tool to give frame info.
commandline = '%s -loglevel warning -show_frames -of json %s' % (
encoder.Tool('ffprobe'), encodedfile)
ffprobeinfo = subprocess.check_output(commandline, shell=True)
probeinfo = json.loads(ffprobeinfo)
previous_position = 0
frameinfo = []
for frame in probeinfo['frames']:
current_position = int(frame['pkt_pos'])
if previous_position != 0:
frameinfo.append({'size': 8 * (current_position - previous_position)})
previous_position = current_position
frameinfo.append({'size': 8 *
(os.path.getsize(encodedfile) - previous_position)})
return frameinfo
def VideoFilesEqual(old_encoded_file, new_encoded_file, extension):
if extension == 'webm':
# Matroska files contain UIDs that vary even if the video content
# is the same. So we must use vpxdec --md5 instead.
old_checksum = subprocess.check_output((encoder.Tool('vpxdec'),
'--md5',
old_encoded_file))
new_checksum = subprocess.check_output((encoder.Tool('vpxdec'),
'--md5',
new_encoded_file))
return old_checksum == new_checksum
else:
return filecmp.cmp(old_encoded_file, new_encoded_file)
|
google/compare-codecs
|
lib/file_codec.py
|
Python
|
apache-2.0
| 7,774
|
import pytest
import logging
from dtest import Tester
from tools.data import rows_to_list
since = pytest.mark.since
logger = logging.getLogger(__name__)
@since('3.0')
class TestStressSparsenessRatio(Tester):
"""
@jira_ticket CASSANDRA-9522
Tests for the `row-population-ratio` parameter to `cassandra-stress`.
"""
def test_uniform_ratio(self):
"""
Tests that the ratio-specifying string 'uniform(5..15)/50' results in
~80% of the values written being non-null.
"""
self.distribution_template(ratio_spec='uniform(5..15)/50',
expected_ratio=.8,
delta=.1)
def test_fixed_ratio(self):
"""
Tests that the string 'fixed(1)/3' results in ~1/3 of the values
written being non-null.
"""
self.distribution_template(ratio_spec='fixed(1)/3',
expected_ratio=1 - 1 / 3,
delta=.01)
def distribution_template(self, ratio_spec, expected_ratio, delta):
"""
@param ratio_spec the string passed to `row-population-ratio` in the call to `cassandra-stress`
@param expected_ratio the expected ratio of null/non-null values in the values written
@param delta the acceptable delta between the expected and actual ratios
A parameterized test for the `row-population-ratio` parameter to
`cassandra-stress`.
"""
self.cluster.populate(1).start(wait_for_binary_proto=True)
node = self.cluster.nodelist()[0]
node.stress(['write', 'n=1000', 'no-warmup', '-rate', 'threads=50', '-col', 'n=FIXED(50)',
'-insert', 'row-population-ratio={ratio_spec}'.format(ratio_spec=ratio_spec)])
session = self.patient_cql_connection(node)
written = rows_to_list(session.execute('SELECT * FROM keyspace1.standard1;'))
num_nones = sum(row.count(None) for row in written)
num_results = sum(len(row) for row in written)
assert abs(float(num_nones) / num_results - expected_ratio) <= delta
|
beobal/cassandra-dtest
|
stress_tool_test.py
|
Python
|
apache-2.0
| 2,137
|
#!/usr/bin/env python
import os
import sys
import sqlite3
import re
import itertools
import collections
import json
import abc
import re
import numpy as np
# gemini imports
import gemini_utils as util
from gemini_constants import *
from gemini_utils import OrderedSet, OrderedDict, itersubclasses, partition
import compression
from sql_utils import ensure_columns, get_select_cols_and_rest
from gemini_subjects import get_subjects
class RowFormat:
"""A row formatter to output rows in a custom format. To provide
a new output format 'foo', implement the class methods and set the
name field to foo. This will automatically add support for 'foo' to
anything accepting the --format option via --format foo.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def name(self):
return
@abc.abstractmethod
def format(self, row):
""" return a string representation of a GeminiRow object
"""
return '\t'.join([str(row.row[c]) for c in row.row])
@abc.abstractmethod
def format_query(self, query):
""" augment the query with columns necessary for the format or else just
return the untouched query
"""
return query
@abc.abstractmethod
def predicate(self, row):
""" the row must pass this additional predicate to be output. Just
return True if there is no additional predicate"""
return True
@abc.abstractmethod
def header(self, fields):
""" return a header for the row """
return "\t".join(fields)
class DefaultRowFormat(RowFormat):
name = "default"
def __init__(self, args):
pass
def format(self, row):
return '\t'.join([str(row.row[c]) for c in row.row])
def format_query(self, query):
return query
def predicate(self, row):
return True
def header(self, fields):
""" return a header for the row """
return "\t".join(fields)
class CarrierSummary(RowFormat):
"""
Generates a count of the carrier/noncarrier status of each feature in a given
column of the sample table
Assumes None == unknown.
"""
name = "carrier_summary"
def __init__(self, args):
subjects = get_subjects(args)
self.carrier_summary = args.carrier_summary
# get the list of all possible values in the column
# but don't include None, since we are treating that as unknown.
self.column_types = list(set([getattr(x, self.carrier_summary)
for x in subjects.values()]))
self.column_types = [i for i in self.column_types if i is not None]
self.column_counters = {None: set()}
for ct in self.column_types:
self.column_counters[ct] = set([k for (k, v) in subjects.items() if
getattr(v, self.carrier_summary) == ct])
def format(self, row):
have_variant = set(row.variant_samples)
have_reference = set(row.HOM_REF_samples)
unknown = len(set(row.UNKNOWN_samples).union(self.column_counters[None]))
carrier_counts = []
for ct in self.column_types:
counts = len(self.column_counters[ct].intersection(have_variant))
carrier_counts.append(counts)
for ct in self.column_types:
counts = len(self.column_counters[ct].intersection(have_reference))
carrier_counts.append(counts)
carrier_counts.append(unknown)
carrier_counts = map(str, carrier_counts)
return '\t'.join([str(row.row[c]) for c in row.row] + carrier_counts)
def format_query(self, query):
return query
def predicate(self, row):
return True
def header(self, fields):
""" return a header for the row """
header_columns = self.column_types
if self.carrier_summary == "affected":
header_columns = self._rename_affected()
carriers = [x + "_carrier" for x in map(str, header_columns)]
noncarriers = [ x + "_noncarrier" for x in map(str, header_columns)]
fields += carriers
fields += noncarriers
fields += ["unknown"]
return "\t".join(fields)
def _rename_affected(self):
header_columns = []
for ct in self.column_types:
if ct == True:
header_columns.append("affected")
elif ct == False:
header_columns.append("unaffected")
return header_columns
class TPEDRowFormat(RowFormat):
X_PAR_REGIONS = [(60001, 2699520), (154931044, 155260560)]
Y_PAR_REGIONS = [(10001, 2649520), (59034050, 59363566)]
name = "tped"
NULL_GENOTYPES = ["."]
PED_MISSING = ["0", "0"]
VALID_CHROMOSOMES = map(str, range(1, 23)) + ["X", "Y", "XY", "MT"]
POSSIBLE_HAPLOID = ["X", "Y"]
def __init__(self, args):
gq = GeminiQuery(args.db)
subjects = get_subjects(args)
# get samples in order of genotypes
self.samples = [gq.idx_to_sample_object[x] for x in range(len(subjects))]
def format(self, row):
VALID_CHROMOSOMES = map(str, range(1, 23)) + ["X", "Y", "XY", "MT"]
chrom = row['chrom'].split("chr")[1]
chrom = chrom if chrom in VALID_CHROMOSOMES else "0"
start = str(row.row['start'])
end = str(row.row['end'])
geno = [re.split('\||/', x) for x in row.row['gts'].split(",")]
geno = [self._fix_genotype(chrom, start, genotype, self.samples[i].sex)
for i, genotype in enumerate(geno)]
genotypes = " ".join(list(flatten(geno)))
alleles = "|".join(set(list(flatten(geno))).difference("0"))
name = chrom + ":" + start + "-" + end + ":" + alleles + ":" + str(row['variant_id'])
return " ".join([chrom, name, "0", start, genotypes])
def format_query(self, query):
NEED_COLUMNS = ["chrom", "rs_ids", "start", "gts", "type", "variant_id"]
return ensure_columns(query, NEED_COLUMNS)
def predicate(self, row):
geno = [re.split("\||/", x) for x in row['gts']]
geno = list(flatten(geno))
num_alleles = len(set(geno).difference(self.NULL_GENOTYPES))
return num_alleles > 0 and num_alleles <= 2 and row['type'] != "sv"
def _is_haploid(self, genotype):
return len(genotype) < 2
def _has_missing(self, genotype):
return any([allele in self.NULL_GENOTYPES for allele in genotype])
def _is_heterozygote(self, genotype):
return len(genotype) == 2 and (genotype[0] != genotype[1])
def _in_PAR(self, chrom, start):
if chrom == "X":
for region in self.X_PAR_REGIONS:
if start > region[0] and start < region[1]:
return True
elif chrom == "Y":
for region in self.Y_PAR_REGIONS:
if start > region[0] and start < region[1]:
return True
return False
def _fix_genotype(self, chrom, start, genotype, sex):
"""
the TPED format has to have both alleles set, even if it is haploid.
this fixes that setting Y calls on the female to missing,
heterozygotic calls on the male non PAR regions to missing and haploid
calls on non-PAR regions to be the haploid call for both alleles
"""
if sex == "2":
# set female Y calls and haploid calls to missing
if self._is_haploid(genotype) or chrom == "Y" or self._has_missing(genotype):
return self.PED_MISSING
return genotype
if chrom in self.POSSIBLE_HAPLOID and sex == "1":
# remove the missing genotype calls
genotype = [x for x in genotype if x not in self.NULL_GENOTYPES]
# if all genotypes are missing skip
if not genotype:
return self.PED_MISSING
# heterozygote males in non PAR regions are a mistake
if self._is_heterozygote(genotype) and not self._in_PAR(chrom, start):
return self.PED_MISSING
# set haploid males to be homozygous for the allele
if self._is_haploid(genotype):
return [genotype[0], genotype[0]]
# if a genotype is missing or is haploid set it to missing
if self._has_missing(genotype) or self._is_haploid(genotype):
return self.PED_MISSING
else:
return genotype
def header(self, fields):
return None
class JSONRowFormat(RowFormat):
name = "json"
def __init__(self, args):
pass
def format(self, row):
"""Emit a JSON representation of a given row
"""
return json.dumps(row.row)
def format_query(self, query):
return query
def predicate(self, row):
return True
def header(self, fields):
return None
class GeminiRow(object):
def __init__(self, row, gts=None, gt_types=None,
gt_phases=None, gt_depths=None,
gt_ref_depths=None, gt_alt_depths=None,
gt_quals=None, variant_samples=None,
HET_samples=None, HOM_ALT_samples=None,
HOM_REF_samples=None, UNKNOWN_samples=None,
info=None,formatter=DefaultRowFormat(None)):
self.row = row
self.gts = gts
self.info = info
self.gt_types = gt_types
self.gt_phases = gt_phases
self.gt_depths = gt_depths
self.gt_ref_depths = gt_ref_depths
self.gt_alt_depths = gt_alt_depths
self.gt_quals = gt_quals
self.gt_cols = ['gts', 'gt_types', 'gt_phases',
'gt_depths', 'gt_ref_depths', 'gt_alt_depths',
'gt_quals', "variant_samples", "HET_samples", "HOM_ALT_samples", "HOM_REF_samples"]
self.formatter = formatter
self.variant_samples = variant_samples
self.HET_samples = HET_samples
self.HOM_ALT_samples = HOM_ALT_samples
self.HOM_REF_samples = HOM_REF_samples
self.UNKNOWN_samples = UNKNOWN_samples
def __getitem__(self, val):
if val not in self.gt_cols:
return self.row[val]
else:
return getattr(self, val)
def __iter__(self):
return self
def __repr__(self):
return self.formatter.format(self)
def next(self):
try:
return self.row.keys()
except:
raise StopIteration
class GeminiQuery(object):
"""
An interface to submit queries to an existing Gemini database
and iterate over the results of the query.
We create a GeminiQuery object by specifying database to which to
connect::
from gemini import GeminiQuery
gq = GeminiQuery("my.db")
We can then issue a query against the database and iterate through
the results by using the ``run()`` method::
for row in gq:
print row
Instead of printing the entire row, one access print specific columns::
gq.run("select chrom, start, end from variants")
for row in gq:
print row['chrom']
Also, all of the underlying numpy genotype arrays are
always available::
gq.run("select chrom, start, end from variants")
for row in gq:
gts = row.gts
print row['chrom'], gts
# yields "chr1" ['A/G' 'G/G' ... 'A/G']
The ``run()`` methods also accepts genotype filter::
query = "select chrom, start, end" from variants"
gt_filter = "gt_types.NA20814 == HET"
gq.run(query)
for row in gq:
print row
Lastly, one can use the ``sample_to_idx`` and ``idx_to_sample``
dictionaries to gain access to sample-level genotype information
either by sample name or by sample index::
# grab dict mapping sample to genotype array indices
smp2idx = gq.sample_to_idx
query = "select chrom, start, end from variants"
gt_filter = "gt_types.NA20814 == HET"
gq.run(query, gt_filter)
# print a header listing the selected columns
print gq.header
for row in gq:
# access a NUMPY array of the sample genotypes.
gts = row['gts']
# use the smp2idx dict to access sample genotypes
idx = smp2idx['NA20814']
print row, gts[idx]
"""
def __init__(self, db, include_gt_cols=False,
out_format=DefaultRowFormat(None)):
assert os.path.exists(db), "%s does not exist." % db
self.db = db
self.query_executed = False
self.for_browser = False
self.include_gt_cols = include_gt_cols
# try to connect to the provided database
self._connect_to_database()
# extract the column names from the sample table.
# needed for gt-filter wildcard support.
self._collect_sample_table_columns()
# list of samples ids for each clause in the --gt-filter
self.sample_info = collections.defaultdict(list)
# map sample names to indices. e.g. self.sample_to_idx[NA20814] -> 323
self.sample_to_idx = util.map_samples_to_indices(self.c)
# and vice versa. e.g., self.idx_to_sample[323] -> NA20814
self.idx_to_sample = util.map_indices_to_samples(self.c)
self.idx_to_sample_object = util.map_indices_to_sample_objects(self.c)
self.formatter = out_format
self.predicates = [self.formatter.predicate]
def _set_gemini_browser(self, for_browser):
self.for_browser = for_browser
def run(self, query, gt_filter=None, show_variant_samples=False,
variant_samples_delim=',', predicates=None,
needs_genotypes=False, needs_genes=False,
show_families=False):
"""
Execute a query against a Gemini database. The user may
specify:
1. (reqd.) an SQL `query`.
2. (opt.) a genotype filter.
"""
self.query = self.formatter.format_query(query)
self.gt_filter = gt_filter
if self._is_gt_filter_safe() is False:
sys.exit("ERROR: invalid --gt-filter command.")
self.show_variant_samples = show_variant_samples
self.variant_samples_delim = variant_samples_delim
self.needs_genotypes = needs_genotypes
self.needs_genes = needs_genes
self.show_families = show_families
if predicates:
self.predicates += predicates
# make sure the SELECT columns are separated by a
# comma and a space. then tokenize by spaces.
self.query = self.query.replace(',', ', ')
self.query_pieces = self.query.split()
if not any(s.startswith("gt") for s in self.query_pieces) and \
not any(s.startswith("(gt") for s in self.query_pieces) and \
not any(".gt" in s for s in self.query_pieces):
if self.gt_filter is None:
self.query_type = "no-genotypes"
else:
self.gt_filter = self._correct_genotype_filter()
self.query_type = "filter-genotypes"
else:
if self.gt_filter is None:
self.query_type = "select-genotypes"
else:
self.gt_filter = self._correct_genotype_filter()
self.query_type = "filter-genotypes"
self._apply_query()
self.query_executed = True
def __iter__(self):
return self
@property
def header(self):
"""
Return a header describing the columns that
were selected in the query issued to a GeminiQuery object.
"""
if self.query_type == "no-genotypes":
h = [col for col in self.all_query_cols]
else:
h = [col for col in self.all_query_cols] + \
[col for col in OrderedSet(self.all_columns_orig)
- OrderedSet(self.select_columns)]
if self.show_variant_samples:
h += ["variant_samples", "HET_samples", "HOM_ALT_samples"]
if self.show_families:
h += ["families"]
return self.formatter.header(h)
@property
def sample2index(self):
"""
Return a dictionary mapping sample names to
genotype array offsets::
gq = GeminiQuery("my.db")
s2i = gq.sample2index
print s2i['NA20814']
# yields 1088
"""
return self.sample_to_idx
@property
def index2sample(self):
"""
Return a dictionary mapping sample names to
genotype array offsets::
gq = GeminiQuery("my.db")
i2s = gq.index2sample
print i2s[1088]
# yields "NA20814"
"""
return self.idx_to_sample
def next(self):
"""
Return the GeminiRow object for the next query result.
"""
# we use a while loop since we may skip records based upon
# genotype filters. if we need to skip a record, we just
# throw a continue and keep trying. the alternative is to just
# recursively call self.next() if we need to skip, but this
# can quickly exceed the stack.
while (1):
try:
row = self.c.next()
except Exception as e:
self.conn.close()
raise StopIteration
gts = None
gt_types = None
gt_phases = None
gt_depths = None
gt_ref_depths = None
gt_alt_depths = None
gt_quals = None
variant_names = []
het_names = []
hom_alt_names = []
hom_ref_names = []
unknown_names = []
info = None
if 'info' in self.report_cols:
info = compression.unpack_ordereddict_blob(row['info'])
if self._query_needs_genotype_info():
gts = compression.unpack_genotype_blob(row['gts'])
gt_types = \
compression.unpack_genotype_blob(row['gt_types'])
gt_phases = \
compression.unpack_genotype_blob(row['gt_phases'])
gt_depths = \
compression.unpack_genotype_blob(row['gt_depths'])
gt_ref_depths = \
compression.unpack_genotype_blob(row['gt_ref_depths'])
gt_alt_depths = \
compression.unpack_genotype_blob(row['gt_alt_depths'])
gt_quals = \
compression.unpack_genotype_blob(row['gt_quals'])
variant_samples = [x for x, y in enumerate(gt_types) if y == HET or
y == HOM_ALT]
variant_names = [self.idx_to_sample[x] for x in variant_samples]
het_samples = [x for x, y in enumerate(gt_types) if y == HET]
het_names = [self.idx_to_sample[x] for x in het_samples]
hom_alt_samples = [x for x, y in enumerate(gt_types) if y == HOM_ALT]
hom_alt_names = [self.idx_to_sample[x] for x in hom_alt_samples]
hom_ref_samples = [x for x, y in enumerate(gt_types) if y == HOM_REF]
hom_ref_names = [self.idx_to_sample[x] for x in hom_ref_samples]
unknown_samples = [x for x, y in enumerate(gt_types) if y == UNKNOWN]
unknown_names = [self.idx_to_sample[x] for x in unknown_samples]
families = map(str, list(set([self.idx_to_sample_object[x].family_id
for x in variant_samples])))
# skip the record if it does not meet the user's genotype filter
if self.gt_filter and not eval(self.gt_filter, locals()):
continue
fields = OrderedDict()
for idx, col in enumerate(self.report_cols):
if col == "*":
continue
if not col.startswith("gt") and not col.startswith("GT") and not col == "info":
fields[col] = row[col]
elif col == "info":
fields[col] = self._info_dict_to_string(info)
else:
# reuse the original column name user requested
# e.g. replace gts[1085] with gts.NA20814
if '[' in col:
orig_col = self.gt_idx_to_name_map[col]
val = eval(col.strip())
if type(val) in [np.int8, np.int32, np.bool_]:
fields[orig_col] = int(val)
elif type(val) in [np.float32]:
fields[orig_col] = float(val)
else:
fields[orig_col] = val
else:
# asked for "gts" or "gt_types", e.g.
if col == "gts":
fields[col] = ','.join(gts)
elif col == "gt_types":
fields[col] = \
','.join(str(t) for t in gt_types)
elif col == "gt_phases":
fields[col] = \
','.join(str(p) for p in gt_phases)
elif col == "gt_depths":
fields[col] = \
','.join(str(d) for d in gt_depths)
elif col == "gt_quals":
fields[col] = \
','.join(str(d) for d in gt_quals)
elif col == "gt_ref_depths":
fields[col] = \
','.join(str(d) for d in gt_ref_depths)
elif col == "gt_alt_depths":
fields[col] = \
','.join(str(d) for d in gt_alt_depths)
if self.show_variant_samples:
fields["variant_samples"] = \
self.variant_samples_delim.join(variant_names)
fields["HET_samples"] = \
self.variant_samples_delim.join(het_names)
fields["HOM_ALT_samples"] = \
self.variant_samples_delim.join(hom_alt_names)
if self.show_families:
fields["families"] = self.variant_samples_delim.join(families)
gemini_row = GeminiRow(fields, gts, gt_types, gt_phases,
gt_depths, gt_ref_depths, gt_alt_depths,
gt_quals, variant_names, het_names, hom_alt_names,
hom_ref_names, unknown_names, info,
formatter=self.formatter)
if not all([predicate(gemini_row) for predicate in self.predicates]):
continue
if not self.for_browser:
return gemini_row
else:
return fields
def _connect_to_database(self):
"""
Establish a connection to the requested Gemini database.
"""
# open up a new database
if os.path.exists(self.db):
self.conn = sqlite3.connect(self.db)
self.conn.isolation_level = None
# allow us to refer to columns by name
self.conn.row_factory = sqlite3.Row
self.c = self.conn.cursor()
def _collect_sample_table_columns(self):
"""
extract the column names in the samples table into a list
"""
self.c.execute('select * from samples limit 1')
self.sample_column_names = [tup[0] for tup in self.c.description]
def _is_gt_filter_safe(self):
"""
Test to see if the gt_filter string is potentially malicious.
A future improvement would be to use pyparsing to
traverse and directly validate the string.
"""
if self.gt_filter is None:
return True
# avoid builtins
# http://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html
if "__" in self.gt_filter:
return False
# avoid malicious commands
evil = [" rm ", "os.system"]
if any(s in self.gt_filter for s in evil):
return False
# make sure a "gt" col is in the string
valid_cols = ["gts.", "gt_types.", "gt_phases.", "gt_quals.",
"gt_depths.", "gt_ref_depths.", "gt_alt_depths.",
"(gts).", "(gt_types).", "(gt_phases).", "(gt_quals).",
"(gt_depths).", "(gt_ref_depths).", "(gt_alt_depths)."]
if any(s in self.gt_filter for s in valid_cols):
return True
# assume the worst
return False
def _execute_query(self):
try:
self.c.execute(self.query)
except sqlite3.OperationalError as e:
print "SQLite error: {0}".format(e)
sys.exit("The query issued (%s) has a syntax error." % self.query)
def _apply_query(self):
"""
Execute a query. Intercept gt* columns and
replace sample names with indices where necessary.
"""
if self.needs_genes:
self.query = self._add_gene_col_to_query()
if self._query_needs_genotype_info():
# break up the select statement into individual
# pieces and replace genotype columns using sample
# names with sample indices
self._split_select()
# we only need genotype information if the user is
# querying the variants table
self.query = self._add_gt_cols_to_query()
self._execute_query()
self.all_query_cols = [str(tuple[0]) for tuple in self.c.description
if not tuple[0].startswith("gt") \
and ".gt" not in tuple[0]]
if "*" in self.select_columns:
self.select_columns.remove("*")
self.all_columns_orig.remove("*")
self.all_columns_new.remove("*")
self.select_columns += self.all_query_cols
self.report_cols = self.all_query_cols + \
list(OrderedSet(self.all_columns_new) - OrderedSet(self.select_columns))
# the query does not involve the variants table
# and as such, we don't need to do anything fancy.
else:
self._execute_query()
self.all_query_cols = [str(tuple[0]) for tuple in self.c.description
if not tuple[0].startswith("gt")]
self.report_cols = self.all_query_cols
def _correct_genotype_col(self, raw_col):
"""
Convert a _named_ genotype index to a _numerical_
genotype index so that the appropriate value can be
extracted for the sample from the genotype numpy arrays.
These lookups will be eval()'ed on the resuting rows to
extract the appropriate information.
For example, convert gt_types.1478PC0011 to gt_types[11]
"""
if raw_col == "*":
return raw_col.lower()
# e.g., "gts.NA12878"
elif '.' in raw_col:
(column, sample) = raw_col.split('.', 1)
corrected = column.lower() + "[" + str(self.sample_to_idx[sample]).lower() + "]"
else:
# e.g. "gts" - do nothing
corrected = raw_col
return corrected
def _get_matching_sample_ids(self, wildcard):
"""
Helper function to convert a sample wildcard
to a list of tuples reflecting the sample indices
and sample names so that the wildcard
query can be applied to the gt_* columns.
"""
query = 'SELECT sample_id, name FROM samples '
if wildcard.strip() != "*":
query += ' WHERE ' + wildcard
sample_info = [] # list of sample_id/name tuples
self.c.execute(query)
for row in self.c:
# sample_ids are 1-based but gt_* indices are 0-based
sample_info.append((int(row['sample_id']) - 1, str(row['name'])))
return sample_info
def _correct_genotype_filter(self):
"""
This converts a raw genotype filter that contains
'wildcard' statements into a filter that can be eval()'ed.
Specifically, we must convert a _named_ genotype index
to a _numerical_ genotype index so that the appropriate
value can be extracted for the sample from the genotype
numpy arrays.
For example, without WILDCARDS, this converts:
--gt-filter "(gt_types.1478PC0011 == 1)"
to:
(gt_types[11] == 1)
With WILDCARDS, this converts things like:
"(gt_types).(phenotype==1).(==HET)"
to:
"gt_types[2] == HET and gt_types[5] == HET"
"""
def _swap_genotype_for_number(token):
"""
This is a bit of a hack to get around the fact that eval()
doesn't handle the imported constants well when also having to
find local variables. This requires some eval/globals()/locals() fu
that has evaded me thus far. Just replacing HET, etc. with 1, etc. works.
"""
if any(g in token for g in ['HET', 'HOM_ALT', 'HOM_REF', 'UNKNOWN']):
token = token.replace('HET', str(HET))
token = token.replace('HOM_ALT', str(HOM_ALT))
token = token.replace('HOM_REF', str(HOM_REF))
token = token.replace('UNKNOWN', str(UNKNOWN))
return token
corrected_gt_filter = []
# first try to identify wildcard rules.
# (\s*gt\w+\) handles both
# (gt_types).(*).(!=HOM_REF).(all)
# and
# ( gt_types).(*).(!=HOM_REF).(all)
wildcard_tokens = re.split(r'(\(\s*gt\w+\s*\)\.\(.+?\)\.\(.+?\)\.\(.+?\))', str(self.gt_filter))
for token_idx, token in enumerate(wildcard_tokens):
# NOT a WILDCARD
# We must then split on whitespace and
# correct the gt_* columns:
# e.g., "gts.NA12878" or "and gt_types.M10500 == HET"
if (token.find("gt") >= 0 or token.find("GT") >= 0) \
and not '.(' in token and not ')self.' in token:
tokens = re.split(r'[\s+]+', str(token))
for t in tokens:
if len(t) == 0:
continue
if (t.find("gt") >= 0 or t.find("GT") >= 0):
corrected = self._correct_genotype_col(t)
corrected_gt_filter.append(corrected)
else:
t = _swap_genotype_for_number(t)
corrected_gt_filter.append(t)
# IS a WILDCARD
# e.g., "gt_types.(affected==1).(==HET)"
elif (token.find("gt") >= 0 or token.find("GT") >= 0) \
and '.(' in token and ').' in token:
# break the wildcard into its pieces. That is:
# (COLUMN).(WILDCARD).(WILDCARD_RULE).(WILDCARD_OP)
# e.g, (gts).(phenotype==2).(==HET).(any)
if token.count('.') != 3 or \
token.count('(') != 4 or \
token.count(')') != 4:
sys.exit("Wildcard filter should consist of 4 elements. Exiting.")
(column, wildcard, wildcard_rule, wildcard_op) = token.split('.')
# remove the syntactic parentheses
column = column.strip('(').strip(')').strip()
wildcard = wildcard.strip('(').strip(')').strip()
wildcard_rule = wildcard_rule.strip('(').strip(')').strip()
wildcard_op = wildcard_op.strip('(').strip(')').strip()
# collect and save all of the samples that meet the wildcard criteria
# for each clause.
# these will be used in the list comprehension for the eval expression
# constructed below.
self.sample_info[token_idx] = self._get_matching_sample_ids(wildcard)
# Replace HET, etc. with 1, et.c to avoid eval() issues.
wildcard_rule = _swap_genotype_for_number(wildcard_rule)
# build the rule based on the wildcard the user has supplied.
if wildcard_op in ["all", "any"]:
rule = wildcard_op + "(" + column + '[sample[0]]' + wildcard_rule + " for sample in self.sample_info[" + str(token_idx) + "])"
elif wildcard_op == "none":
rule = "not any(" + column + '[sample[0]]' + wildcard_rule + " for sample in self.sample_info[" + str(token_idx) + "])"
elif "count" in wildcard_op:
# break "count>=2" into ['', '>=2']
tokens = wildcard_op.split('count')
count_comp = tokens[len(tokens) - 1]
rule = "sum(" + column + '[sample[0]]' + wildcard_rule + " for sample in self.sample_info[" + str(token_idx) + "])" + count_comp
else:
sys.exit("Unsupported wildcard operation: (%s). Exiting." % wildcard_op)
corrected_gt_filter.append(rule)
else:
if len(token) > 0:
corrected_gt_filter.append(token.lower())
return " ".join(corrected_gt_filter)
def _add_gt_cols_to_query(self):
"""
We have to modify the raw query to select the genotype
columns in order to support the genotype filters. That is,
if the user wants to limit the rows returned based upon, for example,
"gts.joe == 1", then we need to select the full gts BLOB column in
order to enforce that limit. The user wouldn't have selected gts as a
columns, so therefore, we have to modify the select statement to add
it.
In essence, when a gneotype filter has been requested, we always add
the gts, gt_types and gt_phases columns.
"""
if "from" not in self.query.lower():
sys.exit("Malformed query: expected a FROM keyword.")
(select_tokens, rest_of_query) = get_select_cols_and_rest(self.query)
# remove any GT columns
select_clause_list = []
for token in select_tokens:
if not token.startswith("gt") and \
not token.startswith("GT") and \
not ".gt" in token and \
not ".GT" in token and \
not token.startswith("(gt") and \
not token.startswith("(GT"):
select_clause_list.append(token)
# reconstruct the query with the GT* columns added
if len(select_clause_list) > 0:
select_clause = ",".join(select_clause_list) + \
", gts, gt_types, gt_phases, gt_depths, \
gt_ref_depths, gt_alt_depths, gt_quals "
else:
select_clause = ",".join(select_clause_list) + \
" gts, gt_types, gt_phases, gt_depths, \
gt_ref_depths, gt_alt_depths, gt_quals "
self.query = "select " + select_clause + rest_of_query
# extract the original select columns
return self.query
def _add_gene_col_to_query(self):
"""
Add the gene column to the list of SELECT'ed columns
in a query.
"""
if "from" not in self.query.lower():
sys.exit("Malformed query: expected a FROM keyword.")
(select_tokens, rest_of_query) = get_select_cols_and_rest(self.query)
if not any("gene" in s for s in select_tokens):
select_clause = ",".join(select_tokens) + \
", gene "
self.query = "select " + select_clause + rest_of_query
return self.query
def _split_select(self):
"""
Build a list of _all_ columns in the SELECT statement
and segregated the non-genotype specific SELECT columns.
This is used to control how to report the results, as the
genotype-specific columns need to be eval()'ed whereas others
do not.
For example: "SELECT chrom, start, end, gt_types.1478PC0011"
will populate the lists as follows:
select_columns = ['chrom', 'start', 'end']
all_columns = ['chrom', 'start', 'end', 'gt_types[11]']
"""
self.select_columns = []
self.all_columns_new = []
self.all_columns_orig = []
self.gt_name_to_idx_map = {}
self.gt_idx_to_name_map = {}
# iterate through all of the select columns andclear
# distinguish the genotype-specific columns from the base columns
if "from" not in self.query.lower():
sys.exit("Malformed query: expected a FROM keyword.")
(select_tokens, rest_of_query) = get_select_cols_and_rest(self.query)
for token in select_tokens:
# it is a WILDCARD
if (token.find("gt") >= 0 or token.find("GT") >= 0) \
and '.(' in token and ').' in token:
# break the wildcard into its pieces. That is:
# (COLUMN).(WILDCARD)
(column, wildcard) = token.split('.')
# remove the syntactic parentheses
wildcard = wildcard.strip('(').strip(')')
column = column.strip('(').strip(')')
# convert "gt_types.(affected==1)"
# to: gt_types[3] == HET and gt_types[9] == HET
sample_info = self._get_matching_sample_ids(wildcard)
# maintain a list of the sample indices that should
# be displayed as a result of the SELECT'ed wildcard
wildcard_indices = []
for (idx, sample) in enumerate(sample_info):
wildcard_display_col = column + '.' + str(sample[1])
wildcard_mask_col = column + '[' + str(sample[0]) + ']'
wildcard_indices.append(sample[0])
new_col = wildcard_mask_col
self.all_columns_new.append(new_col)
self.all_columns_orig.append(wildcard_display_col)
self.gt_name_to_idx_map[wildcard_display_col] = wildcard_mask_col
self.gt_idx_to_name_map[wildcard_mask_col] = wildcard_display_col
# it is a basic genotype column
elif (token.find("gt") >= 0 or token.find("GT") >= 0) \
and '.(' not in token and not ').' in token:
new_col = self._correct_genotype_col(token)
self.all_columns_new.append(new_col)
self.all_columns_orig.append(token)
self.gt_name_to_idx_map[token] = new_col
self.gt_idx_to_name_map[new_col] = token
# it is neither
else:
self.select_columns.append(token)
self.all_columns_new.append(token)
self.all_columns_orig.append(token)
def _info_dict_to_string(self, info):
"""
Flatten the VCF info-field OrderedDict into a string,
including all arrays for allelic-level info.
"""
if info is not None:
return ';'.join(['%s=%s' % (key, value) if not isinstance(value, list) \
else '%s=%s' % (key, ','.join([str(v) for v in value])) \
for (key, value) in info.items()])
else:
return None
def _tokenize_query(self):
tokens = list(flatten([x.split(",") for x in self.query.split(" ")]))
return tokens
def _query_needs_genotype_info(self):
tokens = self._tokenize_query()
requested_genotype = "variants" in tokens and \
(any([x.startswith("gt") for x in tokens]) or \
any([x.startswith("(gt") for x in tokens]) or \
any(".gt" in x for x in tokens))
return requested_genotype or \
self.include_gt_cols or \
self.show_variant_samples or \
self.needs_genotypes
def select_formatter(args):
SUPPORTED_FORMATS = {x.name.lower(): x for x in
itersubclasses(RowFormat)}
if hasattr(args, 'carrier_summary') and args.carrier_summary:
return SUPPORTED_FORMATS["carrier_summary"](args)
if not args.format in SUPPORTED_FORMATS:
raise NotImplementedError("Conversion to %s not supported. Valid "
"formats are %s."
% (args.format, SUPPORTED_FORMATS))
else:
return SUPPORTED_FORMATS[args.format](args)
def flatten(l):
"""
flatten an irregular list of lists
example: flatten([[[1, 2, 3], [4, 5]], 6]) -> [1, 2, 3, 4, 5, 6]
lifted from: http://stackoverflow.com/questions/2158395/
"""
for el in l:
if isinstance(el, collections.Iterable) and not isinstance(el,
basestring):
for sub in flatten(el):
yield sub
else:
yield el
if __name__ == "__main__":
db = sys.argv[1]
gq = GeminiQuery(db)
print "test a basic query with no genotypes"
query = "select chrom, start, end from variants limit 5"
gq.run(query)
for row in gq:
print row
print "test a basic query with no genotypes using a header"
query = "select chrom, start, end from variants limit 5"
gq.run(query)
print gq.header
for row in gq:
print row
print "test query that selects a sample genotype"
query = "select chrom, start, end, gts.NA20814 from variants limit 5"
gq.run(query)
for row in gq:
print row
print "test query that selects a sample genotype and uses a header"
query = "select chrom, start, end, gts.NA20814 from variants limit 5"
gq.run(query)
print gq.header
for row in gq:
print row
print "test query that selects and _filters_ on a sample genotype"
query = "select chrom, start, end, gts.NA20814 from variants limit 50"
db_filter = "gt_types.NA20814 == HET"
gq.run(query, db_filter)
for row in gq:
print row
print "test query that selects and _filters_ on a sample genotype and uses a filter"
query = "select chrom, start, end, gts.NA20814 from variants limit 50"
db_filter = "gt_types.NA20814 == HET"
gq.run(query, db_filter)
print gq.header
for row in gq:
print row
print "test query that selects and _filters_ on a sample genotype and uses a filter and a header"
query = "select chrom, start, end, gts.NA20814 from variants limit 50"
db_filter = "gt_types.NA20814 == HET"
gq.run(query, db_filter)
print gq.header
for row in gq:
print row
print "demonstrate accessing individual columns"
query = "select chrom, start, end, gts.NA20814 from variants limit 50"
db_filter = "gt_types.NA20814 == HET"
gq.run(query, db_filter)
for row in gq:
print row['chrom'], row['start'], row['end'], row['gts.NA20814']
|
poojavade/Genomics_Docker
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/gemini-0.10.0-py2.7.egg/gemini/GeminiQuery.py
|
Python
|
apache-2.0
| 43,429
|
#!/usr/bin/env python
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Glance Image Cache Pre-fetcher
This is meant to be run from the command line after queueing
images to be pretched.
"""
import os
import sys
# If ../glance/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
sys.path.insert(0, possible_topdir)
import glance_store
from oslo_log import log as logging
from glance.common import config
from glance.image_cache import prefetcher
CONF = config.CONF
logging.register_options(CONF)
CONF.set_default(name='use_stderr', default=True)
def main():
try:
config.parse_cache_args()
logging.setup(CONF, 'glance')
glance_store.register_opts(config.CONF)
glance_store.create_stores(config.CONF)
glance_store.verify_default_store()
app = prefetcher.Prefetcher()
app.run()
except RuntimeError as e:
sys.exit("ERROR: %s" % e)
if __name__ == '__main__':
main()
|
openstack/glance
|
glance/cmd/cache_prefetcher.py
|
Python
|
apache-2.0
| 1,880
|
from utils.header import MagicField, Field
from load_command import LoadCommandHeader, LoadCommandCommand
class PrebindCksumCommand(LoadCommandHeader):
ENDIAN = None
FIELDS = (
MagicField('cmd', 'I', {LoadCommandCommand.COMMANDS['LC_DYSYMTAB']: 'LC_DYSYMTAB'}),
Field('cmdsize', 'I'),
Field('cksum', 'I'),
)
def __init__(self, bytes_=None, **kwargs):
self.cksum = None
super(PrebindCksumCommand, self).__init__(bytes_, **kwargs)
|
hkkwok/MachOTool
|
mach_o/headers/prebind_cksum_command.py
|
Python
|
apache-2.0
| 488
|
#!/usr/bin/python
##############################################################################
# Copyright 2016-2017 Rigetti Computing
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import pyquil.forest as qvm_endpoint
from pyquil.quil import Program
from pyquil.quilbase import DirectQubit
from pyquil.gates import I, X, Y, Z, H, T, S, RX, RY, RZ, CNOT, CCNOT, PHASE, CPHASE00, CPHASE01, \
CPHASE10, CPHASE, SWAP, CSWAP, ISWAP, PSWAP, MEASURE, HALT, WAIT, NOP, RESET, \
TRUE, FALSE, NOT, AND, OR, MOVE, EXCHANGE
from pyquil.quilbase import InstructionGroup, DefGate, Gate, reset_label_counter, RawInstr, Addr
import pytest
import numpy as np
from math import pi, sqrt
def test_make_connection():
qvm_endpoint.Connection()
def test_gate():
tg = Gate("TEST", qubits=(DirectQubit(1), DirectQubit(2)), params=[])
assert tg.out() == "TEST 1 2"
def test_defgate():
dg = DefGate("TEST", np.array([[1., 0.],
[0., 1.]]))
assert dg.out() == "DEFGATE TEST:\n 1.0, 0.0\n 0.0, 1.0\n"
test = dg.get_constructor()
tg = test(DirectQubit(1), DirectQubit(2))
assert tg.out() == "TEST 1 2"
def test_defgate_non_square_should_throw_error():
with pytest.raises(AssertionError) as error_info:
DefGate("TEST", np.array([[0 + 0.5j, 0.5, 1],
[0.5, 0 - 0.5j, 1]]))
assert str(error_info.value) == "Matrix must be square."
def test_defgate_non_unitary_should_throw_error():
with pytest.raises(AssertionError) as error_info:
DefGate("TEST", np.array([[0, 1],
[2, 3]]))
assert str(error_info.value) == "Matrix must be unitary."
def test_defgate_param():
dgp = DefGate("TEST", [[1., 0.], [0., 1.]])
assert dgp.out() == "DEFGATE TEST:\n 1.0, 0.0\n 0.0, 1.0\n"
test = dgp.get_constructor()
tg = test(DirectQubit(1))
assert tg.out() == "TEST 1"
def test_instruction_group_gates():
ig = InstructionGroup()
ig.inst(H(0), X(1))
assert len(ig.actions) == 2
assert ig.out() == "H 0\nX 1\n"
def test_instruction_group_tuple():
ig = InstructionGroup()
ig.inst(("Y", 0),
("X", 1))
assert len(ig.actions) == 2
assert ig.out() == "Y 0\nX 1\n"
def test_instruction_group_string():
ig = InstructionGroup()
ig.inst("Y 0",
"X 1", )
assert len(ig.actions) == 2
assert ig.out() == "Y 0\nX 1\n"
def test_program_gates():
ig = Program()
ig.inst(H(0), X(1))
assert len(ig.actions) == 2
assert ig.out() == "H 0\nX 1\n"
def test_program_pop():
prog = Program(X(0), X(1))
_, instruction = prog.pop()
assert prog.out() == "X 0\n"
assert Program(instruction).out() == "X 1\n"
def test_plus_operator():
p = Program()
p += H(0)
p += [X(0), Y(0), Z(0)]
assert len(p.actions) == 4
assert p.out() == "H 0\nX 0\nY 0\nZ 0\n"
def test_program_plus_program():
p = Program().inst(X(0))
q = Program().inst(Y(0))
r = p + q
assert len(p.actions) == 1
assert len(q.actions) == 1
assert len(r.actions) == 2
assert p.out() == "X 0\n"
assert q.out() == "Y 0\n"
assert r.out() == "X 0\nY 0\n"
def test_program_tuple():
ig = Program()
ig.inst(("Y", 0),
("X", 1))
assert len(ig.actions) == 2
assert ig.out() == "Y 0\nX 1\n"
def test_program_string():
ig = Program()
ig.inst("Y 0",
"X 1", )
assert len(ig.actions) == 2
assert all(isinstance(i[1], RawInstr) for i in ig.actions)
assert ig.out() == "Y 0\nX 1\n"
def test_prog_init():
p = Program()
p.inst(X(0)).measure(0, 0)
assert p.out() == 'X 0\nMEASURE 0 [0]\n'
def test_classical_regs():
p = Program()
p.inst(X(0)).measure(0, 1)
assert p.out() == 'X 0\nMEASURE 0 [1]\n'
def test_simple_instructions():
p = Program().inst(HALT, WAIT, RESET, NOP)
assert p.out() == 'HALT\nWAIT\nRESET\nNOP\n'
def test_unary_classicals():
p = Program()
p.inst(TRUE(0),
FALSE(Addr(1)),
NOT(2))
assert p.out() == 'TRUE [0]\n' \
'FALSE [1]\n' \
'NOT [2]\n'
def test_binary_classicals():
p = Program()
p.inst(AND(0, 1),
OR(Addr(0), Addr(1)),
MOVE(0, 1),
EXCHANGE(0, Addr(1)))
assert p.out() == 'AND [0] [1]\n' \
'OR [0] [1]\n' \
'MOVE [0] [1]\n' \
'EXCHANGE [0] [1]\n'
def test_measurement_calls():
p = Program()
p.inst(MEASURE(0, 1),
MEASURE(0, Addr(1)))
assert p.out() == 'MEASURE 0 [1]\n' * 2
def test_construction_syntax():
p = Program().inst(X(0), Y(1), Z(0)).measure(0, 1)
assert p.out() == 'X 0\nY 1\nZ 0\nMEASURE 0 [1]\n'
p = Program().inst(X(0)).inst(Y(1)).measure(0, 1).inst(MEASURE(1, 2))
assert p.out() == 'X 0\nY 1\nMEASURE 0 [1]\nMEASURE 1 [2]\n'
p = Program().inst(X(0)).measure(0, 1).inst(Y(1), X(0)).measure(0, 0)
assert p.out() == 'X 0\nMEASURE 0 [1]\nY 1\nX 0\nMEASURE 0 [0]\n'
def test_singles():
p = Program(I(0), X(0), Y(1), Z(1), H(2), T(2), S(1))
assert p.out() == 'I 0\nX 0\nY 1\nZ 1\nH 2\nT 2\nS 1\n'
def test_rotations():
p = Program(RX(0.5)(0), RY(0.1)(1), RZ(1.4)(2))
assert p.out() == 'RX(0.5) 0\nRY(0.1) 1\nRZ(1.4) 2\n'
def test_controlled_gates():
p = Program(CNOT(0, 1), CCNOT(0, 1, 2))
assert p.out() == 'CNOT 0 1\nCCNOT 0 1 2\n'
def test_phases():
p = Program(PHASE(np.pi)(1), CPHASE00(np.pi)(0, 1), CPHASE01(np.pi)(0, 1),
CPHASE10(np.pi)(0, 1),
CPHASE(np.pi)(0, 1))
assert p.out() == 'PHASE(3.141592653589793) 1\nCPHASE00(3.141592653589793) 0 1\n' \
'CPHASE01(3.141592653589793) 0 1\nCPHASE10(3.141592653589793) 0 1\n' \
'CPHASE(3.141592653589793) 0 1\n'
def test_swaps():
p = Program(SWAP(0, 1), CSWAP(0, 1, 2), ISWAP(0, 1), PSWAP(np.pi)(0, 1))
assert p.out() == 'SWAP 0 1\nCSWAP 0 1 2\nISWAP 0 1\nPSWAP(3.141592653589793) 0 1\n'
def test_def_gate():
# First we define the new gate from a matrix
x_gate_matrix = np.array(([0.0, 1.0], [1.0, 0.0]))
sqrt_x = np.array([[ 0.5+0.5j, 0.5-0.5j],
[ 0.5-0.5j, 0.5+0.5j]])
p = Program().defgate("SQRT-X", sqrt_x)
# Then we can use the new gate
p.inst(("SQRT-X", 0))
assert p.out() == 'DEFGATE SQRT-X:\n 0.5+0.5i, 0.5-0.5i\n 0.5-0.5i, 0.5+0.5i\n\nSQRT-X 0\n'
def test_multiqubit_gate():
# A multi-qubit defgate example
x_gate_matrix = np.array(([0.0, 1.0], [1.0, 0.0]))
sqrt_x = np.array([[ 0.5+0.5j, 0.5-0.5j],
[ 0.5-0.5j, 0.5+0.5j]])
x_sqrt_x = np.kron(sqrt_x, x_gate_matrix)
p = Program().defgate("X-SQRT-X", x_sqrt_x)
# Then we can use the new gate
p.inst(("X-SQRT-X", 0, 1))
assert p.out() == 'DEFGATE X-SQRT-X:\n 0.0+0.0i, 0.5+0.5i, 0.0+0.0i, 0.5-0.5i\n ' \
'0.5+0.5i, 0.0+0.0i, 0.5-0.5i, 0.0+0.0i\n ' \
'0.0+0.0i, 0.5-0.5i, 0.0+0.0i, 0.5+0.5i\n ' \
'0.5-0.5i, 0.0+0.0i, 0.5+0.5i, 0.0+0.0i\n\nX-SQRT-X 0 1\n'
def test_define_qft():
def qft3(q0, q1, q2):
p = Program()
p.inst(H(q2),
CPHASE(pi / 2.0)(q1, q2),
H(1),
CPHASE(pi / 4.0)(q0, q2),
CPHASE(pi / 2.0)(q0, q1),
H(q0),
SWAP(q0, q2))
return p
# I(2) is to force 3 qubits in state prep program.
state_prep = Program().inst(X(0))
prog = state_prep + qft3(0, 1, 2)
output = prog.out()
print output
assert output == 'X 0\nH 2\nCPHASE(1.5707963267948966) 1 2\nH 1\nCPHASE(0.7853981633974483) 0 ' \
'2\nCPHASE(1.5707963267948966) 0 1\nH 0\nSWAP 0 2\n'
def test_control_flows():
reset_label_counter()
classical_flag_register = 2
p = Program(X(0), H(0)).measure(0, classical_flag_register)
# run p in a loop until classical_flag_register is 0
loop_prog = Program(X(0)).measure(0, classical_flag_register)
loop_prog.while_do(classical_flag_register, p)
assert loop_prog.out() == 'X 0\nMEASURE 0 [2]\nLABEL @START1\nJUMP-UNLESS @END2 [2]\nX ' \
'0\nH 0\nMEASURE 0 [2]\nJUMP @START1\nLABEL @END2\n'
# create a program that branches based on the value of a classical register
x_prog = Program(X(0))
z_prog = Program()
branch = Program(H(1)).measure(1, 1).if_then(1, x_prog, z_prog).measure(0, 0)
assert branch.out() == 'H 1\nMEASURE 1 [1]\nJUMP-WHEN @THEN3 [1]\nJUMP @END4\nLABEL ' \
'@THEN3\nX 0\nLABEL @END4\nMEASURE 0 [0]\n'
def test_if_option():
reset_label_counter()
p = Program(X(0)).measure(0, 0).if_then(0, Program(X(1)))
assert p.out() == 'X 0\nMEASURE 0 [0]\nJUMP-WHEN @THEN1 [0]\nJUMP @END2\n' \
'LABEL @THEN1\nX 1\nLABEL @END2\n'
def test_alloc_free():
p = Program()
print p.resource_manager.in_use
q1 = p.alloc()
p.inst(H(q1))
p.free(q1)
q2 = p.alloc()
p.inst(H(q2))
p.free(q2)
assert p.resource_manager.live_qubits == []
assert p.out() == "H 0\nH 0\n"
def test_alloc_free():
p = Program()
p.inst(H(0)) # H 0
q1 = p.alloc() # q1 = 1
q2 = p.alloc() # q2 = 3
p.inst(CNOT(q1, q2)) # CNOT 1 3
p.inst(H(2))
q3 = p.alloc() # q3 = 4
p.inst(X(q3)) # X 4
p.free(q1) # remove 1
q4 = p.alloc() # q4 = 1
p.inst(Y(q4)) # Y 1
p.free(q2)
p.free(q3)
p.free(q4)
assert p.resource_manager.live_qubits == []
assert p.out() == "H 0\n" \
"CNOT 1 3\n" \
"H 2\n" \
"X 4\n" \
"Y 1\n"
def test_multiple_instantiate():
p = Program()
q = p.alloc()
p.inst(H(q))
p.free(q)
assert p.out() == 'H 0\n'
assert p.out() == 'H 0\n'
def test_alloc_no_free():
p = Program()
q1 = p.alloc()
q2 = p.alloc()
p.inst(H(q1))
p.inst(H(q2))
assert p.out() == 'H 0\nH 1\n'
assert p.out() == 'H 0\nH 1\n'
def test_extract_qubits():
p = Program(RX(0.5)(0), RY(0.1)(1), RZ(1.4)(2))
assert p.extract_qubits() == set([0, 1, 2])
p.if_then(0, X(4), H(5)).measure(6, 2)
assert p.extract_qubits() == set([0, 1, 2, 4, 5, 6])
p.while_do(0, Program(X(3)).measure(3, 0))
assert p.extract_qubits() == set([0, 1, 2, 3, 4, 5, 6])
new_qubit = p.alloc()
p.inst(X(new_qubit))
p.synthesize()
assert p.extract_qubits() == set([0, 1, 2, 3, 4, 5, 6, new_qubit.index()])
|
balopat/pyquil
|
pyquil/tests/test_quil.py
|
Python
|
apache-2.0
| 11,288
|
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Function call retry utility."""
import time
from typing import Any, Callable, Mapping, Optional, Sequence, Type, TypeVar
from gazoo_device import errors
def _default_is_successful(_: Any) -> bool:
return True
def not_func(val: Any) -> bool:
"""Returns True if bool(val) evaluates to False."""
return not bool(val)
def is_true(val: Any) -> bool:
"""Returns True if bool(val) evaluates to True."""
return bool(val)
_FuncReturnType = TypeVar("_FuncReturnType")
def retry(
func: Callable[..., _FuncReturnType],
func_args: Sequence[Any] = (),
func_kwargs: Optional[Mapping[str, Any]] = None,
is_successful: Callable[[_FuncReturnType], bool] = _default_is_successful,
timeout: float = 10,
interval: float = 1,
reraise: bool = True,
exc_type: Type[Exception] = errors.CommunicationTimeoutError
) -> _FuncReturnType:
"""Retries func() until it succeeds or timeout is reached.
Success of execution of func() is determined by is_successful() function,
which should return True on successful execution of func().
Args:
func: Function to execute.
func_args: Positional arguments to the function.
func_kwargs: Keyword arguments to the function.
is_successful: Function which takes in the result of func() and returns
whether function execution should be considered successful. To indicate
success, return True. Defaults to always returning True.
timeout: If no run of func() succeeds in this time period, raise an error.
interval: How long to wait between attempts of func().
reraise: Whether to re-raise exceptions in func() or not. If True, will
re-raise any exceptions from func(). If False, considers execution of
func() a failure if an Exception is raised. is_successful() will NOT be
called if an Exception occurs.
exc_type: Type of exception to raise when timeout is reached. Note that the
class constructor will be called with just 1 argument.
Returns:
Return value of first successful func() call.
Raises:
Exception: if timeout is reached, or if an Exception occurs in func() with
reraise=True.
"""
if func_kwargs is None:
func_kwargs = {}
tried_times = 0
start_time = time.time()
end_time = start_time + timeout
while time.time() < end_time:
exception_occurred = False
tried_times += 1
try:
func_result = func(*func_args, **func_kwargs)
except Exception: # pylint: disable=broad-except
if reraise:
raise
else:
exception_occurred = True
if not exception_occurred and is_successful(func_result):
return func_result
time.sleep(interval)
time_elapsed = time.time() - start_time
raise exc_type("Timeout in {}s. Tried calling {} {} times.".format(
time_elapsed, func.__name__, tried_times))
|
google/gazoo-device
|
gazoo_device/utility/retry.py
|
Python
|
apache-2.0
| 3,404
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from collections import namedtuple
import tvm
from tvm import relay
from tvm.relay import quantize as qtz
import mxnet as mx
from mxnet import gluon
import logging
import os
logging.basicConfig(level=logging.INFO)
Config = namedtuple('Config', ['model', 'nbit_input', 'dtype_input', 'nbit_output', 'dtype_output', 'global_scale', 'expected_acc'])
def get_val_data(model_name,
rec_val,
batch_size,
num_workers=4):
rec_val = os.path.expanduser(rec_val)
mean_rgb = [123.68, 116.779, 103.939]
std_rgb = [58.393, 57.12, 57.375]
def batch_fn(batch, ctx):
data = gluon.utils.split_and_load(batch.data[0], ctx_list=ctx, batch_axis=0)
label = gluon.utils.split_and_load(batch.label[0], ctx_list=ctx, batch_axis=0)
return data, label
img_size = 299 if model_name == 'inceptionv3' else 224
val_data = mx.io.ImageRecordIter(
path_imgrec = rec_val,
preprocess_threads = num_workers,
shuffle = False,
batch_size = batch_size,
resize = 256,
data_shape = (3, img_size, img_size),
mean_r = mean_rgb[0],
mean_g = mean_rgb[1],
mean_b = mean_rgb[2],
std_r = std_rgb[0],
std_g = std_rgb[1],
std_b = std_rgb[2],
)
return val_data, batch_fn
def get_model(model_name, batch_size, qconfig, target=None, original=False, simulated=False):
gluon_model = gluon.model_zoo.vision.get_model(model_name, pretrained=True)
img_size = 299 if model_name == 'inceptionv3' else 224
data_shape = (batch_size, 3, img_size, img_size)
mod, params = relay.frontend.from_mxnet(gluon_model, {"data": data_shape})
net = mod['main']
with relay.build_config(opt_level=3):
qfunc = relay.quantize.prerequisite_optimize(net, params=params)
logging.debug('original')
logging.debug(qfunc.astext(show_meta_data=False))
if original:
return qfunc
with qconfig:
logging.debug('current quantize config')
logging.debug(qtz.current_qconfig())
qfunc = qtz.quantize(qfunc)
logging.debug('after quantize')
logging.debug(qfunc.astext(show_meta_data=False))
return qfunc
def eval_acc(model, dataset, batch_fn, target=tvm.target.cuda(), ctx=tvm.gpu(), log_interval=100):
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(model, target)
# create runtime module
m = tvm.contrib.graph_runtime.create(graph, lib, ctx)
m.set_input(**params)
# setup evaluaiton metric
dataset.reset()
batch_size = dataset.batch_size
acc_top1 = mx.metric.Accuracy()
acc_top5 = mx.metric.TopKAccuracy(5)
acc_top1.reset()
acc_top5.reset()
# Execute
for i, batch in enumerate(dataset):
data, label = batch_fn(batch, [mx.cpu(0)])
m.run(data=data[0].asnumpy())
out_arr = m.get_output(0)
acc_top1.update(label, [mx.nd.array(out_arr.asnumpy())])
acc_top5.update(label, [mx.nd.array(out_arr.asnumpy())])
if not (i + 1) % log_interval:
_, top1 = acc_top1.get()
_, top5 = acc_top5.get()
nsamples = (i + 1) * batch_size
logging.info('[%d samples] validation: acc-top1=%f acc-top5=%f', nsamples, top1, top5)
logging.info('[final] validation: acc-top1=%f acc-top5=%f', top1, top5)
return top1
def test_quantize_acc(cfg, rec_val):
qconfig = qtz.qconfig(skip_conv_layers=[0],
nbit_input=cfg.nbit_input,
nbit_weight=cfg.nbit_input,
global_scale=cfg.global_scale,
dtype_input=cfg.dtype_input,
dtype_weight=cfg.dtype_input,
dtype_activation=cfg.dtype_output,
debug_enabled_ops=None)
model = get_model(cfg.model, 32, qconfig, tvm.target.cuda())
val_data, batch_fn = get_val_data(cfg.model, rec_val=rec_val, batch_size=32)
acc = eval_acc(model, val_data, batch_fn)
assert acc > cfg.expected_acc
return acc
if __name__ == "__main__":
#TODO(for user): replace the line with the path to imagenet validation dataset
rec_val = "/scratch/tqchen/imagenet/val.rec"
results = []
configs = [
Config('mobilenetv2_1.0', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=4.0, expected_acc=0.666),
Config('resnet18_v1', nbit_input=8, dtype_input='int8', nbit_output=16, dtype_output='int16', global_scale=8.0, expected_acc=0.692),
Config('resnet18_v1', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=8.0, expected_acc=0.692),
Config('resnet34_v1', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=8.0, expected_acc=0.733),
Config('resnet50_v1', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=8.0, expected_acc=0.747),
Config('resnet101_v1', nbit_input=8, dtype_input='int8', nbit_output=32, dtype_output='int32', global_scale=8.0, expected_acc=0.756),
# TODO: need to fix accuracy
# Config('mobilenetv2_1.0', nbit_input=8, dtype_input='int8', nbit_output=16, dtype_output='int16', global_scale=4.0),
]
for config in configs:
acc = test_quantize_acc(config, rec_val)
results.append((config, acc))
for res in results:
print(res)
|
Huyuwei/tvm
|
tests/python/nightly/quantization/test_quantization_accuracy.py
|
Python
|
apache-2.0
| 6,399
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pymongo
from pymongo import MongoClient
from pymongo import errors
import re
class Database(object):
'''Database creation'''
def __init__(self, database_name):
self.client = MongoClient('mongodb://localhost,localhost:27017')
self.db_name = database_name
self.db = self.client[self.db_name]
#self.jobs = self.client[self.db_name].jobs
#self.results = self.db['results']
#self.queue = self.db['queue']
#self.log = self.db['log']
#self.sources = self.db['sources']
#self.jobs = self.db['jobs']
#self.db.x = self.db[x]
# def __repr__(self, database_name):
# print "Using database: %s" %self.client[database_name]
# return self.db
def use_db(self, database_name):
return self.client[str(name)]
def show_dbs(self):
return self.client.database_names()
def create_coll(self, coll_name):
setattr(self, str(coll_name), self.db[str(coll_name)])
#print "coll : %s has been created in db:%s " %(self.__dict__[str(coll_name)], self.db_name)
return self.__dict__[str(coll_name)]
def create_colls(self, coll_names=["results","sources", "logs", "queue"]):
for n in coll_names:
setattr(self, n, self.db[str(n)])
# self.queue = self.db['queue']
# self.log = self.db['log']
# self.sources = self.db['sources']
# #print "Creating coll", [n for n in self.db.collection_names()]
return [n for n in self.db.collection_names()]
def show_coll(self):
try:
print "using collection %s in DB : %s" %(self.coll_name, self.db_name)
return self.coll_name
except AttributeError:
return False
#return self.db.collection_names()
def show_coll_items(self, coll_name):
return [n for n in self.db[str(coll_name)].find()]
# def count(self, coll_name):
# self.db_coll = self.db[str(coll_name)]
# return self.db_coll.count()
def drop(self, type, name):
if type == "collection":
return self.db[str(name)].drop()
elif type == "database":
return self.client.drop_database(str(name))
else:
print "Unknown Type"
return False
def drop_all_dbs():
'''remove EVERY SINGLE MONGO DATABASE'''
for n in self.show_dbs():
self.use_db(n)
self.drop("database", n)
def stats(self):
'''Output the current stats of database in Terminal'''
title = "===STATS===\n"
name ="Stored results in Mongo Database: %s \n" %(self.db_name)
res = "\t-Nombre de resultats dans la base: %d\n" % (self.db.results.count())
sources = "\t-Nombre de sources: %d\n" % len(self.db.sources.distinct('url'))
url = "\t-urls en cours de traitement: %d\n" % (self.db.queue.count())
url2 = "\t-urls traitees: %d\n" % (self.db.results.count()+ self.db.log.count())
url3 = "\t-urls erronées: %d\n" % (self.db.log.count())
size = "\t-Size of the database %s: %d MB\n" % (self.db_name, (self.db.command('dbStats', 1024)['storageSize'])/1024/1024.)
result = [title, name, res, sources, url, url2, size]
return "".join(result)
def report(self):
''' Output the currents of database for Email Report'''
res = "<li>Nombre de resultats dans la base: %d</li>" % (self.db.results.count())
sources = "<li>Nombre de sources: %d</li>" % len(self.db.sources.distinct('url'))
url = "<li>urls en cours de traitement: %d\n</li>" % (self.db.queue.count())
url2 = "<li>urls traitees: %d</li>" % (self.db.results.count()+ self.db.log.count())
size = "<li>Size of the database %s: %d MB</li>" % (self.db_name, (self.db.command('dbStats', 1024)['storageSize'])/1024/1024.)
result = [res, sources, url, url2, size]
return "".join(result)
# Define export gephi inside report option
# def create_node(self):
# label = ["url", "outlink", "backlink"]
# urllist = [n for n in self.db.results.distinct("url")]
# # outlist = [u for u in n['outlinks'] for n in self.db.results.find() if u not in outlist]
# # backlist = [u["url"] for u in n['backlinks'] for n in self.db.results.find() if u["url"] not in backlist]
# outlist = []
# backlist = []
# print len(urllist)
# for n in self.db.results.find():
# if n["outlinks"] is None:
# pass
# for o in n["outlinks"]:
# if o is not None:
# outlist.append([o["url"], "backlink"])
# for n in self.db.results.find():
# if n != []:
# for o in n["backlinks"]:
# if o is not None:
# backlist.append([o["url"], "backlink"])
# return
# def export_outlinks(self):
# '''Output url : outlink'''
# print "source; target"
# for n in self.db.results.find():
# for o in n["outlinks"]:
# if o is not None:
# print n['url']+";"+o
# else:
# print n["url"]+";None"
# return
# def export_backlinks(self):
# print "source;target"
# for n in self.db.results.find():
# if n != []:
# for u in n["backlinks"]:
# print n["url"]+";"+u["url"]
# # for o in n["backlinks"]:
# # if o is not None:
# # print n['url']+";"+o
# # else:
# # print n["url"]+";None"
# return
if __name__ == "__main__":
db = Database('RRI')
db.create_node()
|
c24b/mango
|
database.py
|
Python
|
apache-2.0
| 5,004
|
# Django settings for ssssss project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
import os
if 'SERVER_SOFTWARE' in os.environ:
from sae.const import (
MYSQL_HOST, MYSQL_PORT, MYSQL_USER, MYSQL_PASS, MYSQL_DB
)
else:
# Make `python manage.py syncdb` works happy!
MYSQL_HOST = 'localhost'
MYSQL_PORT = '3306'
MYSQL_USER = 'root'
MYSQL_PASS = 'root'
MYSQL_DB = 'app_pylabs'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': MYSQL_DB,
'USER': MYSQL_USER,
'PASSWORD': MYSQL_PASS,
'HOST': MYSQL_HOST,
'PORT': MYSQL_PORT,
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.4/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'pg95(hjk#kjll4p%g)5+%4=$ra_%+9kf7@)8co=7=)%7t$$6%f'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'contactsync.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'contactsync.wsgi.application'
TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), '..', 'templates').replace('\\', '/'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
huaijiefeng/contact-sync-server
|
contactsync/settings.py
|
Python
|
apache-2.0
| 5,270
|
import os
import subprocess
import sys
deltext=""
if sys.platform.startswith("linux") :
deltext="rm"
copytext="cp"
if sys.platform.startswith("darwin") :
deltext="rm"
copytext="cp"
if sys.platform.startswith("win") :
deltext="del"
copytext="copy"
def run_in_shell(cmd):
subprocess.check_call(cmd, shell=True)
def replace(namefile,oldtext,newtext):
f = open(namefile,'r')
filedata = f.read()
f.close()
newdata = filedata.replace(oldtext,newtext)
f = open(namefile,'w')
f.write(newdata)
f.close()
def rsaset(tb,tff,nb,base,ml) :
bd=tb+"_"+base
fnameh="config_big_"+bd+".h"
run_in_shell(copytext+" config_big.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"@NB@",nb)
replace(fnameh,"@BASE@",base)
fnameh="config_ff_"+tff+".h"
run_in_shell(copytext+" config_ff.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"WWW",tff)
replace(fnameh,"@ML@",ml)
fnamec="big_"+bd+".c"
fnameh="big_"+bd+".h"
run_in_shell(copytext+" big.c "+fnamec)
run_in_shell(copytext+" big.h "+fnameh)
replace(fnamec,"XXX",bd)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ff_"+tff+".c"
fnameh="ff_"+tff+".h"
run_in_shell(copytext+" ff.c "+fnamec)
run_in_shell(copytext+" ff.h "+fnameh)
replace(fnamec,"WWW",tff)
replace(fnamec,"XXX",bd)
replace(fnameh,"WWW",tff)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="rsa_"+tff+".c"
fnameh="rsa_"+tff+".h"
run_in_shell(copytext+" rsa.c "+fnamec)
run_in_shell(copytext+" rsa.h "+fnameh)
replace(fnamec,"WWW",tff)
replace(fnamec,"XXX",bd)
replace(fnameh,"WWW",tff)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
def curveset(tb,tf,tc,nb,base,nbt,m8,mt,ct,pf,stw,sx,ab,cs) :
bd=tb+"_"+base
fnameh="config_big_"+bd+".h"
run_in_shell(copytext+" config_big.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"@NB@",nb)
replace(fnameh,"@BASE@",base)
fnameh="config_field_"+tf+".h"
run_in_shell(copytext+" config_field.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"YYY",tf)
replace(fnameh,"@NBT@",nbt)
replace(fnameh,"@M8@",m8)
replace(fnameh,"@MT@",mt)
ib=int(base)
inb=int(nb)
inbt=int(nbt)
sh=ib*(1+((8*inb-1)//ib))-inbt
if sh > 30 :
sh=30
replace(fnameh,"@SH@",str(sh))
fnameh="config_curve_"+tc+".h"
run_in_shell(copytext+" config_curve.h "+fnameh)
replace(fnameh,"XXX",bd)
replace(fnameh,"YYY",tf)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"@CT@",ct)
replace(fnameh,"@PF@",pf)
replace(fnameh,"@ST@",stw)
replace(fnameh,"@SX@",sx)
replace(fnameh,"@CS@",cs)
replace(fnameh,"@AB@",ab)
fnamec="big_"+bd+".c"
fnameh="big_"+bd+".h"
run_in_shell(copytext+" big.c "+fnamec)
run_in_shell(copytext+" big.h "+fnameh)
replace(fnamec,"XXX",bd)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp_"+tf+".c"
fnameh="fp_"+tf+".h"
run_in_shell(copytext+" fp.c "+fnamec)
run_in_shell(copytext+" fp.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
run_in_shell("gcc -O3 -std=c99 -c rom_field_"+tf+".c")
fnamec="ecp_"+tc+".c"
fnameh="ecp_"+tc+".h"
run_in_shell(copytext+" ecp.c "+fnamec)
run_in_shell(copytext+" ecp.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ecdh_"+tc+".c"
fnameh="ecdh_"+tc+".h"
run_in_shell(copytext+" ecdh.c "+fnamec)
run_in_shell(copytext+" ecdh.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
run_in_shell("gcc -O3 -std=c99 -c rom_curve_"+tc+".c")
if pf != "NOT" :
fnamec="fp2_"+tf+".c"
fnameh="fp2_"+tf+".h"
run_in_shell(copytext+" fp2.c "+fnamec)
run_in_shell(copytext+" fp2.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp4_"+tf+".c"
fnameh="fp4_"+tf+".h"
run_in_shell(copytext+" fp4.c "+fnamec)
run_in_shell(copytext+" fp4.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
if cs == "128" :
fnamec="fp12_"+tf+".c"
fnameh="fp12_"+tf+".h"
run_in_shell(copytext+" fp12.c "+fnamec)
run_in_shell(copytext+" fp12.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ecp2_"+tc+".c"
fnameh="ecp2_"+tc+".h"
run_in_shell(copytext+" ecp2.c "+fnamec)
run_in_shell(copytext+" ecp2.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="pair_"+tc+".c"
fnameh="pair_"+tc+".h"
run_in_shell(copytext+" pair.c "+fnamec)
run_in_shell(copytext+" pair.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="mpin_"+tc+".c"
fnameh="mpin_"+tc+".h"
run_in_shell(copytext+" mpin.c "+fnamec)
run_in_shell(copytext+" mpin.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="bls_"+tc+".c"
fnameh="bls_"+tc+".h"
run_in_shell(copytext+" bls.c "+fnamec)
run_in_shell(copytext+" bls.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
if cs == "192" :
fnamec="fp8_"+tf+".c"
fnameh="fp8_"+tf+".h"
run_in_shell(copytext+" fp8.c "+fnamec)
run_in_shell(copytext+" fp8.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp24_"+tf+".c"
fnameh="fp24_"+tf+".h"
run_in_shell(copytext+" fp24.c "+fnamec)
run_in_shell(copytext+" fp24.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ecp4_"+tc+".c"
fnameh="ecp4_"+tc+".h"
run_in_shell(copytext+" ecp4.c "+fnamec)
run_in_shell(copytext+" ecp4.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="pair192_"+tc+".c"
fnameh="pair192_"+tc+".h"
run_in_shell(copytext+" pair192.c "+fnamec)
run_in_shell(copytext+" pair192.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="mpin192_"+tc+".c"
fnameh="mpin192_"+tc+".h"
run_in_shell(copytext+" mpin192.c "+fnamec)
run_in_shell(copytext+" mpin192.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="bls192_"+tc+".c"
fnameh="bls192_"+tc+".h"
run_in_shell(copytext+" bls192.c "+fnamec)
run_in_shell(copytext+" bls192.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
if cs == "256" :
fnamec="fp8_"+tf+".c"
fnameh="fp8_"+tf+".h"
run_in_shell(copytext+" fp8.c "+fnamec)
run_in_shell(copytext+" fp8.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="ecp8_"+tc+".c"
fnameh="ecp8_"+tc+".h"
run_in_shell(copytext+" ecp8.c "+fnamec)
run_in_shell(copytext+" ecp8.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp16_"+tf+".c"
fnameh="fp16_"+tf+".h"
run_in_shell(copytext+" fp16.c "+fnamec)
run_in_shell(copytext+" fp16.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="fp48_"+tf+".c"
fnameh="fp48_"+tf+".h"
run_in_shell(copytext+" fp48.c "+fnamec)
run_in_shell(copytext+" fp48.h "+fnameh)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnamec,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
replace(fnameh,"ZZZ",tc)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="pair256_"+tc+".c"
fnameh="pair256_"+tc+".h"
run_in_shell(copytext+" pair256.c "+fnamec)
run_in_shell(copytext+" pair256.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="mpin256_"+tc+".c"
fnameh="mpin256_"+tc+".h"
run_in_shell(copytext+" mpin256.c "+fnamec)
run_in_shell(copytext+" mpin256.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
fnamec="bls256_"+tc+".c"
fnameh="bls256_"+tc+".h"
run_in_shell(copytext+" bls256.c "+fnamec)
run_in_shell(copytext+" bls256.h "+fnameh)
replace(fnamec,"ZZZ",tc)
replace(fnamec,"YYY",tf)
replace(fnamec,"XXX",bd)
replace(fnameh,"ZZZ",tc)
replace(fnameh,"YYY",tf)
replace(fnameh,"XXX",bd)
run_in_shell("gcc -O3 -std=c99 -c "+fnamec)
replace("arch.h","@WL@","64")
print("Elliptic Curves")
print("1. ED25519")
print("2. C25519")
print("3. NIST256")
print("4. BRAINPOOL")
print("5. ANSSI")
print("6. HIFIVE")
print("7. GOLDILOCKS")
print("8. NIST384")
print("9. C41417")
print("10. NIST521\n")
print("11. NUMS256W")
print("12. NUMS256E")
print("13. NUMS384W")
print("14. NUMS384E")
print("15. NUMS512W")
print("16. NUMS512E")
print("17. SECP256K1\n")
print("Pairing-Friendly Elliptic Curves")
print("18. BN254")
print("19. BN254CX")
print("20. BLS383")
print("21. BLS381")
print("22. FP256BN")
print("23. FP512BN")
print("24. BLS461\n")
print("25. BLS24")
print("26. BLS48\n")
print("RSA")
print("27. RSA2048")
print("28. RSA3072")
print("29. RSA4096")
selection=[]
ptr=0
max=30
curve_selected=False
pfcurve_selected=False
rsa_selected=False
while ptr<max:
x=int(input("Choose a Scheme to support - 0 to finish: "))
if x == 0:
break
# print("Choice= ",x)
already=False
for i in range(0,ptr):
if x==selection[i]:
already=True
break
if already:
continue
selection.append(x)
ptr=ptr+1
# curveset(big,field,curve,big_length_bytes,bits_in_base,modulus_bits,modulus_mod_8,modulus_type,curve_type,pairing_friendly,sextic twist,sign of x,ate bits,curve security)
# for each curve give names for big, field and curve. In many cases the latter two will be the same.
# Typically "big" is the size in bits, always a multiple of 8, "field" describes the modulus, and "curve" is the common name for the elliptic curve
# big_length_bytes is "big" divided by 8
# Next give the number base used for 64 bit architectures, as n where the base is 2^n (note that these must be fixed for the same "big" name, if is ever re-used for another curve)
# modulus_bits is the bit length of the modulus, typically the same or slightly smaller than "big"
# modulus_mod_8 is the remainder when the modulus is divided by 8
# modulus_type is NOT_SPECIAL, or PSEUDO_MERSENNE, or MONTGOMERY_Friendly, or GENERALISED_MERSENNE (supported for GOLDILOCKS only)
# curve_type is WEIERSTRASS, EDWARDS or MONTGOMERY
# pairing_friendly is BN, BLS or NOT (if not pairing friendly)
# if pairing friendly. M or D type twist, and sign of the family parameter x
# ate bits is number of bits in Ate parameter (from romgen program)
# curve security is AES equiavlent, rounded up.
if x==1:
curveset("256","25519","ED25519","32","56","255","5","PSEUDO_MERSENNE","EDWARDS","NOT","","","","128")
curve_selected=True
if x==2:
curveset("256","25519","C25519","32","56","255","5","PSEUDO_MERSENNE","MONTGOMERY","NOT","","","","128")
curve_selected=True
if x==3:
curveset("256","NIST256","NIST256","32","56","256","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==4:
curveset("256","BRAINPOOL","BRAINPOOL","32","56","256","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==5:
curveset("256","ANSSI","ANSSI","32","56","256","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==6:
curveset("336","HIFIVE","HIFIVE","42","60","336","5","PSEUDO_MERSENNE","EDWARDS","NOT","","","","192")
curve_selected=True
if x==7:
curveset("448","GOLDILOCKS","GOLDILOCKS","56","58","448","7","GENERALISED_MERSENNE","EDWARDS","NOT","","","","256")
curve_selected=True
if x==8:
curveset("384","NIST384","NIST384","48","56","384","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","192")
curve_selected=True
if x==9:
curveset("416","C41417","C41417","52","60","414","7","PSEUDO_MERSENNE","EDWARDS","NOT","","","","256")
curve_selected=True
if x==10:
curveset("528","NIST521","NIST521","66","60","521","7","PSEUDO_MERSENNE","WEIERSTRASS","NOT","","","","256")
curve_selected=True
if x==11:
curveset("256","256PMW","NUMS256W","32","56","256","3","PSEUDO_MERSENNE","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==12:
curveset("256","256PME","NUMS256E","32","56","256","3","PSEUDO_MERSENNE","EDWARDS","NOT","","","","128")
curve_selected=True
if x==13:
curveset("384","384PM","NUMS384W","48","56","384","3","PSEUDO_MERSENNE","WEIERSTRASS","NOT","","","","192")
curve_selected=True
if x==14:
curveset("384","384PM","NUMS384E","48","56","384","3","PSEUDO_MERSENNE","EDWARDS","NOT","","","","192")
curve_selected=True
if x==15:
curveset("512","512PM","NUMS512W","64","56","512","7","PSEUDO_MERSENNE","WEIERSTRASS","NOT","","","","256")
curve_selected=True
if x==16:
curveset("512","512PM","NUMS512E","64","56","512","7","PSEUDO_MERSENNE","EDWARDS","NOT","","","","256")
curve_selected=True
if x==17:
curveset("256","SECP256K1","SECP256K1","32","56","256","7","NOT_SPECIAL","WEIERSTRASS","NOT","","","","128")
curve_selected=True
if x==18:
curveset("256","BN254","BN254","32","56","254","3","NOT_SPECIAL","WEIERSTRASS","BN","D_TYPE","NEGATIVEX","66","128")
pfcurve_selected=True
if x==19:
curveset("256","BN254CX","BN254CX","32","56","254","3","NOT_SPECIAL","WEIERSTRASS","BN","D_TYPE","NEGATIVEX","66","128")
pfcurve_selected=True
if x==20:
curveset("384","BLS383","BLS383","48","58","383","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","POSITIVEX","65","128")
pfcurve_selected=True
if x==21:
curveset("384","BLS381","BLS381","48","58","381","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","NEGATIVEX","65","128")
pfcurve_selected=True
if x==22:
curveset("256","FP256BN","FP256BN","32","56","256","3","NOT_SPECIAL","WEIERSTRASS","BN","M_TYPE","NEGATIVEX","66","128")
pfcurve_selected=True
if x==23:
curveset("512","FP512BN","FP512BN","64","60","512","3","NOT_SPECIAL","WEIERSTRASS","BN","M_TYPE","POSITIVEX","130","128")
pfcurve_selected=True
# https://eprint.iacr.org/2017/334.pdf
if x==24:
curveset("464","BLS461","BLS461","58","60","461","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","NEGATIVEX","78","128")
pfcurve_selected=True
if x==25:
curveset("480","BLS24","BLS24","60","56","479","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","POSITIVEX","49","192")
pfcurve_selected=True
if x==26:
curveset("560","BLS48","BLS48","70","58","556","3","NOT_SPECIAL","WEIERSTRASS","BLS","M_TYPE","POSITIVEX","32","256")
pfcurve_selected=True
# rsaset(big,ring,big_length_bytes,bits_in_base,multiplier)
# for each choice give distinct names for "big" and "ring".
# Typically "big" is the length in bits of the underlying big number type
# "ring" is the RSA modulus size = "big" times 2^m
# big_length_bytes is "big" divided by 8
# Next give the number base used for 64 bit architecture, as n where the base is 2^n
# multiplier is 2^m (see above)
# There are choices here, different ways of getting the same result, but some faster than others
if x==27:
#256 is slower but may allow reuse of 256-bit BIGs used for elliptic curve
#512 is faster.. but best is 1024
rsaset("1024","2048","128","58","2")
#rsaset("512","2048","64","60","4")
#rsaset("256","2048","32","56","8")
rsa_selected=True
if x==28:
rsaset("384","3072","48","56","8")
rsa_selected=True
if x==29:
#rsaset("256","4096","32","56","16")
rsaset("512","4096","64","60","8")
rsa_selected=True
run_in_shell(deltext+" big.*")
run_in_shell(deltext+" fp.*")
run_in_shell(deltext+" ecp.*")
run_in_shell(deltext+" ecdh.*")
run_in_shell(deltext+" ff.*")
run_in_shell(deltext+" rsa.*")
run_in_shell(deltext+" config_big.h")
run_in_shell(deltext+" config_field.h")
run_in_shell(deltext+" config_curve.h")
run_in_shell(deltext+" config_ff.h")
run_in_shell(deltext+" fp2.*")
run_in_shell(deltext+" fp4.*")
run_in_shell(deltext+" fp8.*")
run_in_shell(deltext+" fp16.*")
run_in_shell(deltext+" fp12.*")
run_in_shell(deltext+" fp24.*")
run_in_shell(deltext+" fp48.*")
run_in_shell(deltext+" ecp2.*")
run_in_shell(deltext+" ecp4.*")
run_in_shell(deltext+" ecp8.*")
run_in_shell(deltext+" pair.*")
run_in_shell(deltext+" mpin.*")
run_in_shell(deltext+" bls.*")
run_in_shell(deltext+" pair192.*")
run_in_shell(deltext+" mpin192.*")
run_in_shell(deltext+" bls192.*")
run_in_shell(deltext+" pair256.*")
run_in_shell(deltext+" mpin256.*")
run_in_shell(deltext+" bls256.*")
# create library
run_in_shell("gcc -O3 -std=c99 -c randapi.c")
if curve_selected :
run_in_shell("gcc -O3 -std=c99 -c ecdh_support.c")
if rsa_selected :
run_in_shell("gcc -O3 -std=c99 -c rsa_support.c")
if pfcurve_selected :
run_in_shell("gcc -O3 -std=c99 -c pbc_support.c")
run_in_shell("gcc -O3 -std=c99 -c hash.c")
run_in_shell("gcc -O3 -std=c99 -c rand.c")
run_in_shell("gcc -O3 -std=c99 -c oct.c")
run_in_shell("gcc -O3 -std=c99 -c aes.c")
run_in_shell("gcc -O3 -std=c99 -c gcm.c")
run_in_shell("gcc -O3 -std=c99 -c newhope.c")
if sys.platform.startswith("win") :
run_in_shell("for %i in (*.o) do @echo %~nxi >> f.list")
run_in_shell("ar rc amcl.a @f.list")
run_in_shell(deltext+" f.list")
else :
run_in_shell("ar rc amcl.a *.o")
run_in_shell(deltext+" *.o")
#print("Your section was ")
#for i in range(0,ptr):
# print (selection[i])
|
miracl/amcl
|
version3/c/config64.py
|
Python
|
apache-2.0
| 19,998
|
###################################################
# This is a basic script to carry on a conversation
# with ghost
###################################################
# create service
ghost = Runtime.start("ghost", "WebGui")
ear = Runtime.start("ear", "WebkitSpeechRecognition")
ghostchat = Runtime.start("ghostchat", "ProgramAB")
htmlfilter = Runtime.start("htmlfilter", "HtmlFilter")
mouth = Runtime.start("mouth", "NaturalReaderSpeech")
# creating the connections and routes
# - I'll need to check on these - might
# need to just "attach" some services together
ear.addTextListener(ghostchat)
ghostchat.addTextListener(htmlfilter)
htmlfilter.addTextListener(mouth)
# start a chatbot session
ghostchat.startSession("ProgramAB/bots", "ghostchat")
voices = mouth.getVoices()
# I've also tried removing this because I got an iteration error for this line
# for voice in voices:
# NaturalReaderSpeech.setVoice("Ryan")
|
MyRobotLab/pyrobotlab
|
home/Humanoid/ghost.py
|
Python
|
apache-2.0
| 934
|
from __future__ import unicode_literals
import json
import unittest
from mopidy.models import (
Album, Artist, ModelJSONEncoder, Playlist, Ref, SearchResult, TlTrack,
Track, model_json_decoder)
class GenericCopyTest(unittest.TestCase):
def compare(self, orig, other):
self.assertEqual(orig, other)
self.assertNotEqual(id(orig), id(other))
def test_copying_track(self):
track = Track()
self.compare(track, track.copy())
def test_copying_artist(self):
artist = Artist()
self.compare(artist, artist.copy())
def test_copying_album(self):
album = Album()
self.compare(album, album.copy())
def test_copying_playlist(self):
playlist = Playlist()
self.compare(playlist, playlist.copy())
def test_copying_track_with_basic_values(self):
track = Track(name='foo', uri='bar')
copy = track.copy(name='baz')
self.assertEqual('baz', copy.name)
self.assertEqual('bar', copy.uri)
def test_copying_track_with_missing_values(self):
track = Track(uri='bar')
copy = track.copy(name='baz')
self.assertEqual('baz', copy.name)
self.assertEqual('bar', copy.uri)
def test_copying_track_with_private_internal_value(self):
artist1 = Artist(name='foo')
artist2 = Artist(name='bar')
track = Track(artists=[artist1])
copy = track.copy(artists=[artist2])
self.assertIn(artist2, copy.artists)
def test_copying_track_with_invalid_key(self):
with self.assertRaises(TypeError):
Track().copy(invalid_key=True)
def test_copying_track_to_remove(self):
track = Track(name='foo').copy(name=None)
self.assertEquals(track.__dict__, Track().__dict__)
class RefTest(unittest.TestCase):
def test_uri(self):
uri = 'an_uri'
ref = Ref(uri=uri)
self.assertEqual(ref.uri, uri)
with self.assertRaises(AttributeError):
ref.uri = None
def test_name(self):
name = 'a name'
ref = Ref(name=name)
self.assertEqual(ref.name, name)
with self.assertRaises(AttributeError):
ref.name = None
def test_invalid_kwarg(self):
with self.assertRaises(TypeError):
SearchResult(foo='baz')
def test_repr_without_results(self):
self.assertEquals(
"Ref(name=u'foo', type=u'artist', uri=u'uri')",
repr(Ref(uri='uri', name='foo', type='artist')))
def test_serialize_without_results(self):
self.assertDictEqual(
{'__model__': 'Ref', 'uri': 'uri'},
Ref(uri='uri').serialize())
def test_to_json_and_back(self):
ref1 = Ref(uri='uri')
serialized = json.dumps(ref1, cls=ModelJSONEncoder)
ref2 = json.loads(serialized, object_hook=model_json_decoder)
self.assertEqual(ref1, ref2)
def test_type_constants(self):
self.assertEqual(Ref.ALBUM, 'album')
self.assertEqual(Ref.ARTIST, 'artist')
self.assertEqual(Ref.DIRECTORY, 'directory')
self.assertEqual(Ref.PLAYLIST, 'playlist')
self.assertEqual(Ref.TRACK, 'track')
def test_album_constructor(self):
ref = Ref.album(uri='foo', name='bar')
self.assertEqual(ref.uri, 'foo')
self.assertEqual(ref.name, 'bar')
self.assertEqual(ref.type, Ref.ALBUM)
def test_artist_constructor(self):
ref = Ref.artist(uri='foo', name='bar')
self.assertEqual(ref.uri, 'foo')
self.assertEqual(ref.name, 'bar')
self.assertEqual(ref.type, Ref.ARTIST)
def test_directory_constructor(self):
ref = Ref.directory(uri='foo', name='bar')
self.assertEqual(ref.uri, 'foo')
self.assertEqual(ref.name, 'bar')
self.assertEqual(ref.type, Ref.DIRECTORY)
def test_playlist_constructor(self):
ref = Ref.playlist(uri='foo', name='bar')
self.assertEqual(ref.uri, 'foo')
self.assertEqual(ref.name, 'bar')
self.assertEqual(ref.type, Ref.PLAYLIST)
def test_track_constructor(self):
ref = Ref.track(uri='foo', name='bar')
self.assertEqual(ref.uri, 'foo')
self.assertEqual(ref.name, 'bar')
self.assertEqual(ref.type, Ref.TRACK)
class ArtistTest(unittest.TestCase):
def test_uri(self):
uri = 'an_uri'
artist = Artist(uri=uri)
self.assertEqual(artist.uri, uri)
with self.assertRaises(AttributeError):
artist.uri = None
def test_name(self):
name = 'a name'
artist = Artist(name=name)
self.assertEqual(artist.name, name)
with self.assertRaises(AttributeError):
artist.name = None
def test_musicbrainz_id(self):
mb_id = 'mb-id'
artist = Artist(musicbrainz_id=mb_id)
self.assertEqual(artist.musicbrainz_id, mb_id)
with self.assertRaises(AttributeError):
artist.musicbrainz_id = None
def test_invalid_kwarg(self):
with self.assertRaises(TypeError):
Artist(foo='baz')
def test_invalid_kwarg_with_name_matching_method(self):
with self.assertRaises(TypeError):
Artist(copy='baz')
with self.assertRaises(TypeError):
Artist(serialize='baz')
def test_repr(self):
self.assertEquals(
"Artist(name=u'name', uri=u'uri')",
repr(Artist(uri='uri', name='name')))
def test_serialize(self):
self.assertDictEqual(
{'__model__': 'Artist', 'uri': 'uri', 'name': 'name'},
Artist(uri='uri', name='name').serialize())
def test_serialize_falsy_values(self):
self.assertDictEqual(
{'__model__': 'Artist', 'uri': '', 'name': ''},
Artist(uri='', name='').serialize())
def test_to_json_and_back(self):
artist1 = Artist(uri='uri', name='name')
serialized = json.dumps(artist1, cls=ModelJSONEncoder)
artist2 = json.loads(serialized, object_hook=model_json_decoder)
self.assertEqual(artist1, artist2)
def test_to_json_and_back_with_unknown_field(self):
artist = Artist(uri='uri', name='name').serialize()
artist['foo'] = 'foo'
serialized = json.dumps(artist)
with self.assertRaises(TypeError):
json.loads(serialized, object_hook=model_json_decoder)
def test_to_json_and_back_with_field_matching_method(self):
artist = Artist(uri='uri', name='name').serialize()
artist['copy'] = 'foo'
serialized = json.dumps(artist)
with self.assertRaises(TypeError):
json.loads(serialized, object_hook=model_json_decoder)
def test_to_json_and_back_with_field_matching_internal_field(self):
artist = Artist(uri='uri', name='name').serialize()
artist['__mro__'] = 'foo'
serialized = json.dumps(artist)
with self.assertRaises(TypeError):
json.loads(serialized, object_hook=model_json_decoder)
def test_eq_name(self):
artist1 = Artist(name='name')
artist2 = Artist(name='name')
self.assertEqual(artist1, artist2)
self.assertEqual(hash(artist1), hash(artist2))
def test_eq_uri(self):
artist1 = Artist(uri='uri')
artist2 = Artist(uri='uri')
self.assertEqual(artist1, artist2)
self.assertEqual(hash(artist1), hash(artist2))
def test_eq_musibrainz_id(self):
artist1 = Artist(musicbrainz_id='id')
artist2 = Artist(musicbrainz_id='id')
self.assertEqual(artist1, artist2)
self.assertEqual(hash(artist1), hash(artist2))
def test_eq(self):
artist1 = Artist(uri='uri', name='name', musicbrainz_id='id')
artist2 = Artist(uri='uri', name='name', musicbrainz_id='id')
self.assertEqual(artist1, artist2)
self.assertEqual(hash(artist1), hash(artist2))
def test_eq_none(self):
self.assertNotEqual(Artist(), None)
def test_eq_other(self):
self.assertNotEqual(Artist(), 'other')
def test_ne_name(self):
artist1 = Artist(name='name1')
artist2 = Artist(name='name2')
self.assertNotEqual(artist1, artist2)
self.assertNotEqual(hash(artist1), hash(artist2))
def test_ne_uri(self):
artist1 = Artist(uri='uri1')
artist2 = Artist(uri='uri2')
self.assertNotEqual(artist1, artist2)
self.assertNotEqual(hash(artist1), hash(artist2))
def test_ne_musicbrainz_id(self):
artist1 = Artist(musicbrainz_id='id1')
artist2 = Artist(musicbrainz_id='id2')
self.assertNotEqual(artist1, artist2)
self.assertNotEqual(hash(artist1), hash(artist2))
def test_ne(self):
artist1 = Artist(uri='uri1', name='name1', musicbrainz_id='id1')
artist2 = Artist(uri='uri2', name='name2', musicbrainz_id='id2')
self.assertNotEqual(artist1, artist2)
self.assertNotEqual(hash(artist1), hash(artist2))
class AlbumTest(unittest.TestCase):
def test_uri(self):
uri = 'an_uri'
album = Album(uri=uri)
self.assertEqual(album.uri, uri)
with self.assertRaises(AttributeError):
album.uri = None
def test_name(self):
name = 'a name'
album = Album(name=name)
self.assertEqual(album.name, name)
with self.assertRaises(AttributeError):
album.name = None
def test_artists(self):
artist = Artist()
album = Album(artists=[artist])
self.assertIn(artist, album.artists)
with self.assertRaises(AttributeError):
album.artists = None
def test_artists_none(self):
self.assertEqual(set(), Album(artists=None).artists)
def test_num_tracks(self):
num_tracks = 11
album = Album(num_tracks=num_tracks)
self.assertEqual(album.num_tracks, num_tracks)
with self.assertRaises(AttributeError):
album.num_tracks = None
def test_num_discs(self):
num_discs = 2
album = Album(num_discs=num_discs)
self.assertEqual(album.num_discs, num_discs)
with self.assertRaises(AttributeError):
album.num_discs = None
def test_date(self):
date = '1977-01-01'
album = Album(date=date)
self.assertEqual(album.date, date)
with self.assertRaises(AttributeError):
album.date = None
def test_musicbrainz_id(self):
mb_id = 'mb-id'
album = Album(musicbrainz_id=mb_id)
self.assertEqual(album.musicbrainz_id, mb_id)
with self.assertRaises(AttributeError):
album.musicbrainz_id = None
def test_images(self):
image = 'data:foobar'
album = Album(images=[image])
self.assertIn(image, album.images)
with self.assertRaises(AttributeError):
album.images = None
def test_images_none(self):
self.assertEqual(set(), Album(images=None).images)
def test_invalid_kwarg(self):
with self.assertRaises(TypeError):
Album(foo='baz')
def test_repr_without_artists(self):
self.assertEquals(
"Album(name=u'name', uri=u'uri')",
repr(Album(uri='uri', name='name')))
def test_repr_with_artists(self):
self.assertEquals(
"Album(artists=[Artist(name=u'foo')], name=u'name', uri=u'uri')",
repr(Album(uri='uri', name='name', artists=[Artist(name='foo')])))
def test_serialize_without_artists(self):
self.assertDictEqual(
{'__model__': 'Album', 'uri': 'uri', 'name': 'name'},
Album(uri='uri', name='name').serialize())
def test_serialize_with_artists(self):
artist = Artist(name='foo')
self.assertDictEqual(
{'__model__': 'Album', 'uri': 'uri', 'name': 'name',
'artists': [artist.serialize()]},
Album(uri='uri', name='name', artists=[artist]).serialize())
def test_serialize_with_images(self):
image = 'data:foobar'
self.assertDictEqual(
{'__model__': 'Album', 'uri': 'uri', 'name': 'name',
'images': [image]},
Album(uri='uri', name='name', images=[image]).serialize())
def test_to_json_and_back(self):
album1 = Album(uri='uri', name='name', artists=[Artist(name='foo')])
serialized = json.dumps(album1, cls=ModelJSONEncoder)
album2 = json.loads(serialized, object_hook=model_json_decoder)
self.assertEqual(album1, album2)
def test_eq_name(self):
album1 = Album(name='name')
album2 = Album(name='name')
self.assertEqual(album1, album2)
self.assertEqual(hash(album1), hash(album2))
def test_eq_uri(self):
album1 = Album(uri='uri')
album2 = Album(uri='uri')
self.assertEqual(album1, album2)
self.assertEqual(hash(album1), hash(album2))
def test_eq_artists(self):
artists = [Artist()]
album1 = Album(artists=artists)
album2 = Album(artists=artists)
self.assertEqual(album1, album2)
self.assertEqual(hash(album1), hash(album2))
def test_eq_artists_order(self):
artist1 = Artist(name='name1')
artist2 = Artist(name='name2')
album1 = Album(artists=[artist1, artist2])
album2 = Album(artists=[artist2, artist1])
self.assertEqual(album1, album2)
self.assertEqual(hash(album1), hash(album2))
def test_eq_num_tracks(self):
album1 = Album(num_tracks=2)
album2 = Album(num_tracks=2)
self.assertEqual(album1, album2)
self.assertEqual(hash(album1), hash(album2))
def test_eq_date(self):
date = '1977-01-01'
album1 = Album(date=date)
album2 = Album(date=date)
self.assertEqual(album1, album2)
self.assertEqual(hash(album1), hash(album2))
def test_eq_musibrainz_id(self):
album1 = Album(musicbrainz_id='id')
album2 = Album(musicbrainz_id='id')
self.assertEqual(album1, album2)
self.assertEqual(hash(album1), hash(album2))
def test_eq(self):
artists = [Artist()]
album1 = Album(
name='name', uri='uri', artists=artists, num_tracks=2,
musicbrainz_id='id')
album2 = Album(
name='name', uri='uri', artists=artists, num_tracks=2,
musicbrainz_id='id')
self.assertEqual(album1, album2)
self.assertEqual(hash(album1), hash(album2))
def test_eq_none(self):
self.assertNotEqual(Album(), None)
def test_eq_other(self):
self.assertNotEqual(Album(), 'other')
def test_ne_name(self):
album1 = Album(name='name1')
album2 = Album(name='name2')
self.assertNotEqual(album1, album2)
self.assertNotEqual(hash(album1), hash(album2))
def test_ne_uri(self):
album1 = Album(uri='uri1')
album2 = Album(uri='uri2')
self.assertNotEqual(album1, album2)
self.assertNotEqual(hash(album1), hash(album2))
def test_ne_artists(self):
album1 = Album(artists=[Artist(name='name1')])
album2 = Album(artists=[Artist(name='name2')])
self.assertNotEqual(album1, album2)
self.assertNotEqual(hash(album1), hash(album2))
def test_ne_num_tracks(self):
album1 = Album(num_tracks=1)
album2 = Album(num_tracks=2)
self.assertNotEqual(album1, album2)
self.assertNotEqual(hash(album1), hash(album2))
def test_ne_date(self):
album1 = Album(date='1977-01-01')
album2 = Album(date='1977-01-02')
self.assertNotEqual(album1, album2)
self.assertNotEqual(hash(album1), hash(album2))
def test_ne_musicbrainz_id(self):
album1 = Album(musicbrainz_id='id1')
album2 = Album(musicbrainz_id='id2')
self.assertNotEqual(album1, album2)
self.assertNotEqual(hash(album1), hash(album2))
def test_ne(self):
album1 = Album(
name='name1', uri='uri1', artists=[Artist(name='name1')],
num_tracks=1, musicbrainz_id='id1')
album2 = Album(
name='name2', uri='uri2', artists=[Artist(name='name2')],
num_tracks=2, musicbrainz_id='id2')
self.assertNotEqual(album1, album2)
self.assertNotEqual(hash(album1), hash(album2))
class TrackTest(unittest.TestCase):
def test_uri(self):
uri = 'an_uri'
track = Track(uri=uri)
self.assertEqual(track.uri, uri)
with self.assertRaises(AttributeError):
track.uri = None
def test_name(self):
name = 'a name'
track = Track(name=name)
self.assertEqual(track.name, name)
with self.assertRaises(AttributeError):
track.name = None
def test_artists(self):
artists = [Artist(name='name1'), Artist(name='name2')]
track = Track(artists=artists)
self.assertEqual(set(track.artists), set(artists))
with self.assertRaises(AttributeError):
track.artists = None
def test_artists_none(self):
self.assertEqual(set(), Track(artists=None).artists)
def test_composers(self):
artists = [Artist(name='name1'), Artist(name='name2')]
track = Track(composers=artists)
self.assertEqual(set(track.composers), set(artists))
with self.assertRaises(AttributeError):
track.composers = None
def test_composers_none(self):
self.assertEqual(set(), Track(composers=None).composers)
def test_performers(self):
artists = [Artist(name='name1'), Artist(name='name2')]
track = Track(performers=artists)
self.assertEqual(set(track.performers), set(artists))
with self.assertRaises(AttributeError):
track.performers = None
def test_performers_none(self):
self.assertEqual(set(), Track(performers=None).performers)
def test_album(self):
album = Album()
track = Track(album=album)
self.assertEqual(track.album, album)
with self.assertRaises(AttributeError):
track.album = None
def test_track_no(self):
track_no = 7
track = Track(track_no=track_no)
self.assertEqual(track.track_no, track_no)
with self.assertRaises(AttributeError):
track.track_no = None
def test_disc_no(self):
disc_no = 2
track = Track(disc_no=disc_no)
self.assertEqual(track.disc_no, disc_no)
with self.assertRaises(AttributeError):
track.disc_no = None
def test_date(self):
date = '1977-01-01'
track = Track(date=date)
self.assertEqual(track.date, date)
with self.assertRaises(AttributeError):
track.date = None
def test_length(self):
length = 137000
track = Track(length=length)
self.assertEqual(track.length, length)
with self.assertRaises(AttributeError):
track.length = None
def test_bitrate(self):
bitrate = 160
track = Track(bitrate=bitrate)
self.assertEqual(track.bitrate, bitrate)
with self.assertRaises(AttributeError):
track.bitrate = None
def test_musicbrainz_id(self):
mb_id = 'mb-id'
track = Track(musicbrainz_id=mb_id)
self.assertEqual(track.musicbrainz_id, mb_id)
with self.assertRaises(AttributeError):
track.musicbrainz_id = None
def test_invalid_kwarg(self):
with self.assertRaises(TypeError):
Track(foo='baz')
def test_repr_without_artists(self):
self.assertEquals(
"Track(name=u'name', uri=u'uri')",
repr(Track(uri='uri', name='name')))
def test_repr_with_artists(self):
self.assertEquals(
"Track(artists=[Artist(name=u'foo')], name=u'name', uri=u'uri')",
repr(Track(uri='uri', name='name', artists=[Artist(name='foo')])))
def test_serialize_without_artists(self):
self.assertDictEqual(
{'__model__': 'Track', 'uri': 'uri', 'name': 'name'},
Track(uri='uri', name='name').serialize())
def test_serialize_with_artists(self):
artist = Artist(name='foo')
self.assertDictEqual(
{'__model__': 'Track', 'uri': 'uri', 'name': 'name',
'artists': [artist.serialize()]},
Track(uri='uri', name='name', artists=[artist]).serialize())
def test_serialize_with_album(self):
album = Album(name='foo')
self.assertDictEqual(
{'__model__': 'Track', 'uri': 'uri', 'name': 'name',
'album': album.serialize()},
Track(uri='uri', name='name', album=album).serialize())
def test_to_json_and_back(self):
track1 = Track(
uri='uri', name='name', album=Album(name='foo'),
artists=[Artist(name='foo')])
serialized = json.dumps(track1, cls=ModelJSONEncoder)
track2 = json.loads(serialized, object_hook=model_json_decoder)
self.assertEqual(track1, track2)
def test_eq_uri(self):
track1 = Track(uri='uri1')
track2 = Track(uri='uri1')
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_name(self):
track1 = Track(name='name1')
track2 = Track(name='name1')
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_artists(self):
artists = [Artist()]
track1 = Track(artists=artists)
track2 = Track(artists=artists)
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_artists_order(self):
artist1 = Artist(name='name1')
artist2 = Artist(name='name2')
track1 = Track(artists=[artist1, artist2])
track2 = Track(artists=[artist2, artist1])
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_album(self):
album = Album()
track1 = Track(album=album)
track2 = Track(album=album)
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_track_no(self):
track1 = Track(track_no=1)
track2 = Track(track_no=1)
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_date(self):
date = '1977-01-01'
track1 = Track(date=date)
track2 = Track(date=date)
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_length(self):
track1 = Track(length=100)
track2 = Track(length=100)
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_bitrate(self):
track1 = Track(bitrate=100)
track2 = Track(bitrate=100)
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_musibrainz_id(self):
track1 = Track(musicbrainz_id='id')
track2 = Track(musicbrainz_id='id')
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq(self):
date = '1977-01-01'
artists = [Artist()]
album = Album()
track1 = Track(
uri='uri', name='name', artists=artists, album=album, track_no=1,
date=date, length=100, bitrate=100, musicbrainz_id='id')
track2 = Track(
uri='uri', name='name', artists=artists, album=album, track_no=1,
date=date, length=100, bitrate=100, musicbrainz_id='id')
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_eq_none(self):
self.assertNotEqual(Track(), None)
def test_eq_other(self):
self.assertNotEqual(Track(), 'other')
def test_ne_uri(self):
track1 = Track(uri='uri1')
track2 = Track(uri='uri2')
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ne_name(self):
track1 = Track(name='name1')
track2 = Track(name='name2')
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ne_artists(self):
track1 = Track(artists=[Artist(name='name1')])
track2 = Track(artists=[Artist(name='name2')])
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ne_album(self):
track1 = Track(album=Album(name='name1'))
track2 = Track(album=Album(name='name2'))
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ne_track_no(self):
track1 = Track(track_no=1)
track2 = Track(track_no=2)
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ne_date(self):
track1 = Track(date='1977-01-01')
track2 = Track(date='1977-01-02')
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ne_length(self):
track1 = Track(length=100)
track2 = Track(length=200)
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ne_bitrate(self):
track1 = Track(bitrate=100)
track2 = Track(bitrate=200)
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ne_musicbrainz_id(self):
track1 = Track(musicbrainz_id='id1')
track2 = Track(musicbrainz_id='id2')
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ne(self):
track1 = Track(
uri='uri1', name='name1', artists=[Artist(name='name1')],
album=Album(name='name1'), track_no=1, date='1977-01-01',
length=100, bitrate=100, musicbrainz_id='id1')
track2 = Track(
uri='uri2', name='name2', artists=[Artist(name='name2')],
album=Album(name='name2'), track_no=2, date='1977-01-02',
length=200, bitrate=200, musicbrainz_id='id2')
self.assertNotEqual(track1, track2)
self.assertNotEqual(hash(track1), hash(track2))
def test_ignores_values_with_default_value_none(self):
track1 = Track(name='name1')
track2 = Track(name='name1', album=None)
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
def test_copy_can_reset_to_default_value(self):
track1 = Track(name='name1')
track2 = Track(name='name1', album=Album()).copy(album=None)
self.assertEqual(track1, track2)
self.assertEqual(hash(track1), hash(track2))
class TlTrackTest(unittest.TestCase):
def test_tlid(self):
tlid = 123
tl_track = TlTrack(tlid=tlid)
self.assertEqual(tl_track.tlid, tlid)
with self.assertRaises(AttributeError):
tl_track.tlid = None
def test_track(self):
track = Track()
tl_track = TlTrack(track=track)
self.assertEqual(tl_track.track, track)
with self.assertRaises(AttributeError):
tl_track.track = None
def test_invalid_kwarg(self):
with self.assertRaises(TypeError):
TlTrack(foo='baz')
def test_positional_args(self):
tlid = 123
track = Track()
tl_track = TlTrack(tlid, track)
self.assertEqual(tl_track.tlid, tlid)
self.assertEqual(tl_track.track, track)
def test_iteration(self):
tlid = 123
track = Track()
tl_track = TlTrack(tlid, track)
(tlid2, track2) = tl_track
self.assertEqual(tlid2, tlid)
self.assertEqual(track2, track)
def test_repr(self):
self.assertEquals(
"TlTrack(tlid=123, track=Track(uri=u'uri'))",
repr(TlTrack(tlid=123, track=Track(uri='uri'))))
def test_serialize(self):
track = Track(uri='uri', name='name')
self.assertDictEqual(
{'__model__': 'TlTrack', 'tlid': 123, 'track': track.serialize()},
TlTrack(tlid=123, track=track).serialize())
def test_to_json_and_back(self):
tl_track1 = TlTrack(tlid=123, track=Track(uri='uri', name='name'))
serialized = json.dumps(tl_track1, cls=ModelJSONEncoder)
tl_track2 = json.loads(serialized, object_hook=model_json_decoder)
self.assertEqual(tl_track1, tl_track2)
def test_eq(self):
tlid = 123
track = Track()
tl_track1 = TlTrack(tlid=tlid, track=track)
tl_track2 = TlTrack(tlid=tlid, track=track)
self.assertEqual(tl_track1, tl_track2)
self.assertEqual(hash(tl_track1), hash(tl_track2))
def test_eq_none(self):
self.assertNotEqual(TlTrack(), None)
def test_eq_other(self):
self.assertNotEqual(TlTrack(), 'other')
def test_ne_tlid(self):
tl_track1 = TlTrack(tlid=123)
tl_track2 = TlTrack(tlid=321)
self.assertNotEqual(tl_track1, tl_track2)
self.assertNotEqual(hash(tl_track1), hash(tl_track2))
def test_ne_track(self):
tl_track1 = TlTrack(track=Track(uri='a'))
tl_track2 = TlTrack(track=Track(uri='b'))
self.assertNotEqual(tl_track1, tl_track2)
self.assertNotEqual(hash(tl_track1), hash(tl_track2))
class PlaylistTest(unittest.TestCase):
def test_uri(self):
uri = 'an_uri'
playlist = Playlist(uri=uri)
self.assertEqual(playlist.uri, uri)
with self.assertRaises(AttributeError):
playlist.uri = None
def test_name(self):
name = 'a name'
playlist = Playlist(name=name)
self.assertEqual(playlist.name, name)
with self.assertRaises(AttributeError):
playlist.name = None
def test_tracks(self):
tracks = [Track(), Track(), Track()]
playlist = Playlist(tracks=tracks)
self.assertEqual(list(playlist.tracks), tracks)
with self.assertRaises(AttributeError):
playlist.tracks = None
def test_length(self):
tracks = [Track(), Track(), Track()]
playlist = Playlist(tracks=tracks)
self.assertEqual(playlist.length, 3)
def test_last_modified(self):
last_modified = 1390942873000
playlist = Playlist(last_modified=last_modified)
self.assertEqual(playlist.last_modified, last_modified)
with self.assertRaises(AttributeError):
playlist.last_modified = None
def test_with_new_uri(self):
tracks = [Track()]
last_modified = 1390942873000
playlist = Playlist(
uri='an uri', name='a name', tracks=tracks,
last_modified=last_modified)
new_playlist = playlist.copy(uri='another uri')
self.assertEqual(new_playlist.uri, 'another uri')
self.assertEqual(new_playlist.name, 'a name')
self.assertEqual(list(new_playlist.tracks), tracks)
self.assertEqual(new_playlist.last_modified, last_modified)
def test_with_new_name(self):
tracks = [Track()]
last_modified = 1390942873000
playlist = Playlist(
uri='an uri', name='a name', tracks=tracks,
last_modified=last_modified)
new_playlist = playlist.copy(name='another name')
self.assertEqual(new_playlist.uri, 'an uri')
self.assertEqual(new_playlist.name, 'another name')
self.assertEqual(list(new_playlist.tracks), tracks)
self.assertEqual(new_playlist.last_modified, last_modified)
def test_with_new_tracks(self):
tracks = [Track()]
last_modified = 1390942873000
playlist = Playlist(
uri='an uri', name='a name', tracks=tracks,
last_modified=last_modified)
new_tracks = [Track(), Track()]
new_playlist = playlist.copy(tracks=new_tracks)
self.assertEqual(new_playlist.uri, 'an uri')
self.assertEqual(new_playlist.name, 'a name')
self.assertEqual(list(new_playlist.tracks), new_tracks)
self.assertEqual(new_playlist.last_modified, last_modified)
def test_with_new_last_modified(self):
tracks = [Track()]
last_modified = 1390942873000
new_last_modified = last_modified + 1000
playlist = Playlist(
uri='an uri', name='a name', tracks=tracks,
last_modified=last_modified)
new_playlist = playlist.copy(last_modified=new_last_modified)
self.assertEqual(new_playlist.uri, 'an uri')
self.assertEqual(new_playlist.name, 'a name')
self.assertEqual(list(new_playlist.tracks), tracks)
self.assertEqual(new_playlist.last_modified, new_last_modified)
def test_invalid_kwarg(self):
with self.assertRaises(TypeError):
Playlist(foo='baz')
def test_repr_without_tracks(self):
self.assertEquals(
"Playlist(name=u'name', uri=u'uri')",
repr(Playlist(uri='uri', name='name')))
def test_repr_with_tracks(self):
self.assertEquals(
"Playlist(name=u'name', tracks=[Track(name=u'foo')], uri=u'uri')",
repr(Playlist(uri='uri', name='name', tracks=[Track(name='foo')])))
def test_serialize_without_tracks(self):
self.assertDictEqual(
{'__model__': 'Playlist', 'uri': 'uri', 'name': 'name'},
Playlist(uri='uri', name='name').serialize())
def test_serialize_with_tracks(self):
track = Track(name='foo')
self.assertDictEqual(
{'__model__': 'Playlist', 'uri': 'uri', 'name': 'name',
'tracks': [track.serialize()]},
Playlist(uri='uri', name='name', tracks=[track]).serialize())
def test_to_json_and_back(self):
playlist1 = Playlist(uri='uri', name='name')
serialized = json.dumps(playlist1, cls=ModelJSONEncoder)
playlist2 = json.loads(serialized, object_hook=model_json_decoder)
self.assertEqual(playlist1, playlist2)
def test_eq_name(self):
playlist1 = Playlist(name='name')
playlist2 = Playlist(name='name')
self.assertEqual(playlist1, playlist2)
self.assertEqual(hash(playlist1), hash(playlist2))
def test_eq_uri(self):
playlist1 = Playlist(uri='uri')
playlist2 = Playlist(uri='uri')
self.assertEqual(playlist1, playlist2)
self.assertEqual(hash(playlist1), hash(playlist2))
def test_eq_tracks(self):
tracks = [Track()]
playlist1 = Playlist(tracks=tracks)
playlist2 = Playlist(tracks=tracks)
self.assertEqual(playlist1, playlist2)
self.assertEqual(hash(playlist1), hash(playlist2))
def test_eq_last_modified(self):
playlist1 = Playlist(last_modified=1)
playlist2 = Playlist(last_modified=1)
self.assertEqual(playlist1, playlist2)
self.assertEqual(hash(playlist1), hash(playlist2))
def test_eq(self):
tracks = [Track()]
playlist1 = Playlist(
uri='uri', name='name', tracks=tracks, last_modified=1)
playlist2 = Playlist(
uri='uri', name='name', tracks=tracks, last_modified=1)
self.assertEqual(playlist1, playlist2)
self.assertEqual(hash(playlist1), hash(playlist2))
def test_eq_none(self):
self.assertNotEqual(Playlist(), None)
def test_eq_other(self):
self.assertNotEqual(Playlist(), 'other')
def test_ne_name(self):
playlist1 = Playlist(name='name1')
playlist2 = Playlist(name='name2')
self.assertNotEqual(playlist1, playlist2)
self.assertNotEqual(hash(playlist1), hash(playlist2))
def test_ne_uri(self):
playlist1 = Playlist(uri='uri1')
playlist2 = Playlist(uri='uri2')
self.assertNotEqual(playlist1, playlist2)
self.assertNotEqual(hash(playlist1), hash(playlist2))
def test_ne_tracks(self):
playlist1 = Playlist(tracks=[Track(uri='uri1')])
playlist2 = Playlist(tracks=[Track(uri='uri2')])
self.assertNotEqual(playlist1, playlist2)
self.assertNotEqual(hash(playlist1), hash(playlist2))
def test_ne_last_modified(self):
playlist1 = Playlist(last_modified=1)
playlist2 = Playlist(last_modified=2)
self.assertNotEqual(playlist1, playlist2)
self.assertNotEqual(hash(playlist1), hash(playlist2))
def test_ne(self):
playlist1 = Playlist(
uri='uri1', name='name1', tracks=[Track(uri='uri1')],
last_modified=1)
playlist2 = Playlist(
uri='uri2', name='name2', tracks=[Track(uri='uri2')],
last_modified=2)
self.assertNotEqual(playlist1, playlist2)
self.assertNotEqual(hash(playlist1), hash(playlist2))
class SearchResultTest(unittest.TestCase):
def test_uri(self):
uri = 'an_uri'
result = SearchResult(uri=uri)
self.assertEqual(result.uri, uri)
with self.assertRaises(AttributeError):
result.uri = None
def test_tracks(self):
tracks = [Track(), Track(), Track()]
result = SearchResult(tracks=tracks)
self.assertEqual(list(result.tracks), tracks)
with self.assertRaises(AttributeError):
result.tracks = None
def test_artists(self):
artists = [Artist(), Artist(), Artist()]
result = SearchResult(artists=artists)
self.assertEqual(list(result.artists), artists)
with self.assertRaises(AttributeError):
result.artists = None
def test_albums(self):
albums = [Album(), Album(), Album()]
result = SearchResult(albums=albums)
self.assertEqual(list(result.albums), albums)
with self.assertRaises(AttributeError):
result.albums = None
def test_invalid_kwarg(self):
with self.assertRaises(TypeError):
SearchResult(foo='baz')
def test_repr_without_results(self):
self.assertEquals(
"SearchResult(uri=u'uri')",
repr(SearchResult(uri='uri')))
def test_serialize_without_results(self):
self.assertDictEqual(
{'__model__': 'SearchResult', 'uri': 'uri'},
SearchResult(uri='uri').serialize())
def test_to_json_and_back(self):
result1 = SearchResult(uri='uri')
serialized = json.dumps(result1, cls=ModelJSONEncoder)
result2 = json.loads(serialized, object_hook=model_json_decoder)
self.assertEqual(result1, result2)
|
woutervanwijk/mopidy
|
tests/test_models.py
|
Python
|
apache-2.0
| 38,697
|
from oauth2client.service_account import ServiceAccountCredentials
from googleapiclient.discovery import build
from google.oauth2 import service_account
from common.methods import set_progress
from infrastructure.models import CustomField, Environment
from pathlib import Path
import json, tempfile
import os
import zipfile
import time
import io
from django.conf import settings
from googleapiclient.http import MediaIoBaseUpload
def generate_custom_fields():
CustomField.objects.get_or_create(
name='function_name', defaults={'label': 'function name', 'type': 'STR', 'show_as_attribute': True,
'description': 'Name given to the Google Cloud function'}
)
CustomField.objects.get_or_create(
name='available_memory_mb', defaults={'label': 'Memory', 'type': 'INT', 'show_as_attribute': True,
'description': 'Memory allocated to the Google Cloud function'}
)
CustomField.objects.get_or_create(
name='entry_point', defaults={'label': 'EntryPoint', 'type': 'STR', 'show_as_attribute': True,
'description': 'Name of a function exported by the module specified in '
'directory with source code'}
)
CustomField.objects.get_or_create(
name='runtime', defaults={'label': 'Runtime', 'type': 'STR', 'show_as_attribute': True}
)
CustomField.objects.get_or_create(
name='service_account_email', defaults={'label': 'serviceAccountEmail',
'type': 'STR',
'show_as_attribute': False,
'description':
'Service account that the function will assume as its identity.'}
)
CustomField.objects.get_or_create(
name='https_trigger', defaults={'label': 'HttpsTrigger',
'type': 'STR',
'show_as_attribute': True,
'description':
'Url to trigger the google function'}
)
CustomField.objects.get_or_create(
name='source_archive_url', defaults={'label': 'sourceArchiveUrl',
'type': 'STR',
'show_as_attribute': True,
'description':
'Url to where the source code of the function is located.'}
)
CustomField.objects.get_or_create(
name='google_rh_id', defaults={'label': 'Resource Handler',
'type': 'STR',
'show_as_attribute': False})
FUNCTIONS_VALID_REGIONS = ['us-central1', 'us-east1',
'asia-east2', 'asia-northeast1', 'europe-west1', 'europe-west2']
def generate_options_for_env_id(server=None, **kwargs):
gcp_envs = Environment.objects.filter(
resource_handler__resource_technology__name="Google Cloud Platform")
options = []
for env in gcp_envs:
options.append((env.id, env.name))
if not options:
raise RuntimeError("No valid Google Cloud Platform resource handlers in CloudBolt")
return options
def generate_options_for_runtime(**kwargs):
return [("nodejs8", "Node JS 8"),
("nodejs10", "Node JS 10"),
("python37", "Python 3.7"),
("go111", "Node JS 8"), ]
def generate_options_for_bucket_to_store_sourcecode(control_value=None, **kwargs):
buckets = []
if control_value:
environment = Environment.objects.get(id=control_value)
project_id=environment.gcp_project
rh = environment.resource_handler.cast()
project=rh.gcp_projects.get(id=project_id).name
storage_client = create_build_client(rh,project_id,'storage')
list_bucket=storage_client.buckets().list(project=project).execute()
buckets = [bucket.get('name') for bucket in list_bucket.get('items')]
return buckets
def generate_options_for_enter_sourcecode_or_bucket_url(**kwargs):
return ['SourceCode', 'BucketUrl']
def generate_options_for_available_memory_mb(**kwargs):
return [
(128, '128 MB'),
(256, '256 MB'),
(512, '512 MB'),
(1024, '1 GB'),
(2048, '2 GB'),
]
def generate_options_for_gcp_region(control_value=None,**kwargs):
if control_value is None:
return []
environment = Environment.objects.get(id=control_value)
project_id=environment.gcp_project
rh = environment.resource_handler.cast()
project=rh.gcp_projects.get(id=environment.gcp_project).name
client = create_build_client(rh,project_id,'cloudfunctions')
locations=client.projects().locations().list(name=f'projects/{project}').execute()
return [region.get('locationId') for region in locations['locations']]
def create_build_client(rh,project_id,servicename):
'''method to create cloud build client for given service'''
account_info = json.loads(rh.gcp_projects.get(id=project_id).service_account_info)
credentials=service_account.Credentials.from_service_account_info(account_info)
client=build(servicename, "v1", credentials=credentials, cache_discovery=False)
return client
def validate_file_name(runtime,filename):
"""
Every runtime has
-specific file that is expected by google cloud functions
"""
runtimes = {
'python37': 'main.py',
'nodejs8': 'index.js',
'nodejs10': 'index.js',
'go111': 'function.go'
}
return (runtimes.get(runtime)==filename)
def create_file_with_sourcecode(sourcecode):
# Creates a temporary file containing the sourcecode passed.
path=sourcecode
filename=Path(sourcecode).name
if path.startswith(settings.MEDIA_URL):
set_progress("Converting relative URL to filesystem path")
path = path.replace(settings.MEDIA_URL, settings.MEDIA_ROOT)
path = os.path.join(settings.MEDIA_ROOT, path)
archive=io.BytesIO()
with zipfile.ZipFile(archive, 'w') as zip_archive:
with open(path, 'r') as file:
zip_file = zipfile.ZipInfo(filename)
zip_archive.writestr(zip_file, file.read())
archive.seek(0)
media=MediaIoBaseUpload(archive, mimetype='application/zip')
return media
def upload_file_to_s3(storage_client, bucket_name, file,func_name):
'''method to upload file in bucket'''
body={'name': func_name}
object=storage_client.objects()
obj_insert=object.insert(bucket=bucket_name,body=body,media_body=file).execute()
return bucket_name+'/'+func_name
def run(resource, logger=None, **kwargs):
environment = Environment.objects.get(id='{{ env_id }}')
function_name = '{{ function_name }}'
source_code = """{{ source_code }}"""
entry_point = '{{ entry_point }}'
available_memory_mb = '{{ available_memory_mb }}'
runtime = '{{ runtime }}'
bucket = '{{ bucket_to_store_sourcecode }}'
cloud_storage_location = '{{ cloud_storage_location }}'
enter_sourcecode_or_bucket_url = "{{enter_sourcecode_or_bucket_url}}"
region = "{{gcp_region}}"
rh = environment.resource_handler.cast()
project = environment.gcp_project
account_info = json.loads(rh.gcp_projects.get(id=project).service_account_info)
project_name=account_info['project_id']
service_name = 'cloudfunctions'
client = create_build_client(rh,project,service_name)
set_progress("Connection to google cloud established")
# validate a file with an extension corresponding to the runtime selected
storage_client = create_build_client(rh,project,'storage')
if not cloud_storage_location:
filename=Path(source_code).name
if validate_file_name(runtime,filename):
sourcecode_location = create_file_with_sourcecode(source_code)
else:
return "FAILURE","Please provide valid file.",""
file_location = upload_file_to_s3(storage_client, bucket, sourcecode_location,function_name)
else:
file_location = cloud_storage_location
# Need a way to be sure upload has completed
time.sleep(5)
body = {
"name": f"projects/{project_name}/locations/{region}/functions/{function_name}",
"httpsTrigger": {
"url": f"https://{region}-{project_name}.cloudfunctions.net/{function_name}"
},
"status": "ACTIVE",
"entryPoint": f"{entry_point}",
"timeout": "60s",
"availableMemoryMb": int(available_memory_mb),
"serviceAccountEmail": account_info.get('client_email'),
"runtime": f"{runtime}",
"sourceArchiveUrl": f"gs://{file_location}",
}
set_progress("Writing file to google cloud function")
result = client.projects().locations().functions().create(
location=f"projects/{project_name}/locations/{region}", body=body).execute()
if result.get('name'):
generate_custom_fields()
resource.name = function_name
resource.google_rh_id = rh.id
resource.function_name = f"projects/{project_name}/locations/{region}/functions/{function_name}"
resource.available_memory_mb = available_memory_mb
resource.entry_point = entry_point
resource.runtime = runtime
resource.service_account_email = rh.serviceaccount
resource.https_trigger = result.get('metadata').get('request').get('httpsTrigger').get('url')
resource.source_archive_url = result.get('metadata').get('request').get('sourceArchiveUrl')
resource.save()
return "SUCCESS", "", ""
return "FAILURE", "", ""
|
CloudBoltSoftware/cloudbolt-forge
|
blueprints/google_cloud_function/create.py
|
Python
|
apache-2.0
| 9,886
|
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 27 13:35:59 2017
@author: mkammoun.lct
"""
import numpy as np
import matplotlib.pyplot as pl
from bisect import bisect
import math
n=200
n2=10000
def per(theta,n):
perm=[]
for i in range(1,n+1):
if np.random.binomial(1,theta/(float(theta)+i-1))==1:
perm.append(i)
else:
j=np.random.randint(i-1)
k=perm[j]
perm[j]=i
perm.append(k)
return perm
per(0.1,1000)
def RSK(p):
'''Given a permutation p, spit out a pair of Young tableaux'''
P = []; Q = []
def insert(m, n=0):
'''Insert m into P, then place n in Q at the same place'''
for r in range(len(P)):
if m > P[r][-1]:
P[r].append(m);
return
c = bisect(P[r], m)
P[r][c],m = m,P[r][c]
P.append([m])
return P
for i in range(len(p)):
insert(int(p[i]), i+1)
return map(len,P)
def pointspos(per):
rsk=RSK(per)
return [rsk[i]-i-1 for i in range(len(rsk)) if (rsk[i]-i -1) >=0]
pointspos([1,2,3])
## seulement les points entre [-3 rac(n) et 3 rac(n)]
alea1={}
alea2={}
for i in range(int(3*n**0.5)+1):
alea1[i]=0
alea2[i]=0
for j in range(n2):
per_unif=np.random.permutation(range(1,np.random.poisson(n)+1))
per_ewens=per(0.1,np.random.poisson(n))
print j
p1=pointspos(per_unif)
p2=pointspos(per_ewens)
for i in p1 :
if i<3*n**0.5:
alea1[i]+=1
for i in p2 :
if i<3*n**0.5:
alea2[i]+=1
x=range(int(3*n**0.5+1))
a1=np.array([alea1[i]for i in x])/float(n2)
a2=np.array([alea2[i]for i in x])/float(n2)
x2=np.array(range(int(1000*2*n**0.5+1)))/1000
a3=np.array(np.arccos(np.array(x2)/(2*n**0.5)))/math.pi
pl.plot(x,a1,"*",label="uniform")
pl.plot(x,a2,"+",label="Ewens")
pl.plot(x2,a3,label="approximation sinus")
pl.legend()
|
kammmoun/PFE
|
codes/Ewens&uniform+RSK_rho_1.py
|
Python
|
apache-2.0
| 2,011
|
"""
This code was Ported from CPython's sha512module.c
"""
import _struct as struct
SHA_BLOCKSIZE = 128
SHA_DIGESTSIZE = 64
def new_shaobject():
return {
'digest': [0]*8,
'count_lo': 0,
'count_hi': 0,
'data': [0]* SHA_BLOCKSIZE,
'local': 0,
'digestsize': 0
}
ROR64 = lambda x, y: (((x & 0xffffffffffffffff) >> (y & 63)) | (x << (64 - (y & 63)))) & 0xffffffffffffffff
Ch = lambda x, y, z: (z ^ (x & (y ^ z)))
Maj = lambda x, y, z: (((x | y) & z) | (x & y))
S = lambda x, n: ROR64(x, n)
R = lambda x, n: (x & 0xffffffffffffffff) >> n
Sigma0 = lambda x: (S(x, 28) ^ S(x, 34) ^ S(x, 39))
Sigma1 = lambda x: (S(x, 14) ^ S(x, 18) ^ S(x, 41))
Gamma0 = lambda x: (S(x, 1) ^ S(x, 8) ^ R(x, 7))
Gamma1 = lambda x: (S(x, 19) ^ S(x, 61) ^ R(x, 6))
def sha_transform(sha_info):
W = []
d = sha_info['data']
for i in xrange(0,16):
W.append( (d[8*i]<<56) + (d[8*i+1]<<48) + (d[8*i+2]<<40) + (d[8*i+3]<<32) + (d[8*i+4]<<24) + (d[8*i+5]<<16) + (d[8*i+6]<<8) + d[8*i+7])
for i in xrange(16,80):
W.append( (Gamma1(W[i - 2]) + W[i - 7] + Gamma0(W[i - 15]) + W[i - 16]) & 0xffffffffffffffff )
ss = sha_info['digest'][:]
def RND(a,b,c,d,e,f,g,h,i,ki):
t0 = (h + Sigma1(e) + Ch(e, f, g) + ki + W[i]) & 0xffffffffffffffff
t1 = (Sigma0(a) + Maj(a, b, c)) & 0xffffffffffffffff
d = (d + t0) & 0xffffffffffffffff
h = (t0 + t1) & 0xffffffffffffffff
return d & 0xffffffffffffffff, h & 0xffffffffffffffff
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],0,0x428a2f98d728ae22)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],1,0x7137449123ef65cd)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],2,0xb5c0fbcfec4d3b2f)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],3,0xe9b5dba58189dbbc)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],4,0x3956c25bf348b538)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],5,0x59f111f1b605d019)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],6,0x923f82a4af194f9b)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],7,0xab1c5ed5da6d8118)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],8,0xd807aa98a3030242)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],9,0x12835b0145706fbe)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],10,0x243185be4ee4b28c)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],11,0x550c7dc3d5ffb4e2)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],12,0x72be5d74f27b896f)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],13,0x80deb1fe3b1696b1)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],14,0x9bdc06a725c71235)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],15,0xc19bf174cf692694)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],16,0xe49b69c19ef14ad2)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],17,0xefbe4786384f25e3)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],18,0x0fc19dc68b8cd5b5)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],19,0x240ca1cc77ac9c65)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],20,0x2de92c6f592b0275)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],21,0x4a7484aa6ea6e483)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],22,0x5cb0a9dcbd41fbd4)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],23,0x76f988da831153b5)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],24,0x983e5152ee66dfab)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],25,0xa831c66d2db43210)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],26,0xb00327c898fb213f)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],27,0xbf597fc7beef0ee4)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],28,0xc6e00bf33da88fc2)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],29,0xd5a79147930aa725)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],30,0x06ca6351e003826f)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],31,0x142929670a0e6e70)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],32,0x27b70a8546d22ffc)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],33,0x2e1b21385c26c926)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],34,0x4d2c6dfc5ac42aed)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],35,0x53380d139d95b3df)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],36,0x650a73548baf63de)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],37,0x766a0abb3c77b2a8)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],38,0x81c2c92e47edaee6)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],39,0x92722c851482353b)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],40,0xa2bfe8a14cf10364)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],41,0xa81a664bbc423001)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],42,0xc24b8b70d0f89791)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],43,0xc76c51a30654be30)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],44,0xd192e819d6ef5218)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],45,0xd69906245565a910)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],46,0xf40e35855771202a)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],47,0x106aa07032bbd1b8)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],48,0x19a4c116b8d2d0c8)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],49,0x1e376c085141ab53)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],50,0x2748774cdf8eeb99)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],51,0x34b0bcb5e19b48a8)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],52,0x391c0cb3c5c95a63)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],53,0x4ed8aa4ae3418acb)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],54,0x5b9cca4f7763e373)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],55,0x682e6ff3d6b2b8a3)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],56,0x748f82ee5defb2fc)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],57,0x78a5636f43172f60)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],58,0x84c87814a1f0ab72)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],59,0x8cc702081a6439ec)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],60,0x90befffa23631e28)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],61,0xa4506cebde82bde9)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],62,0xbef9a3f7b2c67915)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],63,0xc67178f2e372532b)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],64,0xca273eceea26619c)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],65,0xd186b8c721c0c207)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],66,0xeada7dd6cde0eb1e)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],67,0xf57d4f7fee6ed178)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],68,0x06f067aa72176fba)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],69,0x0a637dc5a2c898a6)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],70,0x113f9804bef90dae)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],71,0x1b710b35131c471b)
ss[3], ss[7] = RND(ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],72,0x28db77f523047d84)
ss[2], ss[6] = RND(ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],73,0x32caab7b40c72493)
ss[1], ss[5] = RND(ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],ss[5],74,0x3c9ebe0a15c9bebc)
ss[0], ss[4] = RND(ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],ss[4],75,0x431d67c49c100d4c)
ss[7], ss[3] = RND(ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],ss[3],76,0x4cc5d4becb3e42b6)
ss[6], ss[2] = RND(ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],ss[2],77,0x597f299cfc657e2a)
ss[5], ss[1] = RND(ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],ss[1],78,0x5fcb6fab3ad6faec)
ss[4], ss[0] = RND(ss[1],ss[2],ss[3],ss[4],ss[5],ss[6],ss[7],ss[0],79,0x6c44198c4a475817)
dig = []
for i, x in enumerate(sha_info['digest']):
dig.append( (x + ss[i]) & 0xffffffffffffffff )
sha_info['digest'] = dig
def sha_init():
sha_info = new_shaobject()
sha_info['digest'] = [ 0x6a09e667f3bcc908, 0xbb67ae8584caa73b, 0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1, 0x510e527fade682d1, 0x9b05688c2b3e6c1f, 0x1f83d9abfb41bd6b, 0x5be0cd19137e2179]
sha_info['count_lo'] = 0
sha_info['count_hi'] = 0
sha_info['local'] = 0
sha_info['digestsize'] = 64
return sha_info
def sha384_init():
sha_info = new_shaobject()
sha_info['digest'] = [ 0xcbbb9d5dc1059ed8, 0x629a292a367cd507, 0x9159015a3070dd17, 0x152fecd8f70e5939, 0x67332667ffc00b31, 0x8eb44a8768581511, 0xdb0c2e0d64f98fa7, 0x47b5481dbefa4fa4]
sha_info['count_lo'] = 0
sha_info['count_hi'] = 0
sha_info['local'] = 0
sha_info['digestsize'] = 48
return sha_info
def getbuf(s):
if isinstance(s, str):
return s
elif isinstance(s, unicode):
return str(s)
else:
return buffer(s)
def sha_update(sha_info, buffer):
count = len(buffer)
buffer_idx = 0
clo = (sha_info['count_lo'] + (count << 3)) & 0xffffffff
if clo < sha_info['count_lo']:
sha_info['count_hi'] += 1
sha_info['count_lo'] = clo
sha_info['count_hi'] += (count >> 29)
if sha_info['local']:
i = SHA_BLOCKSIZE - sha_info['local']
if i > count:
i = count
# copy buffer
for x in enumerate(buffer[buffer_idx:buffer_idx+i]):
sha_info['data'][sha_info['local']+x[0]] = struct.unpack('B', x[1])[0]
count -= i
buffer_idx += i
sha_info['local'] += i
if sha_info['local'] == SHA_BLOCKSIZE:
sha_transform(sha_info)
sha_info['local'] = 0
else:
return
while count >= SHA_BLOCKSIZE:
# copy buffer
sha_info['data'] = [struct.unpack('B',c)[0] for c in buffer[buffer_idx:buffer_idx + SHA_BLOCKSIZE]]
count -= SHA_BLOCKSIZE
buffer_idx += SHA_BLOCKSIZE
sha_transform(sha_info)
# copy buffer
pos = sha_info['local']
sha_info['data'][pos:pos+count] = [struct.unpack('B',c)[0] for c in buffer[buffer_idx:buffer_idx + count]]
sha_info['local'] = count
def sha_final(sha_info):
lo_bit_count = sha_info['count_lo']
hi_bit_count = sha_info['count_hi']
count = (lo_bit_count >> 3) & 0x7f
sha_info['data'][count] = 0x80;
count += 1
if count > SHA_BLOCKSIZE - 16:
# zero the bytes in data after the count
sha_info['data'] = sha_info['data'][:count] + ([0] * (SHA_BLOCKSIZE - count))
sha_transform(sha_info)
# zero bytes in data
sha_info['data'] = [0] * SHA_BLOCKSIZE
else:
sha_info['data'] = sha_info['data'][:count] + ([0] * (SHA_BLOCKSIZE - count))
sha_info['data'][112] = 0;
sha_info['data'][113] = 0;
sha_info['data'][114] = 0;
sha_info['data'][115] = 0;
sha_info['data'][116] = 0;
sha_info['data'][117] = 0;
sha_info['data'][118] = 0;
sha_info['data'][119] = 0;
sha_info['data'][120] = (hi_bit_count >> 24) & 0xff
sha_info['data'][121] = (hi_bit_count >> 16) & 0xff
sha_info['data'][122] = (hi_bit_count >> 8) & 0xff
sha_info['data'][123] = (hi_bit_count >> 0) & 0xff
sha_info['data'][124] = (lo_bit_count >> 24) & 0xff
sha_info['data'][125] = (lo_bit_count >> 16) & 0xff
sha_info['data'][126] = (lo_bit_count >> 8) & 0xff
sha_info['data'][127] = (lo_bit_count >> 0) & 0xff
sha_transform(sha_info)
dig = []
for i in sha_info['digest']:
dig.extend([ ((i>>56) & 0xff), ((i>>48) & 0xff), ((i>>40) & 0xff), ((i>>32) & 0xff), ((i>>24) & 0xff), ((i>>16) & 0xff), ((i>>8) & 0xff), (i & 0xff) ])
return ''.join([chr(i) for i in dig])
class sha512(object):
digest_size = digestsize = SHA_DIGESTSIZE
block_size = SHA_BLOCKSIZE
def __init__(self, s=None):
self._sha = sha_init()
if s:
sha_update(self._sha, getbuf(s))
def update(self, s):
sha_update(self._sha, getbuf(s))
def digest(self):
return sha_final(self._sha.copy())[:self._sha['digestsize']]
def hexdigest(self):
return ''.join([('0%x' % ord(i))[-2:] for i in self.digest()])
def copy(self):
new = sha512.__new__(sha512)
new._sha = self._sha.copy()
return new
class sha384(sha512):
digest_size = digestsize = 48
def __init__(self, s=None):
self._sha = sha384_init()
if s:
sha_update(self._sha, getbuf(s))
def copy(self):
new = sha384.__new__(sha384)
new._sha = self._sha.copy()
return new
def test():
import _sha512
a_str = "just a test string"
assert _sha512.sha512().hexdigest() == sha512().hexdigest()
assert _sha512.sha512(a_str).hexdigest() == sha512(a_str).hexdigest()
assert _sha512.sha512(a_str*7).hexdigest() == sha512(a_str*7).hexdigest()
s = sha512(a_str)
s.update(a_str)
assert _sha512.sha512(a_str+a_str).hexdigest() == s.hexdigest()
if __name__ == "__main__":
test()
|
aisk/grumpy
|
third_party/pypy/_sha512.py
|
Python
|
apache-2.0
| 14,181
|
# Copyright 2015 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.watchers.keypair
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Mike Grima <mgrima@netflix.com>
"""
import json
from security_monkey.decorators import record_exception
from security_monkey.decorators import iter_account_region
from security_monkey.watcher import Watcher, ChangeItem
from security_monkey.datastore import Account
from security_monkey import app, ARN_PREFIX
class ElasticSearchService(Watcher):
index = 'elasticsearchservice'
i_am_singular = 'ElasticSearch Service Access Policy'
i_am_plural = 'ElasticSearch Service Access Policies'
def __init__(self, accounts=None, debug=False):
super(ElasticSearchService, self).__init__(accounts=accounts, debug=debug)
def slurp(self):
"""
:returns: item_list - list of ElasticSearchService Items
:return: exception_map - A dict where the keys are a tuple containing the
location of the exception and the value is the actual exception
"""
self.prep_for_slurp()
@iter_account_region(index=self.index, accounts=self.accounts, service_name='es')
def slurp_items(**kwargs):
item_list = []
exception_map = {}
kwargs['exception_map'] = exception_map
account_db = Account.query.filter(Account.name == kwargs['account_name']).first()
account_num = account_db.identifier
es_info = self.get_all_es_domains_in_region(**kwargs)
if es_info is None:
return item_list, exception_map
(client, domains) = es_info
app.logger.debug("Found {} {}".format(len(domains), ElasticSearchService.i_am_plural))
for domain in domains:
if self.check_ignore_list(domain["DomainName"]):
continue
# Fetch the policy:
item = self.build_item(domain["DomainName"], client, account_num, **kwargs)
if item:
item_list.append(item)
return item_list, exception_map
return slurp_items()
@record_exception(source='{index}-watcher'.format(index=index), pop_exception_fields=False)
def get_all_es_domains_in_region(self, **kwargs):
from security_monkey.common.sts_connect import connect
client = connect(kwargs['account_name'], "boto3.es.client", region=kwargs['region'])
app.logger.debug("Checking {}/{}/{}".format(ElasticSearchService.index, kwargs['account_name'], kwargs['region']))
# No need to paginate according to: client.can_paginate("list_domain_names")
domains = self.wrap_aws_rate_limited_call(client.list_domain_names)["DomainNames"]
return client, domains
@record_exception(source='{index}-watcher'.format(index=index), pop_exception_fields=False)
def build_item(self, domain, client, account_num, **kwargs):
arn = ARN_PREFIX + ':es:{region}:{account_number}:domain/{domain_name}'.format(
region=kwargs['region'],
account_number=account_num,
domain_name=domain)
config = {
'arn': arn
}
domain_config = self.wrap_aws_rate_limited_call(client.describe_elasticsearch_domain_config,
DomainName=domain)
# Does the cluster have a policy?
if domain_config["DomainConfig"]["AccessPolicies"]["Options"] == "":
config['policy'] = {}
else:
config['policy'] = json.loads(domain_config["DomainConfig"]["AccessPolicies"]["Options"])
config['name'] = domain
return ElasticSearchServiceItem(region=kwargs['region'], account=kwargs['account_name'], name=domain, arn=arn, config=config)
class ElasticSearchServiceItem(ChangeItem):
def __init__(self, region=None, account=None, name=None, arn=None, config={}):
super(ElasticSearchServiceItem, self).__init__(
index=ElasticSearchService.index,
region=region,
account=account,
name=name,
arn=arn,
new_config=config)
|
stackArmor/security_monkey
|
security_monkey/watchers/elasticsearch_service.py
|
Python
|
apache-2.0
| 4,759
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Client side of the scheduler manager RPC API.
"""
from oslo.config import cfg
from nova.openstack.common import jsonutils
import nova.openstack.common.rpc.proxy
rpcapi_opts = [
cfg.StrOpt('scheduler_topic',
default='scheduler',
help='the topic scheduler nodes listen on'),
]
CONF = cfg.CONF
CONF.register_opts(rpcapi_opts)
class SchedulerAPI(nova.openstack.common.rpc.proxy.RpcProxy):
'''Client side of the scheduler rpc API.
API version history:
1.0 - Initial version.
1.1 - Changes to prep_resize():
- remove instance_uuid, add instance
- remove instance_type_id, add instance_type
- remove topic, it was unused
1.2 - Remove topic from run_instance, it was unused
1.3 - Remove instance_id, add instance to live_migration
1.4 - Remove update_db from prep_resize
1.5 - Add reservations argument to prep_resize()
1.6 - Remove reservations argument to run_instance()
1.7 - Add create_volume() method, remove topic from live_migration()
2.0 - Remove 1.x backwards compat
2.1 - Add image_id to create_volume()
2.2 - Remove reservations argument to create_volume()
2.3 - Remove create_volume()
2.4 - Change update_service_capabilities()
- accepts a list of capabilities
2.5 - Add get_backdoor_port()
2.6 - Add select_hosts()
'''
#
# NOTE(russellb): This is the default minimum version that the server
# (manager) side must implement unless otherwise specified using a version
# argument to self.call()/cast()/etc. here. It should be left as X.0 where
# X is the current major API version (1.0, 2.0, ...). For more information
# about rpc API versioning, see the docs in
# openstack/common/rpc/dispatcher.py.
#
BASE_RPC_API_VERSION = '2.0'
def __init__(self):
super(SchedulerAPI, self).__init__(topic=CONF.scheduler_topic,
default_version=self.BASE_RPC_API_VERSION)
def run_instance(self, ctxt, request_spec, admin_password,
injected_files, requested_networks, is_first_time,
filter_properties):
return self.cast(ctxt, self.make_msg('run_instance',
request_spec=request_spec, admin_password=admin_password,
injected_files=injected_files,
requested_networks=requested_networks,
is_first_time=is_first_time,
filter_properties=filter_properties))
def prep_resize(self, ctxt, instance, instance_type, image,
request_spec, filter_properties, reservations):
instance_p = jsonutils.to_primitive(instance)
instance_type_p = jsonutils.to_primitive(instance_type)
reservations_p = jsonutils.to_primitive(reservations)
image_p = jsonutils.to_primitive(image)
self.cast(ctxt, self.make_msg('prep_resize',
instance=instance_p, instance_type=instance_type_p,
image=image_p, request_spec=request_spec,
filter_properties=filter_properties,
reservations=reservations_p))
def live_migration(self, ctxt, block_migration, disk_over_commit,
instance, dest):
# NOTE(comstud): Call vs cast so we can get exceptions back, otherwise
# this call in the scheduler driver doesn't return anything.
instance_p = jsonutils.to_primitive(instance)
return self.call(ctxt, self.make_msg('live_migration',
block_migration=block_migration,
disk_over_commit=disk_over_commit, instance=instance_p,
dest=dest))
def update_service_capabilities(self, ctxt, service_name, host,
capabilities):
self.fanout_cast(ctxt, self.make_msg('update_service_capabilities',
service_name=service_name, host=host,
capabilities=capabilities),
version='2.4')
def select_hosts(self, ctxt, request_spec, filter_properties):
return self.call(ctxt, self.make_msg('select_hosts',
request_spec=request_spec,
filter_properties=filter_properties),
version='2.6')
|
sridevikoushik31/nova
|
nova/scheduler/rpcapi.py
|
Python
|
apache-2.0
| 4,921
|
# -*- encoding: utf-8 -*-
"""
Django settings for login project.
"""
from django.core.urlresolvers import reverse_lazy
DEBUG = True
TESTING = False
SESSION_COOKIE_SECURE = False
CSRF_COOKIE_SECURE = False
# We use the 'SITE_NAME' for the name of the database and the name of the
# cloud files container.
SITE_NAME = 'app_login'
ADMINS = (
('admin', 'code@pkimber.net'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-gb'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
#'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '#9x3dk(nl82sihl7c^u_#--yp((!g2ehd_1pmp)fpgx=h9(l9='
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'reversion.middleware.RevisionMiddleware',
)
ROOT_URLCONF = 'example_login.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'example_login.wsgi.application'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
'string_if_invalid': '**** INVALID EXPRESSION: %s ****',
},
},
]
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'example_login',
'base',
'login',
'mail',
'reversion',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# Celery
BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
# https://kfalck.net/2013/02/21/run-multiple-celeries-on-a-single-redis
CELERY_DEFAULT_QUEUE = '{}'.format(SITE_NAME)
# http://celery.readthedocs.org/en/latest/userguide/tasks.html#disable-rate-limits-if-they-re-not-used
CELERY_DISABLE_RATE_LIMITS = True
# django-compressor
COMPRESS_ENABLED = False # defaults to the opposite of DEBUG
# See the list of constants at the top of 'mail.models'
MAIL_TEMPLATE_TYPE = 'django'
DEFAULT_FROM_EMAIL = 'notify@pkimber.net'
# mandrill
#EMAIL_BACKEND = 'djrill.mail.backends.djrill.DjrillBackend'
#MANDRILL_API_KEY = get_env_variable('MANDRILL_API_KEY')
#MANDRILL_USER_NAME = get_env_variable('MANDRILL_USER_NAME')
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
FTP_STATIC_DIR = None
FTP_STATIC_URL = None
# URL where requests are redirected after login when the contrib.auth.login
# view gets no next parameter.
LOGIN_REDIRECT_URL = reverse_lazy('project.dash')
SENDFILE_BACKEND = 'sendfile.backends.development'
SENDFILE_ROOT = 'media-private'
|
pkimber/login
|
example_login/base.py
|
Python
|
apache-2.0
| 6,137
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
from unittest import TestCase
from unittest.mock import patch, MagicMock
import sqlalchemy as sa
from sqlalchemy.orm import Session
from sqlalchemy.ext.declarative import declarative_base
from crate.client.cursor import Cursor
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
class SqlAlchemyBulkTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
Base = declarative_base(bind=self.engine)
class Character(Base):
__tablename__ = 'characters'
name = sa.Column(sa.String, primary_key=True)
age = sa.Column(sa.Integer)
self.character = Character
self.session = Session()
@patch('crate.client.connection.Cursor', FakeCursor)
def test_bulk_save(self):
chars = [
self.character(name='Arthur', age=35),
self.character(name='Banshee', age=26),
self.character(name='Callisto', age=37),
]
fake_cursor.description = ()
fake_cursor.rowcount = len(chars)
fake_cursor.executemany.return_value = [
{'rowcount': 1},
{'rowcount': 1},
{'rowcount': 1},
]
self.session.bulk_save_objects(chars)
(stmt, bulk_args), _kwargs = fake_cursor.executemany.call_args
expected_stmt = "INSERT INTO characters (name, age) VALUES (?, ?)"
self.assertEqual(expected_stmt, stmt)
expected_bulk_args = (
('Arthur', 35),
('Banshee', 26),
('Callisto', 37)
)
self.assertEqual(expected_bulk_args, bulk_args)
|
crate/crate-python
|
src/crate/client/sqlalchemy/tests/bulk_test.py
|
Python
|
apache-2.0
| 2,714
|
from __future__ import unicode_literals
import boto
import copy
import itertools
import re
import six
from collections import defaultdict
from datetime import datetime
from boto.ec2.instance import Instance as BotoInstance, Reservation
from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType
from boto.ec2.spotinstancerequest import SpotInstanceRequest as BotoSpotRequest
from boto.ec2.launchspecification import LaunchSpecification
from moto.core import BaseBackend
from moto.core.models import Model
from moto.core.utils import iso_8601_datetime_with_milliseconds
from .exceptions import (
EC2ClientError,
DependencyViolationError,
MissingParameterError,
InvalidParameterValueError,
InvalidParameterValueErrorTagNull,
InvalidDHCPOptionsIdError,
MalformedDHCPOptionsIdError,
InvalidKeyPairNameError,
InvalidKeyPairDuplicateError,
InvalidInternetGatewayIdError,
GatewayNotAttachedError,
ResourceAlreadyAssociatedError,
InvalidVPCIdError,
InvalidSubnetIdError,
InvalidNetworkInterfaceIdError,
InvalidNetworkAttachmentIdError,
InvalidSecurityGroupDuplicateError,
InvalidSecurityGroupNotFoundError,
InvalidPermissionNotFoundError,
InvalidRouteTableIdError,
InvalidRouteError,
InvalidInstanceIdError,
MalformedAMIIdError,
InvalidAMIIdError,
InvalidAMIAttributeItemValueError,
InvalidSnapshotIdError,
InvalidVolumeIdError,
InvalidVolumeAttachmentError,
InvalidDomainError,
InvalidAddressError,
InvalidAllocationIdError,
InvalidAssociationIdError,
InvalidVPCPeeringConnectionIdError,
InvalidVPCPeeringConnectionStateTransitionError,
TagLimitExceeded,
InvalidID,
InvalidCIDRSubnetError,
InvalidNetworkAclIdError,
InvalidVpnGatewayIdError,
InvalidVpnConnectionIdError,
InvalidCustomerGatewayIdError,
)
from .utils import (
EC2_RESOURCE_TO_PREFIX,
EC2_PREFIX_TO_RESOURCE,
random_ami_id,
random_dhcp_option_id,
random_eip_allocation_id,
random_eip_association_id,
random_eni_attach_id,
random_eni_id,
random_instance_id,
random_internet_gateway_id,
random_ip,
random_nat_gateway_id,
random_key_pair,
random_private_ip,
random_public_ip,
random_reservation_id,
random_route_table_id,
generate_route_id,
split_route_id,
random_security_group_id,
random_snapshot_id,
random_spot_request_id,
random_subnet_id,
random_subnet_association_id,
random_volume_id,
random_vpc_id,
random_vpc_peering_connection_id,
generic_filter,
is_valid_resource_id,
get_prefix,
simple_aws_filter_to_re,
is_valid_cidr,
filter_internet_gateways,
filter_reservations,
random_network_acl_id,
random_network_acl_subnet_association_id,
random_vpn_gateway_id,
random_vpn_connection_id,
random_customer_gateway_id,
is_tag_filter,
)
def utc_date_and_time():
return datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.000Z')
def validate_resource_ids(resource_ids):
for resource_id in resource_ids:
if not is_valid_resource_id(resource_id):
raise InvalidID(resource_id=resource_id)
return True
class InstanceState(object):
def __init__(self, name='pending', code=0):
self.name = name
self.code = code
class StateReason(object):
def __init__(self, message="", code=""):
self.message = message
self.code = code
class TaggedEC2Resource(object):
def get_tags(self, *args, **kwargs):
tags = self.ec2_backend.describe_tags(filters={'resource-id': [self.id]})
return tags
def add_tag(self, key, value):
self.ec2_backend.create_tags([self.id], {key: value})
def get_filter_value(self, filter_name):
tags = self.get_tags()
if filter_name.startswith('tag:'):
tagname = filter_name.replace('tag:', '', 1)
for tag in tags:
if tag['key'] == tagname:
return tag['value']
return ''
if filter_name == 'tag-key':
return [tag['key'] for tag in tags]
if filter_name == 'tag-value':
return [tag['value'] for tag in tags]
class NetworkInterface(TaggedEC2Resource):
def __init__(self, ec2_backend, subnet, private_ip_address, device_index=0,
public_ip_auto_assign=True, group_ids=None):
self.ec2_backend = ec2_backend
self.id = random_eni_id()
self.device_index = device_index
self.private_ip_address = private_ip_address
self.subnet = subnet
self.instance = None
self.attachment_id = None
self.public_ip = None
self.public_ip_auto_assign = public_ip_auto_assign
self.start()
self.attachments = []
# Local set to the ENI. When attached to an instance, @property group_set
# returns groups for both self and the attached instance.
self._group_set = []
group = None
if group_ids:
for group_id in group_ids:
group = self.ec2_backend.get_security_group_from_id(group_id)
if not group:
# Create with specific group ID.
group = SecurityGroup(self.ec2_backend, group_id, group_id, group_id, vpc_id=subnet.vpc_id)
self.ec2_backend.groups[subnet.vpc_id][group_id] = group
if group:
self._group_set.append(group)
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
security_group_ids = properties.get('SecurityGroups', [])
ec2_backend = ec2_backends[region_name]
subnet_id = properties.get('SubnetId')
if subnet_id:
subnet = ec2_backend.get_subnet(subnet_id)
else:
subnet = None
private_ip_address = properties.get('PrivateIpAddress', None)
network_interface = ec2_backend.create_network_interface(
subnet,
private_ip_address,
group_ids=security_group_ids
)
return network_interface
def stop(self):
if self.public_ip_auto_assign:
self.public_ip = None
def start(self):
self.check_auto_public_ip()
def check_auto_public_ip(self):
if self.public_ip_auto_assign:
self.public_ip = random_public_ip()
@property
def group_set(self):
if self.instance and self.instance.security_groups:
return set(self._group_set) | set(self.instance.security_groups)
else:
return self._group_set
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'PrimaryPrivateIpAddress':
return self.private_ip_address
elif attribute_name == 'SecondaryPrivateIpAddresses':
raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "SecondaryPrivateIpAddresses" ]"')
raise UnformattedGetAttTemplateException()
@property
def physical_resource_id(self):
return self.id
def get_filter_value(self, filter_name):
if filter_name == 'network-interface-id':
return self.id
elif filter_name in ('addresses.private-ip-address', 'private-ip-address'):
return self.private_ip_address
elif filter_name == 'subnet-id':
return self.subnet.id
elif filter_name == 'vpc-id':
return self.subnet.vpc_id
elif filter_name == 'group-id':
return [group.id for group in self._group_set]
filter_value = super(NetworkInterface, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error(
"The filter '{0}' for DescribeNetworkInterfaces".format(filter_name)
)
return filter_value
class NetworkInterfaceBackend(object):
def __init__(self):
self.enis = {}
super(NetworkInterfaceBackend, self).__init__()
def create_network_interface(self, subnet, private_ip_address, group_ids=None, **kwargs):
eni = NetworkInterface(self, subnet, private_ip_address, group_ids=group_ids, **kwargs)
self.enis[eni.id] = eni
return eni
def get_network_interface(self, eni_id):
for eni in self.enis.values():
if eni_id == eni.id:
return eni
raise InvalidNetworkInterfaceIdError(eni_id)
def delete_network_interface(self, eni_id):
deleted = self.enis.pop(eni_id, None)
if not deleted:
raise InvalidNetworkInterfaceIdError(eni_id)
return deleted
def describe_network_interfaces(self, filters=None):
enis = self.enis.values()
if filters:
for (_filter, _filter_value) in filters.items():
if _filter == 'network-interface-id':
_filter = 'id'
enis = [eni for eni in enis if getattr(eni, _filter) in _filter_value]
elif _filter == 'group-id':
original_enis = enis
enis = []
for eni in original_enis:
for group in eni.group_set:
if group.id in _filter_value:
enis.append(eni)
break
else:
self.raise_not_implemented_error("The filter '{0}' for DescribeNetworkInterfaces".format(_filter))
return enis
def attach_network_interface(self, eni_id, instance_id, device_index):
eni = self.get_network_interface(eni_id)
instance = self.get_instance(instance_id)
return instance.attach_eni(eni, device_index)
def detach_network_interface(self, attachment_id):
found_eni = None
for eni in self.enis.values():
if eni.attachment_id == attachment_id:
found_eni = eni
break
else:
raise InvalidNetworkAttachmentIdError(attachment_id)
found_eni.instance.detach_eni(found_eni)
def modify_network_interface_attribute(self, eni_id, group_id):
eni = self.get_network_interface(eni_id)
group = self.get_security_group_from_id(group_id)
eni._group_set = [group]
def get_all_network_interfaces(self, eni_ids=None, filters=None):
enis = self.enis.values()
if eni_ids:
enis = [eni for eni in enis if eni.id in eni_ids]
if len(enis) != len(eni_ids):
invalid_id = list(set(eni_ids).difference(set([eni.id for eni in enis])))[0]
raise InvalidNetworkInterfaceIdError(invalid_id)
return generic_filter(filters, enis)
class Instance(BotoInstance, TaggedEC2Resource):
def __init__(self, ec2_backend, image_id, user_data, security_groups, **kwargs):
super(Instance, self).__init__()
self.ec2_backend = ec2_backend
self.id = random_instance_id()
self.image_id = image_id
self._state = InstanceState("running", 16)
self._reason = ""
self._state_reason = StateReason()
self.user_data = user_data
self.security_groups = security_groups
self.instance_type = kwargs.get("instance_type", "m1.small")
placement = kwargs.get("placement", None)
self.vpc_id = None
self.subnet_id = kwargs.get("subnet_id")
in_ec2_classic = not bool(self.subnet_id)
self.key_name = kwargs.get("key_name")
self.source_dest_check = "true"
self.launch_time = utc_date_and_time()
associate_public_ip = kwargs.get("associate_public_ip", False)
if in_ec2_classic:
# If we are in EC2-Classic, autoassign a public IP
associate_public_ip = True
amis = self.ec2_backend.describe_images(filters={'image-id': image_id})
ami = amis[0] if amis else None
self.platform = ami.platform if ami else None
self.virtualization_type = ami.virtualization_type if ami else 'paravirtual'
self.architecture = ami.architecture if ami else 'x86_64'
# handle weird bug around user_data -- something grabs the repr(), so it must be clean
if isinstance(self.user_data, list) and len(self.user_data) > 0:
if six.PY3 and isinstance(self.user_data[0], six.binary_type):
# string will have a "b" prefix -- need to get rid of it
self.user_data[0] = self.user_data[0].decode('utf-8')
elif six.PY2 and isinstance(self.user_data[0], six.text_type):
# string will have a "u" prefix -- need to get rid of it
self.user_data[0] = self.user_data[0].encode('utf-8')
if self.subnet_id:
subnet = ec2_backend.get_subnet(self.subnet_id)
self.vpc_id = subnet.vpc_id
self._placement.zone = subnet.availability_zone
elif placement:
self._placement.zone = placement
else:
self._placement.zone = ec2_backend.region_name + 'a'
self.block_device_mapping = BlockDeviceMapping()
self.prep_nics(kwargs.get("nics", {}),
subnet_id=self.subnet_id,
private_ip=kwargs.get("private_ip"),
associate_public_ip=associate_public_ip)
def setup_defaults(self):
# Default have an instance with root volume should you not wish to override with attach volume cmd.
volume = self.ec2_backend.create_volume(8, 'us-east-1a')
self.ec2_backend.attach_volume(volume.id, self.id, '/dev/sda1')
def teardown_defaults(self):
volume_id = self.block_device_mapping['/dev/sda1'].volume_id
self.ec2_backend.detach_volume(volume_id, self.id, '/dev/sda1')
self.ec2_backend.delete_volume(volume_id)
@property
def get_block_device_mapping(self):
return self.block_device_mapping.items()
@property
def private_ip(self):
return self.nics[0].private_ip_address
@property
def private_dns(self):
return "ip-{0}.ec2.internal".format(self.private_ip)
@property
def public_ip(self):
return self.nics[0].public_ip
@property
def public_dns(self):
if self.public_ip:
return "ec2-{0}.compute-1.amazonaws.com".format(self.public_ip)
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
security_group_ids = properties.get('SecurityGroups', [])
group_names = [ec2_backend.get_security_group_from_id(group_id).name for group_id in security_group_ids]
reservation = ec2_backend.add_instances(
image_id=properties['ImageId'],
user_data=properties.get('UserData'),
count=1,
security_group_names=group_names,
instance_type=properties.get("InstanceType", "m1.small"),
subnet_id=properties.get("SubnetId"),
key_name=properties.get("KeyName"),
private_ip=properties.get('PrivateIpAddress'),
)
return reservation.instances[0]
@property
def physical_resource_id(self):
return self.id
def start(self, *args, **kwargs):
for nic in self.nics.values():
nic.start()
self._state.name = "running"
self._state.code = 16
self._reason = ""
self._state_reason = StateReason()
def stop(self, *args, **kwargs):
for nic in self.nics.values():
nic.stop()
self._state.name = "stopped"
self._state.code = 80
self._reason = "User initiated ({0})".format(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC'))
self._state_reason = StateReason("Client.UserInitiatedShutdown: User initiated shutdown",
"Client.UserInitiatedShutdown")
def delete(self, region):
self.terminate()
def terminate(self, *args, **kwargs):
for nic in self.nics.values():
nic.stop()
self.teardown_defaults()
self._state.name = "terminated"
self._state.code = 48
self._reason = "User initiated ({0})".format(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC'))
self._state_reason = StateReason("Client.UserInitiatedShutdown: User initiated shutdown",
"Client.UserInitiatedShutdown")
def reboot(self, *args, **kwargs):
self._state.name = "running"
self._state.code = 16
self._reason = ""
self._state_reason = StateReason()
@property
def dynamic_group_list(self):
if self.nics:
groups = []
for nic in self.nics.values():
for group in nic.group_set:
groups.append(group)
return groups
else:
return self.security_groups
def prep_nics(self, nic_spec, subnet_id=None, private_ip=None, associate_public_ip=None):
self.nics = {}
if not private_ip:
private_ip = random_private_ip()
# Primary NIC defaults
primary_nic = {'SubnetId': subnet_id,
'PrivateIpAddress': private_ip,
'AssociatePublicIpAddress': associate_public_ip}
primary_nic = dict((k, v) for k, v in primary_nic.items() if v)
# If empty NIC spec but primary NIC values provided, create NIC from them.
if primary_nic and not nic_spec:
nic_spec[0] = primary_nic
nic_spec[0]['DeviceIndex'] = 0
# Flesh out data structures and associations
for nic in nic_spec.values():
device_index = int(nic.get('DeviceIndex'))
nic_id = nic.get('NetworkInterfaceId')
if nic_id:
# If existing NIC found, use it.
use_nic = self.ec2_backend.get_network_interface(nic_id)
use_nic.device_index = device_index
use_nic.public_ip_auto_assign = False
else:
# If primary NIC values provided, use them for the primary NIC.
if device_index == 0 and primary_nic:
nic.update(primary_nic)
if 'SubnetId' in nic:
subnet = self.ec2_backend.get_subnet(nic['SubnetId'])
else:
subnet = None
group_id = nic.get('SecurityGroupId')
group_ids = [group_id] if group_id else []
use_nic = self.ec2_backend.create_network_interface(subnet,
nic.get('PrivateIpAddress'),
device_index=device_index,
public_ip_auto_assign=nic.get('AssociatePublicIpAddress', False),
group_ids=group_ids)
self.attach_eni(use_nic, device_index)
def attach_eni(self, eni, device_index):
device_index = int(device_index)
self.nics[device_index] = eni
eni.instance = self # This is used upon associate/disassociate public IP.
eni.attachment_id = random_eni_attach_id()
eni.device_index = device_index
return eni.attachment_id
def detach_eni(self, eni):
self.nics.pop(eni.device_index, None)
eni.instance = None
eni.attachment_id = None
eni.device_index = None
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'AvailabilityZone':
return self.placement
elif attribute_name == 'PrivateDnsName':
return self.private_dns
elif attribute_name == 'PublicDnsName':
return self.public_dns
elif attribute_name == 'PrivateIp':
return self.private_ip
elif attribute_name == 'PublicIp':
return self.public_ip
raise UnformattedGetAttTemplateException()
class InstanceBackend(object):
def __init__(self):
self.reservations = {}
super(InstanceBackend, self).__init__()
def get_instance(self, instance_id):
for instance in self.all_instances():
if instance.id == instance_id:
return instance
raise InvalidInstanceIdError(instance_id)
def add_instances(self, image_id, count, user_data, security_group_names,
**kwargs):
new_reservation = Reservation()
new_reservation.id = random_reservation_id()
security_groups = [self.get_security_group_from_name(name)
for name in security_group_names]
security_groups.extend(self.get_security_group_from_id(sg_id)
for sg_id in kwargs.pop("security_group_ids", []))
self.reservations[new_reservation.id] = new_reservation
for index in range(count):
new_instance = Instance(
self,
image_id,
user_data,
security_groups,
**kwargs
)
new_reservation.instances.append(new_instance)
new_instance.setup_defaults()
return new_reservation
def start_instances(self, instance_ids):
started_instances = []
for instance in self.get_multi_instances_by_id(instance_ids):
instance.start()
started_instances.append(instance)
return started_instances
def stop_instances(self, instance_ids):
stopped_instances = []
for instance in self.get_multi_instances_by_id(instance_ids):
instance.stop()
stopped_instances.append(instance)
return stopped_instances
def terminate_instances(self, instance_ids):
terminated_instances = []
for instance in self.get_multi_instances_by_id(instance_ids):
instance.terminate()
terminated_instances.append(instance)
return terminated_instances
def reboot_instances(self, instance_ids):
rebooted_instances = []
for instance in self.get_multi_instances_by_id(instance_ids):
instance.reboot()
rebooted_instances.append(instance)
return rebooted_instances
def modify_instance_attribute(self, instance_id, key, value):
instance = self.get_instance(instance_id)
setattr(instance, key, value)
return instance
def modify_instance_security_groups(self, instance_id, new_group_list):
instance = self.get_instance(instance_id)
setattr(instance, 'security_groups', new_group_list)
return instance
def describe_instance_attribute(self, instance_id, key):
if key == 'group_set':
key = 'security_groups'
instance = self.get_instance(instance_id)
value = getattr(instance, key)
return instance, value
def all_instances(self):
instances = []
for reservation in self.all_reservations():
for instance in reservation.instances:
instances.append(instance)
return instances
def all_running_instances(self):
instances = []
for reservation in self.all_reservations():
for instance in reservation.instances:
if instance.state_code == 16:
instances.append(instance)
return instances
def get_multi_instances_by_id(self, instance_ids):
"""
:param instance_ids: A string list with instance ids
:return: A list with instance objects
"""
result = []
for reservation in self.all_reservations():
for instance in reservation.instances:
if instance.id in instance_ids:
result.append(instance)
# TODO: Trim error message down to specific invalid id.
if instance_ids and len(instance_ids) > len(result):
raise InvalidInstanceIdError(instance_ids)
return result
def get_instance_by_id(self, instance_id):
for reservation in self.all_reservations():
for instance in reservation.instances:
if instance.id == instance_id:
return instance
def get_reservations_by_instance_ids(self, instance_ids, filters=None):
""" Go through all of the reservations and filter to only return those
associated with the given instance_ids.
"""
reservations = []
for reservation in self.all_reservations(make_copy=True):
reservation_instance_ids = [instance.id for instance in reservation.instances]
matching_reservation = any(instance_id in reservation_instance_ids for instance_id in instance_ids)
if matching_reservation:
# We need to make a copy of the reservation because we have to modify the
# instances to limit to those requested
reservation.instances = [instance for instance in reservation.instances if instance.id in instance_ids]
reservations.append(reservation)
found_instance_ids = [instance.id for reservation in reservations for instance in reservation.instances]
if len(found_instance_ids) != len(instance_ids):
invalid_id = list(set(instance_ids).difference(set(found_instance_ids)))[0]
raise InvalidInstanceIdError(invalid_id)
if filters is not None:
reservations = filter_reservations(reservations, filters)
return reservations
def all_reservations(self, make_copy=False, filters=None):
if make_copy:
# Return copies so that other functions can modify them with changing
# the originals
reservations = [copy.deepcopy(reservation) for reservation in self.reservations.values()]
else:
reservations = [reservation for reservation in self.reservations.values()]
if filters is not None:
reservations = filter_reservations(reservations, filters)
return reservations
class KeyPairBackend(object):
def __init__(self):
self.keypairs = defaultdict(dict)
super(KeyPairBackend, self).__init__()
def create_key_pair(self, name):
if name in self.keypairs:
raise InvalidKeyPairDuplicateError(name)
self.keypairs[name] = keypair = random_key_pair()
keypair['name'] = name
return keypair
def delete_key_pair(self, name):
if name in self.keypairs:
self.keypairs.pop(name)
return True
def describe_key_pairs(self, filter_names=None):
results = []
for name, keypair in self.keypairs.items():
if not filter_names or name in filter_names:
keypair['name'] = name
results.append(keypair)
# TODO: Trim error message down to specific invalid name.
if filter_names and len(filter_names) > len(results):
raise InvalidKeyPairNameError(filter_names)
return results
def import_key_pair(self, key_name, public_key_material):
if key_name in self.keypairs:
raise InvalidKeyPairDuplicateError(key_name)
self.keypairs[key_name] = keypair = random_key_pair()
keypair['name'] = key_name
return keypair
class TagBackend(object):
VALID_TAG_FILTERS = ['key',
'resource-id',
'resource-type',
'value']
VALID_TAG_RESOURCE_FILTER_TYPES = ['customer-gateway',
'dhcp-options',
'image',
'instance',
'internet-gateway',
'network-acl',
'network-interface',
'reserved-instances',
'route-table',
'security-group',
'snapshot',
'spot-instances-request',
'subnet',
'volume',
'vpc',
'vpc-peering-connection'
'vpn-connection',
'vpn-gateway']
def __init__(self):
self.tags = defaultdict(dict)
super(TagBackend, self).__init__()
def create_tags(self, resource_ids, tags):
if None in set([tags[tag] for tag in tags]):
raise InvalidParameterValueErrorTagNull()
for resource_id in resource_ids:
if resource_id in self.tags:
if len(self.tags[resource_id]) + len([tag for tag in tags if not tag.startswith("aws:")]) > 10:
raise TagLimitExceeded()
elif len([tag for tag in tags if not tag.startswith("aws:")]) > 10:
raise TagLimitExceeded()
for resource_id in resource_ids:
for tag in tags:
self.tags[resource_id][tag] = tags[tag]
return True
def delete_tags(self, resource_ids, tags):
for resource_id in resource_ids:
for tag in tags:
if tag in self.tags[resource_id]:
if tags[tag] is None:
self.tags[resource_id].pop(tag)
elif tags[tag] == self.tags[resource_id][tag]:
self.tags[resource_id].pop(tag)
return True
def describe_tags(self, filters=None):
import re
results = []
key_filters = []
resource_id_filters = []
resource_type_filters = []
value_filters = []
if filters is not None:
for tag_filter in filters:
if tag_filter in self.VALID_TAG_FILTERS:
if tag_filter == 'key':
for value in filters[tag_filter]:
key_filters.append(re.compile(simple_aws_filter_to_re(value)))
if tag_filter == 'resource-id':
for value in filters[tag_filter]:
resource_id_filters.append(re.compile(simple_aws_filter_to_re(value)))
if tag_filter == 'resource-type':
for value in filters[tag_filter]:
if value in self.VALID_TAG_RESOURCE_FILTER_TYPES:
resource_type_filters.append(value)
if tag_filter == 'value':
for value in filters[tag_filter]:
value_filters.append(re.compile(simple_aws_filter_to_re(value)))
for resource_id, tags in self.tags.items():
for key, value in tags.items():
add_result = False
if filters is None:
add_result = True
else:
key_pass = False
id_pass = False
type_pass = False
value_pass = False
if key_filters:
for pattern in key_filters:
if pattern.match(key) is not None:
key_pass = True
else:
key_pass = True
if resource_id_filters:
for pattern in resource_id_filters:
if pattern.match(resource_id) is not None:
id_pass = True
else:
id_pass = True
if resource_type_filters:
for resource_type in resource_type_filters:
if EC2_PREFIX_TO_RESOURCE[get_prefix(resource_id)] == resource_type:
type_pass = True
else:
type_pass = True
if value_filters:
for pattern in value_filters:
if pattern.match(value) is not None:
value_pass = True
else:
value_pass = True
if key_pass and id_pass and type_pass and value_pass:
add_result = True
# If we're not filtering, or we are filtering and this
if add_result:
result = {
'resource_id': resource_id,
'key': key,
'value': value,
'resource_type': EC2_PREFIX_TO_RESOURCE[get_prefix(resource_id)],
}
results.append(result)
return results
class Ami(TaggedEC2Resource):
def __init__(self, ec2_backend, ami_id, instance=None, source_ami=None,
name=None, description=None):
self.ec2_backend = ec2_backend
self.id = ami_id
self.state = "available"
self.name = name
self.description = description
self.virtualization_type = None
self.architecture = None
self.kernel_id = None
self.platform = None
if instance:
self.instance = instance
self.instance_id = instance.id
self.virtualization_type = instance.virtualization_type
self.architecture = instance.architecture
self.kernel_id = instance.kernel
self.platform = instance.platform
elif source_ami:
"""
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/CopyingAMIs.html
"We don't copy launch permissions, user-defined tags, or Amazon S3 bucket permissions from the source AMI to the new AMI."
~ 2014.09.29
"""
self.virtualization_type = source_ami.virtualization_type
self.architecture = source_ami.architecture
self.kernel_id = source_ami.kernel_id
self.platform = source_ami.platform
if not name:
self.name = source_ami.name
if not description:
self.description = source_ami.description
self.launch_permission_groups = set()
self.launch_permission_users = set()
# AWS auto-creates these, we should reflect the same.
volume = self.ec2_backend.create_volume(15, "us-east-1a")
self.ebs_snapshot = self.ec2_backend.create_snapshot(volume.id, "Auto-created snapshot for AMI %s" % self.id)
@property
def is_public(self):
return 'all' in self.launch_permission_groups
@property
def is_public_string(self):
return str(self.is_public).lower()
def get_filter_value(self, filter_name):
if filter_name == 'virtualization-type':
return self.virtualization_type
elif filter_name == 'kernel-id':
return self.kernel_id
elif filter_name in ['architecture', 'platform']:
return getattr(self, filter_name)
elif filter_name == 'image-id':
return self.id
elif filter_name == 'is-public':
return str(self.is_public)
elif filter_name == 'state':
return self.state
elif filter_name == 'name':
return self.name
filter_value = super(Ami, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeImages".format(filter_name))
return filter_value
class AmiBackend(object):
def __init__(self):
self.amis = {}
super(AmiBackend, self).__init__()
def create_image(self, instance_id, name=None, description=None):
# TODO: check that instance exists and pull info from it.
ami_id = random_ami_id()
instance = self.get_instance(instance_id)
ami = Ami(self, ami_id, instance=instance, source_ami=None, name=name, description=description)
self.amis[ami_id] = ami
return ami
def copy_image(self, source_image_id, source_region, name=None, description=None):
source_ami = ec2_backends[source_region].describe_images(ami_ids=[source_image_id])[0]
ami_id = random_ami_id()
ami = Ami(self, ami_id, instance=None, source_ami=source_ami, name=name, description=description)
self.amis[ami_id] = ami
return ami
def describe_images(self, ami_ids=(), filters=None):
if filters:
images = self.amis.values()
return generic_filter(filters, images)
else:
images = []
for ami_id in ami_ids:
if ami_id in self.amis:
images.append(self.amis[ami_id])
elif not ami_id.startswith("ami-"):
raise MalformedAMIIdError(ami_id)
else:
raise InvalidAMIIdError(ami_id)
return images or self.amis.values()
def deregister_image(self, ami_id):
if ami_id in self.amis:
self.amis.pop(ami_id)
return True
raise InvalidAMIIdError(ami_id)
def get_launch_permission_groups(self, ami_id):
ami = self.describe_images(ami_ids=[ami_id])[0]
return ami.launch_permission_groups
def get_launch_permission_users(self, ami_id):
ami = self.describe_images(ami_ids=[ami_id])[0]
return ami.launch_permission_users
def validate_permission_targets(self, user_ids=None, group=None):
# If anything is invalid, nothing is added. (No partial success.)
if user_ids:
"""
AWS docs:
"The AWS account ID is a 12-digit number, such as 123456789012, that you use to construct Amazon Resource Names (ARNs)."
http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html
"""
for user_id in user_ids:
if len(user_id) != 12 or not user_id.isdigit():
raise InvalidAMIAttributeItemValueError("userId", user_id)
if group and group != 'all':
raise InvalidAMIAttributeItemValueError("UserGroup", group)
def add_launch_permission(self, ami_id, user_ids=None, group=None):
ami = self.describe_images(ami_ids=[ami_id])[0]
self.validate_permission_targets(user_ids=user_ids, group=group)
if user_ids:
for user_id in user_ids:
ami.launch_permission_users.add(user_id)
if group:
ami.launch_permission_groups.add(group)
return True
def remove_launch_permission(self, ami_id, user_ids=None, group=None):
ami = self.describe_images(ami_ids=[ami_id])[0]
self.validate_permission_targets(user_ids=user_ids, group=group)
if user_ids:
for user_id in user_ids:
ami.launch_permission_users.discard(user_id)
if group:
ami.launch_permission_groups.discard(group)
return True
class Region(object):
def __init__(self, name, endpoint):
self.name = name
self.endpoint = endpoint
class Zone(object):
def __init__(self, name, region_name):
self.name = name
self.region_name = region_name
class RegionsAndZonesBackend(object):
regions = [
Region("eu-west-1", "ec2.eu-west-1.amazonaws.com"),
Region("sa-east-1", "ec2.sa-east-1.amazonaws.com"),
Region("us-east-1", "ec2.us-east-1.amazonaws.com"),
Region("ap-northeast-1", "ec2.ap-northeast-1.amazonaws.com"),
Region("us-west-2", "ec2.us-west-2.amazonaws.com"),
Region("us-west-1", "ec2.us-west-1.amazonaws.com"),
Region("ap-southeast-1", "ec2.ap-southeast-1.amazonaws.com"),
Region("ap-southeast-2", "ec2.ap-southeast-2.amazonaws.com"),
]
# TODO: cleanup. For now, pretend everything is us-east-1. 'merica.
zones = [
Zone("us-east-1a", "us-east-1"),
Zone("us-east-1b", "us-east-1"),
Zone("us-east-1c", "us-east-1"),
Zone("us-east-1d", "us-east-1"),
Zone("us-east-1e", "us-east-1"),
]
def describe_regions(self):
return self.regions
def describe_availability_zones(self):
return self.zones
def get_zone_by_name(self, name):
for zone in self.zones:
if zone.name == name:
return zone
class SecurityRule(object):
def __init__(self, ip_protocol, from_port, to_port, ip_ranges, source_groups):
self.ip_protocol = ip_protocol
self.from_port = from_port
self.to_port = to_port
self.ip_ranges = ip_ranges or []
self.source_groups = source_groups
@property
def unique_representation(self):
return "{0}-{1}-{2}-{3}-{4}".format(
self.ip_protocol,
self.from_port,
self.to_port,
self.ip_ranges,
self.source_groups
)
def __eq__(self, other):
return self.unique_representation == other.unique_representation
class SecurityGroup(TaggedEC2Resource):
def __init__(self, ec2_backend, group_id, name, description, vpc_id=None):
self.ec2_backend = ec2_backend
self.id = group_id
self.name = name
self.description = description
self.ingress_rules = []
self.egress_rules = [SecurityRule(-1, -1, -1, ['0.0.0.0/0'], [])]
self.enis = {}
self.vpc_id = vpc_id
self.owner_id = "123456789012"
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
vpc_id = properties.get('VpcId')
security_group = ec2_backend.create_security_group(
name=resource_name,
description=properties.get('GroupDescription'),
vpc_id=vpc_id,
)
for tag in properties.get("Tags", []):
tag_key = tag["Key"]
tag_value = tag["Value"]
security_group.add_tag(tag_key, tag_value)
for ingress_rule in properties.get('SecurityGroupIngress', []):
source_group_id = ingress_rule.get('SourceSecurityGroupId')
ec2_backend.authorize_security_group_ingress(
group_name_or_id=security_group.id,
ip_protocol=ingress_rule['IpProtocol'],
from_port=ingress_rule['FromPort'],
to_port=ingress_rule['ToPort'],
ip_ranges=ingress_rule.get('CidrIp'),
source_group_ids=[source_group_id],
vpc_id=vpc_id,
)
return security_group
@classmethod
def update_from_cloudformation_json(cls, original_resource, new_resource_name, cloudformation_json, region_name):
cls._delete_security_group_given_vpc_id(original_resource.name, original_resource.vpc_id, region_name)
return cls.create_from_cloudformation_json(new_resource_name, cloudformation_json, region_name)
@classmethod
def delete_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
vpc_id = properties.get('VpcId')
cls._delete_security_group_given_vpc_id(resource_name, vpc_id, region_name)
@classmethod
def _delete_security_group_given_vpc_id(cls, resource_name, vpc_id, region_name):
ec2_backend = ec2_backends[region_name]
security_group = ec2_backend.get_security_group_from_name(resource_name, vpc_id)
if security_group:
security_group.delete(region_name)
def delete(self, region_name):
''' Not exposed as part of the ELB API - used for CloudFormation. '''
self.ec2_backend.delete_security_group(group_id=self.id)
@property
def physical_resource_id(self):
return self.id
def matches_filter(self, key, filter_value):
def to_attr(filter_name):
attr = None
if filter_name == 'group-name':
attr = 'name'
elif filter_name == 'group-id':
attr = 'id'
elif filter_name == 'vpc-id':
attr = 'vpc_id'
else:
attr = filter_name.replace('-', '_')
return attr
if key.startswith('ip-permission'):
match = re.search(r"ip-permission.(*)", key)
ingress_attr = to_attr(match.groups()[0])
for ingress in self.ingress_rules:
if getattr(ingress, ingress_attr) in filter_value:
return True
elif is_tag_filter(key):
tag_value = self.get_filter_value(key)
return tag_value in filter_value
else:
attr_name = to_attr(key)
return getattr(self, attr_name) in filter_value
return False
def matches_filters(self, filters):
for key, value in filters.items():
if not self.matches_filter(key, value):
return False
return True
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'GroupId':
return self.id
raise UnformattedGetAttTemplateException()
class SecurityGroupBackend(object):
def __init__(self):
# the key in the dict group is the vpc_id or None (non-vpc)
self.groups = defaultdict(dict)
# Create the default security group
self.create_security_group("default", "default group")
super(SecurityGroupBackend, self).__init__()
def create_security_group(self, name, description, vpc_id=None, force=False):
if not description:
raise MissingParameterError('GroupDescription')
group_id = random_security_group_id()
if not force:
existing_group = self.get_security_group_from_name(name, vpc_id)
if existing_group:
raise InvalidSecurityGroupDuplicateError(name)
group = SecurityGroup(self, group_id, name, description, vpc_id=vpc_id)
self.groups[vpc_id][group_id] = group
return group
def describe_security_groups(self, group_ids=None, groupnames=None, filters=None):
all_groups = itertools.chain(*[x.values() for x in self.groups.values()])
groups = []
if group_ids or groupnames or filters:
for group in all_groups:
if ((group_ids and group.id in group_ids) or
(groupnames and group.name in groupnames) or
(filters and group.matches_filters(filters))):
groups.append(group)
else:
groups = all_groups
return groups
def _delete_security_group(self, vpc_id, group_id):
if self.groups[vpc_id][group_id].enis:
raise DependencyViolationError("{0} is being utilized by {1}".format(group_id, 'ENIs'))
return self.groups[vpc_id].pop(group_id)
def delete_security_group(self, name=None, group_id=None):
if group_id:
# loop over all the SGs, find the right one
for vpc_id, groups in self.groups.items():
if group_id in groups:
return self._delete_security_group(vpc_id, group_id)
raise InvalidSecurityGroupNotFoundError(group_id)
elif name:
# Group Name. Has to be in standard EC2, VPC needs to be identified by group_id
group = self.get_security_group_from_name(name)
if group:
return self._delete_security_group(None, group.id)
raise InvalidSecurityGroupNotFoundError(name)
def get_security_group_from_id(self, group_id):
# 2 levels of chaining necessary since it's a complex structure
all_groups = itertools.chain.from_iterable([x.values() for x in self.groups.values()])
for group in all_groups:
if group.id == group_id:
return group
def get_security_group_from_name(self, name, vpc_id=None):
for group_id, group in self.groups[vpc_id].items():
if group.name == name:
return group
def get_security_group_by_name_or_id(self, group_name_or_id, vpc_id):
# try searching by id, fallbacks to name search
group = self.get_security_group_from_id(group_name_or_id)
if group is None:
group = self.get_security_group_from_name(group_name_or_id, vpc_id)
return group
def authorize_security_group_ingress(self,
group_name_or_id,
ip_protocol,
from_port,
to_port,
ip_ranges,
source_group_names=None,
source_group_ids=None,
vpc_id=None):
group = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id)
if ip_ranges and not isinstance(ip_ranges, list):
ip_ranges = [ip_ranges]
if ip_ranges:
for cidr in ip_ranges:
if not is_valid_cidr(cidr):
raise InvalidCIDRSubnetError(cidr=cidr)
source_group_names = source_group_names if source_group_names else []
source_group_ids = source_group_ids if source_group_ids else []
source_groups = []
for source_group_name in source_group_names:
source_group = self.get_security_group_from_name(source_group_name, vpc_id)
if source_group:
source_groups.append(source_group)
# for VPCs
for source_group_id in source_group_ids:
source_group = self.get_security_group_from_id(source_group_id)
if source_group:
source_groups.append(source_group)
security_rule = SecurityRule(ip_protocol, from_port, to_port, ip_ranges, source_groups)
group.ingress_rules.append(security_rule)
def revoke_security_group_ingress(self,
group_name_or_id,
ip_protocol,
from_port,
to_port,
ip_ranges,
source_group_names=None,
source_group_ids=None,
vpc_id=None):
group = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id)
source_groups = []
for source_group_name in source_group_names:
source_group = self.get_security_group_from_name(source_group_name, vpc_id)
if source_group:
source_groups.append(source_group)
for source_group_id in source_group_ids:
source_group = self.get_security_group_from_id(source_group_id)
if source_group:
source_groups.append(source_group)
security_rule = SecurityRule(ip_protocol, from_port, to_port, ip_ranges, source_groups)
if security_rule in group.ingress_rules:
group.ingress_rules.remove(security_rule)
return security_rule
raise InvalidPermissionNotFoundError()
def authorize_security_group_egress(self,
group_name_or_id,
ip_protocol,
from_port,
to_port,
ip_ranges,
source_group_names=None,
source_group_ids=None,
vpc_id=None):
group = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id)
if ip_ranges and not isinstance(ip_ranges, list):
ip_ranges = [ip_ranges]
if ip_ranges:
for cidr in ip_ranges:
if not is_valid_cidr(cidr):
raise InvalidCIDRSubnetError(cidr=cidr)
source_group_names = source_group_names if source_group_names else []
source_group_ids = source_group_ids if source_group_ids else []
source_groups = []
for source_group_name in source_group_names:
source_group = self.get_security_group_from_name(source_group_name, vpc_id)
if source_group:
source_groups.append(source_group)
# for VPCs
for source_group_id in source_group_ids:
source_group = self.get_security_group_from_id(source_group_id)
if source_group:
source_groups.append(source_group)
security_rule = SecurityRule(ip_protocol, from_port, to_port, ip_ranges, source_groups)
group.egress_rules.append(security_rule)
def revoke_security_group_egress(self,
group_name_or_id,
ip_protocol,
from_port,
to_port,
ip_ranges,
source_group_names=None,
source_group_ids=None,
vpc_id=None):
group = self.get_security_group_by_name_or_id(group_name_or_id, vpc_id)
source_groups = []
for source_group_name in source_group_names:
source_group = self.get_security_group_from_name(source_group_name, vpc_id)
if source_group:
source_groups.append(source_group)
for source_group_id in source_group_ids:
source_group = self.get_security_group_from_id(source_group_id)
if source_group:
source_groups.append(source_group)
security_rule = SecurityRule(ip_protocol, from_port, to_port, ip_ranges, source_groups)
if security_rule in group.egress_rules:
group.egress_rules.remove(security_rule)
return security_rule
raise InvalidPermissionNotFoundError()
class SecurityGroupIngress(object):
def __init__(self, security_group, properties):
self.security_group = security_group
self.properties = properties
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
group_name = properties.get('GroupName')
group_id = properties.get('GroupId')
ip_protocol = properties.get("IpProtocol")
cidr_ip = properties.get("CidrIp")
from_port = properties.get("FromPort")
source_security_group_id = properties.get("SourceSecurityGroupId")
source_security_group_name = properties.get("SourceSecurityGroupName")
# source_security_owner_id = properties.get("SourceSecurityGroupOwnerId") # IGNORED AT THE MOMENT
to_port = properties.get("ToPort")
assert group_id or group_name
assert source_security_group_name or cidr_ip or source_security_group_id
assert ip_protocol
if source_security_group_id:
source_security_group_ids = [source_security_group_id]
else:
source_security_group_ids = None
if source_security_group_name:
source_security_group_names = [source_security_group_name]
else:
source_security_group_names = None
if cidr_ip:
ip_ranges = [cidr_ip]
else:
ip_ranges = []
if group_id:
security_group = ec2_backend.describe_security_groups(group_ids=[group_id])[0]
else:
security_group = ec2_backend.describe_security_groups(groupnames=[group_name])[0]
ec2_backend.authorize_security_group_ingress(
group_name_or_id=security_group.id,
ip_protocol=ip_protocol,
from_port=from_port,
to_port=to_port,
ip_ranges=ip_ranges,
source_group_ids=source_security_group_ids,
source_group_names=source_security_group_names,
)
return cls(security_group, properties)
class VolumeAttachment(object):
def __init__(self, volume, instance, device):
self.volume = volume
self.attach_time = utc_date_and_time()
self.instance = instance
self.device = device
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
instance_id = properties['InstanceId']
volume_id = properties['VolumeId']
ec2_backend = ec2_backends[region_name]
attachment = ec2_backend.attach_volume(
volume_id=volume_id,
instance_id=instance_id,
device_path=properties['Device'],
)
return attachment
class Volume(TaggedEC2Resource):
def __init__(self, ec2_backend, volume_id, size, zone, snapshot_id=None):
self.id = volume_id
self.size = size
self.zone = zone
self.create_time = utc_date_and_time()
self.attachment = None
self.snapshot_id = snapshot_id
self.ec2_backend = ec2_backend
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
volume = ec2_backend.create_volume(
size=properties.get('Size'),
zone_name=properties.get('AvailabilityZone'),
)
return volume
@property
def physical_resource_id(self):
return self.id
@property
def status(self):
if self.attachment:
return 'in-use'
else:
return 'available'
def get_filter_value(self, filter_name):
if filter_name.startswith('attachment') and not self.attachment:
return None
if filter_name == 'attachment.attach-time':
return self.attachment.attach_time
if filter_name == 'attachment.device':
return self.attachment.device
if filter_name == 'attachment.instance-id':
return self.attachment.instance.id
if filter_name == 'create-time':
return self.create_time
if filter_name == 'size':
return self.size
if filter_name == 'snapshot-id':
return self.snapshot_id
if filter_name == 'status':
return self.status
if filter_name == 'volume-id':
return self.id
filter_value = super(Volume, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeVolumes".format(filter_name))
return filter_value
class Snapshot(TaggedEC2Resource):
def __init__(self, ec2_backend, snapshot_id, volume, description):
self.id = snapshot_id
self.volume = volume
self.description = description
self.start_time = utc_date_and_time()
self.create_volume_permission_groups = set()
self.ec2_backend = ec2_backend
self.status = 'completed'
def get_filter_value(self, filter_name):
if filter_name == 'description':
return self.description
if filter_name == 'snapshot-id':
return self.id
if filter_name == 'start-time':
return self.start_time
if filter_name == 'volume-id':
return self.volume.id
if filter_name == 'volume-size':
return self.volume.size
filter_value = super(Snapshot, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeSnapshots".format(filter_name))
return filter_value
class EBSBackend(object):
def __init__(self):
self.volumes = {}
self.attachments = {}
self.snapshots = {}
super(EBSBackend, self).__init__()
def create_volume(self, size, zone_name, snapshot_id=None):
volume_id = random_volume_id()
zone = self.get_zone_by_name(zone_name)
if snapshot_id:
snapshot = self.get_snapshot(snapshot_id)
if size is None:
size = snapshot.volume.size
volume = Volume(self, volume_id, size, zone, snapshot_id)
self.volumes[volume_id] = volume
return volume
def describe_volumes(self, filters=None):
if filters:
volumes = self.volumes.values()
return generic_filter(filters, volumes)
return self.volumes.values()
def get_volume(self, volume_id):
volume = self.volumes.get(volume_id, None)
if not volume:
raise InvalidVolumeIdError(volume_id)
return volume
def delete_volume(self, volume_id):
if volume_id in self.volumes:
return self.volumes.pop(volume_id)
raise InvalidVolumeIdError(volume_id)
def attach_volume(self, volume_id, instance_id, device_path):
volume = self.get_volume(volume_id)
instance = self.get_instance(instance_id)
if not volume or not instance:
return False
volume.attachment = VolumeAttachment(volume, instance, device_path)
# Modify instance to capture mount of block device.
bdt = BlockDeviceType(volume_id=volume_id, status=volume.status, size=volume.size,
attach_time=utc_date_and_time())
instance.block_device_mapping[device_path] = bdt
return volume.attachment
def detach_volume(self, volume_id, instance_id, device_path):
volume = self.get_volume(volume_id)
self.get_instance(instance_id)
old_attachment = volume.attachment
if not old_attachment:
raise InvalidVolumeAttachmentError(volume_id, instance_id)
volume.attachment = None
return old_attachment
def create_snapshot(self, volume_id, description):
snapshot_id = random_snapshot_id()
volume = self.get_volume(volume_id)
snapshot = Snapshot(self, snapshot_id, volume, description)
self.snapshots[snapshot_id] = snapshot
return snapshot
def describe_snapshots(self, filters=None):
if filters:
snapshots = self.snapshots.values()
return generic_filter(filters, snapshots)
return self.snapshots.values()
def get_snapshot(self, snapshot_id):
snapshot = self.snapshots.get(snapshot_id, None)
if not snapshot:
raise InvalidSnapshotIdError(snapshot_id)
return snapshot
def delete_snapshot(self, snapshot_id):
if snapshot_id in self.snapshots:
return self.snapshots.pop(snapshot_id)
raise InvalidSnapshotIdError(snapshot_id)
def get_create_volume_permission_groups(self, snapshot_id):
snapshot = self.get_snapshot(snapshot_id)
return snapshot.create_volume_permission_groups
def add_create_volume_permission(self, snapshot_id, user_id=None, group=None):
if user_id:
self.raise_not_implemented_error("The UserId parameter for ModifySnapshotAttribute")
if group != 'all':
raise InvalidAMIAttributeItemValueError("UserGroup", group)
snapshot = self.get_snapshot(snapshot_id)
snapshot.create_volume_permission_groups.add(group)
return True
def remove_create_volume_permission(self, snapshot_id, user_id=None, group=None):
if user_id:
self.raise_not_implemented_error("The UserId parameter for ModifySnapshotAttribute")
if group != 'all':
raise InvalidAMIAttributeItemValueError("UserGroup", group)
snapshot = self.get_snapshot(snapshot_id)
snapshot.create_volume_permission_groups.discard(group)
return True
class VPC(TaggedEC2Resource):
def __init__(self, ec2_backend, vpc_id, cidr_block, is_default):
self.ec2_backend = ec2_backend
self.id = vpc_id
self.cidr_block = cidr_block
self.dhcp_options = None
self.state = 'available'
self.is_default = is_default
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
vpc = ec2_backend.create_vpc(
cidr_block=properties['CidrBlock'],
)
return vpc
@property
def physical_resource_id(self):
return self.id
def get_filter_value(self, filter_name):
if filter_name == 'vpc-id':
return self.id
elif filter_name == 'cidr':
return self.cidr_block
elif filter_name == 'dhcp-options-id':
if not self.dhcp_options:
return None
return self.dhcp_options.id
filter_value = super(VPC, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeVPCs".format(filter_name))
return filter_value
class VPCBackend(object):
def __init__(self):
self.vpcs = {}
super(VPCBackend, self).__init__()
def create_vpc(self, cidr_block):
vpc_id = random_vpc_id()
vpc = VPC(self, vpc_id, cidr_block, len(self.vpcs) == 0)
self.vpcs[vpc_id] = vpc
# AWS creates a default main route table and security group.
self.create_route_table(vpc_id, main=True)
# AWS creates a default Network ACL
self.create_network_acl(vpc_id, default=True)
default = self.get_security_group_from_name('default', vpc_id=vpc_id)
if not default:
self.create_security_group('default', 'default VPC security group', vpc_id=vpc_id)
return vpc
def get_vpc(self, vpc_id):
if vpc_id not in self.vpcs:
raise InvalidVPCIdError(vpc_id)
return self.vpcs.get(vpc_id)
def get_all_vpcs(self, vpc_ids=None, filters=None):
if vpc_ids:
vpcs = [vpc for vpc in self.vpcs.values() if vpc.id in vpc_ids]
else:
vpcs = self.vpcs.values()
return generic_filter(filters, vpcs)
def delete_vpc(self, vpc_id):
# Delete route table if only main route table remains.
route_tables = self.get_all_route_tables(filters={'vpc-id': vpc_id})
if len(route_tables) > 1:
raise DependencyViolationError(
"The vpc {0} has dependencies and cannot be deleted."
.format(vpc_id)
)
for route_table in route_tables:
self.delete_route_table(route_table.id)
# Delete default security group if exists.
default = self.get_security_group_from_name('default', vpc_id=vpc_id)
if default:
self.delete_security_group(group_id=default.id)
# Now delete VPC.
vpc = self.vpcs.pop(vpc_id, None)
if not vpc:
raise InvalidVPCIdError(vpc_id)
if vpc.dhcp_options:
vpc.dhcp_options.vpc = None
self.delete_dhcp_options_set(vpc.dhcp_options.id)
vpc.dhcp_options = None
return vpc
class VPCPeeringConnectionStatus(object):
def __init__(self, code='initiating-request', message=''):
self.code = code
self.message = message
def initiating(self):
self.code = 'initiating-request'
self.message = 'Initiating Request to {accepter ID}'
def pending(self):
self.code = 'pending-acceptance'
self.message = 'Pending Acceptance by {accepter ID}'
def accept(self):
self.code = 'active'
self.message = 'Active'
def reject(self):
self.code = 'rejected'
self.message = 'Inactive'
class VPCPeeringConnection(TaggedEC2Resource):
def __init__(self, vpc_pcx_id, vpc, peer_vpc):
self.id = vpc_pcx_id
self.vpc = vpc
self.peer_vpc = peer_vpc
self._status = VPCPeeringConnectionStatus()
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
vpc = ec2_backend.get_vpc(properties['VpcId'])
peer_vpc = ec2_backend.get_vpc(properties['PeerVpcId'])
vpc_pcx = ec2_backend.create_vpc_peering_connection(vpc, peer_vpc)
return vpc_pcx
@property
def physical_resource_id(self):
return self.id
class VPCPeeringConnectionBackend(object):
def __init__(self):
self.vpc_pcxs = {}
super(VPCPeeringConnectionBackend, self).__init__()
def create_vpc_peering_connection(self, vpc, peer_vpc):
vpc_pcx_id = random_vpc_peering_connection_id()
vpc_pcx = VPCPeeringConnection(vpc_pcx_id, vpc, peer_vpc)
vpc_pcx._status.pending()
self.vpc_pcxs[vpc_pcx_id] = vpc_pcx
return vpc_pcx
def get_all_vpc_peering_connections(self):
return self.vpc_pcxs.values()
def get_vpc_peering_connection(self, vpc_pcx_id):
if vpc_pcx_id not in self.vpc_pcxs:
raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id)
return self.vpc_pcxs.get(vpc_pcx_id)
def delete_vpc_peering_connection(self, vpc_pcx_id):
deleted = self.vpc_pcxs.pop(vpc_pcx_id, None)
if not deleted:
raise InvalidVPCPeeringConnectionIdError(vpc_pcx_id)
return deleted
def accept_vpc_peering_connection(self, vpc_pcx_id):
vpc_pcx = self.get_vpc_peering_connection(vpc_pcx_id)
if vpc_pcx._status.code != 'pending-acceptance':
raise InvalidVPCPeeringConnectionStateTransitionError(vpc_pcx.id)
vpc_pcx._status.accept()
return vpc_pcx
def reject_vpc_peering_connection(self, vpc_pcx_id):
vpc_pcx = self.get_vpc_peering_connection(vpc_pcx_id)
if vpc_pcx._status.code != 'pending-acceptance':
raise InvalidVPCPeeringConnectionStateTransitionError(vpc_pcx.id)
vpc_pcx._status.reject()
return vpc_pcx
class Subnet(TaggedEC2Resource):
def __init__(self, ec2_backend, subnet_id, vpc_id, cidr_block, availability_zone, defaultForAz,
map_public_ip_on_launch):
self.ec2_backend = ec2_backend
self.id = subnet_id
self.vpc_id = vpc_id
self.cidr_block = cidr_block
self._availability_zone = availability_zone
self.defaultForAz = defaultForAz
self.map_public_ip_on_launch = map_public_ip_on_launch
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
vpc_id = properties['VpcId']
cidr_block = properties['CidrBlock']
availability_zone = properties.get('AvailabilityZone')
ec2_backend = ec2_backends[region_name]
subnet = ec2_backend.create_subnet(
vpc_id=vpc_id,
cidr_block=cidr_block,
availability_zone=availability_zone,
)
for tag in properties.get("Tags", []):
tag_key = tag["Key"]
tag_value = tag["Value"]
subnet.add_tag(tag_key, tag_value)
return subnet
@property
def availability_zone(self):
if self._availability_zone is None:
# This could probably be smarter, but there doesn't appear to be a
# way to pull AZs for a region in boto
return self.ec2_backend.region_name + "a"
else:
return self._availability_zone
@property
def physical_resource_id(self):
return self.id
def get_filter_value(self, filter_name):
"""
API Version 2014-10-01 defines the following filters for DescribeSubnets:
* availabilityZone
* available-ip-address-count
* cidrBlock
* defaultForAz
* state
* subnet-id
* tag:key=value
* tag-key
* tag-value
* vpc-id
Taken from: http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSubnets.html
"""
if filter_name in ['cidr', 'cidrBlock', 'cidr-block']:
return self.cidr_block
elif filter_name == 'vpc-id':
return self.vpc_id
elif filter_name == 'subnet-id':
return self.id
elif filter_name == 'availabilityZone':
return self.availability_zone
elif filter_name == 'defaultForAz':
return self.defaultForAz
filter_value = super(Subnet, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeSubnets".format(filter_name))
return filter_value
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'AvailabilityZone':
raise NotImplementedError('"Fn::GetAtt" : [ "{0}" , "AvailabilityZone" ]"')
raise UnformattedGetAttTemplateException()
class SubnetBackend(object):
def __init__(self):
self.subnets = {}
super(SubnetBackend, self).__init__()
def get_subnet(self, subnet_id):
subnet = self.subnets.get(subnet_id, None)
if not subnet:
raise InvalidSubnetIdError(subnet_id)
return subnet
def create_subnet(self, vpc_id, cidr_block, availability_zone=None):
subnet_id = random_subnet_id()
vpc = self.get_vpc(vpc_id) # Validate VPC exists
defaultForAz = "true" if vpc.is_default else "false"
map_public_ip_on_launch = "true" if vpc.is_default else "false"
subnet = Subnet(self, subnet_id, vpc_id, cidr_block, availability_zone, defaultForAz, map_public_ip_on_launch)
# AWS associates a new subnet with the default Network ACL
self.associate_default_network_acl_with_subnet(subnet_id)
self.subnets[subnet_id] = subnet
return subnet
def get_all_subnets(self, filters=None):
subnets = self.subnets.values()
return generic_filter(filters, subnets)
def delete_subnet(self, subnet_id):
deleted = self.subnets.pop(subnet_id, None)
if not deleted:
raise InvalidSubnetIdError(subnet_id)
return deleted
def modify_subnet_attribute(self, subnet_id, map_public_ip):
subnet = self.get_subnet(subnet_id)
if map_public_ip not in ('true', 'false'):
raise InvalidParameterValueError(map_public_ip)
subnet.map_public_ip_on_launch = map_public_ip
class SubnetRouteTableAssociation(object):
def __init__(self, route_table_id, subnet_id):
self.route_table_id = route_table_id
self.subnet_id = subnet_id
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
route_table_id = properties['RouteTableId']
subnet_id = properties['SubnetId']
ec2_backend = ec2_backends[region_name]
subnet_association = ec2_backend.create_subnet_association(
route_table_id=route_table_id,
subnet_id=subnet_id,
)
return subnet_association
class SubnetRouteTableAssociationBackend(object):
def __init__(self):
self.subnet_associations = {}
super(SubnetRouteTableAssociationBackend, self).__init__()
def create_subnet_association(self, route_table_id, subnet_id):
subnet_association = SubnetRouteTableAssociation(route_table_id, subnet_id)
self.subnet_associations["{0}:{1}".format(route_table_id, subnet_id)] = subnet_association
return subnet_association
class RouteTable(TaggedEC2Resource):
def __init__(self, ec2_backend, route_table_id, vpc_id, main=False):
self.ec2_backend = ec2_backend
self.id = route_table_id
self.vpc_id = vpc_id
self.main = main
self.associations = {}
self.routes = {}
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
vpc_id = properties['VpcId']
ec2_backend = ec2_backends[region_name]
route_table = ec2_backend.create_route_table(
vpc_id=vpc_id,
)
return route_table
@property
def physical_resource_id(self):
return self.id
def get_filter_value(self, filter_name):
if filter_name == "association.main":
# Note: Boto only supports 'true'.
# https://github.com/boto/boto/issues/1742
if self.main:
return 'true'
else:
return 'false'
elif filter_name == "route-table-id":
return self.id
elif filter_name == "vpc-id":
return self.vpc_id
elif filter_name == "association.route-table-id":
return self.id
elif filter_name == "association.route-table-association-id":
return self.associations.keys()
elif filter_name == "association.subnet-id":
return self.associations.values()
filter_value = super(RouteTable, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeRouteTables".format(filter_name))
return filter_value
class RouteTableBackend(object):
def __init__(self):
self.route_tables = {}
super(RouteTableBackend, self).__init__()
def create_route_table(self, vpc_id, main=False):
route_table_id = random_route_table_id()
vpc = self.get_vpc(vpc_id) # Validate VPC exists
route_table = RouteTable(self, route_table_id, vpc_id, main=main)
self.route_tables[route_table_id] = route_table
# AWS creates a default local route.
self.create_route(route_table_id, vpc.cidr_block, local=True)
return route_table
def get_route_table(self, route_table_id):
route_table = self.route_tables.get(route_table_id, None)
if not route_table:
raise InvalidRouteTableIdError(route_table_id)
return route_table
def get_all_route_tables(self, route_table_ids=None, filters=None):
route_tables = self.route_tables.values()
if route_table_ids:
route_tables = [route_table for route_table in route_tables if route_table.id in route_table_ids]
if len(route_tables) != len(route_table_ids):
invalid_id = list(set(route_table_ids).difference(set([route_table.id for route_table in route_tables])))[0]
raise InvalidRouteTableIdError(invalid_id)
return generic_filter(filters, route_tables)
def delete_route_table(self, route_table_id):
route_table = self.get_route_table(route_table_id)
if route_table.associations:
raise DependencyViolationError(
"The routeTable '{0}' has dependencies and cannot be deleted."
.format(route_table_id)
)
self.route_tables.pop(route_table_id)
return True
def associate_route_table(self, route_table_id, subnet_id):
# Idempotent if association already exists.
route_tables_by_subnet = self.get_all_route_tables(filters={'association.subnet-id': [subnet_id]})
if route_tables_by_subnet:
for association_id, check_subnet_id in route_tables_by_subnet[0].associations.items():
if subnet_id == check_subnet_id:
return association_id
# Association does not yet exist, so create it.
route_table = self.get_route_table(route_table_id)
self.get_subnet(subnet_id) # Validate subnet exists
association_id = random_subnet_association_id()
route_table.associations[association_id] = subnet_id
return association_id
def disassociate_route_table(self, association_id):
for route_table in self.route_tables.values():
if association_id in route_table.associations:
return route_table.associations.pop(association_id, None)
raise InvalidAssociationIdError(association_id)
def replace_route_table_association(self, association_id, route_table_id):
# Idempotent if association already exists.
new_route_table = self.get_route_table(route_table_id)
if association_id in new_route_table.associations:
return association_id
# Find route table which currently has the association, error if none.
route_tables_by_association_id = self.get_all_route_tables(filters={'association.route-table-association-id': [association_id]})
if not route_tables_by_association_id:
raise InvalidAssociationIdError(association_id)
# Remove existing association, create new one.
previous_route_table = route_tables_by_association_id[0]
subnet_id = previous_route_table.associations.pop(association_id, None)
return self.associate_route_table(route_table_id, subnet_id)
class Route(object):
def __init__(self, route_table, destination_cidr_block, local=False,
gateway=None, instance=None, interface=None, vpc_pcx=None):
self.id = generate_route_id(route_table.id, destination_cidr_block)
self.route_table = route_table
self.destination_cidr_block = destination_cidr_block
self.local = local
self.gateway = gateway
self.instance = instance
self.interface = interface
self.vpc_pcx = vpc_pcx
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
gateway_id = properties.get('GatewayId')
instance_id = properties.get('InstanceId')
interface_id = properties.get('NetworkInterfaceId')
pcx_id = properties.get('VpcPeeringConnectionId')
route_table_id = properties['RouteTableId']
ec2_backend = ec2_backends[region_name]
route_table = ec2_backend.create_route(
route_table_id=route_table_id,
destination_cidr_block=properties['DestinationCidrBlock'],
gateway_id=gateway_id,
instance_id=instance_id,
interface_id=interface_id,
vpc_peering_connection_id=pcx_id,
)
return route_table
class RouteBackend(object):
def __init__(self):
super(RouteBackend, self).__init__()
def create_route(self, route_table_id, destination_cidr_block, local=False,
gateway_id=None, instance_id=None, interface_id=None,
vpc_peering_connection_id=None):
route_table = self.get_route_table(route_table_id)
if interface_id:
self.raise_not_implemented_error("CreateRoute to NetworkInterfaceId")
gateway = None
if gateway_id:
if EC2_RESOURCE_TO_PREFIX['vpn-gateway'] in gateway_id:
gateway = self.get_vpn_gateway(gateway_id)
elif EC2_RESOURCE_TO_PREFIX['internet-gateway'] in gateway_id:
gateway = self.get_internet_gateway(gateway_id)
route = Route(route_table, destination_cidr_block, local=local,
gateway=gateway,
instance=self.get_instance(instance_id) if instance_id else None,
interface=None,
vpc_pcx=self.get_vpc_peering_connection(vpc_peering_connection_id) if vpc_peering_connection_id else None)
route_table.routes[route.id] = route
return route
def replace_route(self, route_table_id, destination_cidr_block,
gateway_id=None, instance_id=None, interface_id=None,
vpc_peering_connection_id=None):
route_table = self.get_route_table(route_table_id)
route_id = generate_route_id(route_table.id, destination_cidr_block)
route = route_table.routes[route_id]
if interface_id:
self.raise_not_implemented_error("ReplaceRoute to NetworkInterfaceId")
route.gateway = None
if gateway_id:
if EC2_RESOURCE_TO_PREFIX['vpn-gateway'] in gateway_id:
route.gateway = self.get_vpn_gateway(gateway_id)
elif EC2_RESOURCE_TO_PREFIX['internet-gateway'] in gateway_id:
route.gateway = self.get_internet_gateway(gateway_id)
route.instance = self.get_instance(instance_id) if instance_id else None
route.interface = None
route.vpc_pcx = self.get_vpc_peering_connection(vpc_peering_connection_id) if vpc_peering_connection_id else None
route_table.routes[route.id] = route
return route
def get_route(self, route_id):
route_table_id, destination_cidr_block = split_route_id(route_id)
route_table = self.get_route_table(route_table_id)
return route_table.get(route_id)
def delete_route(self, route_table_id, destination_cidr_block):
route_table = self.get_route_table(route_table_id)
route_id = generate_route_id(route_table_id, destination_cidr_block)
deleted = route_table.routes.pop(route_id, None)
if not deleted:
raise InvalidRouteError(route_table_id, destination_cidr_block)
return deleted
class InternetGateway(TaggedEC2Resource):
def __init__(self, ec2_backend):
self.ec2_backend = ec2_backend
self.id = random_internet_gateway_id()
self.vpc = None
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
ec2_backend = ec2_backends[region_name]
return ec2_backend.create_internet_gateway()
@property
def physical_resource_id(self):
return self.id
@property
def attachment_state(self):
if self.vpc:
return "available"
else:
return "detached"
class InternetGatewayBackend(object):
def __init__(self):
self.internet_gateways = {}
super(InternetGatewayBackend, self).__init__()
def create_internet_gateway(self):
igw = InternetGateway(self)
self.internet_gateways[igw.id] = igw
return igw
def describe_internet_gateways(self, internet_gateway_ids=None, filters=None):
igws = []
if internet_gateway_ids is None:
igws = self.internet_gateways.values()
else:
for igw_id in internet_gateway_ids:
if igw_id in self.internet_gateways:
igws.append(self.internet_gateways[igw_id])
else:
raise InvalidInternetGatewayIdError(igw_id)
if filters is not None:
igws = filter_internet_gateways(igws, filters)
return igws
def delete_internet_gateway(self, internet_gateway_id):
igw = self.get_internet_gateway(internet_gateway_id)
if igw.vpc:
raise DependencyViolationError(
"{0} is being utilized by {1}"
.format(internet_gateway_id, igw.vpc.id)
)
self.internet_gateways.pop(internet_gateway_id)
return True
def detach_internet_gateway(self, internet_gateway_id, vpc_id):
igw = self.get_internet_gateway(internet_gateway_id)
if not igw.vpc or igw.vpc.id != vpc_id:
raise GatewayNotAttachedError(internet_gateway_id, vpc_id)
igw.vpc = None
return True
def attach_internet_gateway(self, internet_gateway_id, vpc_id):
igw = self.get_internet_gateway(internet_gateway_id)
if igw.vpc:
raise ResourceAlreadyAssociatedError(internet_gateway_id)
vpc = self.get_vpc(vpc_id)
igw.vpc = vpc
return True
def get_internet_gateway(self, internet_gateway_id):
igw_ids = [internet_gateway_id]
return self.describe_internet_gateways(internet_gateway_ids=igw_ids)[0]
class VPCGatewayAttachment(object):
def __init__(self, gateway_id, vpc_id):
self.gateway_id = gateway_id
self.vpc_id = vpc_id
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ec2_backend = ec2_backends[region_name]
attachment = ec2_backend.create_vpc_gateway_attachment(
gateway_id=properties['InternetGatewayId'],
vpc_id=properties['VpcId'],
)
ec2_backend.attach_internet_gateway(properties['InternetGatewayId'], properties['VpcId'])
return attachment
@property
def physical_resource_id(self):
return self.id
class VPCGatewayAttachmentBackend(object):
def __init__(self):
self.gateway_attachments = {}
super(VPCGatewayAttachmentBackend, self).__init__()
def create_vpc_gateway_attachment(self, vpc_id, gateway_id):
attachment = VPCGatewayAttachment(vpc_id, gateway_id)
self.gateway_attachments[gateway_id] = attachment
return attachment
class SpotInstanceRequest(BotoSpotRequest, TaggedEC2Resource):
def __init__(self, ec2_backend, spot_request_id, price, image_id, type,
valid_from, valid_until, launch_group, availability_zone_group,
key_name, security_groups, user_data, instance_type, placement,
kernel_id, ramdisk_id, monitoring_enabled, subnet_id,
**kwargs):
super(SpotInstanceRequest, self).__init__(**kwargs)
ls = LaunchSpecification()
self.ec2_backend = ec2_backend
self.launch_specification = ls
self.id = spot_request_id
self.state = "open"
self.price = price
self.type = type
self.valid_from = valid_from
self.valid_until = valid_until
self.launch_group = launch_group
self.availability_zone_group = availability_zone_group
self.user_data = user_data # NOT
ls.kernel = kernel_id
ls.ramdisk = ramdisk_id
ls.image_id = image_id
ls.key_name = key_name
ls.instance_type = instance_type
ls.placement = placement
ls.monitored = monitoring_enabled
ls.subnet_id = subnet_id
if security_groups:
for group_name in security_groups:
group = self.ec2_backend.get_security_group_from_name(group_name)
if group:
ls.groups.append(group)
else:
# If not security groups, add the default
default_group = self.ec2_backend.get_security_group_from_name("default")
ls.groups.append(default_group)
def get_filter_value(self, filter_name):
if filter_name == 'state':
return self.state
if filter_name == 'spot-instance-request-id':
return self.id
filter_value = super(SpotInstanceRequest, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeSpotInstanceRequests".format(filter_name))
return filter_value
@six.add_metaclass(Model)
class SpotRequestBackend(object):
def __init__(self):
self.spot_instance_requests = {}
super(SpotRequestBackend, self).__init__()
def request_spot_instances(self, price, image_id, count, type, valid_from,
valid_until, launch_group, availability_zone_group,
key_name, security_groups, user_data,
instance_type, placement, kernel_id, ramdisk_id,
monitoring_enabled, subnet_id):
requests = []
for _ in range(count):
spot_request_id = random_spot_request_id()
request = SpotInstanceRequest(self,
spot_request_id, price, image_id, type, valid_from, valid_until,
launch_group, availability_zone_group, key_name, security_groups,
user_data, instance_type, placement, kernel_id, ramdisk_id,
monitoring_enabled, subnet_id)
self.spot_instance_requests[spot_request_id] = request
requests.append(request)
return requests
@Model.prop('SpotInstanceRequest')
def describe_spot_instance_requests(self, filters=None):
requests = self.spot_instance_requests.values()
return generic_filter(filters, requests)
def cancel_spot_instance_requests(self, request_ids):
requests = []
for request_id in request_ids:
requests.append(self.spot_instance_requests.pop(request_id))
return requests
class ElasticAddress(object):
def __init__(self, domain):
self.public_ip = random_ip()
self.allocation_id = random_eip_allocation_id() if domain == "vpc" else None
self.domain = domain
self.instance = None
self.eni = None
self.association_id = None
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
ec2_backend = ec2_backends[region_name]
properties = cloudformation_json.get('Properties')
instance_id = None
if properties:
domain = properties.get('Domain')
eip = ec2_backend.allocate_address(
domain=domain if domain else 'standard')
instance_id = properties.get('InstanceId')
else:
eip = ec2_backend.allocate_address(domain='standard')
if instance_id:
instance = ec2_backend.get_instance_by_id(instance_id)
ec2_backend.associate_address(instance, address=eip.public_ip)
return eip
@property
def physical_resource_id(self):
return self.allocation_id if self.allocation_id else self.public_ip
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'AllocationId':
return self.allocation_id
raise UnformattedGetAttTemplateException()
class ElasticAddressBackend(object):
def __init__(self):
self.addresses = []
super(ElasticAddressBackend, self).__init__()
def allocate_address(self, domain):
if domain not in ['standard', 'vpc']:
raise InvalidDomainError(domain)
address = ElasticAddress(domain)
self.addresses.append(address)
return address
def address_by_ip(self, ips):
eips = [address for address in self.addresses
if address.public_ip in ips]
# TODO: Trim error message down to specific invalid address.
if not eips or len(ips) > len(eips):
raise InvalidAddressError(ips)
return eips
def address_by_allocation(self, allocation_ids):
eips = [address for address in self.addresses
if address.allocation_id in allocation_ids]
# TODO: Trim error message down to specific invalid id.
if not eips or len(allocation_ids) > len(eips):
raise InvalidAllocationIdError(allocation_ids)
return eips
def address_by_association(self, association_ids):
eips = [address for address in self.addresses
if address.association_id in association_ids]
# TODO: Trim error message down to specific invalid id.
if not eips or len(association_ids) > len(eips):
raise InvalidAssociationIdError(association_ids)
return eips
def associate_address(self, instance=None, eni=None, address=None, allocation_id=None, reassociate=False):
eips = []
if address:
eips = self.address_by_ip([address])
elif allocation_id:
eips = self.address_by_allocation([allocation_id])
eip = eips[0]
new_instance_association = bool(instance and (not eip.instance or eip.instance.id == instance.id))
new_eni_association = bool(eni and (not eip.eni or eni.id == eip.eni.id))
if new_instance_association or new_eni_association or reassociate:
eip.instance = instance
eip.eni = eni
if eip.eni:
eip.eni.public_ip = eip.public_ip
if eip.domain == "vpc":
eip.association_id = random_eip_association_id()
return eip
raise ResourceAlreadyAssociatedError(eip.public_ip)
def describe_addresses(self):
return self.addresses
def disassociate_address(self, address=None, association_id=None):
eips = []
if address:
eips = self.address_by_ip([address])
elif association_id:
eips = self.address_by_association([association_id])
eip = eips[0]
if eip.eni:
if eip.eni.instance and eip.eni.instance._state.name == "running":
eip.eni.check_auto_public_ip()
else:
eip.eni.public_ip = None
eip.eni = None
eip.instance = None
eip.association_id = None
return True
def release_address(self, address=None, allocation_id=None):
eips = []
if address:
eips = self.address_by_ip([address])
elif allocation_id:
eips = self.address_by_allocation([allocation_id])
eip = eips[0]
self.disassociate_address(address=eip.public_ip)
eip.allocation_id = None
self.addresses.remove(eip)
return True
class DHCPOptionsSet(TaggedEC2Resource):
def __init__(self, ec2_backend, domain_name_servers=None, domain_name=None,
ntp_servers=None, netbios_name_servers=None,
netbios_node_type=None):
self.ec2_backend = ec2_backend
self._options = {
"domain-name-servers": domain_name_servers,
"domain-name": domain_name,
"ntp-servers": ntp_servers,
"netbios-name-servers": netbios_name_servers,
"netbios-node-type": netbios_node_type,
}
self.id = random_dhcp_option_id()
self.vpc = None
def get_filter_value(self, filter_name):
"""
API Version 2015-10-01 defines the following filters for DescribeDhcpOptions:
* dhcp-options-id
* key
* value
* tag:key=value
* tag-key
* tag-value
Taken from: http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeDhcpOptions.html
"""
if filter_name == 'dhcp-options-id':
return self.id
elif filter_name == 'key':
return list(self._options.keys())
elif filter_name == 'value':
values = [item for item in list(self._options.values()) if item]
return itertools.chain(*values)
filter_value = super(DHCPOptionsSet, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeDhcpOptions".format(filter_name))
return filter_value
@property
def options(self):
return self._options
class DHCPOptionsSetBackend(object):
def __init__(self):
self.dhcp_options_sets = {}
super(DHCPOptionsSetBackend, self).__init__()
def associate_dhcp_options(self, dhcp_options, vpc):
dhcp_options.vpc = vpc
vpc.dhcp_options = dhcp_options
def create_dhcp_options(
self, domain_name_servers=None, domain_name=None,
ntp_servers=None, netbios_name_servers=None,
netbios_node_type=None):
NETBIOS_NODE_TYPES = [1, 2, 4, 8]
for field_value in domain_name_servers, ntp_servers, netbios_name_servers:
if field_value and len(field_value) > 4:
raise InvalidParameterValueError(",".join(field_value))
if netbios_node_type and int(netbios_node_type[0]) not in NETBIOS_NODE_TYPES:
raise InvalidParameterValueError(netbios_node_type)
options = DHCPOptionsSet(
self, domain_name_servers, domain_name, ntp_servers,
netbios_name_servers, netbios_node_type
)
self.dhcp_options_sets[options.id] = options
return options
def describe_dhcp_options(self, options_ids=None):
options_sets = []
for option_id in options_ids or []:
if option_id in self.dhcp_options_sets:
options_sets.append(self.dhcp_options_sets[option_id])
else:
raise InvalidDHCPOptionsIdError(option_id)
return options_sets or self.dhcp_options_sets.values()
def delete_dhcp_options_set(self, options_id):
if not (options_id and options_id.startswith('dopt-')):
raise MalformedDHCPOptionsIdError(options_id)
if options_id in self.dhcp_options_sets:
if self.dhcp_options_sets[options_id].vpc:
raise DependencyViolationError("Cannot delete assigned DHCP options.")
self.dhcp_options_sets.pop(options_id)
else:
raise InvalidDHCPOptionsIdError(options_id)
return True
def get_all_dhcp_options(self, dhcp_options_ids=None, filters=None):
dhcp_options_sets = self.dhcp_options_sets.values()
if dhcp_options_ids:
dhcp_options_sets = [dhcp_options_set for dhcp_options_set in dhcp_options_sets if dhcp_options_set.id in dhcp_options_ids]
if len(dhcp_options_sets) != len(dhcp_options_ids):
invalid_id = list(set(dhcp_options_ids).difference(set([dhcp_options_set.id for dhcp_options_set in dhcp_options_sets])))[0]
raise InvalidDHCPOptionsIdError(invalid_id)
return generic_filter(filters, dhcp_options_sets)
class VPNConnection(TaggedEC2Resource):
def __init__(self, ec2_backend, id, type,
customer_gateway_id, vpn_gateway_id):
self.ec2_backend = ec2_backend
self.id = id
self.state = 'available'
self.customer_gateway_configuration = {}
self.type = type
self.customer_gateway_id = customer_gateway_id
self.vpn_gateway_id = vpn_gateway_id
self.tunnels = None
self.options = None
self.static_routes = None
class VPNConnectionBackend(object):
def __init__(self):
self.vpn_connections = {}
super(VPNConnectionBackend, self).__init__()
def create_vpn_connection(self, type, customer_gateway_id,
vpn_gateway_id,
static_routes_only=None):
vpn_connection_id = random_vpn_connection_id()
if static_routes_only:
pass
vpn_connection = VPNConnection(
self, id=vpn_connection_id, type=type,
customer_gateway_id=customer_gateway_id,
vpn_gateway_id=vpn_gateway_id
)
self.vpn_connections[vpn_connection.id] = vpn_connection
return vpn_connection
def delete_vpn_connection(self, vpn_connection_id):
if vpn_connection_id in self.vpn_connections:
self.vpn_connections.pop(vpn_connection_id)
else:
raise InvalidVpnConnectionIdError(vpn_connection_id)
return True
def describe_vpn_connections(self, vpn_connection_ids=None):
vpn_connections = []
for vpn_connection_id in vpn_connection_ids or []:
if vpn_connection_id in self.vpn_connections:
vpn_connections.append(self.vpn_connections[vpn_connection_id])
else:
raise InvalidVpnConnectionIdError(vpn_connection_id)
return vpn_connections or self.vpn_connections.values()
def get_all_vpn_connections(self, vpn_connection_ids=None, filters=None):
vpn_connections = self.vpn_connections.values()
if vpn_connection_ids:
vpn_connections = [vpn_connection for vpn_connection in vpn_connections
if vpn_connection.id in vpn_connection_ids]
if len(vpn_connections) != len(vpn_connection_ids):
invalid_id = list(set(vpn_connection_ids).difference(set([vpn_connection.id for vpn_connection in vpn_connections])))[0]
raise InvalidVpnConnectionIdError(invalid_id)
return generic_filter(filters, vpn_connections)
class NetworkAclBackend(object):
def __init__(self):
self.network_acls = {}
super(NetworkAclBackend, self).__init__()
def get_network_acl(self, network_acl_id):
network_acl = self.network_acls.get(network_acl_id, None)
if not network_acl:
raise InvalidNetworkAclIdError(network_acl_id)
return network_acl
def create_network_acl(self, vpc_id, default=False):
network_acl_id = random_network_acl_id()
self.get_vpc(vpc_id)
network_acl = NetworkAcl(self, network_acl_id, vpc_id, default)
self.network_acls[network_acl_id] = network_acl
return network_acl
def get_all_network_acls(self, network_acl_ids=None, filters=None):
network_acls = self.network_acls.values()
if network_acl_ids:
network_acls = [network_acl for network_acl in network_acls
if network_acl.id in network_acl_ids]
if len(network_acls) != len(network_acl_ids):
invalid_id = list(set(network_acl_ids).difference(set([network_acl.id for network_acl in network_acls])))[0]
raise InvalidRouteTableIdError(invalid_id)
return generic_filter(filters, network_acls)
def delete_network_acl(self, network_acl_id):
deleted = self.network_acls.pop(network_acl_id, None)
if not deleted:
raise InvalidNetworkAclIdError(network_acl_id)
return deleted
def create_network_acl_entry(self, network_acl_id, rule_number,
protocol, rule_action, egress, cidr_block,
icmp_code, icmp_type, port_range_from,
port_range_to):
network_acl_entry = NetworkAclEntry(self, network_acl_id, rule_number,
protocol, rule_action, egress,
cidr_block, icmp_code, icmp_type,
port_range_from, port_range_to)
network_acl = self.get_network_acl(network_acl_id)
network_acl.network_acl_entries.append(network_acl_entry)
return network_acl_entry
def replace_network_acl_association(self, association_id,
network_acl_id):
# lookup existing association for subnet and delete it
default_acl = next(value for key, value in self.network_acls.items()
if association_id in value.associations.keys())
subnet_id = None
for key, value in default_acl.associations.items():
if key == association_id:
subnet_id = default_acl.associations[key].subnet_id
del default_acl.associations[key]
break
new_assoc_id = random_network_acl_subnet_association_id()
association = NetworkAclAssociation(self,
new_assoc_id,
subnet_id,
network_acl_id)
new_acl = self.get_network_acl(network_acl_id)
new_acl.associations[new_assoc_id] = association
return association
def associate_default_network_acl_with_subnet(self, subnet_id):
association_id = random_network_acl_subnet_association_id()
acl = next(acl for acl in self.network_acls.values() if acl.default)
acl.associations[association_id] = NetworkAclAssociation(self, association_id,
subnet_id, acl.id)
class NetworkAclAssociation(object):
def __init__(self, ec2_backend, new_association_id,
subnet_id, network_acl_id):
self.ec2_backend = ec2_backend
self.id = new_association_id
self.new_association_id = new_association_id
self.subnet_id = subnet_id
self.network_acl_id = network_acl_id
super(NetworkAclAssociation, self).__init__()
class NetworkAcl(TaggedEC2Resource):
def __init__(self, ec2_backend, network_acl_id, vpc_id, default=False):
self.ec2_backend = ec2_backend
self.id = network_acl_id
self.vpc_id = vpc_id
self.network_acl_entries = []
self.associations = {}
self.default = 'true' if default is True else 'false'
def get_filter_value(self, filter_name):
if filter_name == "default":
return self.default
elif filter_name == "vpc-id":
return self.vpc_id
elif filter_name == "association.network-acl-id":
return self.id
elif filter_name == "association.subnet-id":
return [assoc.subnet_id for assoc in self.associations.values()]
filter_value = super(NetworkAcl, self).get_filter_value(filter_name)
if filter_value is None:
self.ec2_backend.raise_not_implemented_error("The filter '{0}' for DescribeNetworkAcls".format(filter_name))
return filter_value
class NetworkAclEntry(TaggedEC2Resource):
def __init__(self, ec2_backend, network_acl_id, rule_number,
protocol, rule_action, egress, cidr_block,
icmp_code, icmp_type, port_range_from,
port_range_to):
self.ec2_backend = ec2_backend
self.network_acl_id = network_acl_id
self.rule_number = rule_number
self.protocol = protocol
self.rule_action = rule_action
self.egress = egress
self.cidr_block = cidr_block
self.icmp_code = icmp_code
self.icmp_type = icmp_type
self.port_range_from = port_range_from
self.port_range_to = port_range_to
class VpnGateway(TaggedEC2Resource):
def __init__(self, ec2_backend, id, type):
self.ec2_backend = ec2_backend
self.id = id
self.type = type
self.attachments = {}
super(VpnGateway, self).__init__()
class VpnGatewayAttachment(object):
def __init__(self, vpc_id, state):
self.vpc_id = vpc_id
self.state = state
super(VpnGatewayAttachment, self).__init__()
class VpnGatewayBackend(object):
def __init__(self):
self.vpn_gateways = {}
super(VpnGatewayBackend, self).__init__()
def create_vpn_gateway(self, type='ipsec.1'):
vpn_gateway_id = random_vpn_gateway_id()
vpn_gateway = VpnGateway(self, vpn_gateway_id, type)
self.vpn_gateways[vpn_gateway_id] = vpn_gateway
return vpn_gateway
def get_all_vpn_gateways(self, filters=None):
vpn_gateways = self.vpn_gateways.values()
return generic_filter(filters, vpn_gateways)
def get_vpn_gateway(self, vpn_gateway_id):
vpn_gateway = self.vpn_gateways.get(vpn_gateway_id, None)
if not vpn_gateway:
raise InvalidVpnGatewayIdError(vpn_gateway_id)
return vpn_gateway
def attach_vpn_gateway(self, vpn_gateway_id, vpc_id):
vpn_gateway = self.get_vpn_gateway(vpn_gateway_id)
self.get_vpc(vpc_id)
attachment = VpnGatewayAttachment(vpc_id, state='attached')
vpn_gateway.attachments[vpc_id] = attachment
return attachment
def delete_vpn_gateway(self, vpn_gateway_id):
deleted = self.vpn_gateways.pop(vpn_gateway_id, None)
if not deleted:
raise InvalidVpnGatewayIdError(vpn_gateway_id)
return deleted
def detach_vpn_gateway(self, vpn_gateway_id, vpc_id):
vpn_gateway = self.get_vpn_gateway(vpn_gateway_id)
self.get_vpc(vpc_id)
detached = vpn_gateway.attachments.pop(vpc_id, None)
if not detached:
raise InvalidVPCIdError(vpc_id)
return detached
class CustomerGateway(TaggedEC2Resource):
def __init__(self, ec2_backend, id, type, ip_address, bgp_asn):
self.ec2_backend = ec2_backend
self.id = id
self.type = type
self.ip_address = ip_address
self.bgp_asn = bgp_asn
self.attachments = {}
super(CustomerGateway, self).__init__()
class CustomerGatewayBackend(object):
def __init__(self):
self.customer_gateways = {}
super(CustomerGatewayBackend, self).__init__()
def create_customer_gateway(self, type='ipsec.1', ip_address=None, bgp_asn=None):
customer_gateway_id = random_customer_gateway_id()
customer_gateway = CustomerGateway(self, customer_gateway_id, type, ip_address, bgp_asn)
self.customer_gateways[customer_gateway_id] = customer_gateway
return customer_gateway
def get_all_customer_gateways(self, filters=None):
customer_gateways = self.customer_gateways.values()
return generic_filter(filters, customer_gateways)
def get_customer_gateway(self, customer_gateway_id):
customer_gateway = self.customer_gateways.get(customer_gateway_id, None)
if not customer_gateway:
raise InvalidCustomerGatewayIdError(customer_gateway_id)
return customer_gateway
def delete_customer_gateway(self, customer_gateway_id):
deleted = self.customer_gateways.pop(customer_gateway_id, None)
if not deleted:
raise InvalidCustomerGatewayIdError(customer_gateway_id)
return deleted
class NatGateway(object):
def __init__(self, backend, subnet_id, allocation_id):
# public properties
self.id = random_nat_gateway_id()
self.subnet_id = subnet_id
self.allocation_id = allocation_id
self.state = 'available'
self.private_ip = random_private_ip()
# protected properties
self._created_at = datetime.utcnow()
self._backend = backend
# NOTE: this is the core of NAT Gateways creation
self._eni = self._backend.create_network_interface(backend.get_subnet(self.subnet_id), self.private_ip)
# associate allocation with ENI
self._backend.associate_address(eni=self._eni, allocation_id=self.allocation_id)
@property
def vpc_id(self):
subnet = self._backend.get_subnet(self.subnet_id)
return subnet.vpc_id
@property
def create_time(self):
return iso_8601_datetime_with_milliseconds(self._created_at)
@property
def network_interface_id(self):
return self._eni.id
@property
def public_ip(self):
eips = self._backend.address_by_allocation([self.allocation_id])
return eips[0].public_ip
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
ec2_backend = ec2_backends[region_name]
nat_gateway = ec2_backend.create_nat_gateway(
cloudformation_json['Properties']['SubnetId'],
cloudformation_json['Properties']['AllocationId'],
)
return nat_gateway
class NatGatewayBackend(object):
def __init__(self):
self.nat_gateways = {}
def get_all_nat_gateways(self, filters):
return self.nat_gateways.values()
def create_nat_gateway(self, subnet_id, allocation_id):
nat_gateway = NatGateway(self, subnet_id, allocation_id)
self.nat_gateways[nat_gateway.id] = nat_gateway
return nat_gateway
def delete_nat_gateway(self, nat_gateway_id):
return self.nat_gateways.pop(nat_gateway_id)
class EC2Backend(BaseBackend, InstanceBackend, TagBackend, AmiBackend,
RegionsAndZonesBackend, SecurityGroupBackend, EBSBackend,
VPCBackend, SubnetBackend, SubnetRouteTableAssociationBackend,
NetworkInterfaceBackend, VPNConnectionBackend,
VPCPeeringConnectionBackend,
RouteTableBackend, RouteBackend, InternetGatewayBackend,
VPCGatewayAttachmentBackend, SpotRequestBackend,
ElasticAddressBackend, KeyPairBackend, DHCPOptionsSetBackend,
NetworkAclBackend, VpnGatewayBackend, CustomerGatewayBackend,
NatGatewayBackend):
def __init__(self, region_name):
super(EC2Backend, self).__init__()
self.region_name = region_name
def reset(self):
region_name = self.region_name
self.__dict__ = {}
self.__init__(region_name)
# Use this to generate a proper error template response when in a response handler.
def raise_error(self, code, message):
raise EC2ClientError(code, message)
def raise_not_implemented_error(self, blurb):
msg = "{0} has not been implemented in Moto yet." \
" Feel free to open an issue at" \
" https://github.com/spulec/moto/issues".format(blurb)
raise NotImplementedError(msg)
def do_resources_exist(self, resource_ids):
for resource_id in resource_ids:
resource_prefix = get_prefix(resource_id)
if resource_prefix == EC2_RESOURCE_TO_PREFIX['customer-gateway']:
self.get_customer_gateway(customer_gateway_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['dhcp-options']:
self.describe_dhcp_options(options_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['image']:
self.describe_images(ami_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['instance']:
self.get_instance_by_id(instance_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['internet-gateway']:
self.describe_internet_gateways(internet_gateway_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['network-acl']:
self.get_all_network_acls()
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['network-interface']:
self.describe_network_interfaces(filters={'network-interface-id': resource_id})
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['reserved-instance']:
self.raise_not_implemented_error('DescribeReservedInstances')
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['route-table']:
self.get_route_table(route_table_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['security-group']:
self.describe_security_groups(group_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['snapshot']:
self.get_snapshot(snapshot_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['spot-instance-request']:
self.describe_spot_instance_requests(filters={'spot-instance-request-id': resource_id})
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['subnet']:
self.get_subnet(subnet_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['volume']:
self.get_volume(volume_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['vpc']:
self.get_vpc(vpc_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['vpc-peering-connection']:
self.get_vpc_peering_connection(vpc_pcx_id=resource_id)
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['vpn-connection']:
self.describe_vpn_connections(vpn_connection_ids=[resource_id])
elif resource_prefix == EC2_RESOURCE_TO_PREFIX['vpn-gateway']:
self.get_vpn_gateway(vpn_gateway_id=resource_id)
return True
ec2_backends = {}
for region in boto.ec2.regions():
ec2_backends[region.name] = EC2Backend(region.name)
|
mrucci/moto
|
moto/ec2/models.py
|
Python
|
apache-2.0
| 121,494
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mark McClain, DreamHost
import weakref
from oslo.config import cfg
from neutron.agent.common import config
from neutron.agent import rpc as agent_rpc
from neutron.common import constants
from neutron import context
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.openstack.common import loopingcall
from neutron.openstack.common import periodic_task
from neutron.services.loadbalancer.drivers.haproxy import (
agent_api,
plugin_driver
)
LOG = logging.getLogger(__name__)
NS_PREFIX = 'qlbaas-'
OPTS = [
cfg.StrOpt(
'device_driver',
default=('neutron.services.loadbalancer.drivers'
'.haproxy.namespace_driver.HaproxyNSDriver'),
help=_('The driver used to manage the loadbalancing device'),
),
cfg.StrOpt(
'loadbalancer_state_path',
default='$state_path/lbaas',
help=_('Location to store config and state files'),
),
cfg.StrOpt(
'interface_driver',
help=_('The driver used to manage the virtual interface')
),
cfg.StrOpt(
'user_group',
default='nogroup',
help=_('The user group'),
),
]
class LogicalDeviceCache(object):
"""Manage a cache of known devices."""
class Device(object):
"""Inner classes used to hold values for weakref lookups."""
def __init__(self, port_id, pool_id):
self.port_id = port_id
self.pool_id = pool_id
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.port_id, self.pool_id))
def __init__(self):
self.devices = set()
self.port_lookup = weakref.WeakValueDictionary()
self.pool_lookup = weakref.WeakValueDictionary()
def put(self, device):
port_id = device['vip']['port_id']
pool_id = device['pool']['id']
d = self.Device(device['vip']['port_id'], device['pool']['id'])
if d not in self.devices:
self.devices.add(d)
self.port_lookup[port_id] = d
self.pool_lookup[pool_id] = d
def remove(self, device):
if not isinstance(device, self.Device):
device = self.Device(
device['vip']['port_id'], device['pool']['id']
)
if device in self.devices:
self.devices.remove(device)
def remove_by_pool_id(self, pool_id):
d = self.pool_lookup.get(pool_id)
if d:
self.devices.remove(d)
def get_by_pool_id(self, pool_id):
return self.pool_lookup.get(pool_id)
def get_by_port_id(self, port_id):
return self.port_lookup.get(port_id)
def get_pool_ids(self):
return self.pool_lookup.keys()
class LbaasAgentManager(periodic_task.PeriodicTasks):
# history
# 1.0 Initial version
# 1.1 Support agent_updated call
RPC_API_VERSION = '1.1'
def __init__(self, conf):
self.conf = conf
try:
vif_driver = importutils.import_object(conf.interface_driver, conf)
except ImportError:
# the driver is optional
msg = _('Error importing interface driver: %s')
raise SystemExit(msg % conf.interface_driver)
vif_driver = None
try:
self.driver = importutils.import_object(
conf.device_driver,
config.get_root_helper(self.conf),
conf.loadbalancer_state_path,
vif_driver,
self._vip_plug_callback
)
except ImportError:
msg = _('Error importing loadbalancer device driver: %s')
raise SystemExit(msg % conf.device_driver)
self.agent_state = {
'binary': 'neutron-loadbalancer-agent',
'host': conf.host,
'topic': plugin_driver.TOPIC_LOADBALANCER_AGENT,
'configurations': {'device_driver': conf.device_driver,
'interface_driver': conf.interface_driver},
'agent_type': constants.AGENT_TYPE_LOADBALANCER,
'start_flag': True}
self.admin_state_up = True
self.context = context.get_admin_context_without_session()
self._setup_rpc()
self.needs_resync = False
self.cache = LogicalDeviceCache()
def _setup_rpc(self):
self.plugin_rpc = agent_api.LbaasAgentApi(
plugin_driver.TOPIC_PROCESS_ON_HOST,
self.context,
self.conf.host
)
self.state_rpc = agent_rpc.PluginReportStateAPI(
plugin_driver.TOPIC_PROCESS_ON_HOST)
report_interval = self.conf.AGENT.report_interval
if report_interval:
heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
heartbeat.start(interval=report_interval)
def _report_state(self):
try:
device_count = len(self.cache.devices)
self.agent_state['configurations']['devices'] = device_count
self.state_rpc.report_state(self.context,
self.agent_state)
self.agent_state.pop('start_flag', None)
except Exception:
LOG.exception("Failed reporting state!")
def initialize_service_hook(self, started_by):
self.sync_state()
@periodic_task.periodic_task
def periodic_resync(self, context):
if self.needs_resync:
self.needs_resync = False
self.sync_state()
@periodic_task.periodic_task(spacing=6)
def collect_stats(self, context):
for pool_id in self.cache.get_pool_ids():
try:
stats = self.driver.get_stats(pool_id)
if stats:
self.plugin_rpc.update_pool_stats(pool_id, stats)
except Exception:
LOG.exception(_('Error upating stats'))
self.needs_resync = True
def _vip_plug_callback(self, action, port):
if action == 'plug':
self.plugin_rpc.plug_vip_port(port['id'])
elif action == 'unplug':
self.plugin_rpc.unplug_vip_port(port['id'])
def sync_state(self):
known_devices = set(self.cache.get_pool_ids())
try:
ready_logical_devices = set(self.plugin_rpc.get_ready_devices())
for deleted_id in known_devices - ready_logical_devices:
self.destroy_device(deleted_id)
for pool_id in ready_logical_devices:
self.refresh_device(pool_id)
except Exception:
LOG.exception(_('Unable to retrieve ready devices'))
self.needs_resync = True
self.remove_orphans()
def refresh_device(self, pool_id):
try:
logical_config = self.plugin_rpc.get_logical_device(pool_id)
if self.driver.exists(pool_id):
self.driver.update(logical_config)
else:
self.driver.create(logical_config)
self.cache.put(logical_config)
except Exception:
LOG.exception(_('Unable to refresh device for pool: %s'), pool_id)
self.needs_resync = True
def destroy_device(self, pool_id):
device = self.cache.get_by_pool_id(pool_id)
if not device:
return
try:
self.driver.destroy(pool_id)
self.plugin_rpc.pool_destroyed(pool_id)
except Exception:
LOG.exception(_('Unable to destroy device for pool: %s'), pool_id)
self.needs_resync = True
self.cache.remove(device)
def remove_orphans(self):
try:
self.driver.remove_orphans(self.cache.get_pool_ids())
except NotImplementedError:
pass # Not all drivers will support this
def reload_pool(self, context, pool_id=None, host=None):
"""Handle RPC cast from plugin to reload a pool."""
if pool_id:
self.refresh_device(pool_id)
def modify_pool(self, context, pool_id=None, host=None):
"""Handle RPC cast from plugin to modify a pool if known to agent."""
if self.cache.get_by_pool_id(pool_id):
self.refresh_device(pool_id)
def destroy_pool(self, context, pool_id=None, host=None):
"""Handle RPC cast from plugin to destroy a pool if known to agent."""
if self.cache.get_by_pool_id(pool_id):
self.destroy_device(pool_id)
def agent_updated(self, context, payload):
"""Handle the agent_updated notification event."""
if payload['admin_state_up'] != self.admin_state_up:
self.admin_state_up = payload['admin_state_up']
if self.admin_state_up:
self.needs_resync = True
else:
for pool_id in self.cache.get_pool_ids():
self.destroy_device(pool_id)
LOG.info(_("agent_updated by server side %s!"), payload)
|
armando-migliaccio/neutron
|
neutron/services/loadbalancer/drivers/haproxy/agent_manager.py
|
Python
|
apache-2.0
| 9,680
|
# Copyright 2016 Rudrajit Tapadar
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base import TestPscan
import errno
import mock
from StringIO import StringIO
import sys
class TestScan(TestPscan):
@mock.patch('socket.socket.connect')
def test_tcp_port_open(self, mock_connect):
hosts = "127.0.0.1"
ports = "22"
mock_connect.return_value = None
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
h = self.get_host_obj(hosts, [22])
h[0].ports[0].status = "Open"
self.assertPortsEqual(scanner.hosts[0].ports,
h[0].ports)
@mock.patch('socket.socket.connect')
def test_tcp_port_closed(self, mock_connect):
hosts = "127.0.0.1"
ports = "22"
mock_connect.side_effect = IOError()
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
h = self.get_host_obj(hosts, [22])
h[0].ports[0].status = "Closed"
self.assertPortsEqual(scanner.hosts[0].ports,
h[0].ports)
@mock.patch('socket.socket.connect')
def test_tcp_port_range(self, mock_connect):
hosts = "127.0.0.1"
ports = "21-22"
mock_connect.return_value = None
mock_connect.side_effect = [IOError(), None]
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
h = self.get_host_obj(hosts, [21, 22])
h[0].ports[0].status = "Closed"
h[0].ports[1].status = "Open"
self.assertPortsEqual(scanner.hosts[0].ports,
h[0].ports)
@mock.patch('socket.socket.connect')
def test_show_open_port(self, mock_connect):
hosts = "127.0.0.1"
ports = "5672"
mock_connect.return_value = None
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"+------+----------+-------+---------+\n"
"| Port | Protocol | State | Service |\n"
"+------+----------+-------+---------+\n"
"| 5672 | TCP | Open | amqp |\n"
"+------+----------+-------+---------+"
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
sys.stdout = s
@mock.patch('socket.socket.connect')
def test_show_closed_port(self, mock_connect):
hosts = "127.0.0.1"
ports = "5673"
mock_connect.side_effect = IOError()
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"+------+----------+--------+---------+\n"
"| Port | Protocol | State | Service |\n"
"+------+----------+--------+---------+\n"
"| 5673 | TCP | Closed | unknown |\n"
"+------+----------+--------+---------+"
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
sys.stdout = s
@mock.patch('socket.socket.connect')
def test_show_closed_port_range(self, mock_connect):
hosts = "127.0.0.1"
ports = "5673-5674"
mock_connect.side_effect = IOError(errno.ECONNREFUSED)
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"All 2 scanned ports are closed on the target."
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
sys.stdout = s
@mock.patch('socket.socket.connect')
def test_show_partially_open_port_range(self, mock_connect):
hosts = "127.0.0.1"
ports = "5671-5672"
mock_connect.return_value = None
mock_connect.side_effect = [IOError(), None]
scanner = self.get_scanner_obj(hosts, ports)
scanner.tcp()
s = sys.stdout
o = StringIO()
sys.stdout = o
output = (
"Showing results for target: 127.0.0.1\n"
"+------+----------+-------+---------+\n"
"| Port | Protocol | State | Service |\n"
"+------+----------+-------+---------+\n"
"| 5672 | TCP | Open | amqp |\n"
"+------+----------+-------+---------+"
)
scanner.show()
self.assertEqual(o.getvalue().strip(), output)
@mock.patch('socket.socket.connect')
def test_udp_port_open(self, mock_connect):
hosts = "127.0.0.1"
ports = "53"
mock_connect.return_value = None
scanner = self.get_scanner_obj(hosts, ports)
scanner.udp()
#h = self.get_host_obj(hosts, [22])
#h[0].ports[0].status = "Open"
#self.assertPortsEqual(scanner.hosts[0].ports,
# h[0].ports)
|
rtapadar/pscan
|
pscan/tests/test_scan.py
|
Python
|
apache-2.0
| 5,489
|
# !/usr/bin/python
#
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import urllib2
import sys
import simplejson as json
import ConfigParser
import signal
import time
CONF_FILE = '/etc/check_api.conf'
plugin_name = "check-api-contrail-9081"
plugin_instance = "lma-contrail-extension"
plugin_interval = 90
plugin_type = 'gauge'
plugin_request = 'active'
url = "http://127.0.0.1:9081"
class OSAPI(object):
def __init__(self, config):
self.config = config
self.username = self.config.get('api', 'user')
self.password = self.config.get('api', 'password')
self.tenant_name = self.config.get('api', 'tenant')
self.endpoint_keystone = self.config.get('api',
'keystone_endpoints'
).split(',')
self.token = None
self.tenant_id = None
self.get_token()
def get_timeout(self, service):
try:
return int(self.config.get('api', '%s_timeout' % service))
except ConfigParser.NoOptionError:
return 1
def get_token(self):
data = json.dumps({
"auth":
{
'tenantName': self.tenant_name,
'passwordCredentials':
{
'username': self.username,
'password': self.password
}
}
})
for keystone in self.endpoint_keystone:
try:
request = urllib2.Request(
'%s/tokens' % keystone,
data=data,
headers={
'Content-type': 'application/json'
})
data = json.loads(
urllib2.urlopen(
request, timeout=self.get_timeout('keystone')).read())
self.token = data['access']['token']['id']
self.tenant_id = data['access']['token']['tenant']['id']
return
except Exception as e:
print("Got exception '%s'" % e)
sys.exit(1)
def check_api(self, url, service):
try:
request = urllib2.Request(
url,
headers={
'X-Auth-Token': self.token,
})
start_time = time.time()
p = urllib2.urlopen(request, timeout=self.get_timeout(service))
end_time = time.time()
except urllib2.HTTPError, e:
return
except Exception as e:
print e
sys.exit(1)
return "%.3f" % (end_time - start_time)
def configure_callback(conf):
for node in conf.children:
val = str(node.values[0])
def restore_sigchld():
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
def log_verbose(msg):
collectd.info('%s plugin [verbose]: %s' % (plugin_name, msg))
def payload():
config = ConfigParser.RawConfigParser()
config.read(CONF_FILE)
API = OSAPI(config)
payload = API.check_api(url, "contrail")
return payload
def payload_callback():
log_verbose('Read callback called')
value = payload()
# log_verbose(
# 'Sending value: %s.%s=%s' % (plugin_name, '-'.join([val.plugin, val.type]), value))
val = collectd.Values(
plugin=plugin_name, # metric source
plugin_instance=plugin_instance,
type=plugin_type,
type_instance=plugin_name,
interval=plugin_interval,
meta={'0': True},
values=[value]
)
val.dispatch()
if __name__ == '__main__':
if sys.argv[1]:
url = sys.argv[1]
else:
print "Please provide URL"
sys.exit(1)
print "Plugin: " + plugin_name
payload = payload()
print("%s" % (payload))
sys.exit(0)
else:
import collectd
collectd.register_init(restore_sigchld)
collectd.register_config(configure_callback)
collectd.register_read(payload_callback, plugin_interval)
|
TAJaroszewski/lma_contrail_monitoring
|
deployment_scripts/puppet/modules/lma_contrail_monitoring/files/scripts/check-api-contrail-9081.py
|
Python
|
apache-2.0
| 4,630
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('transactions', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='transaction',
name='response',
field=models.CharField(default=b'', max_length=4, null=True, blank=True),
),
]
|
erickdom/restAndroid
|
transactions/migrations/0002_transaction_response.py
|
Python
|
apache-2.0
| 436
|
# -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Errors for the Python logging handlers."""
class RecoverableError(Exception):
"""A special error case we'll ignore."""
pass
|
GoogleCloudPlatform/cloud-pubsub-logging-python
|
pubsub_logging/errors.py
|
Python
|
apache-2.0
| 759
|
#
# Copyright 2015 VTT Technical Research Center of Finland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from scipy.integrate import cumtrapz
from scipy.interpolate import griddata
from scipy.stats import lognorm
from scipy.optimize import curve_fit
import numpy as np
def fitLogNormParticleDistribution(D10, D50, D90):
'''
Fitting function to get the mu and sigma -parameters of the Log-normal
distribution from cumulative particle distribution values D10, D50 and D90.
The DXX are values that the cumulative particle distribution function gets
at those points. For example D10 means that 10% of particle are smaller
than this size.
d10 = 7.3
d50 = 12.0
d90 = 18.3
(mu, sigma) = fitLogNormParticleDistribution(d10, d50, d90)
print(mu, sigma)
'''
mu = np.log(D50) # fixed by definition
def errfunc(mu_, sig_):
N = lognorm(sig_, scale=np.exp(mu_))
# minimize ze difference between D10 and D90 to cumulative function
# Weight the D10 more by 2*
zero = 2 * np.abs(0.1 - N.cdf(D10)) + np.abs(0.9 - N.cdf(D90))
return(zero)
sigma, pcov = curve_fit(errfunc, [mu], [0])
print(sigma)
return(mu, sigma[0])
def particlesInVolumeLogNormWeight(weight_frac, density_p, density_host,
mu, sigma, particle_diameters):
'''
Function that calculates particle densities in a volume element for
given weight fraction.
Presumes LogNormal particle distribution
'''
print('Weight fraction is %.1f %%' % (weight_frac * 100))
w = weight_frac
vol_frac = w * density_host / density_p / (1 + w * (
density_host / density_p - 1))
print('Volume fraction is %.1f %%' % (vol_frac * 100))
return(particlesInVolumeLogNorm(vol_frac, mu, sigma, particle_diameters))
def particlesInVolumeLogNormWeightTotal(weight_frac, density_p, density_host,
mu, sigma, particle_diameters):
'''
IF ONLY 1 PARTICLE TYPE IN SILICONE!
Returns the total number of particles in volume element.
Presumes LogNormal particle distribution
'''
print('Weight fraction is %.1f %%' % (weight_frac * 100))
w = weight_frac
vol_frac = w * density_host / density_p / (1 + w * (
density_host / density_p - 1))
print('Volume fraction is %.1f %%' % (vol_frac * 100))
return(particlesInVolumeLogNormTotal(vol_frac, mu, sigma,
particle_diameters))
def particlesInVolumeLogNormWeightTotal2(weight_frac1, weight_frac2, dens_p1, dens_p2, dens_host, mu, sigma, particle_diameters):
'''
IF 2 PARTICLE TYPES IN SILICONE!
Returns the total number of particles in volume element
and the volume fraction.
Presumes LogNormal particle distribution
'''
print('Weight fraction is %.1f %%' % (weight_frac1 * 100))
w_p1 = weight_frac1
w_p2 = weight_frac2
w_s = 1.0 - w_p1 - w_p2
vol_frac = (dens_host * dens_p2 * w_p1) / (w_s * dens_p1 * dens_p2 + w_p1 *dens_host * dens_p2 + w_p2 * dens_host * dens_p1)
print('Volume fraction is %.1f %%' % (vol_frac * 100))
return(vol_frac, particlesInVolumeLogNormTotal(vol_frac, mu, sigma,
particle_diameters))
def particlesInVolumeLogNorm(vol_frac, mu, sigma, particle_diameters):
'''
Function that calculates particle densities in a volume element.
The particles are diameters are log-normally distributed (sigma, mu)
and they have a given volume fraction.
'''
D = particle_diameters
# Calculate particle density(particles per um ^ 3)
N = lognorm(sigma, scale=np.exp(mu))
# Weight factors of each particle size
pdf = N.pdf(D)
# Volume of particle having radius R[m ^ 3]
Vsph = 4.0 / 3.0 * np.pi * (D / 2.0) ** 3.0
# Particle volumes multiplied with weight factors = > volume distribution
WV = pdf * Vsph
# Total volume of the volume distribution
Vtot = np.trapz(WV, D)
# Number of particles in um ^ 3
n_part = vol_frac / Vtot
print('Number of particles in cubic micrometer = %.18f' % n_part)
# Check, should give the volume fraction in %
print("Volume fraction was: %.1f %%" %
(np.trapz(n_part * pdf * Vsph, D) * 100))
bins = pdf * (D[1] - D[0])
# print(bins.sum())
return(n_part * bins)
def particlesInVolumeLogNormTotal(vol_frac, mu, sigma, particle_diameters):
n = particlesInVolumeLogNorm(vol_frac, mu, sigma, particle_diameters)
return(n.sum())
def rayleighScatteringCrossSection(wavelengths,
particle_refractive_index,
particle_diameter):
d = particle_diameter
n = particle_refractive_index
l = wavelengths
cross = ((2.0 * (np.pi ** 5.0) * d ** 6.0) / (3 * l ** 4.0) *
(((n ** 2.0) - 1.0) / ((n ** 2.0) + 2.0)) ** 2.0)
return(cross)
def rayleighScatteringPhaseFunction(cosTheta):
return(3.0 / 4.0 * (1 + cosTheta ** 2))
def henyeyGreensteinPhaseFunction(cosTheta, asymmetry_factor):
g = asymmetry_factor
p = 0.5 * (1.0 - g ** 2) / (1 + g ** 2 - 2 * g * cosTheta) ** (3.0 / 2.0)
return(p)
def cumulativeDistribution(phaseFunction, cosTheta):
return(-0.5 * cumtrapz(phaseFunction, cosTheta, initial=0))
def cumulativeDistributionTheta(phaseFunction, theta):
return(cumtrapz(phaseFunction * np.sin(theta), theta, initial=0))
def invertNiceFunction(x, y, yi):
new_y = griddata(y, x, yi)
if np.isnan(new_y[0]):
new_y[0] = x[0]
if np.isnan(new_y[-1]):
new_y[-1] = x[-1]
return(new_y)
'''
th = np.arange(0, 180, 0.5)
th = np.radians(th)
rv = np.linspace(0, 1, 1000)
phase = rayleighScatteringPhaseFunction(np.cos(th))
phase = henyeyGreensteinPhaseFunction(np.cos(th), -0.6)
cumul = cumulativeDistribution(phase, np.cos(th))
invers = invertNiceFunction(np.cos(th), cumul, rv)
plt.plot(rv, np.degrees(np.arccos(invers)))
plt.show()
print(np.degrees(np.arccos(invers)))
'''
'''
if __name__ == '__main__':
particlesInVolumeLogNormWeightTotal(weight_frac=0.24,
density_p=5.0,
density_host=1.1,
mu=1, sigma=1,
particle_diameters=np.array([1, 2]))
particlesInVolumeLogNormWeight(weight_frac=0.24,
density_p=5.0,
density_host=1.1,
mu=1, sigma=1,
particle_diameters=np.array([1, 2]))
'''
|
ollitapa/VTT-Raytracer
|
python_source/pyraytracer/scatteringTools.py
|
Python
|
apache-2.0
| 7,251
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Data access functions to read from and write to the SQLite backend.
"""
import sqlite3
import codecs
import os
import re
def setup_db():
dbpath = os.path.dirname(os.path.realpath(__file__)) + os.sep +".."+os.sep+"gitdox.db"
conn = sqlite3.connect(dbpath)
cur = conn.cursor()
# Drop tables if they exist
cur.execute("DROP TABLE IF EXISTS docs")
cur.execute("DROP TABLE IF EXISTS users")
cur.execute("DROP TABLE IF EXISTS metadata")
cur.execute("DROP TABLE IF EXISTS validate")
conn.commit()
# Create tables
#user table not used
#cur.execute('''CREATE TABLE IF NOT EXISTS users
# (id INTEGER PRIMARY KEY AUTOINCREMENT, username text)''')
#docs table
cur.execute('''CREATE TABLE IF NOT EXISTS docs
(id INTEGER PRIMARY KEY AUTOINCREMENT, name text, corpus text, status text,assignee_username text ,filename text, content text, mode text, schema text, validation text, timestamp text, cache text)''')
#metadata table
cur.execute('''CREATE TABLE IF NOT EXISTS metadata
(docid INTEGER, metaid INTEGER PRIMARY KEY AUTOINCREMENT, key text, value text, corpus_meta text, UNIQUE (docid, metaid) ON CONFLICT REPLACE, UNIQUE (docid, key) ON CONFLICT REPLACE)''')
#validation table
cur.execute('''CREATE TABLE IF NOT EXISTS validate
(doc text, corpus text, domain text, name text, operator text, argument text, id INTEGER PRIMARY KEY AUTOINCREMENT)''')
conn.commit()
conn.close()
def create_document(doc_id, name, corpus, status, assigned_username, filename, content,mode="xml", schema='--none--'):
generic_query("INSERT INTO docs(id, name,corpus,status,assignee_username,filename,content,mode,schema) VALUES(?,?,?,?,?,?,?,'xml',?)",
(int(doc_id), name, corpus, status, assigned_username, filename, content, schema))
def generic_query(sql, params, return_new_id=False):
# generic_query("DELETE FROM rst_nodes WHERE doc=? and project=?",(doc,project))
dbpath = os.path.dirname(os.path.realpath(__file__)) + os.sep + ".." + os.sep + "gitdox.db"
conn = sqlite3.connect(dbpath)
with conn:
cur = conn.cursor()
if params is not None:
cur.execute(sql,params)
else:
cur.execute(sql)
if return_new_id:
return cur.lastrowid
else:
rows = cur.fetchall()
return rows
def invalidate_doc_by_name(doc,corpus):
generic_query("UPDATE docs SET validation=NULL WHERE name like ? and corpus like ?", (doc, corpus))
def invalidate_ether_docs(doc,corpus):
generic_query("UPDATE docs SET validation=NULL WHERE name like ? and corpus like ? and mode = 'ether'", (doc, corpus))
def invalidate_doc_by_id(id):
generic_query("UPDATE docs SET validation=NULL WHERE id=?", (id,))
def doc_exists(doc,corpus):
res = generic_query("SELECT name from docs where name=? and corpus=?",(doc,corpus))
return len(res) > 0
def save_changes(id,content):
"""save change from the editor"""
generic_query("UPDATE docs SET content=? WHERE id=?",(content,id))
invalidate_doc_by_id(id)
def update_assignee(doc_id,user_name):
generic_query("UPDATE docs SET assignee_username=? WHERE id=?",(user_name,doc_id))
def update_status(id,status):
generic_query("UPDATE docs SET status=? WHERE id=?",(status,id))
def update_docname(id,docname):
generic_query("UPDATE docs SET name=? WHERE id=?",(docname,id))
invalidate_doc_by_id(id)
def update_filename(id,filename):
generic_query("UPDATE docs SET filename=? WHERE id=?",(filename,id))
def update_corpus(id,corpusname):
generic_query("UPDATE docs SET corpus=? WHERE id=?",(corpusname,id))
invalidate_doc_by_id(id)
def update_mode(id,mode):
generic_query("UPDATE docs SET mode=? WHERE id=?",(mode,id))
def update_schema(id, schema):
generic_query("UPDATE docs SET schema=? WHERE id=?", (schema, id))
def delete_doc(id):
generic_query("DELETE FROM docs WHERE id=?",(id,))
generic_query("DELETE FROM metadata WHERE docid=?", (id,))
def cell(text):
if isinstance(text, int):
text = str(text)
return "\n <td>" + text + "</td>"
def update_meta(meta_id,doc_id,key,value,corpus=False):
if corpus:
_, corpus_name, _, _, _, _, _ = get_doc_info(doc_id)
generic_query("REPLACE INTO metadata(metaid,docid,key,value,corpus_meta) VALUES(?,?,?,?,?)", (meta_id, None, key, value,corpus_name))
else:
generic_query("REPLACE INTO metadata(metaid,docid,key,value,corpus_meta) VALUES(?,?,?,?,?)",(meta_id,doc_id,key,value,None))
invalidate_doc_by_id(doc_id)
def save_meta(doc_id,key,value,corpus=False):
if corpus:
_, corpus_name, _, _, _, _, _ = get_doc_info(doc_id)
new_id = generic_query("REPLACE INTO metadata(docid,key,value,corpus_meta) VALUES(?,?,?,?)", (None, key, value,corpus_name), return_new_id = True)
else:
new_id = generic_query("INSERT OR REPLACE INTO metadata(docid,key,value,corpus_meta) VALUES(?,?,?,?)",(doc_id,key,value,None), return_new_id = True)
invalidate_doc_by_id(doc_id)
return new_id
def delete_meta(metaid, doc_id, corpus=False):
generic_query("DELETE FROM metadata WHERE metaid=?", (metaid,))
if not corpus:
invalidate_doc_by_id(doc_id)
def get_doc_info(doc_id):
res = generic_query("SELECT name,corpus,filename,status,assignee_username,mode,schema FROM docs WHERE id=?", (int(doc_id),))
if len(res) > 0:
return res[0]
else:
return res
def get_doc_content(doc_id):
res = generic_query("SELECT content FROM docs WHERE id=?", (int(doc_id),))
return res[0][0]
def get_all_doc_ids_for_corpus(corpus):
return map(lambda x: x[0],
generic_query("SELECT id FROM docs WHERE corpus=?", (corpus,)))
def get_all_docs(corpus=None, status=None):
if corpus is None:
if status is None:
return generic_query("SELECT id, name, corpus, mode, content FROM docs", None)
else:
return generic_query("SELECT id, name, corpus, mode, content FROM docs where status=?", (status,))
else:
if status is None:
return generic_query("SELECT id, name, corpus, mode, content FROM docs where corpus=?", (corpus,))
else:
return generic_query("SELECT id, name, corpus, mode, content FROM docs where corpus=? and status=?", (corpus, status))
def get_doc_meta(doc_id, corpus=False):
if corpus:
fields = get_doc_info(doc_id)
if len(fields) > 0:
_, corpus_name, _, _, _, _, _ = fields
return generic_query("SELECT * FROM metadata WHERE corpus_meta=? ORDER BY key COLLATE NOCASE",(corpus_name,))
else:
return []
else:
return generic_query("SELECT * FROM metadata WHERE docid=? ORDER BY key COLLATE NOCASE", (int(doc_id),))
def get_corpora():
return generic_query("SELECT DISTINCT corpus FROM docs ORDER BY corpus COLLATE NOCASE", None)
def get_validate_rules(sort=None, domain=None):
query = "SELECT corpus, doc, domain, name, operator, argument, id FROM validate"
args = []
if domain:
query += " WHERE domain=? "
args.append(domain)
if sort:
query += " ORDER BY " + sort
return generic_query(query, args)
def get_rule_domain(id):
return generic_query("SELECT domain FROM validate WHERE id=?", (id,))[0][0]
def get_xml_rules():
return get_validate_rules(domain='xml')
def get_meta_rules():
return get_validate_rules(domain='meta')
def get_ether_rules():
return get_validate_rules(domain='ether')
def get_export_rules():
return get_validate_rules(domain='export')
def create_validate_rule(doc, corpus, domain, name, operator, argument):
new_id = generic_query("INSERT INTO validate(doc,corpus,domain,name,operator,argument) VALUES(?,?,?,?,?,?)", (doc, corpus, domain, name, operator, argument), return_new_id = True)
if domain == "meta":
invalidate_doc_by_name("%","%")
else:
invalidate_ether_docs("%","%")
return new_id
def delete_validate_rule(id):
generic_query("DELETE FROM validate WHERE id=?", (int(id),))
invalidate_doc_by_name("%", "%")
def update_validate_rule(doc, corpus, domain, name, operator, argument, id):
generic_query("UPDATE validate SET doc = ?, corpus = ?, domain = ?, name = ?, operator = ?, argument = ? WHERE id = ?",(doc, corpus, domain, name, operator, argument, id))
if domain == "meta":
invalidate_doc_by_name("%", "%")
else:
invalidate_ether_docs("%", "%")
def update_validation(doc_id,validation):
generic_query("UPDATE docs SET validation=? where id=?",(validation,doc_id))
def update_timestamp(doc_id, timestamp):
generic_query("UPDATE docs SET timestamp=? where id=?", (timestamp, doc_id))
|
cligu/gitdox
|
modules/gitdox_sql.py
|
Python
|
apache-2.0
| 8,292
|
# -*- coding: utf-8 -*-
import collections
from pyramid.view import view_defaults, view_config
from kubb_match.data.models import Round
@view_defaults(request_method='GET', accept='text/html')
class HtmlView(object):
def __init__(self, request):
self.request = request
self.data_manager = request.data_managers['data_manager']
@view_config(route_name='home', renderer='home.jinja2')
def home(self):
return {}
@view_config(route_name='phase1', renderer='phase1.jinja2')
def phase1(self):
p = self.data_manager.get_phase(1)
if len(p.rounds) > 0:
current_round = next((r for r in p.rounds if not r.played))
current_games = current_round.games
current_games.sort(key=lambda x: x.field, reverse=False)
position_data = {}
for position in current_round.positions:
position_data[position.position] = position
else:
current_games = None
position_data = None
return {'current_games': current_games, 'position_data': position_data}
@view_config(route_name='phase2', renderer='phase2.jinja2')
def phase2(self):
p = self.data_manager.get_phase(2)
if len(p.rounds) > 0:
ko_rounds = [r for r in p.rounds if not r.played]
current_games ={}
position_data ={}
for r in ko_rounds:
games = r.games
games.sort(key=lambda x: x.field, reverse=False)
current_games[r.label] = games
positions = {}
r.positions.sort(key=lambda x: x.position, reverse=False)
for pos in r.positions:
if pos.position in positions:
positions[pos.position].append(pos)
else:
positions[pos.position] = [pos]
position_data[r.label] = collections.OrderedDict(sorted(positions.items()))
else:
current_games = None
position_data = None
return {'current_games': current_games, 'position_data': position_data}
# ADMIN
@view_config(route_name='phase1-admin', renderer='phase1-admin.jinja2', permission='admin')
def admin1(self):
p = self.data_manager.get_phase(1)
if len(p.rounds) > 0:
current_round = next((r for r in p.rounds if not r.played))
current_games = current_round.games
current_games.sort(key=lambda x: x.field, reverse=False)
else:
current_games = None
current_round = None
return {'current_games': current_games, 'current_round': current_round}
@view_config(route_name='phase2-admin', renderer='phase2-admin.jinja2', permission='admin')
def admin2(self):
p = self.data_manager.get_phase(2)
if len(p.rounds) > 0:
ko_rounds = [r for r in p.rounds if not r.played]
current_games ={}
current_round ={}
for r in ko_rounds:
games = r.games
games.sort(key=lambda x: x.field, reverse=False)
current_games[r.label] = games
current_round[r.label] = r.id
else:
current_games = None
current_round = None
return {'current_games': current_games, 'current_round': current_round}
@view_config(route_name='results', renderer='results.jinja2')
def results(self):
p = self.data_manager.get_phase(2)
if len(p.rounds) > 0:
ko_rounds = [r for r in p.rounds if r.final]
position_data ={}
for r in ko_rounds:
positions = {}
for pos in r.positions:
if pos.position in positions:
positions[pos.position].append(pos)
else:
positions[pos.position] = [pos]
position_data[r.label] = positions
else:
position_data = None
return {'position_data': position_data}
|
BartSaelen/kubb_match
|
kubb_match/views/html.py
|
Python
|
apache-2.0
| 4,089
|
from requests import HTTPError
from database import Database
import simplejson as json
db = Database.getDatabaseConnection()["cras"]
from log_session import LogSession
import datetime
class DB:
def __init__(self):
pass
@staticmethod
def add_user(user_id, user_name, mail,picture,fcm_token):
print ("DEBUG: adding user with data: " + user_id + " "+ user_name + " " + mail + " " + fcm_token)
data = {
"_id": user_id,
"fcm_token" : fcm_token,
"name": user_name,
"mail": mail,
"picture": picture,
"supervise": [],
"supervised_by" : [],
"currently_monitoring" : [],
"currently_monitored_by": "",
"log_sessions": {}
}
try:
db.create_document(data)
except HTTPError:
print "CloudantException: user already exists"
return data
@staticmethod
def get_user_by_ID(user_ID):
try:
return db[user_ID]
except Exception:
print "DB exception : User does not exists"
return None
@staticmethod
def add_supervisor(user_id, other_id):
user = get_user_by_id(user_id)
other_user = get_user_by_id(other_id)
user["supervised_by"].append(other_id)
other_user["supervise"].append(user_id)
user.save()
other_user.save()
@staticmethod
def get_user_supervise(user_id):
currently_monitoring = db[user_id]["currently_monitoring"]
user_arr = []
for id in db[user_id]["supervise"]:
current = False
if id in currently_monitoring:
current = True
# user_arr.append({"user": get_user_by_id(id),
# "status" : current})
user = get_user_by_id(id).copy()
user.update({"status":current})
user_arr.append(user)
return json.dumps(user_arr)
@staticmethod
def get_user_supervised_by(user_id):
user_arr = []
for id in db[user_id]["supervised_by"]:
user_arr.append(get_user_by_id(id))
return json.dumps(user_arr)
@staticmethod
def get_user_name(id):
return db[id]["name"]
@staticmethod
def update_monitoring_status(user_id, sup_id, monitoring,is_sup):
user = db[user_id]
sup = db[sup_id]
if monitoring:
user["currently_monitored_by"] = sup_id
sup["currently_monitoring"].append(user_id)
else:
if is_sup:
if sup_id in user["log_sessions"]:
num_of_logs = len(user["log_sessions"][sup_id])
user["log_sessions"][sup_id][num_of_logs-1].update({"end_time": str(datetime.datetime.now())})
user.save()
user["currently_monitoring"].remove(sup_id)
sup["currently_monitored_by"] = ""
else:
if user_id in sup["log_sessions"]:
num_of_logs = len(sup["log_sessions"][user_id])
sup["log_sessions"][user_id][num_of_logs - 1].update({"end_time": str(datetime.datetime.now())})
sup.save()
sup["currently_monitoring"].remove(user_id)
user["currently_monitored_by"] = ""
user.save()
sup.save()
@staticmethod
def add_log_session(user_id,to_monitor_id):
user = db[user_id]
if to_monitor_id not in user["log_sessions"]:
user["log_sessions"].update({to_monitor_id: []})
user["log_sessions"][to_monitor_id].append(json.loads(LogSession(datetime.datetime.now(), to_monitor_id).toJSON()))
try:
user.save()
except Exception,e:
print e
@staticmethod
def get_currently_monitored_by(user_id):
return db[user_id]["currently_monitored_by"]
@staticmethod
def get_logs(user_id,sup_id):
log_sessions = db[user_id]["log_sessions"]
if sup_id in log_sessions:
return log_sessions[sup_id]
else:
print "There are no logs available for: " + sup_id
return None
@staticmethod
def add_log_event(user_id, sup_id,event):
user = get_user_by_id(user_id)
log_sessions = db[user_id]["log_sessions"]
if sup_id in log_sessions:
try:
last_session = db[user_id]["log_sessions"][sup_id][len(db[user_id]["log_sessions"][sup_id]) - 1]
last_session["events"].append(event)
db[user_id]["log_sessions"][sup_id][len(db[user_id]["log_sessions"][sup_id]) - 1] = last_session
user.save()
except Exception,e:
print e
def get_user_by_id(user_id):
user = db[user_id]
return user
def db_exists(user_id):
try:
user = db[user_id]
except Exception,e:
return False
print "DEBUG: the name is : " + user["name"]
return user.json()
def get_fcm_token(user_id):
return db[user_id]["fcm_token"]
|
tweiss1234/Cras
|
db_actions.py
|
Python
|
apache-2.0
| 5,095
|
import ConfigParser as configparser
import os
import sys
from pyspark import SparkContext, SparkConf, SparkFiles
from pyspark.sql import SQLContext, Row
from datetime import datetime
from os_utils import make_directory, preparing_path, time_execution_log, check_file_exists
from subprocess import Popen, PIPE
from vina_utils import get_files_pdb, get_name_model_pdb
if __name__ == '__main__':
sc = SparkContext()
sqlCtx = SQLContext(sc)
config = configparser.ConfigParser()
config.read('config.ini')
pythonsh = config.get('VINA', 'pythonsh')
script_receptor4 = config.get('VINA', 'script_receptor4')
pdb_path = config.get('DEFAULT', 'pdb_path')
pdbqt_receptor_path = config.get('DEFAULT', 'pdbqt_receptor_path')
path_spark_drugdesign = config.get('DRUGDESIGN', 'path_spark_drugdesign')
make_directory(pdbqt_receptor_path)
# Adding Python Source file
sc.addPyFile(os.path.join(path_spark_drugdesign, "vina_utils.py"))
sc.addPyFile(os.path.join(path_spark_drugdesign, "json_utils.py"))
sc.addPyFile(os.path.join(path_spark_drugdesign, "os_utils.py"))
# Broadcast
pythonsh = sc.broadcast(pythonsh)
script_receptor4 = sc.broadcast(script_receptor4)
pdbqt_receptor_path = sc.broadcast(pdbqt_receptor_path)
def run_prepare_receptor_spark(receptor):
receptor_pdbqt = os.path.join(pdbqt_receptor_path.value,
get_name_model_pdb(receptor))
command = ''.join([pythonsh.value,
' ',
script_receptor4.value,
' -r ',
receptor,
' -o ',
receptor_pdbqt,
'.pdbqt',
' -v '])
proc = Popen(command, shell=True, stdout=PIPE)
proc.communicate()
start_time = datetime.now()
list_receptor = get_files_pdb(pdb_path)
vina_dockingRDD = sc.parallelize(list_receptor)
vina_dockingRDD.foreach(run_prepare_receptor_spark)
finish_time = datetime.now()
time_execution_log(finish_time, start_time, "prepare_receptor_spark.log")
|
rodrigofaccioli/drugdesign
|
virtualscreening/vina/spark/prepare_receptor.py
|
Python
|
apache-2.0
| 2,232
|
import copy
import datetime
import json
import os
from typing import Dict, List, Optional
import jinja2
import jsonschema
import yaml
from ray_release.anyscale_util import find_cloud_by_name
from ray_release.exception import ReleaseTestConfigError
from ray_release.logger import logger
from ray_release.util import deep_update
class Test(dict):
pass
DEFAULT_WHEEL_WAIT_TIMEOUT = 7200 # Two hours
DEFAULT_COMMAND_TIMEOUT = 1800
DEFAULT_BUILD_TIMEOUT = 1800
DEFAULT_CLUSTER_TIMEOUT = 1800
DEFAULT_CLOUD_ID = "cld_4F7k8814aZzGG8TNUGPKnc"
DEFAULT_ENV = {
"DATESTAMP": str(datetime.datetime.now().strftime("%Y%m%d")),
"TIMESTAMP": str(int(datetime.datetime.now().timestamp())),
"EXPIRATION_1D": str(
(datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
),
"EXPIRATION_2D": str(
(datetime.datetime.now() + datetime.timedelta(days=2)).strftime("%Y-%m-%d")
),
"EXPIRATION_3D": str(
(datetime.datetime.now() + datetime.timedelta(days=3)).strftime("%Y-%m-%d")
),
}
RELEASE_PACKAGE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
RELEASE_TEST_SCHEMA_FILE = os.path.join(
RELEASE_PACKAGE_DIR, "ray_release", "schema.json"
)
class TestEnvironment(dict):
pass
_test_env = None
def get_test_environment():
global _test_env
if _test_env:
return _test_env
_test_env = TestEnvironment(**DEFAULT_ENV)
return _test_env
def set_test_env_var(key: str, value: str):
test_env = get_test_environment()
test_env[key] = value
def get_test_env_var(key: str, default: Optional[str] = None):
test_env = get_test_environment()
return test_env.get(key, default)
def read_and_validate_release_test_collection(config_file: str) -> List[Test]:
"""Read and validate test collection from config file"""
with open(config_file, "rt") as fp:
test_config = yaml.safe_load(fp)
validate_release_test_collection(test_config)
return test_config
def load_schema_file(path: Optional[str] = None) -> Dict:
path = path or RELEASE_TEST_SCHEMA_FILE
with open(path, "rt") as fp:
return json.load(fp)
def validate_release_test_collection(test_collection: List[Test]):
try:
schema = load_schema_file()
except Exception as e:
raise ReleaseTestConfigError(
f"Could not load release test validation schema: {e}"
) from e
num_errors = 0
for test in test_collection:
error = validate_test(test, schema)
if error:
logger.error(
f"Failed to validate test {test.get('name', '(unnamed)')}: {error}"
)
num_errors += 1
if num_errors > 0:
raise ReleaseTestConfigError(
f"Release test configuration error: Found {num_errors} test "
f"validation errors."
)
def validate_test(test: Test, schema: Optional[Dict] = None) -> Optional[str]:
schema = schema or load_schema_file()
try:
jsonschema.validate(test, schema=schema)
except (jsonschema.ValidationError, jsonschema.SchemaError) as e:
return str(e.message)
except Exception as e:
return str(e)
def find_test(test_collection: List[Test], test_name: str) -> Optional[Test]:
"""Find test with `test_name` in `test_collection`"""
for test in test_collection:
if test["name"] == test_name:
return test
return None
def as_smoke_test(test: Test) -> Test:
if "smoke_test" not in test:
logger.warning(
f"Requested smoke test, but test with name {test['name']} does "
f"not have any smoke test configuration."
)
return test
smoke_test_config = test.pop("smoke_test")
new_test = deep_update(test, smoke_test_config)
return new_test
def get_wheels_sanity_check(commit: Optional[str] = None):
if not commit:
cmd = (
"python -c 'import ray; print("
'"No commit sanity check available, but this is the '
"Ray wheel commit:\", ray.__commit__)'"
)
else:
cmd = (
f"python -c 'import ray; "
f'assert ray.__commit__ == "{commit}", ray.__commit__\''
)
return cmd
def load_and_render_yaml_template(
template_path: str, env: Optional[Dict] = None
) -> Optional[Dict]:
if not template_path:
return None
if not os.path.exists(template_path):
raise ReleaseTestConfigError(
f"Cannot load yaml template from {template_path}: Path not found."
)
with open(template_path, "rt") as f:
content = f.read()
render_env = copy.deepcopy(os.environ)
if env:
render_env.update(env)
try:
content = jinja2.Template(content).render(env=env)
return yaml.safe_load(content)
except Exception as e:
raise ReleaseTestConfigError(
f"Error rendering/loading yaml template: {e}"
) from e
def load_test_cluster_env(test: Test, ray_wheels_url: str) -> Optional[Dict]:
cluster_env_file = test["cluster"]["cluster_env"]
cluster_env_path = os.path.join(
RELEASE_PACKAGE_DIR, test.get("working_dir", ""), cluster_env_file
)
env = get_test_environment()
commit = env.get("RAY_COMMIT", None)
env["RAY_WHEELS_SANITY_CHECK"] = get_wheels_sanity_check(commit)
env["RAY_WHEELS"] = ray_wheels_url
return load_and_render_yaml_template(cluster_env_path, env=env)
def load_test_cluster_compute(test: Test) -> Optional[Dict]:
cluster_compute_file = test["cluster"]["cluster_compute"]
cluster_compute_path = os.path.join(
RELEASE_PACKAGE_DIR, test.get("working_dir", ""), cluster_compute_file
)
env = get_test_environment()
cloud_id = get_test_cloud_id(test)
env["ANYSCALE_CLOUD_ID"] = cloud_id
return load_and_render_yaml_template(cluster_compute_path, env=env)
def get_test_cloud_id(test: Test) -> str:
cloud_id = test["cluster"].get("cloud_id", None)
cloud_name = test["cluster"].get("cloud_name", None)
if cloud_id and cloud_name:
raise RuntimeError(
f"You can't supply both a `cloud_name` ({cloud_name}) and a "
f"`cloud_id` ({cloud_id}) in the test cluster configuration. "
f"Please provide only one."
)
elif cloud_name and not cloud_id:
cloud_id = find_cloud_by_name(cloud_name)
if not cloud_id:
raise RuntimeError(f"Couldn't find cloud with name `{cloud_name}`.")
else:
cloud_id = cloud_id or DEFAULT_CLOUD_ID
return cloud_id
|
ray-project/ray
|
release/ray_release/config.py
|
Python
|
apache-2.0
| 6,617
|
#!/usr/bin/env python
"""
Create generic LPU and simple pulse input signal.
"""
from itertools import product
import sys
import numpy as np
import h5py
import networkx as nx
def create_lpu_graph(lpu_name, N_sensory, N_local, N_proj):
"""
Create a generic LPU graph.
Creates a graph containing the neuron and synapse parameters for an LPU
containing the specified number of local and projection neurons. The graph
also contains the parameters for a set of sensory neurons that accept
external input. All neurons are either spiking or graded potential neurons;
the Leaky Integrate-and-Fire model is used for the former, while the
Morris-Lecar model is used for the latter (i.e., the neuron's membrane
potential is deemed to be its output rather than the time when it emits an
action potential). Synapses use either the alpha function model or a
conductance-based model.
Parameters
----------
lpu_name : str
Name of LPU. Used in port identifiers.
N_sensory : int
Number of sensory neurons.
N_local : int
Number of local neurons.
N_proj : int
Number of project neurons.
Returns
-------
g : networkx.MultiDiGraph
Generated graph.
"""
# Set numbers of neurons:
neu_type = ('sensory', 'local', 'proj')
neu_num = (N_sensory, N_local, N_proj)
# Neuron ids are between 0 and the total number of neurons:
G = nx.MultiDiGraph()
in_port_idx = 0
spk_out_id = 0
gpot_out_id = 0
for (t, n) in zip(neu_type, neu_num):
for i in range(n):
id = t+"_"+str(i)
name = t+"_"+str(i)
# Half of the sensory neurons and projection neurons are
# spiking neurons. The other half are graded potential neurons.
# All local neurons are graded potential only.
if t != 'local' and np.random.rand() < 0.5:
G.add_node(id,
{'class': 'LeakyIAF',
'name': name+'_s',
'initV': np.random.uniform(-60.0,-25.0),
'reset_potential': -67.5489770451,
'resting_potential': 0.0,
'threshold': -25.1355161007,
'resistance': 1002.445570216,
'capacitance': 0.0669810502993,
'circuit': 'proj' if t == 'proj' else 'local'
})
# Projection neurons are all assumed to be attached to output
# ports (which are represented as separate nodes):
if t == 'proj':
G.add_node(id+'_port',
{'class': 'Port',
'name': name+'port',
'port_type': 'spike',
'port_io': 'out',
'selector': '/%s/out/spk/%s' % (lpu_name, str(spk_out_id))
})
G.add_edge(id, id+'_port')
spk_out_id += 1
else:
# An input port node is created for and attached to each non-projection
# neuron with a synapse; this assumes that data propagates from one LPU to
# another as follows:
# LPU0[projection neuron] -> LPU0[output port] -> LPU1[input port] ->
# LPU1[synapse] -> LPU1[non-projection neuron]
G.add_node('in_port'+str(in_port_idx),
{'class': 'Port',
'name': 'in_port'+str(in_port_idx),
'port_type': 'spike',
'port_io': 'in',
'selector': '/%s/in/spk/%s' % (lpu_name, in_port_idx)
})
G.add_node('synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
{'class': 'AlphaSynapse',
'name': 'in_port'+str(in_port_idx)+'-'+name,
'ad': 0.19*1000,
'ar': 1.1*100,
'gmax': 0.003*1e-3,
'reverse': 65.0,
'circuit': 'local'
})
G.add_edge('in_port'+str(in_port_idx),
'synapse_'+'in_port'+str(in_port_idx)+'_to_'+id)
G.add_edge('synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
id)
in_port_idx += 1
else:
G.add_node(id,
{'class': "MorrisLecar",
'name': name+'_g',
'V1': 30.,
'V2': 15.,
'V3': 0.,
'V4': 30.,
'phi': 0.025,
'offset': 0.,
'V_L': -50.,
'V_Ca': 100.0,
'V_K': -70.0,
'g_Ca': 1.1,
'g_K': 2.0,
'g_L': 0.5,
'initV': -52.14,
'initn': 0.02,
'circuit': 'proj' if t == 'proj' else 'local'
})
# Projection neurons are all assumed to be attached to output
# ports (which are not represented as separate nodes):
if t == 'proj':
G.add_node(id+'_port',
{'class': 'Port',
'name': name+'port',
'port_type': 'gpot',
'port_io': 'out',
'selector': '/%s/out/gpot/%s' % (lpu_name, str(gpot_out_id))
})
G.add_edge(id, id+'_port')
gpot_out_id += 1
else:
G.add_node('in_port'+str(in_port_idx),
{'class': 'Port',
'name': 'in_port'+str(in_port_idx),
'port_type': 'gpot',
'port_io': 'in',
'selector': '/%s/in/gpot/%s' % (lpu_name, in_port_idx)
})
G.add_node('synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
{'class': 'PowerGPotGPot',
'name': 'in_port'+str(in_port_idx)+'-'+name,
'reverse': -80.0,
'saturation': 0.03*1e-3,
'slope': 0.8*1e-6,
'power': 1.0,
'threshold': -50.0,
'circuit': 'local'
})
G.add_edge('in_port'+str(in_port_idx),
'synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
delay = 0.001)
G.add_edge('synapse_'+'in_port'+str(in_port_idx)+'_to_'+id,
id)
in_port_idx += 1
# Assume a probability of synapse existence for each group of synapses:
# sensory -> local, sensory -> projection, local -> projection,
# projection -> local:
for r, (i, j) in zip((0.5, 0.1, 0.1, 0.3),
((0, 1), (0, 2), (1, 2), (2, 1))):
for src, tar in product(range(neu_num[i]), range(neu_num[j])):
# Don't connect all neurons:
if np.random.rand() > r: continue
# Connections from the sensory neurons use the alpha function model;
# all other connections use the power_gpot_gpot model:
pre_id = neu_type[i]+"_"+str(src)
post_id = neu_type[j]+"_"+str(tar)
name = G.node[pre_id]['name'] + '-' + G.node[post_id]['name']
synapse_id = 'synapse_' + name
if G.node[pre_id]['class'] is 'LeakyIAF':
G.add_node(synapse_id,
{'class' : 'AlphaSynapse',
'name' : name,
'ar' : 1.1*1e2,
'ad' : 1.9*1e3,
'reverse' : 65.0 if G.node[post_id]['class'] is 'LeakyIAF' else 10.0,
'gmax' : 3*1e-6 if G.node[post_id]['class'] is 'LeakyIAF' else 3.1e-7,
'circuit' : 'local'})
G.add_edge(pre_id, synapse_id)
G.add_edge(synapse_id, post_id)
else:
G.add_node(synapse_id,
{'class' : 'PowerGPotGPot',
'name' : name,
'slope' : 0.8*1e-6,
'threshold' : -50.0,
'power' : 1.0,
'saturation' : 0.03*1e-3,
'reverse' : -100.0,
'circuit' : 'local'})
G.add_edge(pre_id, synapse_id, delay = 0.001)
G.add_edge(synapse_id, post_id)
return G
def create_lpu(file_name, lpu_name, N_sensory, N_local, N_proj):
"""
Create a generic LPU graph.
Creates a GEXF file containing the neuron and synapse parameters for an LPU
containing the specified number of local and projection neurons. The GEXF
file also contains the parameters for a set of sensory neurons that accept
external input. All neurons are either spiking or graded potential neurons;
the Leaky Integrate-and-Fire model is used for the former, while the
Morris-Lecar model is used for the latter (i.e., the neuron's membrane
potential is deemed to be its output rather than the time when it emits an
action potential). Synapses use either the alpha function model or a
conductance-based model.
Parameters
----------
file_name : str
Output GEXF file name.
lpu_name : str
Name of LPU. Used in port identifiers.
N_sensory : int
Number of sensory neurons.
N_local : int
Number of local neurons.
N_proj : int
Number of project neurons.
Returns
-------
g : networkx.MultiDiGraph
Generated graph.
"""
g = create_lpu_graph(lpu_name, N_sensory, N_local, N_proj)
nx.write_gexf(g, file_name)
def create_input(file_name, N_sensory, dt=1e-4, dur=1.0, start=0.3, stop=0.6, I_max=0.6):
"""
Create input stimulus for sensory neurons in artificial LPU.
Creates an HDF5 file containing input signals for the specified number of
neurons. The signals consist of a rectangular pulse of specified duration
and magnitude.
Parameters
----------
file_name : str
Name of output HDF5 file.
g: networkx.MultiDiGraph
NetworkX graph object representing the LPU
dt : float
Time resolution of generated signal.
dur : float
Duration of generated signal.
start : float
Start time of signal pulse.
stop : float
Stop time of signal pulse.
I_max : float
Pulse magnitude.
"""
Nt = int(dur/dt)
t = np.arange(0, dt*Nt, dt)
uids = ["sensory_"+str(i) for i in range(N_sensory)]
uids = np.array(uids)
I = np.zeros((Nt, N_sensory), dtype=np.float64)
I[np.logical_and(t>start, t<stop)] = I_max
with h5py.File(file_name, 'w') as f:
f.create_dataset('I/uids', data=uids)
f.create_dataset('I/data', (Nt, N_sensory),
dtype=np.float64,
data=I)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('lpu_file_name', nargs='?', default='generic_lpu.gexf.gz',
help='LPU file name')
parser.add_argument('in_file_name', nargs='?', default='generic_input.h5',
help='Input file name')
parser.add_argument('-s', type=int,
help='Seed random number generator')
parser.add_argument('-l', '--lpu', type=str, default='gen',
help='LPU name')
args = parser.parse_args()
if args.s is not None:
np.random.seed(args.s)
dt = 1e-4
dur = 1.0
start = 0.3
stop = 0.6
I_max = 0.6
neu_num = [np.random.randint(31, 40) for i in xrange(3)]
create_lpu(args.lpu_file_name, args.lpu, *neu_num)
g = nx.read_gexf(args.lpu_file_name)
create_input(args.in_file_name, neu_num[0], dt, dur, start, stop, I_max)
create_lpu(args.lpu_file_name, args.lpu, *neu_num)
|
AdamRTomkins/Neurokernel-singularity-container
|
examples/data/gen_generic_lpu.py
|
Python
|
apache-2.0
| 12,988
|
# -*- coding: utf-8 -*-
from pyramid.interfaces import IRequest
from openprocurement.api.interfaces import IContentConfigurator
from openprocurement.tender.belowthreshold.models import Tender, IBelowThresholdTender
from openprocurement.tender.belowthreshold.adapters import TenderBelowThersholdConfigurator
def includeme(config):
config.add_tender_procurementMethodType(Tender)
config.scan("openprocurement.tender.belowthreshold.views")
config.scan("openprocurement.tender.belowthreshold.subscribers")
config.registry.registerAdapter(TenderBelowThersholdConfigurator,
(IBelowThresholdTender, IRequest),
IContentConfigurator)
|
openprocurement/openprocurement.tender.belowthreshold
|
openprocurement/tender/belowthreshold/includeme.py
|
Python
|
apache-2.0
| 716
|
"""
* Copyright (C) Caleb Marshall and others... - All Rights Reserved
* Written by Caleb Marshall <anythingtechpro@gmail.com>, May 27th, 2017
* Licensing information can found in 'LICENSE', which is part of this source code package.
"""
import struct
class Endianness(object):
"""
A enum that stores network endianess formats
"""
NATIVE = '='
LITTLE_ENDIAN = '<'
BIG_ENDIAN = '>'
NETWORK = '!'
class DataBufferError(IOError):
"""
A data buffer specific io error
"""
class DataBufferIO(object):
"""
A class for manipulating (reading and/or writing) an array of bytes
"""
BYTE_ORDER = Endianness.NETWORK
def __init__(self, data=bytes(), offset=0):
self.data = data
self.offset = offset
@property
def byte_order(self):
return self.BYTE_ORDER
@property
def remaining(self):
return self.data[self.offset:]
def read(self, length):
data = self.remaining[:length]
self.offset += length
return data
def write(self, data):
if not data:
return
self.data += data
def clear(self):
self.data = bytes()
self.offset = 0
def read_from(self, fmt):
data = struct.unpack_from(self.byte_order + fmt, self.data, self.offset)
self.offset += struct.calcsize(fmt)
return data
def write_to(self, fmt, *args):
self.write(struct.pack(self.byte_order + fmt, *args))
def read_byte(self):
return self.read_from('b')[0]
def write_byte(self, value):
self.write_to('b', value)
def read_ubyte(self):
return self.read_from('B')[0]
def write_ubyte(self, value):
self.write_to('B', value)
def read_bool(self):
return self.read_from('?')[0]
def write_bool(self, value):
self.write_to('?', value)
def read_short(self):
return self.read_from('h')[0]
def write_short(self, value):
self.write_to('h', value)
def read_ushort(self):
return self.read_from('H')[0]
def write_ushort(self, value):
self.write_to('H', value)
def read_int(self):
return self.read_from('i')[0]
def write_int(self, value):
self.write_to('i', value)
def read_uint(self):
return self.read_from('I')[0]
def write_uint(self, value):
self.write_to('I', value)
def read_long(self):
return self.read_from('l')[0]
def write_long(self, value):
self.write_to('l', value)
def read_ulong(self):
return self.read_from('L')[0]
def write_ulong(self, value):
self.write_to('L', value)
def read_long_long(self):
return self.read_from('q')[0]
def write_long_long(self, value):
self.write_to('q', value)
def read_ulong_long(self):
return self.read_from('Q')[0]
def write_ulong_long(self, value):
self.write_to('Q', value)
def read_float(self):
return self.read_from('f')[0]
def write_float(self, value):
self.write_to('f', value)
def read_double(self):
return self.read_from('d')[0]
def write_double(self, value):
self.write_to('d', value)
def read_char(self):
return self.read_from('s')[0]
def write_char(self, value):
self.write_to('s', value)
|
AnythingTechPro/curionet
|
curionet/io.py
|
Python
|
apache-2.0
| 3,368
|
from element import BasePageElement
from locators import PageFooterLocators
from selenium.webdriver.common.action_chains import ActionChains
class BasePage(object):
def __init__(self, driver):
self.driver = driver
class FooterPage(BasePage):
def is_copyright_matches(self):
return "Lauren Nicole Smith 2015" in self.driver.page_source
def click_github_button(self):
element = self.driver.find_element(*PageFooterLocators.GET_GITHUB_BUTTON)
element.click()
return "https://github.com/technolotrix" in self.driver.current_url
def click_linkedin_button(self):
element = self.driver.find_element(*PageFooterLocators.GET_LINKEDIN_BUTTON)
element.click()
return "https://www.linkedin.com/in/nicolelns" in self.driver.current_url
def click_twitter_button(self):
element = self.driver.find_element(*PageFooterLocators.GET_TWITTER_BUTTON)
element.click()
return "" in self.driver.current_url
def click_gplus_button(self):
element = self.driver.find_element(*PageFooterLocators.GET_GPLUS_BUTTON)
element.click()
return "" in self.driver.current_url
def click_html5up_link(self):
element = self.driver.find_element(*PageFooterLocators.GET_HTML5UP_LINK)
element.click()
return "http://html5up.net" in self.driver.current_url
|
technolotrix/tests
|
tests/selenium/footer/page.py
|
Python
|
apache-2.0
| 1,389
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for UpdateTag
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-datacatalog
# [START datacatalog_v1_generated_DataCatalog_UpdateTag_sync]
from google.cloud import datacatalog_v1
def sample_update_tag():
# Create a client
client = datacatalog_v1.DataCatalogClient()
# Initialize request argument(s)
tag = datacatalog_v1.Tag()
tag.column = "column_value"
tag.template = "template_value"
request = datacatalog_v1.UpdateTagRequest(
tag=tag,
)
# Make the request
response = client.update_tag(request=request)
# Handle the response
print(response)
# [END datacatalog_v1_generated_DataCatalog_UpdateTag_sync]
|
googleapis/python-datacatalog
|
samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_sync.py
|
Python
|
apache-2.0
| 1,524
|
import re
import os, sys
import json
import csv
import shutil
import ctypes
import logging
import datetime
import fileinput
import subprocess
import xml.etree.ElementTree as etree
DEFAULT_HELPER_PATH = "helper"
class Logger(object):
def __init__(self):
"""Init method
"""
self.terminal = sys.stdout
self.log = open("image-gen-logfile.log", "a")
def write(self, message):
"""Writes a log message
:param message:
:return:
"""
now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S - ')
self.terminal.write(message)
self.log.write(message)
def flush(self):
"""Flushes a log message
:return:
"""
# this flush method is needed for python 3 compatibility.
# this handles the flush command by doing nothing.
pass
class helper(object):
@staticmethod
def executable_in_path(executable):
'''Returns the full path to an executable according to PATH,
otherwise None.'''
if os.name == 'nt':
return shutil.which('packer')
else:
path = os.environ.get('PATH')
if not path:
print >> sys.stderr, "Warning: No PATH could be searched"
paths = path.split(':')
for path in paths:
fullpath = os.path.join(path, executable)
if os.path.isfile(fullpath) and os.access(fullpath, os.X_OK):
return fullpath
return None
@staticmethod
def validate_argtype(arg, argtype):
"""Validates argument against given type
:param arg:
:param argtype:
:return:
"""
if not isinstance(arg, argtype):
raise HelperException('{0} argument must be of type {1}'.format(
arg, argtype))
return arg
@staticmethod
def get_guestos(os_string, os_arch, vm_provider):
"""Returns guest os type for a specific provider
:param os_string:
:param os_arch:
:param vm_provider:
:return:
"""
if "linux" in os_string.lower():
guestos = re.sub(r'\W+', ' ', re.sub(r'\d+', ' ', os_string)).strip()
if "windows" in os_string.lower():
guestos = os_string
if os_arch == '64':
guestos = guestos + "_" + str(os_arch)
guestos = guestos.replace(" ", "_")
data = ""
try:
guest_os_file = os.path.join(DEFAULT_HELPER_PATH, (vm_provider.lower() + '-guestos.json'))
with open(guest_os_file) as data_file:
data = json.load(data_file)
except (OSError, IOError) as ex:
print("error in opening packer template json file")
logging.error(ex.message)
print(str(ex.message))
assert isinstance(data, object)
if guestos in data:
return data[guestos]
elif "windows" in guestos.lower():
if os_arch == 32:
return data['Windows']
else:
return data['Windows_64']
elif "linux" in guestos.lower():
if os_arch == 32:
return data['Linux']
else:
return data['Linux_64']
@staticmethod
def run(cmd):
"""Runs a command
:param cmd: Command
:return: Execution status
"""
try:
'''
p = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, ''):
print(line)
retval = p.wait()
return retval
'''
p = subprocess.Popen(cmd.split(' '), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1)
for line in iter(p.stdout.readline, b''):
print(line.rstrip().decode('utf-8')),
p.stdout.close()
p.wait()
#print(cmd)
#p = subprocess.run(cmd.split(' '), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
#print('returncode:', p.returncode)
#print('{}'.format(p.stdout.decode('utf-8')))
except (subprocess.CalledProcessError, KeyboardInterrupt) as e:
print("Received keyboard interrupt, terminating the build process...")
'''
"""kill function for Win32"""
kernel32 = ctypes.windll.kernel32
handle = kernel32.OpenProcess(1, 0, p.pid)
return (0 != kernel32.TerminateProcess(handle, 0))
logging.error("Error occured while running command {0}, Error: {1}".format(cmd, e.message))
raise subprocess.CalledProcessError
'''
@staticmethod
def SearchReplaceInFile(file, searchpattern, replacewith):
"""
:param file:
:param searchpattern:
:param replacewith:
:return:
"""
for line in fileinput.input(file, inplace=1):
if searchpattern in line:
line = line.replace(searchpattern, replacewith)
sys.stdout.write(line)
fileinput.close()
@staticmethod
def get_productkey(_dbms_query):
"""
:param _dbms_query:
:return:
"""
return " "
class HelperException(Exception):
"""Custom helper exception
"""
pass
|
skoli0/vmbuilder
|
helper/helper.py
|
Python
|
apache-2.0
| 5,599
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Exposes the TRT conversion for Windows platform."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import platform
from tensorflow.python.util.tf_export import tf_export
if platform.system() != "Windows":
raise RuntimeError(
"This module is expected to be loaded only on Windows platform.")
class TrtPrecisionMode(object):
FP32 = "FP32"
FP16 = "FP16"
INT8 = "INT8"
# Use a large enough number as the default max_workspace_size for TRT engines,
# so it can produce reasonable performance results with the default.
DEFAULT_TRT_MAX_WORKSPACE_SIZE_BYTES = 1 << 30
@tf_export("experimental.tensorrt.ConversionParams", v1=[])
class TrtConversionParams(object):
"""A class to encapsulate parameters that are used for TF-TRT conversion."""
def __init__(self,
rewriter_config_template=None,
max_workspace_size_bytes=DEFAULT_TRT_MAX_WORKSPACE_SIZE_BYTES,
precision_mode=TrtPrecisionMode.FP32,
minimum_segment_size=3,
is_dynamic_op=True,
maximum_cached_engines=1,
use_calibration=True,
max_batch_size=1):
"""Initialize TrtConversionParams.
Args:
rewriter_config_template: a template RewriterConfig proto used to create a
TRT-enabled RewriterConfig. If None, it will use a default one.
max_workspace_size_bytes: the maximum GPU temporary memory which the TRT
engine can use at execution time. This corresponds to the
'workspaceSize' parameter of nvinfer1::IBuilder::setMaxWorkspaceSize().
precision_mode: one of TrtPrecisionMode.supported_precision_modes().
minimum_segment_size: the minimum number of nodes required for a subgraph
to be replaced by TRTEngineOp.
is_dynamic_op: whether to generate dynamic TRT ops which will build the
TRT network and engine at run time. i.e. Since TensorRT version < 6.0
does not support dynamic dimensions other than the batch dimension, when
the TensorFlow graph has a non-batch dimension of dynamic size, we would
need to enable this option. This option should be set to True in TF 2.0.
maximum_cached_engines: max number of cached TRT engines for dynamic TRT
ops. Created TRT engines for a dynamic dimension are cached. This is the
maximum number of engines that can be cached. If the number of cached
engines is already at max but none of them supports the input shapes,
the TRTEngineOp will fall back to run the original TF subgraph that
corresponds to the TRTEngineOp.
use_calibration: this argument is ignored if precision_mode is not INT8.
If set to True, a calibration graph will be created to calibrate the
missing ranges. The calibration graph must be converted to an inference
graph by running calibration with calibrate(). If set to False,
quantization nodes will be expected for every tensor in the graph
(exlcuding those which will be fused). If a range is missing, an error
will occur. Please note that accuracy may be negatively affected if
there is a mismatch between which tensors TRT quantizes and which
tensors were trained with fake quantization.
max_batch_size: max size for the input batch. This parameter is only
effective when is_dynamic_op=False which is not supported in TF 2.0.
Raises:
NotImplementedError: TRT is not supported on Windows.
"""
raise NotImplementedError(
"TensorRT integration is not available on Windows.")
@tf_export("experimental.tensorrt.Converter", v1=[])
class TrtConverterWindows(object):
"""An offline converter for TF-TRT transformation for TF 2.0 SavedModels.
Currently this is not available on Windows platform.
"""
def __init__(self,
input_saved_model_dir=None,
input_saved_model_tags=None,
input_saved_model_signature_key=None,
conversion_params=None):
"""Initialize the converter.
Args:
input_saved_model_dir: the directory to load the SavedModel which contains
the input graph to transforms. Used only when input_graph_def is None.
input_saved_model_tags: list of tags to load the SavedModel.
input_saved_model_signature_key: the key of the signature to optimize the
graph for.
conversion_params: a TrtConversionParams instance.
Raises:
NotImplementedError: TRT is not supported on Windows.
"""
raise NotImplementedError(
"TensorRT integration is not available on Windows.")
|
jhseu/tensorflow
|
tensorflow/python/compiler/tensorrt/trt_convert_windows.py
|
Python
|
apache-2.0
| 5,368
|
from abc import ABC, abstractmethod
from .utils import FileUtils, LoggingUtils
import os
import subprocess
class Build(ABC):
def __init__(self, configuration):
super().__init__()
self.configuration = configuration
@abstractmethod
def before_build(self):
raise NotImplementedError()
@abstractmethod
def build(self):
raise NotImplementedError()
@abstractmethod
def after_build(self):
raise NotImplementedError()
def build_application(self):
assert self.configuration.source_folder is not None
assert self.configuration.build_command is not None
FileUtils.chdir(self.configuration.source_folder)
if isinstance(self.configuration.build_command, list):
_cmdline = self.configuration.build_command
else:
_cmdline = self.configuration.build_command.split(' ')
print(_cmdline)
subprocess.call(_cmdline)
def _setup_tokens_before_build(self):
assert self.configuration.tokens_before_build is not None
self._tokens(self.configuration.tokens_before_build)
def _setup_tokens_after_build(self):
assert self.configuration.tokens_after_build is not None
self._tokens(self.configuration.tokens_after_build)
def _create_build_version_folder(self):
assert self.configuration.build_parent_path is not None
FileUtils.make_folders(self.configuration.build_parent_path)
def _tokens(self, tokens, replace_tokens=True):
for _filename in tokens:
_source_pathname = f'{self.configuration.source_folder}{os.sep}{_filename}'
self._revert_git_file(_source_pathname)
if replace_tokens and tokens[_filename]:
for _token in tokens[_filename]:
self._replace_token_filename_content(_source_pathname, _token, tokens[_filename][_token])
def _replace_token_filename_content(self, source_filename, token, text_to_insert):
_start_token = f'// BUILD_START_TOKEN: {token}'
_end_token = f'// BUILD_END_TOKEN: {token}'
FileUtils.replace_text_between_tags(
source_filename,
_start_token,
_end_token,
text_to_insert)
def _revert_git_file(self, git_filename):
assert git_filename is not None
FileUtils.chdir(os.path.dirname(git_filename))
LoggingUtils.log(f'(Git Reset) {git_filename}')
_cmd_line = 'git checkout --quiet ' + os.path.basename(git_filename)
subprocess.call(_cmd_line.split(' '))
""" Public Members """
configuration = None
|
PublicHealthEngland/animal-welfare-assessment-grid
|
prepare-build/uk_gov_phe_erdst_sc/build.py
|
Python
|
apache-2.0
| 2,643
|
#!/usr/bin/python
NWID=1
NR_NODES=20
#Controllers=[{"ip":'127.0.0.1', "port":6633}, {"ip":'10.0.1.28', "port":6633}]
Controllers=[{"ip":'10.0.1.28', "port":6633}]
"""
Start up a Simple topology
"""
from mininet.net import Mininet
from mininet.node import Controller, RemoteController
from mininet.log import setLogLevel, info, error, warn, debug
from mininet.cli import CLI
from mininet.topo import Topo
from mininet.util import quietRun
from mininet.moduledeps import pathCheck
from mininet.link import Link, TCLink
from sys import exit
import os.path
from subprocess import Popen, STDOUT, PIPE
import sys
#import argparse
class MyController( Controller ):
def __init__( self, name, ip='127.0.0.1', port=6633, **kwargs):
"""Init.
name: name to give controller
ip: the IP address where the remote controller is
listening
port: the port where the remote controller is listening"""
Controller.__init__( self, name, ip=ip, port=port, **kwargs )
def start( self ):
"Overridden to do nothing."
return
def stop( self ):
"Overridden to do nothing."
return
def checkListening( self ):
"Warn if remote controller is not accessible"
listening = self.cmd( "echo A | telnet -e A %s %d" %
( self.ip, self.port ) )
if 'Unable' in listening:
warn( "Unable to contact the remote controller"
" at %s:%d\n" % ( self.ip, self.port ) )
class SDNTopo( Topo ):
"SDN Topology"
def __init__( self, *args, **kwargs ):
Topo.__init__( self, *args, **kwargs )
switch = []
host = []
root = []
for i in range (NR_NODES):
name_suffix = '%02d' % NWID + "." + '%02d' % i
dpid_suffix = '%02x' % NWID + '%02x' % i
dpid = '0000' + '0000' + '0000' + dpid_suffix
sw = self.addSwitch('sw'+name_suffix, dpid=dpid)
switch.append(sw)
for i in range (NR_NODES):
host.append(self.addHost( 'host%d' % i ))
for i in range (NR_NODES):
root.append(self.addHost( 'root%d' % i, inNamespace=False ))
for i in range (NR_NODES):
self.addLink(host[i], switch[i])
for i in range (1, NR_NODES):
self.addLink(switch[0], switch[i])
for i in range (NR_NODES):
self.addLink(root[i], host[i])
def startsshd( host ):
"Start sshd on host"
info( '*** Starting sshd\n' )
name, intf, ip = host.name, host.defaultIntf(), host.IP()
banner = '/tmp/%s.banner' % name
host.cmd( 'echo "Welcome to %s at %s" > %s' % ( name, ip, banner ) )
host.cmd( '/usr/sbin/sshd -o "Banner %s"' % banner, '-o "UseDNS no"' )
info( '***', host.name, 'is running sshd on', intf, 'at', ip, '\n' )
def startsshds ( hosts ):
for h in hosts:
startsshd( h )
def stopsshd( ):
"Stop *all* sshd processes with a custom banner"
info( '*** Shutting down stale sshd/Banner processes ',
quietRun( "pkill -9 -f Banner" ), '\n' )
def sdnnet(opt):
topo = SDNTopo()
info( '*** Creating network\n' )
#net = Mininet( topo=topo, controller=MyController, link=TCLink)
net = Mininet( topo=topo, link=TCLink, build=False)
controllers=[]
for c in Controllers:
rc = RemoteController('c%d' % Controllers.index(c), ip=c['ip'],port=c['port'])
print "controller ip %s port %s" % (c['ip'], c['port'])
controllers.append(rc)
net.controllers=controllers
net.build()
host = []
for i in range (NR_NODES):
host.append(net.get( 'host%d' % i ))
net.start()
sw=net.get('sw01.00')
print "center sw", sw
sw.attach('tapa0')
for i in range (NR_NODES):
host[i].defaultIntf().setIP('192.168.%d.%d/16' % (NWID,i))
root = []
for i in range (NR_NODES):
root.append(net.get( 'root%d' % i ))
for i in range (NR_NODES):
host[i].intf('host%d-eth1' % i).setIP('1.1.%d.1/24' % i)
root[i].intf('root%d-eth0' % i).setIP('1.1.%d.2/24' % i)
stopsshd ()
startsshds ( host )
if opt=="cli":
CLI(net)
stopsshd()
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
if len(sys.argv) == 1:
sdnnet("cli")
elif len(sys.argv) == 2 and sys.argv[1] == "-n":
sdnnet("nocli")
else:
print "%s [-n]" % sys.argv[0]
|
opennetworkinglab/spring-open
|
old-scripts/test-network/mininet/net.sprint5-templete.py
|
Python
|
apache-2.0
| 4,452
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Thomas Quintana <quintana.thomas@gmail.com>
CLASS_REGISTRATION_CONFLICT = "The actor %s has already been registered as " \
"a singleton actor."
INSTANCE_REGISTRATION_CONFLICT = "The actor %s has already been registered " \
"as a non singleton actor."
INVALID_ACTOR_CLASS = "The class %s is not a subclass of " \
"'freepy.lib.actors.actor.Actor'."
|
thomasquintana/freepy
|
freepy/lib/actors/errors.py
|
Python
|
apache-2.0
| 1,220
|
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import json
import logging
from multiprocessing.dummy import Pool
from pip._vendor import six
from pip._vendor.requests.adapters import DEFAULT_POOLSIZE
from pip._internal.cli import cmdoptions
from pip._internal.cli.req_command import IndexGroupCommand
from pip._internal.exceptions import CommandError
from pip._internal.index.package_finder import PackageFinder
from pip._internal.models.selection_prefs import SelectionPreferences
from pip._internal.self_outdated_check import make_link_collector
from pip._internal.utils.misc import (
dist_is_editable,
get_installed_distributions,
tabulate,
write_output,
)
from pip._internal.utils.packaging import get_installer
logger = logging.getLogger(__name__)
class ListCommand(IndexGroupCommand):
"""
List installed packages, including editables.
Packages are listed in a case-insensitive sorted order.
"""
usage = """
%prog [options]"""
def __init__(self, *args, **kw):
super(ListCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-o', '--outdated',
action='store_true',
default=False,
help='List outdated packages')
cmd_opts.add_option(
'-u', '--uptodate',
action='store_true',
default=False,
help='List uptodate packages')
cmd_opts.add_option(
'-e', '--editable',
action='store_true',
default=False,
help='List editable projects.')
cmd_opts.add_option(
'-l', '--local',
action='store_true',
default=False,
help=('If in a virtualenv that has global access, do not list '
'globally-installed packages.'),
)
self.cmd_opts.add_option(
'--user',
dest='user',
action='store_true',
default=False,
help='Only output packages installed in user-site.')
cmd_opts.add_option(cmdoptions.list_path())
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help=("Include pre-release and development versions. By default, "
"pip only finds stable versions."),
)
cmd_opts.add_option(
'--format',
action='store',
dest='list_format',
default="columns",
choices=('columns', 'freeze', 'json'),
help="Select the output format among: columns (default), freeze, "
"or json",
)
cmd_opts.add_option(
'--not-required',
action='store_true',
dest='not_required',
help="List packages that are not dependencies of "
"installed packages.",
)
cmd_opts.add_option(
'--exclude-editable',
action='store_false',
dest='include_editable',
help='Exclude editable package from output.',
)
cmd_opts.add_option(
'--include-editable',
action='store_true',
dest='include_editable',
help='Include editable package from output.',
default=True,
)
index_opts = cmdoptions.make_option_group(
cmdoptions.index_group, self.parser
)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, session):
"""
Create a package finder appropriate to this list command.
"""
link_collector = make_link_collector(session, options=options)
# Pass allow_yanked=False to ignore yanked versions.
selection_prefs = SelectionPreferences(
allow_yanked=False,
allow_all_prereleases=options.pre,
)
return PackageFinder.create(
link_collector=link_collector,
selection_prefs=selection_prefs,
)
def run(self, options, args):
if options.outdated and options.uptodate:
raise CommandError(
"Options --outdated and --uptodate cannot be combined.")
cmdoptions.check_list_path_option(options)
packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
editables_only=options.editable,
include_editables=options.include_editable,
paths=options.path,
)
# get_not_required must be called firstly in order to find and
# filter out all dependencies correctly. Otherwise a package
# can't be identified as requirement because some parent packages
# could be filtered out before.
if options.not_required:
packages = self.get_not_required(packages, options)
if options.outdated:
packages = self.get_outdated(packages, options)
elif options.uptodate:
packages = self.get_uptodate(packages, options)
self.output_package_listing(packages, options)
def get_outdated(self, packages, options):
return [
dist for dist in self.iter_packages_latest_infos(packages, options)
if dist.latest_version > dist.parsed_version
]
def get_uptodate(self, packages, options):
return [
dist for dist in self.iter_packages_latest_infos(packages, options)
if dist.latest_version == dist.parsed_version
]
def get_not_required(self, packages, options):
dep_keys = set()
for dist in packages:
dep_keys.update(requirement.key for requirement in dist.requires())
return {pkg for pkg in packages if pkg.key not in dep_keys}
def iter_packages_latest_infos(self, packages, options):
with self._build_session(options) as session:
finder = self._build_package_finder(options, session)
def latest_info(dist):
typ = 'unknown'
all_candidates = finder.find_all_candidates(dist.key)
if not options.pre:
# Remove prereleases
all_candidates = [candidate for candidate in all_candidates
if not candidate.version.is_prerelease]
evaluator = finder.make_candidate_evaluator(
project_name=dist.project_name,
)
best_candidate = evaluator.sort_best_candidate(all_candidates)
if best_candidate is None:
return None
remote_version = best_candidate.version
if best_candidate.link.is_wheel:
typ = 'wheel'
else:
typ = 'sdist'
# This is dirty but makes the rest of the code much cleaner
dist.latest_version = remote_version
dist.latest_filetype = typ
return dist
# This is done for 2x speed up of requests to pypi.org
# so that "real time" of this function
# is almost equal to "user time"
pool = Pool(DEFAULT_POOLSIZE)
for dist in pool.imap_unordered(latest_info, packages):
if dist is not None:
yield dist
pool.close()
pool.join()
def output_package_listing(self, packages, options):
packages = sorted(
packages,
key=lambda dist: dist.project_name.lower(),
)
if options.list_format == 'columns' and packages:
data, header = format_for_columns(packages, options)
self.output_package_listing_columns(data, header)
elif options.list_format == 'freeze':
for dist in packages:
if options.verbose >= 1:
write_output("%s==%s (%s)", dist.project_name,
dist.version, dist.location)
else:
write_output("%s==%s", dist.project_name, dist.version)
elif options.list_format == 'json':
write_output(format_for_json(packages, options))
def output_package_listing_columns(self, data, header):
# insert the header first: we need to know the size of column names
if len(data) > 0:
data.insert(0, header)
pkg_strings, sizes = tabulate(data)
# Create and add a separator.
if len(data) > 0:
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
for val in pkg_strings:
write_output(val)
def format_for_columns(pkgs, options):
"""
Convert the package data into something usable
by output_package_listing_columns.
"""
running_outdated = options.outdated
# Adjust the header for the `pip list --outdated` case.
if running_outdated:
header = ["Package", "Version", "Latest", "Type"]
else:
header = ["Package", "Version"]
data = []
if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
header.append("Location")
if options.verbose >= 1:
header.append("Installer")
for proj in pkgs:
# if we're working on the 'outdated' list, separate out the
# latest_version and type
row = [proj.project_name, proj.version]
if running_outdated:
row.append(proj.latest_version)
row.append(proj.latest_filetype)
if options.verbose >= 1 or dist_is_editable(proj):
row.append(proj.location)
if options.verbose >= 1:
row.append(get_installer(proj))
data.append(row)
return data, header
def format_for_json(packages, options):
data = []
for dist in packages:
info = {
'name': dist.project_name,
'version': six.text_type(dist.version),
}
if options.verbose >= 1:
info['location'] = dist.location
info['installer'] = get_installer(dist)
if options.outdated:
info['latest_version'] = six.text_type(dist.latest_version)
info['latest_filetype'] = dist.latest_filetype
data.append(info)
return json.dumps(data)
|
mdworks2016/work_development
|
Python/20_Third_Certification/venv/lib/python3.7/site-packages/pip/_internal/commands/list.py
|
Python
|
apache-2.0
| 10,511
|
"""
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
from streamalert.shared.artifact_extractor import Artifact
from streamalert.shared.firehose import FirehoseClient
from streamalert.shared.logger import get_logger
from streamalert.shared.alert import Alert
from streamalert_cli.helpers import record_to_schema
LOGGER = get_logger(__name__)
PARTITION_PARTS = re.compile(
r'dt=(?P<year>\d{4})\-(?P<month>\d{2})\-(?P<day>\d{2})\-(?P<hour>\d{2})')
# The returned partition from the SHOW PARTITIONS command is dt=YYYY-MM-DD-HH,
# But when re-creating new partitions this value must be quoted
PARTITION_STMT = ("PARTITION (dt = '{year}-{month}-{day}-{hour}') "
"LOCATION 's3://{bucket}/{table_name}/{year}/{month}/{day}/{hour}'")
# How to map log schema types to Athena/Hive types
SCHEMA_TYPE_MAPPING = {
'string': 'string',
'integer': 'bigint',
'boolean': 'boolean',
'float': 'decimal(10,3)',
dict: 'map<string,string>',
list: 'array<string>'
}
# Athena query statement length limit
MAX_QUERY_LENGTH = 262144
def add_partition_statements(partitions, bucket, table_name):
"""Generate ALTER TABLE commands from existing partitions. It wil yield Athena
statement string(s), the length of each string should be less than Athena query
statement length limit, 262144 bytes.
https://docs.aws.amazon.com/athena/latest/ug/service-limits.html
Args:
partitions (set): The unique set of partitions gathered from Athena
bucket (str): The bucket name
table_name (str): The name of the Athena table
Yields:
string: The ALTER TABLE statements to add the new partitions
"""
# Each add partition statement starting with "ALTER TABLE"
initial_statement = 'ALTER TABLE {} ADD IF NOT EXISTS'.format(table_name)
initial_statement_len = len(initial_statement)
# The statement will be stored in a list of string format before join into a string
statement = [initial_statement]
statement_len = initial_statement_len
fmt_values = {
'bucket': bucket,
'table_name': table_name
}
for partition in sorted(partitions):
parts = PARTITION_PARTS.match(partition)
if not parts:
continue
fmt_values.update(parts.groupdict())
partition_stmt = PARTITION_STMT.format(**fmt_values)
partition_stmt_len = len(partition_stmt)
# It will add a space between sub strings when join the whole statement
space_count = len(statement)
# Monitor the lenght of whole statement and make sure it won't exceed the limit
if statement_len + partition_stmt_len + space_count >= MAX_QUERY_LENGTH:
# If the length of whole statement about to exceed the limit, yield
# the statement and reset it for rest of partitions
yield ' '.join(statement)
statement = [initial_statement]
statement_len = initial_statement_len
statement_len += partition_stmt_len
statement.append(partition_stmt)
yield ' '.join(statement)
def logs_schema_to_athena_schema(log_schema, ddl_statement=True):
"""Convert streamalert log schema to athena schema
Args:
log_schema (dict): StreamAlert log schema object.
ddl_statement (bool): Indicate if the Athena table created by Athena
DDL query or terraform aws_glue_catalog_table resource
Returns:
athena_schema (dict): Equivalent Athena schema used for generating create table statement
"""
athena_schema = {}
for key_name, key_type in log_schema.items():
if ddl_statement:
# Backticks are needed for backward compatibility when creating Athena
# table via Athena DDL query.
key_name = '`{}`'.format(key_name)
if key_type == {}:
# For empty dicts
athena_schema[key_name] = SCHEMA_TYPE_MAPPING[dict]
elif key_type == []:
# For empty array
athena_schema[key_name] = SCHEMA_TYPE_MAPPING[list]
elif isinstance(key_type, dict):
# For recursion
athena_schema[key_name] = logs_schema_to_athena_schema(key_type, ddl_statement)
else:
athena_schema[key_name] = SCHEMA_TYPE_MAPPING[key_type]
return athena_schema
def unique_values_from_query(query_result):
"""Simplify Athena query results into a set of values.
Useful for listing tables, partitions, databases, enable_metrics
Args:
query_result (dict): The result of run_athena_query
Returns:
set: Unique values from the query result
"""
return {
value
for row in query_result['ResultSet']['Rows'] for result in row['Data']
for value in list(result.values())
}
def format_schema_tf(schema):
"""Format schema for an Athena table for terraform.
Args:
schema (dict): Equivalent Athena schema used for generating create table statement
Returns:
formatted_schema (list(tuple))
"""
# Construct the main Athena Schema
formatted_schema = []
for key_name in sorted(schema.keys()):
key_type = schema[key_name]
if isinstance(key_type, str):
formatted_schema.append((key_name.lower(), key_type))
# Account for nested structs
elif isinstance(key_type, dict):
struct_schema = ','.join(
'{0}:{1}'.format(sub_key.lower(), key_type[sub_key])
for sub_key in sorted(key_type.keys())
)
formatted_schema.append((key_name.lower(), 'struct<{}>'.format(struct_schema)))
return formatted_schema
def generate_alerts_table_schema():
"""Generate the schema for alerts table in terraform by using a fake alert
Returns:
athena_schema (dict): Equivalent Athena schema used for generating create table statement
"""
alert = Alert('temp_rule_name', {}, {})
output = alert.output_dict()
schema = record_to_schema(output)
athena_schema = logs_schema_to_athena_schema(schema, False)
return format_schema_tf(athena_schema)
def generate_data_table_schema(config, table, schema_override=None):
"""Generate the schema for data table in terraform
Args:
config (CLIConfig): Loaded StreamAlert config
table (string): The name of data table
Returns:
athena_schema (dict): Equivalent Athena schema used for generating create table statement
"""
enabled_logs = FirehoseClient.load_enabled_log_sources(
config['global']['infrastructure']['firehose'],
config['logs']
)
# Convert special characters in schema name to underscores
sanitized_table_name = FirehoseClient.sanitized_value(table)
# Check that the log type is enabled via Firehose
if sanitized_table_name not in enabled_logs:
LOGGER.error('Table name %s missing from configuration or '
'is not enabled.', sanitized_table_name)
return None
log_info = config['logs'][enabled_logs.get(sanitized_table_name)]
schema = dict(log_info['schema'])
sanitized_schema = FirehoseClient.sanitize_keys(schema)
athena_schema = logs_schema_to_athena_schema(sanitized_schema, False)
# Add envelope keys to Athena Schema
configuration_options = log_info.get('configuration')
if configuration_options:
envelope_keys = configuration_options.get('envelope_keys')
if envelope_keys:
sanitized_envelope_key_schema = FirehoseClient.sanitize_keys(envelope_keys)
# Note: this key is wrapped in backticks to be Hive compliant
athena_schema['streamalert:envelope_keys'] = logs_schema_to_athena_schema(
sanitized_envelope_key_schema, False
)
# Handle Schema overrides
# This is useful when an Athena schema needs to differ from the normal log schema
if schema_override:
for override in schema_override:
column_name, column_type = override.split('=')
# Columns are escaped to avoid Hive issues with special characters
column_name = '{}'.format(column_name)
if column_name in athena_schema:
athena_schema[column_name] = column_type
LOGGER.info('Applied schema override: %s:%s', column_name, column_type)
else:
LOGGER.error(
'Schema override column %s not found in Athena Schema, skipping',
column_name
)
return format_schema_tf(athena_schema)
def generate_artifacts_table_schema():
"""Generate the schema for artifacts table in terraform by using a test artifact instance
Returns:
athena_schema (dict): Equivalent Athena schema used for generating create table statement
"""
artifact = artifact = Artifact(
normalized_type='test_normalized_type',
value='test_value',
source_type='test_source_type',
record_id='test_record_id',
function=None
)
schema = record_to_schema(artifact.artifact)
athena_schema = logs_schema_to_athena_schema(schema, False)
return format_schema_tf(athena_schema)
|
airbnb/streamalert
|
streamalert_cli/athena/helpers.py
|
Python
|
apache-2.0
| 9,745
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Script to extract a catalog of Windows Registry keys and values."""
import argparse
import logging
import sys
from dfwinreg import creg as dfwinreg_creg
from dfwinreg import regf as dfwinreg_regf
from dfwinreg import registry as dfwinreg_registry
from winregrc import catalog
from winregrc import output_writers
class StdoutWriter(output_writers.StdoutOutputWriter):
"""Stdout output writer."""
def WriteKeyPath(self, key_path):
"""Writes a key path to the output.
Args:
key_path (str): key path.
"""
text = '{0:s}\n'.format(key_path)
self.WriteText(text)
def WriteValueDescriptor(self, value_name, value_data_type):
"""Writes a value descriptor to the output.
Args:
value_name (str): name of the value.
value_data_type (str): data type of the value.
"""
text = '\t{0:s}\t{1:s}\n'.format(value_name, value_data_type)
self.WriteText(text)
def Main():
"""The main program function.
Returns:
bool: True if successful or False if not.
"""
argument_parser = argparse.ArgumentParser(description=(
'Extracts a catalog of Windows Registry keys and values.'))
argument_parser.add_argument(
'source', nargs='?', action='store', metavar='PATH', default=None,
help='path of a Windows Registry file.')
options = argument_parser.parse_args()
if not options.source:
print('Source value is missing.')
print('')
argument_parser.print_help()
print('')
return False
logging.basicConfig(
level=logging.INFO, format='[%(levelname)s] %(message)s')
with open(options.source, 'rb') as file_object:
try:
registry_file = dfwinreg_regf.REGFWinRegistryFile()
registry_file.Open(file_object)
except IOError:
registry_file = None
if not registry_file:
try:
registry_file = dfwinreg_creg.CREGWinRegistryFile()
registry_file.Open(file_object)
except IOError:
registry_file = None
if not registry_file:
print('Unable to open Windows Registry file.')
return False
# Using dfWinReg to determine Windows native key paths if available.
registry = dfwinreg_registry.WinRegistry()
key_path_prefix = registry.GetRegistryFileMapping(registry_file)
registry_file.SetKeyPathPrefix(key_path_prefix)
root_key = registry_file.GetRootKey()
output_writer_object = StdoutWriter()
if not output_writer_object.Open():
print('Unable to open output writer.')
print('')
return False
try:
has_results = False
if root_key:
collector_object = catalog.CatalogCollector()
has_results = collector_object.Collect(root_key, output_writer_object)
finally:
output_writer_object.Close()
if not has_results:
print('No catalog keys and values found.')
return True
if __name__ == '__main__':
if not Main():
sys.exit(1)
else:
sys.exit(0)
|
libyal/winreg-kb
|
scripts/catalog.py
|
Python
|
apache-2.0
| 2,974
|
import sqlite3
import logging
from time import sleep
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import *
import sweetSecurityDB
dbPath="/opt/sweetsecurity/client/SweetSecurity.db"
def convertMAC(mac):
newMac="%s%s:%s%s:%s%s:%s%s:%s%s:%s%s" % (mac[0],mac[1],mac[2],mac[3],mac[4],mac[5],mac[6],mac[7],mac[8],mac[9],mac[10],mac[11])
return newMac
def getMac():
myMac = [get_if_hwaddr(i) for i in get_if_list()]
for mac in myMac:
if(mac != "00:00:00:00:00:00"):
return mac
def start():
logger = logging.getLogger('SweetSecurityLogger')
while 1:
try:
dfgwInfo=sweetSecurityDB.getDfgw()
dfgw=dfgwInfo['dfgw']
dfgwMAC=dfgwInfo['dfgwMAC']
dfgwMAC=convertMAC(dfgwMAC)
conn = sqlite3.connect(dbPath)
c = conn.cursor()
for row in c.execute('SELECT * FROM hosts where active = 1 and ignore = 0'):
logger.info("Spoofing Device: ip=%s, mac=%s",row[2],row[3])
#Spoof the things...
victimMac=convertMAC(row[3])
packet = Ether()/ARP(op="who-has",hwdst=dfgwMAC,pdst=dfgw,psrc=row[2])
sendp(packet)
packet = Ether()/ARP(op="who-has",hwdst=victimMac,pdst=row[2],psrc=dfgw)
sendp(packet)
conn.close()
sleep(1)
except Exception,e:
logger.info("Error spoofing device: %s" % str(e))
|
TravisFSmith/SweetSecurity
|
sweetSecurity/client/spoof.py
|
Python
|
apache-2.0
| 1,267
|
import zstackwoodpecker.test_state as ts_header
import os
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template5", checking_point=8, path_list=[
[TestAction.create_vm, 'vm1', ],
[TestAction.create_volume, 'volume1', 'flag=scsi'],
[TestAction.attach_volume, 'vm1', 'volume1'],
[TestAction.create_volume, 'volume2', 'flag=scsi'],
[TestAction.attach_volume, 'vm1', 'volume2'],
[TestAction.create_volume, 'volume3', 'flag=scsi'],
[TestAction.attach_volume, 'vm1', 'volume3'],
[TestAction.create_vm_snapshot, 'vm1', 'vm1-snapshot1'],
[TestAction.reboot_vm, 'vm1'],
[TestAction.detach_volume, 'volume3'],
[TestAction.create_vm_snapshot, 'vm1', 'vm1-snapshot5'],
[TestAction.create_vm_backup, 'vm1', 'vm1-backup1'],
[TestAction.create_vm_snapshot, 'vm1', 'vm1-snapshot8'],
[TestAction.stop_vm, 'vm1'],
[TestAction.change_vm_image, 'vm1'],
[TestAction.attach_volume, 'vm1', 'volume3'],
[TestAction.clone_vm, 'vm1', 'vm2'],
[TestAction.delete_vm_snapshot, 'vm1-snapshot1'],
])
'''
The final status:
Running:['vm2']
Stopped:['vm1']
Enadbled:['vm1-snapshot5', 'volume1-snapshot5', 'volume2-snapshot5', 'vm1-snapshot8', 'volume1-snapshot8', 'volume2-snapshot8', 'vm1-backup1', 'volume1-backup1', 'volume2-backup1']
attached:['volume1', 'volume2', 'volume3']
Detached:[]
Deleted:['vm1-snapshot1', 'volume1-snapshot1', 'volume2-snapshot1', 'volume3-snapshot1']
Expunged:[]
Ha:[]
Group:
vm_snap2:['vm1-snapshot5', 'volume1-snapshot5', 'volume2-snapshot5']---vm1volume1_volume2
vm_snap3:['vm1-snapshot8', 'volume1-snapshot8', 'volume2-snapshot8']---vm1volume1_volume2
vm_backup1:['vm1-backup1', 'volume1-backup1', 'volume2-backup1']---vm1_volume1_volume2
'''
|
zstackio/zstack-woodpecker
|
integrationtest/vm/multihosts/vm_snapshots/paths/xsky_path45.py
|
Python
|
apache-2.0
| 1,722
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014 Klaudiusz Staniek
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from fiblary.client import Client
logging.basicConfig(
format='%(asctime)-15s %(levelname)s: %(module)s:%(funcName)s'
':%(lineno)d: %(message)s',
level=logging.CRITICAL)
def main():
hc2 = Client(
'v3',
'http://192.168.1.230/api/',
'admin',
'admin'
)
info = hc2.info.get()
print info
weather = hc2.weather.get()
print weather
login = hc2.login.get()
print login
devices = hc2.devices.get(1)
print devices
devices = hc2.devices.list(name='Ceiling Lamp')
print devices
print type(devices)
for device in devices:
print device.name
devices = hc2.devices.list(id=1)
for device in devices:
print device.name
if __name__ == '__main__':
main()
|
kstaniek/fiblary
|
examples/info.py
|
Python
|
apache-2.0
| 1,435
|
from nets import vgg
import tensorflow as tf
from preprocessing import vgg_preprocessing
from ..utils.upsampling import bilinear_upsample_weights
slim = tf.contrib.slim
# Mean values for VGG-16
from preprocessing.vgg_preprocessing import _R_MEAN, _G_MEAN, _B_MEAN
def extract_vgg_16_mapping_without_fc8(vgg_16_variables_mapping):
"""Removes the fc8 variable mapping from FCN-32s to VGG-16 model mapping dict.
Given the FCN-32s to VGG-16 model mapping dict which is returned by FCN_32s()
function, remove the mapping for the fc8 variable. This is done because this
variable is responsible for final class prediction and is different for different
tasks. Last layer usually has different size, depending on the number of classes
to be predicted. This is why we omit it from the dict and those variables will
be randomly initialized later.
Parameters
----------
vgg_16_variables_mapping : dict {string: variable}
Dict which maps the FCN-32s model's variables to VGG-16 checkpoint variables
names. Look at FCN-32s() function for more details.
Returns
-------
updated_mapping : dict {string: variable}
Dict which maps the FCN-32s model's variables to VGG-16 checkpoint variables
names without fc8 layer mapping.
"""
# TODO: review this part one more time
vgg_16_keys = vgg_16_variables_mapping.keys()
vgg_16_without_fc8_keys = []
for key in vgg_16_keys:
if 'fc8' not in key:
vgg_16_without_fc8_keys.append(key)
updated_mapping = {key: vgg_16_variables_mapping[key] for key in vgg_16_without_fc8_keys}
return updated_mapping
def FCN_32s(image_batch_tensor,
number_of_classes,
is_training):
"""Returns the FCN-32s model definition.
The function returns the model definition of a network that was described
in 'Fully Convolutional Networks for Semantic Segmentation' by Long et al.
The network subsamples the input by a factor of 32 and uses the bilinear
upsampling kernel to upsample prediction by a factor of 32. This means that
if the image size is not of the factor 32, the prediction of different size
will be delivered. To adapt the network for an any size input use
adapt_network_for_any_size_input(FCN_32s, 32). Note: the upsampling kernel
is fixed in this model definition, because it didn't give significant
improvements according to aforementioned paper.
Parameters
----------
image_batch_tensor : [batch_size, height, width, depth] Tensor
Tensor specifying input image batch
number_of_classes : int
An argument specifying the number of classes to be predicted.
For example, for PASCAL VOC it is 21.
is_training : boolean
An argument specifying if the network is being evaluated or trained.
It affects the work of underlying dropout layer of VGG-16.
Returns
-------
upsampled_logits : [batch_size, height, width, number_of_classes] Tensor
Tensor with logits representing predictions for each class.
Be careful, the output can be of different size compared to input,
use adapt_network_for_any_size_input to adapt network for any input size.
Otherwise, the input images sizes should be of multiple 32.
vgg_16_variables_mapping : dict {string: variable}
Dict which maps the FCN-32s model's variables to VGG-16 checkpoint variables
names. We need this to initilize the weights of FCN-32s model with VGG-16 from
checkpoint file. Look at ipython notebook for examples.
"""
with tf.variable_scope("fcn_32s") as fcn_32s_scope:
upsample_factor = 32
# Convert image to float32 before subtracting the
# mean pixel value
image_batch_float = tf.to_float(image_batch_tensor)
# Subtract the mean pixel value from each pixel
mean_centered_image_batch = image_batch_float - [_R_MEAN, _G_MEAN, _B_MEAN]
upsample_filter_np = bilinear_upsample_weights(upsample_factor,
number_of_classes)
upsample_filter_tensor = tf.constant(upsample_filter_np)
# TODO: make pull request to get this custom vgg feature accepted
# to avoid using custom slim repo.
with slim.arg_scope(vgg.vgg_arg_scope()):
logits, end_points = vgg.vgg_16(mean_centered_image_batch,
num_classes=number_of_classes,
is_training=is_training,
spatial_squeeze=False,
fc_conv_padding='SAME')
downsampled_logits_shape = tf.shape(logits)
# Calculate the ouput size of the upsampled tensor
upsampled_logits_shape = tf.pack([
downsampled_logits_shape[0],
downsampled_logits_shape[1] * upsample_factor,
downsampled_logits_shape[2] * upsample_factor,
downsampled_logits_shape[3]
])
# Perform the upsampling
upsampled_logits = tf.nn.conv2d_transpose(logits,
upsample_filter_tensor,
output_shape=upsampled_logits_shape,
strides=[1, upsample_factor, upsample_factor, 1])
# Map the original vgg-16 variable names
# to the variables in our model. This is done
# to make it possible to use assign_from_checkpoint_fn()
# while providing this mapping.
# TODO: make it cleaner
vgg_16_variables_mapping = {}
vgg_16_variables = slim.get_variables(fcn_32s_scope)
for variable in vgg_16_variables:
# Here we remove the part of a name of the variable
# that is responsible for the current variable scope
original_vgg_16_checkpoint_string = variable.name[len(fcn_32s_scope.original_name_scope):-2]
vgg_16_variables_mapping[original_vgg_16_checkpoint_string] = variable
return upsampled_logits, vgg_16_variables_mapping
|
aakashsinha19/Aspectus
|
Image Segmentation/tf-image-segmentation/tf_image_segmentation/models/fcn_32s.py
|
Python
|
apache-2.0
| 6,382
|
import factory
from zeus import models
from zeus.utils import timezone
from .base import ModelFactory
from .types import GUIDFactory
class UserFactory(ModelFactory):
id = GUIDFactory()
email = factory.Faker("email")
date_created = factory.LazyAttribute(lambda o: timezone.now())
date_active = factory.LazyAttribute(lambda o: o.date_created)
class Meta:
model = models.User
|
getsentry/zeus
|
zeus/factories/user.py
|
Python
|
apache-2.0
| 406
|
"""Unit tests for result_form_functional_tests.py
Systems:
- indicators.views.ResultCreate
- bad indicator id 404
- get with good ids gives form
- initial form data is correct
- correct disaggregation values
- form valid returns appropriate response
- form invalid returns appropriate response
- indicators.views.ResultUpdate
- indicators.forms.ResultForm
"""
import datetime
from indicators.views import ResultCreate, ResultUpdate
from indicators.forms import ResultForm
from indicators.models import Indicator, Result
from factories import (
indicators_models as i_factories,
workflow_models as w_factories
)
from django.urls import reverse
from django.http import Http404
from django import test
class TestResultCreateUpdate404(test.TestCase):
def setUp(self):
self.program = w_factories.ProgramFactory()
self.indicator = i_factories.IndicatorFactory(
program=self.program
)
self.result = i_factories.ResultFactory(
indicator=self.indicator
)
self.user = w_factories.UserFactory(first_name="FN", last_name="LN", username="tester", is_superuser=True)
self.user.set_password('password')
self.user.save()
self.tola_user = w_factories.TolaUserFactory(user=self.user)
self.tola_user.save()
self.client = test.Client(enforce_csrf_checks=False)
self.client.login(username='tester', password='password')
def test_create_view_raises_404_with_bad_indicator_id(self):
kwargs = {
'indicator': self.indicator.id + 1
}
bad_url = reverse('result_add', kwargs=kwargs)
response = self.client.get(bad_url)
self.assertEqual(response.status_code, 404)
def test_update_view_raises_404_with_bad_result_id(self):
kwargs = {
'pk': self.result.id + 1
}
bad_url = reverse('result_update', kwargs=kwargs)
response = self.client.get(bad_url)
self.assertEqual(response.status_code, 404)
class TestUpdateFormInitialValues(test.TestCase):
def setUp(self):
self.program = w_factories.ProgramFactory()
self.indicator = i_factories.IndicatorFactory(
program=self.program,
target_frequency=Indicator.ANNUAL
)
self.result = i_factories.ResultFactory(
indicator=self.indicator,
)
self.result.record_name = 'record name'
self.result.evidence_url = 'evidence url'
self.blank_result = i_factories.ResultFactory(
indicator=self.indicator
)
self.tola_user = w_factories.TolaUserFactory()
self.user = self.tola_user.user
self.request = type('Request', (object,), {'has_write_access': True, 'user': self.user})()
def test_initial_values(self):
form = ResultForm(user=self.user, indicator=self.indicator, program=self.program, instance=self.result, request=self.request)
self.assertEqual(form['achieved'].value(), self.result.achieved)
self.assertEqual(form['target_frequency'].value(), Indicator.ANNUAL)
self.assertEqual(form['indicator'].value(), self.indicator.id)
self.assertEqual(form['date_collected'].value(), self.result.date_collected)
self.assertEqual(form['record_name'].value(), 'record name')
self.assertEqual(form['evidence_url'].value(), 'evidence url')
def test_initial_values_no_evidence(self):
form = ResultForm(user=self.user, indicator=self.indicator, program=self.program, instance=self.blank_result, request=self.request)
self.assertEqual(form['achieved'].value(), self.result.achieved)
self.assertEqual(form['target_frequency'].value(), Indicator.ANNUAL)
self.assertEqual(form['indicator'].value(), self.indicator.id)
self.assertEqual(form['record_name'].value(), None)
self.assertEqual(form['evidence_url'].value(), None)
def test_create_form_initial_values(self):
form = ResultForm(user=self.user, indicator=self.indicator, program=self.program, request=self.request)
self.assertEqual(form['indicator'].value(), self.indicator.id)
self.assertEqual(form['program'].value(), self.program.id)
self.assertEqual(form['achieved'].value(), None)
self.assertEqual(form['record_name'].value(), None)
self.assertEqual(form['evidence_url'].value(), None)
class TestCreateValidation(test.TestCase):
def setUp(self):
self.program = w_factories.ProgramFactory(
reporting_period_start=datetime.date(2016, 1, 1),
reporting_period_end=datetime.date(2016, 12, 31),
)
self.indicator = i_factories.IndicatorFactory(
program=self.program,
target_frequency=Indicator.LOP
)
self.tola_user = w_factories.TolaUserFactory()
self.user = self.tola_user.user
self.request = type('Request', (object,), {'has_write_access': True, 'user': self.user})()
self.form_kwargs = {
'user': self.user,
'indicator': self.indicator,
'program': self.program,
'request': self.request,
}
def test_good_data_validates(self):
minimal_data = {
'date_collected': '2016-01-01',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'rationale': 'this is a rationale'
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertTrue(form.is_valid(), "errors {0}".format(form.errors))
new_result = form.save()
self.assertIsNotNone(new_result.id)
db_result = Result.objects.get(pk=new_result.id)
self.assertEqual(db_result.date_collected, datetime.date(2016, 1, 1))
self.assertEqual(db_result.achieved, 30)
def test_good_data_with_evidence_validates(self):
minimal_data = {
'date_collected': '2016-03-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'record_name': 'new record',
'evidence_url': 'http://google.com',
'rationale': 'this is a rationale'
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertTrue(form.is_valid(), "errors {0}".format(form.errors))
new_result = form.save()
self.assertIsNotNone(new_result.id)
db_result = Result.objects.get(pk=new_result.id)
self.assertEqual(db_result.record_name, 'new record')
def test_good_data_updating_evidence_validates(self):
minimal_data = {
'date_collected': '2016-03-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'record_name': 'existing record',
'evidence_url': 'http://google.com',
'rationale': 'this is a rationale'
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertTrue(form.is_valid(), "errors {0}".format(form.errors))
new_result = form.save()
self.assertIsNotNone(new_result.id)
db_result = Result.objects.get(pk=new_result.id)
self.assertEqual(db_result.record_name, 'existing record')
self.assertEqual(db_result.evidence_url, 'http://google.com')
@test.tag('slow')
def test_adding_record_without_name_passes_validation(self):
bad_data = {
'date_collected': '2016-03-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'evidence_url': 'http://google.com',
'rationale': 'this is a rationale'
}
form = ResultForm(bad_data, **self.form_kwargs)
self.assertTrue(form.is_valid())
def test_adding_record_without_url_fails_validation(self):
bad_data = {
'date_collected': '2016-03-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'record_name': 'new record',
}
form = ResultForm(bad_data, **self.form_kwargs)
self.assertFalse(form.is_valid())
self.assertIn('evidence_url', form.errors)
# date_collected validation
def test_collected_date_before_program_start(self):
minimal_data = {
'date_collected': '2015-12-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertFalse(form.is_valid())
self.assertIn('date_collected', form.errors)
def test_collected_date_after_program_end(self):
minimal_data = {
'date_collected': '2017-1-1',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertFalse(form.is_valid())
self.assertIn('date_collected', form.errors)
|
mercycorps/TolaActivity
|
indicators/tests/form_tests/result_form_unittests.py
|
Python
|
apache-2.0
| 9,175
|
# Copyright (C) 2015 Universidad Politecnica de Madrid
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import urllib
from keystone.tests import test_v3
from keystone.common import config as common_cfg
from keystone.contrib.two_factor_auth import controllers
from keystone.contrib.two_factor_auth import core
from keystone.openstack.common import log
from keystone import exception
import pyotp
import json
LOG = log.getLogger(__name__)
TWO_FACTOR_USER_URL = '/users/{user_id}'
TWO_FACTOR_BASE_URL = '/OS-TWO-FACTOR'
AUTH_ENDPOINT = '/two_factor_auth'
QUESTION_ENDPOINT = '/sec_question'
DATA_ENDPOINT = '/two_factor_data'
DEVICES_ENDPOINT = '/devices'
TWO_FACTOR_URL = TWO_FACTOR_USER_URL + TWO_FACTOR_BASE_URL + AUTH_ENDPOINT
TWO_FACTOR_QUESTION_URL = TWO_FACTOR_USER_URL + TWO_FACTOR_BASE_URL + QUESTION_ENDPOINT
TWO_FACTOR_DATA_URL = TWO_FACTOR_USER_URL + TWO_FACTOR_BASE_URL + DATA_ENDPOINT
TWO_FACTOR_DEVICES_URL = TWO_FACTOR_USER_URL + TWO_FACTOR_BASE_URL + DEVICES_ENDPOINT
class TwoFactorBaseTests(test_v3.RestfulTestCase):
EXTENSION_NAME = 'two_factor_auth'
EXTENSION_TO_ADD = 'two_factor_auth_extension'
SAMPLE_SECURITY_QUESTION = 'Sample question'
SAMPLE_SECURITY_ANSWER = 'Sample answer'
def setUp(self):
super(TwoFactorBaseTests, self).setUp()
# Now that the app has been served, we can query CONF values
self.base_url = 'http://localhost/v3'
self.controller = controllers.TwoFactorV3Controller()
self.manager = core.TwoFactorAuthManager()
def _create_two_factor_key(self, user_id, expected_status=None):
data = self.new_ref()
data['security_question'] = self.SAMPLE_SECURITY_QUESTION
data['security_answer'] = self.SAMPLE_SECURITY_ANSWER
return self.post(
TWO_FACTOR_URL.format(user_id=user_id),
body={'two_factor_auth': data},
expected_status=expected_status
)
def _create_two_factor_key_no_data(self, user_id, expected_status=None):
return self.post(
TWO_FACTOR_URL.format(user_id=user_id),
expected_status=expected_status
)
def _delete_two_factor_key(self, user_id, expected_status=None):
return self.delete(TWO_FACTOR_URL.format(user_id=user_id), expected_status=expected_status)
def _check_is_two_factor_enabled(self, expected_status=None, **kwargs):
return self.head(
TWO_FACTOR_BASE_URL + AUTH_ENDPOINT + '?' +urllib.urlencode(kwargs),
expected_status=expected_status)
def _check_security_question(self, user_id, sec_answer, expected_status=None):
body = {
'two_factor_auth': {
'security_answer': sec_answer
}
}
return self.get(TWO_FACTOR_QUESTION_URL.format(user_id=user_id),
expected_status=expected_status,
body=body)
def _get_two_factor_data(self, user_id, expected_status=None):
return self.get(TWO_FACTOR_DATA_URL.format(user_id=user_id),
expected_status=expected_status)
def _remember_device(self, user_id, expected_status=None, **kwargs):
try:
kwargs['user_id'] = user_id
self.manager.is_two_factor_enabled(user_id=user_id)
except exception.NotFound:
self._create_two_factor_key(user_id=user_id)
return json.loads(self.post(TWO_FACTOR_BASE_URL + DEVICES_ENDPOINT + '?' + urllib.urlencode(kwargs)).body)['two_factor_auth']
def _check_for_device(self, expected_status=None, **kwargs):
response = self.head(TWO_FACTOR_BASE_URL + DEVICES_ENDPOINT + '?' + urllib.urlencode(kwargs), expected_status=expected_status)
def _delete_devices(self, user_id, expected_status=None):
return self.delete(TWO_FACTOR_DEVICES_URL.format(user_id=user_id), expected_status=expected_status)
def _create_user(self):
user = self.new_user_ref(domain_id=self.domain_id)
password = user['password']
user = self.identity_api.create_user(user)
user['password'] = password
return user
def _delete_user(self, user_id):
self.delete(TWO_FACTOR_USER_URL.format(user_id=user_id))
class TwoFactorCRUDTests(TwoFactorBaseTests):
def test_two_factor_enable(self):
self._create_two_factor_key(user_id=self.user_id)
def test_two_factor_new_code(self):
key1 = self._create_two_factor_key(user_id=self.user_id)
key2 = self._create_two_factor_key(user_id=self.user_id)
self.assertNotEqual(key1, key2)
def test_two_factor_new_code_no_data_right(self):
self._create_two_factor_key(user_id=self.user_id)
self._create_two_factor_key_no_data(user_id=self.user_id)
def test_two_factor_new_code_no_data_wrong(self):
self._create_two_factor_key_no_data(user_id=self.user_id, expected_status=400)
def test_two_factor_disable_after_enabling(self):
self._create_two_factor_key(user_id=self.user_id)
self._delete_two_factor_key(user_id=self.user_id)
def test_two_factor_disable_without_enabling(self):
self._delete_two_factor_key(user_id=self.user_id, expected_status=404)
def test_two_factor_is_enabled(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_is_two_factor_enabled(user_id=self.user_id)
def test_two_factor_is_enabled_name_and_domain(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_is_two_factor_enabled(
user_name=self.user['name'],
domain_id=self.user['domain_id'])
def test_two_factor_is_disabled(self):
self._check_is_two_factor_enabled(user_id=self.user_id, expected_status=404)
def test_two_factor_is_disabled_name_and_domain(self):
self._check_is_two_factor_enabled(
user_name=self.user['name'],
domain_id=self.user['domain_id'],
expected_status=404)
def test_two_factor_check_no_params(self):
self._check_is_two_factor_enabled(expected_status=400)
def test_two_factor_check_no_domain(self):
self._check_is_two_factor_enabled(
user_name=self.user['name'],
expected_status=400)
def test_two_factor_check_no_username(self):
self._check_is_two_factor_enabled(
domain_id=self.user['domain_id'],
expected_status=400)
def test_two_factor_is_enabled_after_deleting(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_is_two_factor_enabled(user_id=self.user_id)
self._delete_two_factor_key(user_id=self.user_id)
self._check_is_two_factor_enabled(user_id=self.user_id, expected_status=404)
def test_two_factor_create_key_for_nonexistent_user(self):
self._create_two_factor_key(user_id='nonexistent_user', expected_status=404)
def test_two_factor_delete_user(self):
user = self._create_user()
self._create_two_factor_key(user_id=user['id'])
self._check_is_two_factor_enabled(user_id=user['id'])
self._delete_user(user['id'])
self._check_is_two_factor_enabled(user_id=user['id'], expected_status=404)
class TwoFactorSecQuestionTests(TwoFactorBaseTests):
def test_security_question_get(self):
self._create_two_factor_key(user_id=self.user_id)
data = self._get_two_factor_data(user_id=self.user_id)
self.assertEqual(data.result['two_factor_auth']['security_question'],
self.SAMPLE_SECURITY_QUESTION)
def test_security_question_correct(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_security_question(user_id=self.user_id,
sec_answer=self.SAMPLE_SECURITY_ANSWER)
def test_security_question_wrong(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_security_question(user_id=self.user_id,
sec_answer='Wrong answer',
expected_status=401)
def test_security_question_nonexistent(self):
self._check_security_question(user_id=self.user_id,
sec_answer='Does not matter',
expected_status=404)
class TwoFactorDevicesCRUDTests(TwoFactorBaseTests):
def test_remember_device(self):
self._remember_device(user_id=self.user_id)
def test_remember_device_name_and_domain(self):
self._remember_device(user_id=self.user_id,
user_name=self.user['name'],
domain_id=self.user['domain_id'])
def test_device_right_data(self):
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
def test_device_right_data_name_and_domain(self):
data = self._remember_device(user_id=self.user_id,
user_name=self.user['name'],
domain_id=self.user['domain_id'])
self._check_for_device(user_name=self.user['name'],
domain_id=self.user['domain_id'],
device_id=data['device_id'],
device_token=data['device_token'])
def test_device_updates_token(self):
data = self._remember_device(user_id=self.user_id)
new_data = self._remember_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
self.assertEqual(new_data['device_id'], data['device_id'])
self.assertNotEqual(new_data['device_token'], data['device_token'])
def test_device_wrong_user(self):
user = self._create_user()
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=user['id'],
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=404)
def test_device_wrong_device(self):
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id='just_another_device',
device_token=data['device_token'],
expected_status=404)
def test_device_fake_token(self):
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token='fake_token',
expected_status=404)
def test_device_old_token(self):
data = self._remember_device(user_id=self.user_id)
self._remember_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=403)
def test_device_delete_all(self):
data = self._remember_device(user_id=self.user_id)
self._delete_devices(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=404)
def test_device_does_not_delete_all_devices_when_fake_token(self):
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token='fake_token',
expected_status=404)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
def test_device_deletes_all_devices_when_old_token(self):
data = self._remember_device(user_id=self.user_id)
new_data = self._remember_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=403)
self._check_for_device(user_id=self.user_id,
device_id=new_data['device_id'],
device_token=new_data['device_token'],
expected_status=404)
def test_device_delete_user(self):
user = self._create_user()
data = self._remember_device(user_id=user['id'])
self._delete_user(user['id'])
self._check_for_device(user_id=user['id'],
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=404)
def test_device_disable_two_factor(self):
data = self._remember_device(user_id=self.user_id)
self._delete_two_factor_key(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=404)
class TwoFactorAuthTests(TwoFactorBaseTests):
def auth_plugin_config_override(self, methods=None, **method_classes):
if methods is None:
methods = ['external', 'password', 'token', 'oauth1', 'saml2', 'oauth2']
if not method_classes:
method_classes = dict(
external='keystone.auth.plugins.external.DefaultDomain',
password='keystone.auth.plugins.two_factor.TwoFactor',
token='keystone.auth.plugins.token.Token',
oauth1='keystone.auth.plugins.oauth1.OAuth',
saml2='keystone.auth.plugins.saml2.Saml2',
oauth2='keystone.auth.plugins.oauth2.OAuth2',
)
self.config_fixture.config(group='auth', methods=methods)
common_cfg.setup_authentication()
if method_classes:
self.config_fixture.config(group='auth', **method_classes)
def _auth_body(self, **kwargs):
body = {
"auth": {
"identity": {
"methods": [
"password"
],
"password": {
"user": {
}
},
}
}
}
payload = body['auth']['identity']['password']
if 'user_id' in kwargs:
payload['user']['id'] = kwargs['user_id']
if 'password' in kwargs:
payload['user']['password'] = kwargs['password']
if 'user_name' in kwargs:
payload['user']['name'] = kwargs['user_name']
if 'domain_id' in kwargs:
payload['user']['domain'] = {}
payload['user']['domain']['id'] = kwargs['domain_id']
if 'verification_code' in kwargs:
payload['user']['verification_code'] = kwargs['verification_code']
if 'device_data' in kwargs:
payload['user']['device_data'] = kwargs['device_data']
return body
def _authenticate(self, auth_body, expected_status=201):
return self.post('/auth/tokens', body=auth_body, expected_status=expected_status, noauth=True)
def _get_current_code(self, user_id):
two_factor_info = self.manager.get_two_factor_info(user_id)
totp = pyotp.TOTP(two_factor_info.two_factor_key)
return totp.now()
def test_auth_correct(self):
self._create_two_factor_key(user_id=self.user_id)
req = self._auth_body(user_id=self.user_id,
password=self.user['password'],
verification_code=self._get_current_code(self.user_id))
self._authenticate(auth_body=req)
def test_auth_correct_two_factor_disabled(self):
req = self._auth_body(
user_id=self.user_id,
password=self.user['password'])
self._authenticate(auth_body=req)
def test_auth_correct_name_and_domain(self):
self._create_two_factor_key(user_id=self.user_id)
req = self._auth_body(
user_name=self.user['name'],
domain_id=self.user['domain_id'],
verification_code=self._get_current_code(self.user_id),
password=self.user['password'])
self._authenticate(auth_body=req)
def test_auth_correct_two_factor_disabled_name_and_domain(self):
req = self._auth_body(
user_name=self.user['name'],
domain_id=self.user['domain_id'],
password=self.user['password'])
self._authenticate(auth_body=req)
def test_auth_no_code(self):
self._create_two_factor_key(user_id=self.user_id)
req = self._auth_body(
user_id=self.user_id,
password=self.user['password'])
self._authenticate(auth_body=req, expected_status=400)
def test_auth_wrong_code(self):
self._create_two_factor_key(user_id=self.user_id)
req = self._auth_body(
user_id=self.user_id,
verification_code='123456',
password=self.user['password'])
self._authenticate(auth_body=req, expected_status=401)
def test_auth_right_device_data(self):
self._create_two_factor_key(user_id=self.user_id)
data = self.manager.remember_device(user_id=self.user_id)
req = self._auth_body(
user_id=self.user_id,
device_data=data,
password=self.user['password'])
self._authenticate(auth_body=req)
def test_auth_device_data_from_another_user(self):
user = self._create_user()
self._create_two_factor_key(user_id=user['id'])
self._create_two_factor_key(user_id=self.user_id)
user_device = self.manager.remember_device(user_id=self.user_id)
new_user_device = self.manager.remember_device(user_id=user['id'])
req = self._auth_body(
user_id=self.user_id,
device_data=new_user_device,
password=self.user['password'])
self._authenticate(auth_body=req, expected_status=401)
|
ging/keystone
|
keystone/tests/test_v3_two_factor_auth.py
|
Python
|
apache-2.0
| 19,566
|
import threading
from mock import patch
from uuid import uuid4
from changes_lxc_wrapper.cli.wrapper import WrapperCommand
def generate_jobstep_data():
# this must generic a *valid* dataset that should result in a full
# run
return {
'status': {'id': 'queued'},
'data': {},
'expectedSnapshot': None,
'snapshot': {
'id': 'a1028849e8cf4ff0a7d7fdfe3c4fe925',
},
}
def setup_function(function):
assert threading.activeCount() == 1
def teardown_function(function):
assert threading.activeCount() == 1
@patch.object(WrapperCommand, 'run_build_script')
def test_local_run(mock_run):
command = WrapperCommand([
'--', 'echo 1',
])
command.run()
mock_run.assert_called_once_with(
release='precise',
post_launch=None,
snapshot=None,
save_snapshot=False,
s3_bucket=None,
pre_launch=None,
validate=True,
user='ubuntu',
cmd=['echo 1'],
script=None,
flush_cache=False,
clean=False,
keep=False,
)
@patch('changes_lxc_wrapper.cli.wrapper.ChangesApi')
@patch.object(WrapperCommand, 'run_build_script')
def test_remote_run(mock_run, mock_api_cls):
jobstep_id = uuid4()
jobstep_data = generate_jobstep_data()
mock_api = mock_api_cls.return_value
mock_api.get_jobstep.return_value = jobstep_data
command = WrapperCommand([
'--jobstep-id', jobstep_id.hex,
'--api-url', 'http://changes.example.com',
])
command.run()
mock_run.assert_called_once_with(
release='precise',
post_launch=None,
snapshot='a1028849-e8cf-4ff0-a7d7-fdfe3c4fe925',
save_snapshot=False,
s3_bucket=None,
pre_launch=None,
validate=True,
user='ubuntu',
cmd=['changes-client', '--server', 'http://changes.example.com', '--jobstep_id', jobstep_id.hex],
flush_cache=False,
clean=False,
keep=False,
)
@patch('changes_lxc_wrapper.cli.wrapper.ChangesApi')
@patch.object(WrapperCommand, 'run_build_script')
def test_already_finished_job(mock_run, mock_api_cls):
jobstep_id = uuid4()
jobstep_data = generate_jobstep_data()
jobstep_data['status']['id'] = 'finished'
mock_api = mock_api_cls.return_value
mock_api.get_jobstep.return_value = jobstep_data
command = WrapperCommand([
'--jobstep-id', jobstep_id.hex,
'--api-url', 'http://changes.example.com',
])
command.run()
assert not mock_run.called
@patch('changes_lxc_wrapper.cli.wrapper.ChangesApi')
@patch.object(WrapperCommand, 'run_build_script')
def test_non_default_release(mock_run, mock_api_cls):
jobstep_id = uuid4()
jobstep_data = generate_jobstep_data()
jobstep_data['data']['release'] = 'fakerelease'
mock_api = mock_api_cls.return_value
mock_api.get_jobstep.return_value = jobstep_data
command = WrapperCommand([
'--jobstep-id', jobstep_id.hex,
'--api-url', 'http://changes.example.com',
])
command.run()
mock_run.assert_called_once_with(
release='fakerelease',
post_launch=None,
snapshot='a1028849-e8cf-4ff0-a7d7-fdfe3c4fe925',
save_snapshot=False,
s3_bucket=None,
pre_launch=None,
validate=True,
user='ubuntu',
cmd=['changes-client', '--server', 'http://changes.example.com', '--jobstep_id', jobstep_id.hex],
flush_cache=False,
clean=False,
keep=False,
)
|
dropbox/changes-lxc-wrapper
|
tests/cli/test_wrapper.py
|
Python
|
apache-2.0
| 3,544
|