hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c3648f9f468a7cb72ab329dd30cb38afa148658b
| 3,610
|
py
|
Python
|
Backend/apps/user_operation/migrations/0001_initial.py
|
BeanWei/Dailyfresh-B2C
|
7c94e9a4428e5116c91bf27cf696e6eee430748a
|
[
"Apache-2.0"
] | 84
|
2019-02-22T08:19:52.000Z
|
2022-02-08T03:36:32.000Z
|
Backend/apps/user_operation/migrations/0001_initial.py
|
GinCho-Max/Dailyfresh-B2C
|
7c94e9a4428e5116c91bf27cf696e6eee430748a
|
[
"Apache-2.0"
] | 16
|
2019-09-06T10:25:40.000Z
|
2022-02-12T06:37:41.000Z
|
Backend/apps/user_operation/migrations/0001_initial.py
|
GinCho-Max/Dailyfresh-B2C
|
7c94e9a4428e5116c91bf27cf696e6eee430748a
|
[
"Apache-2.0"
] | 61
|
2019-03-20T02:29:23.000Z
|
2021-07-09T08:14:25.000Z
|
# Generated by Django 2.0.2 on 2019-02-20 10:10
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('goods', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserAddress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('province', models.CharField(default='', max_length=100, verbose_name='省份')),
('city', models.CharField(default='', max_length=100, verbose_name='城市')),
('district', models.CharField(default='', max_length=100, verbose_name='区域')),
('address', models.CharField(default='', max_length=100, verbose_name='详细地址')),
('signer_name', models.CharField(default='', max_length=100, verbose_name='签收人')),
('signer_mobile', models.CharField(default='', max_length=11, verbose_name='电话')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
('is_default', models.BooleanField(default=False, verbose_name='默认地址')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户')),
],
options={
'verbose_name': '收货地址',
'verbose_name_plural': '收货地址',
},
),
migrations.CreateModel(
name='UserFav',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
('goods', models.ForeignKey(help_text='商品id', on_delete=django.db.models.deletion.CASCADE, to='goods.Goods', verbose_name='商品')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户')),
],
options={
'verbose_name': '用户收藏',
'verbose_name_plural': '用户收藏',
},
),
migrations.CreateModel(
name='UserLeavingMessage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message_type', models.IntegerField(choices=[(1, '留言'), (2, '投诉'), (3, '询问'), (4, '售后'), (5, '求购')], default=1, help_text='留言类型: 1(留言),2(投诉),3(询问),4(售后),5(求购)', verbose_name='留言类型')),
('subject', models.CharField(default='', max_length=100, verbose_name='主题')),
('message', models.TextField(default='', help_text='留言内容', verbose_name='留言内容')),
('file', models.FileField(help_text='上传的文件', upload_to='message/images/', verbose_name='上传的文件')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户')),
],
options={
'verbose_name': '用户留言',
'verbose_name_plural': '用户留言',
},
),
migrations.AlterUniqueTogether(
name='userfav',
unique_together={('user', 'goods')},
),
]
| 50.138889
| 200
| 0.586981
|
29760fe775460d88cc248cbca2ffd1316f6549a1
| 1,304
|
py
|
Python
|
tests/cell_test.py
|
tmcclintock/PyDonJuan
|
ab6d567b568c3e0dd976b10c2628ad99ca81b953
|
[
"CC0-1.0"
] | 2
|
2020-12-14T20:50:57.000Z
|
2021-05-26T04:32:24.000Z
|
tests/cell_test.py
|
tmcclintock/PyDonJuan
|
ab6d567b568c3e0dd976b10c2628ad99ca81b953
|
[
"CC0-1.0"
] | 29
|
2020-12-18T15:56:14.000Z
|
2021-01-12T01:17:48.000Z
|
tests/cell_test.py
|
tmcclintock/donjuan
|
ab6d567b568c3e0dd976b10c2628ad99ca81b953
|
[
"CC0-1.0"
] | null | null | null |
from unittest import TestCase
from donjuan import Edge, HexCell, SquareCell
class SquareCellTest(TestCase):
def test_smoke(self):
c = SquareCell()
assert c is not None
def test_space(self):
c = SquareCell()
assert c.space is None
def test_edges(self):
c = SquareCell()
assert c.edges == [None] * c.n_sides
edges = [Edge() for i in range(4)]
c.set_edges(edges)
assert c.edges is edges
def test_filled(self):
c = SquareCell()
assert not c.filled
c.filled = True
assert c.filled
def test_n_sides(self):
c = SquareCell()
assert c.n_sides == 4
def test_coordinates(self):
c = SquareCell()
assert c.coordinates == (0, 0)
c.set_coordinates(1, 2)
assert c.coordinates == (1, 2)
assert c.x == 2
assert c.y == 1
class HexCellTest(TestCase):
def test_smoke(self):
c = HexCell()
assert c is not None
def test_n_sides(self):
c = HexCell()
assert c.n_sides == 6
def test_coordinates(self):
c = HexCell()
assert c.coordinates == (0, 0)
c.set_coordinates(1, 2)
assert c.coordinates == (1, 2)
assert c.x == 2
assert c.y == 1
| 22.877193
| 45
| 0.556748
|
9b489a046e7a04faadb5fe79ebfd7d32171bdbdb
| 876
|
py
|
Python
|
modules/dogpicture.py
|
AudioVisuaali/audiobot
|
324bb0988a8ce2b805199c363f385d5809126642
|
[
"MIT"
] | 3
|
2018-01-27T22:58:47.000Z
|
2018-02-01T21:06:56.000Z
|
modules/dogpicture.py
|
AudioVisuaali/audiobot
|
324bb0988a8ce2b805199c363f385d5809126642
|
[
"MIT"
] | null | null | null |
modules/dogpicture.py
|
AudioVisuaali/audiobot
|
324bb0988a8ce2b805199c363f385d5809126642
|
[
"MIT"
] | null | null | null |
from urllib.request import urlretrieve
from urllib.request import urlopen
from urllib.request import quote
from audiovisuaali import send
from os import remove as osremove
# random_dog (Get's a random dog)
async def random_dog(message, client, arguments):
# Getting a picture not a video or gif
loop = 1
response = ""
while loop == 1:
response = str(urlopen("https://random.dog/woof").read())[2:-1]
if response[-3:] == ("jpg" or "png"):
loop = 0
urlretrieve("https://random.dog/{}".format(quote(response)), "./download/dogs/"+response)
# Sending picture
await client.send_file(message.channel, "./download/dogs/"+response,filename="./download/dogs/"+response, content="<@"+message.author.id+"> ", tts=False)
osremove("./download/dogs/"+response)
send(1, "Top Dog received!")
return
| 36.5
| 158
| 0.652968
|
5399da9601a24e98b2bd6efa56d67b567f1f947d
| 4,028
|
py
|
Python
|
stackdio/api/cloud/migrations/0004_0_8_0_migrations.py
|
hdmillerdr/stackdio
|
84be621705031d147e104369399b872d5093ef64
|
[
"Apache-2.0"
] | 9
|
2015-12-18T22:44:55.000Z
|
2022-02-07T19:34:44.000Z
|
stackdio/api/cloud/migrations/0004_0_8_0_migrations.py
|
hdmillerdr/stackdio
|
84be621705031d147e104369399b872d5093ef64
|
[
"Apache-2.0"
] | 77
|
2015-01-12T17:49:38.000Z
|
2017-02-24T17:57:46.000Z
|
stackdio/api/cloud/migrations/0004_0_8_0_migrations.py
|
hdmillerdr/stackdio
|
84be621705031d147e104369399b872d5093ef64
|
[
"Apache-2.0"
] | 11
|
2015-01-23T15:50:19.000Z
|
2022-02-07T19:34:45.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-08 18:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('cloud', '0003_0_8_initial_data'),
]
operations = [
migrations.AlterField(
model_name='cloudaccount',
name='account_id',
field=models.CharField(default='INVALID_ACCOUNT_ID', max_length=64, verbose_name='Account ID'),
),
migrations.RemoveField(
model_name='cloudaccount',
name='account_id',
),
migrations.AlterField(
model_name='snapshot',
name='size_in_gb',
field=models.IntegerField(default=1),
),
migrations.RemoveField(
model_name='snapshot',
name='size_in_gb',
),
migrations.AlterField(
model_name='cloudaccount',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
),
migrations.AlterField(
model_name='cloudaccount',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
),
migrations.AlterField(
model_name='cloudimage',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
),
migrations.AlterField(
model_name='cloudimage',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
),
migrations.AlterField(
model_name='securitygroup',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
),
migrations.AlterField(
model_name='securitygroup',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
),
migrations.AlterField(
model_name='snapshot',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created'),
),
migrations.AlterField(
model_name='snapshot',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified'),
),
migrations.AlterField(
model_name='cloudinstancesize',
name='provider',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='instance_sizes', to='cloud.CloudProvider', verbose_name='Cloud Provider'),
),
migrations.AlterField(
model_name='cloudregion',
name='provider',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='regions', to='cloud.CloudProvider', verbose_name='Cloud Provider'),
),
migrations.AlterField(
model_name='cloudzone',
name='region',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='zones', to='cloud.CloudRegion', verbose_name='Cloud Region'),
),
migrations.AlterField(
model_name='snapshot',
name='filesystem_type',
field=models.CharField(choices=[('ext2', 'ext2'), ('ext3', 'ext3'), ('ext4', 'ext4'), ('fuse', 'fuse'), ('xfs', 'xfs')], max_length=16, verbose_name='Filesystem Type'),
),
migrations.AlterField(
model_name='snapshot',
name='snapshot_id',
field=models.CharField(max_length=32, verbose_name='Snapshot ID'),
),
]
| 39.881188
| 180
| 0.617925
|
f0f3ddb70a4885e894823323be49cd768cc9bb33
| 397
|
py
|
Python
|
WebCrawler/wsgi.py
|
akum001/myUrlCrawler
|
bd43a95264b507be002de6098778270cf5602067
|
[
"MIT"
] | null | null | null |
WebCrawler/wsgi.py
|
akum001/myUrlCrawler
|
bd43a95264b507be002de6098778270cf5602067
|
[
"MIT"
] | 9
|
2020-02-12T00:00:15.000Z
|
2022-03-11T23:43:45.000Z
|
WebCrawler/wsgi.py
|
akum001/myUrlCrawler
|
bd43a95264b507be002de6098778270cf5602067
|
[
"MIT"
] | null | null | null |
"""
WSGI config for WebCrawler project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'WebCrawler.settings')
application = get_wsgi_application()
| 23.352941
| 78
| 0.788413
|
9688045ea6cd36c514155fa67a0c4ea45964c361
| 607
|
py
|
Python
|
load_tests.py
|
lucashtm/django-old-records
|
fd3df4f33292226110fda1e0a8121428f4eb4018
|
[
"MIT"
] | null | null | null |
load_tests.py
|
lucashtm/django-old-records
|
fd3df4f33292226110fda1e0a8121428f4eb4018
|
[
"MIT"
] | null | null | null |
load_tests.py
|
lucashtm/django-old-records
|
fd3df4f33292226110fda1e0a8121428f4eb4018
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys
from unittest import TestSuite
from boot_django import boot_django
boot_django()
default_labels = ["django_old_records.tests", ]
def get_suite(labels=default_labels):
from django.test.runner import DiscoverRunner
runner = DiscoverRunner(verbosity=1)
failures = runner.run_tests(labels)
if failures:
sys.exit(failures)
# In case this is called from setuptools, return a test suite
return TestSuite()
if __name__ == "__main__":
labels = default_labels
if len(sys.argv[1:]) > 0:
labels = sys.argv[1:]
get_suite(labels)
| 23.346154
| 65
| 0.710049
|
a76390403d682bde1555dde80cb5926c97d1fccc
| 5,504
|
py
|
Python
|
app/fedgraphnn/moleculenet_graph_reg/data/datasets.py
|
ray-ruisun/FedML
|
24ff30d636bb70f64e94e9ca205375033597d3dd
|
[
"Apache-2.0"
] | null | null | null |
app/fedgraphnn/moleculenet_graph_reg/data/datasets.py
|
ray-ruisun/FedML
|
24ff30d636bb70f64e94e9ca205375033597d3dd
|
[
"Apache-2.0"
] | null | null | null |
app/fedgraphnn/moleculenet_graph_reg/data/datasets.py
|
ray-ruisun/FedML
|
24ff30d636bb70f64e94e9ca205375033597d3dd
|
[
"Apache-2.0"
] | null | null | null |
import logging
import numpy as np
import scipy
import pickle
import gzip
import os
from tqdm import tqdm
import torch.utils.data as data
# From GTTF, need to cite once paper is officially accepted to ICLR 2021
class CompactAdjacency:
def __init__(self, adj, precomputed=None, subset=None):
"""Constructs CompactAdjacency.
Args:
adj: scipy sparse matrix containing full adjacency.
precomputed: If given, must be a tuple (compact_adj, degrees).
In this case, adj must be None. If supplied, subset will be ignored.
"""
if adj is None:
return
if precomputed:
if adj is not None:
raise ValueError("Both adj and precomputed are set.")
if subset is not None:
logging.info(
"WARNING: subset is provided. It is ignored, since precomputed is supplied."
)
self.compact_adj, self.degrees = precomputed
self.num_nodes = len(self.degrees)
else:
self.adj = adj
self.num_nodes = (
len(self.adj) if isinstance(self.adj, dict) else self.adj.shape[0]
)
self.compact_adj = scipy.sparse.dok_matrix(
(self.num_nodes, self.num_nodes), dtype="int32"
)
self.degrees = np.zeros(shape=[self.num_nodes], dtype="int32")
self.node_set = set(subset) if subset is not None else None
for v in range(self.num_nodes):
if isinstance(self.adj, dict) and self.node_set is not None:
connection_ids = np.array(
list(self.adj[v].intersection(self.node_set))
)
elif isinstance(self.adj, dict) and self.node_set is None:
connection_ids = np.array(list(self.adj[v]))
else:
connection_ids = self.adj[v].nonzero()[1]
self.degrees[v] = len(connection_ids)
self.compact_adj[
v, np.arange(len(connection_ids), dtype="int32")
] = connection_ids
self.compact_adj = self.compact_adj.tocsr()
@staticmethod
def from_file(filename):
instance = CompactAdjacency(None, None)
data = pickle.load(gzip.open(filename, "rb"))
instance.compact_adj = data["compact_adj"]
instance.adj = data["adj"]
instance.degrees = data["degrees"] if "degrees" in data else data["lengths"]
instance.num_nodes = data["num_nodes"]
return instance
@staticmethod
def from_directory(directory):
instance = CompactAdjacency(None, None)
instance.degrees = np.load(os.path.join(directory, "degrees.npy"))
instance.compact_adj = scipy.sparse.load_npz(
os.path.join(directory, "cadj.npz")
)
logging.info("\n\ncompact_adj.py from_directory\n\n")
# Make adj from cadj and save to adj.npz
import IPython
IPython.embed()
instance.adj = scipy.sparse.load_npz(os.path.join(directory, "adj.npz"))
instance.num_nodes = instance.adj.shape[0]
return instance
def save(self, filename):
with gzip.open(filename, "wb") as fout:
pickle.dump(
{
"compact_adj": self.compact_adj,
"adj": self.adj,
"degrees": self.degrees,
"num_nodes": self.num_nodes,
},
fout,
)
def neighbors_of(self, node):
neighbors = self.compact_adj[node, : self.degrees[node]].todense()
return np.array(neighbors)[0]
class MoleculesDataset(data.Dataset):
def __init__(
self,
adj_matrices,
feature_matrices,
labels,
path,
compact=True,
fanouts=[2, 2],
split="train",
):
if compact:
# filename = path + '/train_comp_adjs.pkl'
# if split == 'val':
# filename = path + '/val_comp_adjs.pkl'
# elif split == 'test':
# filename = path + '/test_comp_adjs.pkl'
#
# if os.path.isfile(filename):
# print('Loading saved compact adjacencies from disk!')
# with open(filename, 'rb') as f:
# self.adj_matrices = pickle.load(f)
#
# else:
# logging.info('Compacting adjacency matrices (GTTF)')
# self.adj_matrices = [CompactAdjacency(adj_matrix) for adj_matrix in tqdm(adj_matrices)]
# with open(filename, 'wb') as f:
# pickle.dump(self.adj_matrices, f)
self.adj_matrices = [
CompactAdjacency(adj_matrix) for adj_matrix in tqdm(adj_matrices)
]
else:
self.adj_matrices = adj_matrices
self.feature_matrices = feature_matrices
self.labels = labels
self.fanouts = [fanouts] * len(adj_matrices)
def __getitem__(self, index):
return (
self.adj_matrices[index],
self.feature_matrices[index],
self.labels[index],
self.fanouts[index],
)
def __len__(self):
return len(self.adj_matrices)
| 35.509677
| 106
| 0.539608
|
f5f008c6ab29ec2f11167369e4878a77a2ee576a
| 100,542
|
py
|
Python
|
core/domain/html_validation_service_test.py
|
kevjumba/oppia
|
2618d2d85a5320c8ff4dcd700ccbc67b4d36fdf4
|
[
"Apache-2.0"
] | 1
|
2021-06-26T00:31:08.000Z
|
2021-06-26T00:31:08.000Z
|
core/domain/html_validation_service_test.py
|
kevjumba/oppia
|
2618d2d85a5320c8ff4dcd700ccbc67b4d36fdf4
|
[
"Apache-2.0"
] | null | null | null |
core/domain/html_validation_service_test.py
|
kevjumba/oppia
|
2618d2d85a5320c8ff4dcd700ccbc67b4d36fdf4
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the HTML validation."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import logging
import os
import re
import bs4
from core.domain import fs_domain
from core.domain import html_domain
from core.domain import html_validation_service
from core.tests import test_utils
import feconf
import python_utils
class ContentMigrationTests(test_utils.GenericTestBase):
"""Tests the function associated with the migration of html
strings to valid RTE format.
"""
def test_wrap_with_siblings(self):
test_cases = [{
'html_content': (
'<p><i>hello</i></p> this is<i>test case1</i> for '
'<ol><li><i>testing</i></li></ol>'
),
'expected_output': (
'<p><i>hello</i></p><p> this is<i>test case1</i> for </p>'
'<ol><li><i>testing</i></li></ol>'
)
}, {
'html_content': (
'<br/>hello this is<br/>test<p> case2<br/>'
'</p> for <p><br/>testing</p>'
),
'expected_output': (
'<p><br/>hello this is<br/>test</p>'
'<p> case2<br/></p> for <p><br/>testing</p>'
)
}, {
'html_content': (
'<p>hello</p>this is case <b>3</b> for <i>'
'testing</i> the <p>function</p>'
),
'expected_output': (
'<p>hello</p><p>this is case <b>3</b> for <i>'
'testing</i> the </p><p>function</p>'
)
}]
for index, test_case in enumerate(test_cases):
soup = bs4.BeautifulSoup(test_case['html_content'], 'html.parser')
if index == 0:
tag = soup.findAll(name='i')[1]
elif index == 1:
tag = soup.find(name='br')
elif index == 2:
tag = soup.find(name='b')
html_validation_service.wrap_with_siblings(tag, soup.new_tag('p'))
self.assertEqual(
python_utils.UNICODE(soup), test_case['expected_output'])
def test_convert_to_textangular(self):
test_cases = [{
'html_content': (
'<div><i>hello</i></div> this is<i>test case1</i> for '
'<span><i>testing</i></span>'
),
'expected_output': (
'<p><i>hello</i></p><p> this is<i>test case1</i> for '
'<i>testing</i></p>'
)
}, {
'html_content': (
'<div><br>hello</div> this is<br>test<pre> case2<br>'
'</pre> for <span><br>testing</span>'
),
'expected_output': (
'<p><br>hello</p><p> this is<br>test</p>'
'<pre> case2\n</pre><p> for <br>testing</p>'
)
}, {
'html_content': 'hello <p> this is case3 for </p> testing',
'expected_output': (
'<p>hello </p><p> this is case3 for </p><p> testing</p>'
)
}, {
'html_content': 'hello <i> this is case4 for </i> testing',
'expected_output': '<p>hello <i> this is case4 for </i> testing</p>'
}, {
'html_content': (
'<span>hello</span><code> this is </code><div>'
'test </div><div>case4</div> for testing'
),
'expected_output': (
'<p>hello this is </p><p>test </p><p>case4</p><p> for '
'testing</p>'
)
}, {
'html_content': (
'<p> Random test</p>case <b>is</b> <i>added</i> here<p>!</p>'
),
'expected_output': (
'<p> Random test</p><p>case <b>is</b> <i>added</i> '
'here</p><p>!</p>'
)
}, {
'html_content': (
'<blockquote> Here is another<b>example'
'</b></blockquote>'
),
'expected_output': (
'<blockquote><p> Here is another<b>example</b></p></blockquote>'
)
}, {
'html_content': (
'<table><tbody><tr><td>January</td><td>$100</td>'
'<td>200</td></tr><tr><td>February</td><td>$80</td><td>400'
'</td></tr></tbody></table>'
),
'expected_output': (
'<p>January $100 200</p><p>February $80 400</p>'
)
}, {
'html_content': (
'<p><p><p>Hello <br/> this<p> is <br> test case <p>'
'for </p> migration <b>testing</b> </p></p></p></p>'
),
'expected_output': (
'<p>Hello <br> this</p><p> is <br> test case </p><p>'
'for </p><p> migration <b>testing</b> </p>'
)
}, {
'html_content': (
'<p>Hello this is <p>test case </p> for <p> <p>migration</p>'
'testing </p> for <p> invalid cases </p></p>'
),
'expected_output': (
'<p>Hello this is </p><p>test case </p><p> for </p><p> </p><p>'
'migration</p><p>testing </p><p> for </p><p> invalid cases </p>'
)
}, {
'html_content': '',
'expected_output': ''
}, {
'html_content': (
'<table><tbody><tr><td><blockquote>Test Content1</blockquote>'
'</td></tr><tr><td><blockquote>Test Content2</blockquote></td>'
'</tr></tbody></table>'
),
'expected_output': (
'<blockquote><p>Test Content1</p></blockquote>'
'<blockquote><p>Test Content2</p></blockquote>'
)
}, {
'html_content': (
'<strong>Bold Text</strong><em>Italic Text</em>'
'<hr>Horizontal Rule'
),
'expected_output': (
'<p><b>Bold Text</b><i>Italic Text</i>'
'<br>Horizontal Rule</p>'
)
}, {
'html_content': (
'<a href=""></a><a>No href</a>'
'<a></a>'
),
'expected_output': '<p>No href</p>'
}, {
'html_content': (
'<a href="somelink">Test a tag</a>'
),
'expected_output': (
'<p><oppia-noninteractive-link text-with-value="&quot;Test '
'a tag&quot;" url-with-value="&quot;somelink&'
'quot;"></oppia-noninteractive-link></p>'
)
}, {
'html_content': (
'<div><blockquote>Test Content1</blockquote></div>'
'<blockquote>Test Content2</blockquote>'
),
'expected_output': (
'<blockquote>Test Content1</blockquote>'
'<blockquote>Test Content2</blockquote>'
)
}, {
'html_content': '<p><pre>Test Content</pre></p>',
'expected_output': '<pre>Test Content</pre>'
}, {
'html_content': (
'<p><ul><li>Test1</li><li>Test2</li></ul></p>'
'<p><ul><li>Test1</li><li>Test2</li></ul></p>'
),
'expected_output': (
'<ul><li>Test1</li><li>Test2</li></ul>'
'<ul><li>Test1</li><li>Test2</li></ul>'
)
}, {
'html_content': (
'<oppia-noninteractive-link text-with-value="&quot;Test '
'a tag&quot;" url-with-value="&quot;somelink&'
'quot;"><b>Test1</b>'
'</oppia-noninteractive-link>'
),
'expected_output': (
'<p><b><oppia-noninteractive-link text-with-value="&quot;'
'Test a tag&quot;" url-with-value="&quot;somelink&'
'quot;">Test1'
'</oppia-noninteractive-link></b></p>'
)
}, {
'html_content': (
'<b><b>Test 1</b></b>'
),
'expected_output': (
'<p><b>Test 1</b></p>'
)
}, {
'html_content': (
'<i><i>Test 2</i></i>'
),
'expected_output': (
'<p><i>Test 2</i></p>'
)
}, {
'html_content': (
'<oppia-noninteractive-link text-with-value="&quot;Test a'
' tag&quot;" url-with-value="&quot;somelink&quot;">'
'<oppia-noninteractive-link text-with-value="&quot;Test a'
' tag&quot;" url-with-value="&quot;somelink&quot;">'
'Test1</oppia-noninteractive-link>'
'</oppia-noninteractive-link>'
),
'expected_output': (
'<p><oppia-noninteractive-link text-with-value="&quot;Test '
'a tag&quot;" url-with-value="&quot;somelink&'
'quot;">Test1</oppia-noninteractive-link></p>'
)
}, {
'html_content': (
'<b><p>Test 1</p></b>'
),
'expected_output': (
'<p>Test 1</p>'
)
}, {
'html_content': (
'<i><p>Test 2</p></i>'
),
'expected_output': (
'<p>Test 2</p>'
)
}, {
'html_content': (
'<tr><td><p>Test 1</p></td>'
'<td><p>Test 2</p></td>'
'<td><p>Test 3</p></td></tr>'
),
'expected_output': (
'<p>Test 1 Test 2 Test 3</p>'
)
}, {
'html_content': (
'<a href="somelink">This is a tag with '
'<b>bold</b></a>'
),
'expected_output': (
'<p><b><oppia-noninteractive-link text-with-value="&quot;'
'This is a tag with bold&quot;"'
' url-with-value="&quot;somelink&quot;">'
'</oppia-noninteractive-link></b></p>'
)
}, {
'html_content': (
'<a href="somelink">This is a tag with '
'<i>Italic</i></a>'
),
'expected_output': (
'<p><i><oppia-noninteractive-link text-with-value="&quot;'
'This is a tag with Italic&quot;"'
' url-with-value="&quot;somelink&quot;">'
'</oppia-noninteractive-link></i></p>'
)
}, {
'html_content': (
'<blockquote><oppia-noninteractive-collapsible '
'content-with-value="&quot;&lt;pre&gt;&lt;'
'p&gt;lorem ipsum&lt;/p&gt;&lt;/pre&gt;'
'&quot;" heading-with-value="&quot;'
'lorem ipsum&quot;lorem ipsum&quot;?&quot;">'
'</oppia-noninteractive-collapsible></blockquote>'
),
'expected_output': (
'<blockquote><p><oppia-noninteractive-collapsible '
'content-with-value="&quot;&lt;pre&gt;&lt;p'
'&gt;lorem ipsum&lt;/p&gt;&lt;/pre&gt;'
'&quot;" heading-with-value="&quot;'
'lorem ipsum&quot;lorem ipsum&quot;?&quot;">'
'</oppia-noninteractive-collapsible></p></blockquote>'
)
}]
for test_case in test_cases:
self.assertEqual(
test_case['expected_output'],
html_validation_service.convert_to_textangular(
test_case['html_content']))
def test_validate_rte_format(self):
test_cases_for_textangular = [
(
'This is for <i>testing</i> the validate <b>text</b> '
'angular function.'
),
(
'This is the last test case <a href="https://github.com">hello'
'<oppia-noninteractive-link url-with-value="&quot;'
'here&quot;" text-with-value="validated">'
'</oppia-noninteractive-link></a><p> testing completed</p>'
)
]
actual_output_with_migration_for_textangular = (
html_validation_service.validate_rte_format(
test_cases_for_textangular,
feconf.RTE_FORMAT_TEXTANGULAR, run_migration=True))
actual_output_without_migration_for_textangular = (
html_validation_service.validate_rte_format(
test_cases_for_textangular, feconf.RTE_FORMAT_TEXTANGULAR))
expected_output_with_migration_for_textangular = {'strings': []}
expected_output_without_migration_for_textangular = {
'i': ['[document]'],
'invalidTags': ['a'],
'oppia-noninteractive-link': ['a'],
'b': ['[document]'],
'strings': [
(
'This is for <i>testing</i> the validate '
'<b>text</b> angular function.'
),
(
'This is the last test case <a href="https://github.com">'
'hello<oppia-noninteractive-link url-with-value="&'
'quot;here&quot;" text-with-value="validated">'
'</oppia-noninteractive-link></a><p> testing completed</p>'
),
]
}
self.assertEqual(
actual_output_with_migration_for_textangular,
expected_output_with_migration_for_textangular)
self.assertEqual(
actual_output_without_migration_for_textangular,
expected_output_without_migration_for_textangular)
test_cases_for_ckeditor = [
(
'<pre>Hello this is <b> testing '
'<oppia-noninteractive-image filepath-with-value="amp;quot;'
'random.png&quot;"></oppia-noninteractive-image> in '
'</b>progress</pre>'
),
(
'<oppia-noninteractive-collapsible content-with-value="&'
'quot;&lt;pre&gt;&lt;p&gt;lorem ipsum&'
'lt;/p&gt;&lt;/pre&gt;'
'&quot;" heading-with-value="&quot;'
'lorem ipsum&quot;lorem ipsum&quot;?&quot;">'
'</oppia-noninteractive-collapsible>'
),
(
'<oppia-noninteractive-tabs tab_contents-with-value'
'=\"[{&quot;content&quot;:&quot;&lt;p&gt;'
'&lt;i&gt;lorem ipsum&lt;/i&gt;&lt;/p'
'&gt;&quot;,&quot;title&quot;:&'
'quot;hello&quot;}]\"></oppia-noninteractive-tabs>'
),
(
'<oppia-noninteractive-collapsible '
'heading-with-value="&quot;'
'lorem ipsum&quot;lorem ipsum&quot;?&quot;">'
'</oppia-noninteractive-collapsible>'
)
]
actual_output_with_migration_for_ckeditor = (
html_validation_service.validate_rte_format(
test_cases_for_ckeditor, feconf.RTE_FORMAT_CKEDITOR,
run_migration=True))
actual_output_without_migration_for_ckeditor = (
html_validation_service.validate_rte_format(
test_cases_for_ckeditor, feconf.RTE_FORMAT_CKEDITOR))
expected_output_with_migration_for_ckeditor = {'strings': []}
expected_output_without_migration_for_ckeditor = {
'invalidTags': ['i', 'b'],
'oppia-noninteractive-image': ['b'],
'p': ['pre'],
'strings': [
(
'<oppia-noninteractive-collapsible '
'heading-with-value="&quot;'
'lorem ipsum&quot;lorem ipsum&quot;?&quot;">'
'</oppia-noninteractive-collapsible>'
),
(
'<pre>Hello this is <b> testing '
'<oppia-noninteractive-image filepath-with-value="amp;quot;'
'random.png&quot;"></oppia-noninteractive-image> in '
'</b>progress</pre>'
),
(
'<oppia-noninteractive-collapsible content-with-value="&'
';quot;&lt;pre&gt;&lt;p&gt;lorem ipsum&'
'lt;/p&gt;&lt;/pre&gt;'
'&quot;" heading-with-value="&quot;'
'lorem ipsum&quot;lorem ipsum&quot;?&quot;">'
'</oppia-noninteractive-collapsible>'
),
(
'<oppia-noninteractive-tabs tab_contents-with-value'
'=\"[{&quot;content&quot;:&quot;&lt;p&'
'gt;&lt;i&gt;lorem ipsum&lt;/i&gt;&lt;'
'/p&gt;&quot;,&quot;title&quot;:&'
'quot;hello&quot;}]\"></oppia-noninteractive-tabs>'
),
]
}
self.assertEqual(
actual_output_with_migration_for_ckeditor,
expected_output_with_migration_for_ckeditor)
self.assertEqual(
actual_output_without_migration_for_ckeditor,
expected_output_without_migration_for_ckeditor)
def test_validate_soup_for_rte(self):
test_cases_for_textangular = [
(
'<p>Hello <b>this </b>is </p><p><br></p><p>test <b>case '
'</b>for </p><p><oppia-noninteractive-collapsible '
'content-with-value=\"&quot;Hello oppia&quot;\" '
'heading-with-value=\"&quot;Learn more about APIs&'
'amp;quot;\"></oppia-noninteractive-collapsible><br></p><p>'
'for migration testing</p>'
),
'Hello<div>oppia</div>testing <i>in progess</i>!',
'<p>Hello</p><p>oppia</p><p>testing <i>in progress</i>!</p>',
'Text with no parent tag',
'<h1>This is not a allowed tag</h1>',
(
'<p><blockquote>Parent child relation not valid</blockquote>'
'</p><b><blockquote>Parent child relation not valid'
'</blockquote></b>'
)
]
expected_output_for_textangular = [False, True, False, True, True, True]
err_dict = {}
for index, test_case in enumerate(test_cases_for_textangular):
actual_output_for_textangular = (
html_validation_service.validate_soup_for_rte(
bs4.BeautifulSoup(test_case, 'html.parser'),
feconf.RTE_FORMAT_TEXTANGULAR, err_dict))
self.assertEqual(
actual_output_for_textangular,
expected_output_for_textangular[index])
test_cases_for_ckeditor = [
(
'<p>Lorem ipsum </p><p> Hello this is oppia </p>'
),
(
'<p>Lorem <span>ipsum </span></p> Hello this is '
'<code>oppia </code>'
),
(
'<p><oppia-noninteractive-image filepath-with-value="amp;quot;'
'random.png&quot;"></oppia-noninteractive-image>Hello this '
'is test case to check image tag inside p tag</p>'
)
]
expected_output_for_ckeditor = [False, True, True]
err_dict = {}
for index, test_case in enumerate(test_cases_for_ckeditor):
actual_output_for_ckeditor = (
html_validation_service.validate_soup_for_rte(
bs4.BeautifulSoup(test_case, 'html.parser'),
feconf.RTE_FORMAT_CKEDITOR, err_dict))
self.assertEqual(
actual_output_for_ckeditor,
expected_output_for_ckeditor[index])
def test_convert_tag_contents_to_rte_format(self):
test_cases_for_textangular = [{
'html_content': (
'<div>Hello <b>this </b>is </div><p><br></p><p>test <b>case '
'</b>for </p><p><oppia-noninteractive-collapsible '
'content-with-value=\"&quot;Hello oppia&quot;\" '
'heading-with-value=\"&quot;Learn more about APIs&'
'quot;\"></oppia-noninteractive-collapsible><br></p><p>'
'for migration testing</p>'
),
'expected_output': (
'<div>Hello <b>this </b>is </div><p><br/></p><p>test <b>case '
'</b>for </p><p><oppia-noninteractive-collapsible '
'content-with-value=\"&quot;&lt;p&gt;Hello oppia'
'&lt;/p&gt;&quot;\" heading-with-value=\"'
'&quot;Learn more about APIs&quot;\">'
'</oppia-noninteractive-collapsible><br/></p><p>'
'for migration testing</p>'
)
}, {
'html_content': (
'Hello<div>oppia</div>testing <i>in progess</i>!'
),
'expected_output': (
'Hello<div>oppia</div>testing <i>in progess</i>!'
)
}, {
'html_content': (
'<span><b>Hello </b></span><div><b><span>this is '
'test case</span></b></div><div><b><br></b></div>'
'<div><oppia-noninteractive-tabs tab_contents-with-value'
'=\"[{&quot;content&quot;:&quot;&lt;span '
'style=\\&quot;line-height: 21px; background-color: '
'rgb(255, 255, 255);\\&quot;&gt;lorem ipsum&lt;'
'/span&gt;&quot;,&quot;title&quot;:&'
'quot;hello&quot;},{&quot;content&quot;:&'
'quot;&lt;span style=\\&quot;color: rgb(0, 0, 0); '
'font-family: &#39;Times New Roman&#39;; font-size: '
'medium; line-height: normal;\\&quot;&gt;&lt;'
'font size=\\&quot;3\\&quot; face=\\&quot;Times '
'New Roman CE\\&quot;&gt;oppia&lt;/font&gt;'
'&lt;/span&gt;&quot;,&quot;title&quot;:'
'&quot;Savjet 1&quot;}]\"></oppia-noninteractive-tabs>'
'<b><br></b></div><div><span></span><b><br></b><div>'
'<span><b><br></b></span></div></div>'
),
'expected_output': (
'<span><b>Hello </b></span><div><b><span>this is '
'test case</span></b></div><div><b><br/></b></div>'
'<div><oppia-noninteractive-tabs tab_contents-with-value='
'\"[{&quot;content&quot;: &quot;&lt;'
'p&gt;lorem ipsum&lt;/p&gt;&quot;, '
'&quot;title&quot;: &quot;hello&quot;}, '
'{&quot;content&quot;: &quot;&lt;p&gt;'
'oppia&lt;/p&gt;&quot;, &quot;title&'
'quot;: &quot;Savjet 1&quot;}]\">'
'</oppia-noninteractive-tabs><b><br/></b></div>'
'<div><span></span><b><br/></b><div>'
'<span><b><br/></b></span></div></div>'
)
}, {
'html_content': (
'<oppia-noninteractive-collapsible '
'content-with-value=""></oppia-noninteractive-collapsible>'
),
'expected_output': (
'<oppia-noninteractive-collapsible content'
'-with-value="&quot;&quot;" heading-with-value='
'"&quot;&quot;"></oppia-noninteractive-collapsible>'
)
}]
for test_case in test_cases_for_textangular:
actual_output_for_textangular = (
html_validation_service.convert_tag_contents_to_rte_format(
test_case['html_content'],
html_validation_service.convert_to_textangular))
self.assertEqual(
actual_output_for_textangular,
test_case['expected_output'])
test_cases_for_ckeditor = [{
'html_content': (
'<oppia-noninteractive-collapsible '
'content-with-value=\"&quot;&lt;pre&gt;&'
'lt;p&gt;Hello oppia&lt;/p&gt;&lt;'
'/pre&gt;&quot;\" '
'heading-with-value=\"&quot;Learn more about APIs&'
'quot;\"></oppia-noninteractive-collapsible>'
),
'expected_output': (
'<oppia-noninteractive-collapsible '
'content-with-value=\"&quot;&lt;pre&gt;Hello oppia'
'&lt;/pre&gt;&quot;\" heading-with-value=\"'
'&quot;Learn more about APIs&quot;\">'
'</oppia-noninteractive-collapsible>'
)
}, {
'html_content': (
'Hello<div>oppia</div>testing <i>in progess</i>!'
),
'expected_output': (
'Hello<div>oppia</div>testing <i>in progess</i>!'
)
}, {
'html_content': (
'<oppia-noninteractive-tabs tab_contents-with-value'
'=\"[{&quot;content&quot;:&quot;&lt;p&gt;'
'&lt;i&gt;lorem ipsum&lt;/i&gt;&lt;/p'
'&gt;&quot;,&quot;title&quot;:&'
'quot;hello&quot;}]\"></oppia-noninteractive-tabs>'
),
'expected_output': (
'<oppia-noninteractive-tabs tab_contents-with-value'
'=\"[{&quot;content&quot;: &quot;&lt;p&gt;'
'&lt;em&gt;lorem ipsum&lt;/em&gt;&lt;/p'
'&gt;&quot;, &quot;title&quot;: &'
'quot;hello&quot;}]\"></oppia-noninteractive-tabs>'
)
}]
for test_case in test_cases_for_ckeditor:
actual_output_for_ckeditor = (
html_validation_service.convert_tag_contents_to_rte_format(
test_case['html_content'],
html_validation_service.convert_to_ckeditor))
self.assertEqual(
actual_output_for_ckeditor,
test_case['expected_output'])
def test_convert_to_ckeditor(self):
test_cases = [{
'html_content': (
'<p>Lorem <span>ipsum </span></p> Hello this is '
'<code>oppia </code>'
),
'expected_output': (
'<p>Lorem <span>ipsum </span></p><p> Hello this is </p>'
'<code>oppia </code>'
)
}, {
'html_content': (
'<p><oppia-noninteractive-image filepath-with-value="&quot;'
'random.png&quot;"></oppia-noninteractive-image>Hello this '
'is test case to check image tag inside p tag</p>'
),
'expected_output': (
'<oppia-noninteractive-image filepath-with-value="&quot;'
'random.png&quot;"></oppia-noninteractive-image><p>Hello '
'this is test case to check image tag inside p tag</p>'
)
}, {
'html_content': '<p>hello <i> this is case4 for </i> testing</p>',
'expected_output': (
'<p>hello <em> this is case4 for </em> testing</p>'
)
}, {
'html_content': (
'<oppia-noninteractive-collapsible content-with-value="&'
'quot;&lt;pre&gt;&lt;p&gt;lorem ipsum&'
'amp;lt;/p&gt;&lt;/pre&gt;'
'&quot;" heading-with-value="&quot;'
'lorem ipsum&quot;lorem ipsum&quot;?&quot;">'
'</oppia-noninteractive-collapsible>'
),
'expected_output': (
'<oppia-noninteractive-collapsible content-with-value="&'
'quot;&lt;pre&gt;lorem ipsum'
'&lt;/pre&gt;'
'&quot;" heading-with-value="&quot;'
'lorem ipsum&quot;lorem ipsum&quot;?&quot;">'
'</oppia-noninteractive-collapsible>'
)
}, {
'html_content': (
'<pre>Hello this is <b> testing '
'<oppia-noninteractive-image filepath-with-value="&quot;'
'random.png&quot;"></oppia-noninteractive-image> in '
'</b>progress</pre>'
),
'expected_output': (
'<pre>Hello this is <strong> testing </strong></pre>'
'<oppia-noninteractive-image filepath-with-value="&quot;'
'random.png&quot;"></oppia-noninteractive-image><pre>'
'<strong> in </strong>progress</pre>'
)
}, {
'html_content': (
'<blockquote><p> Here is another<b>example'
'</b></p></blockquote>'
),
'expected_output': (
'<blockquote><p> Here is another<strong>example'
'</strong></p></blockquote>'
)
}, {
'html_content': (
'<p>Hello </p><p>this </p><p>is test case.</p>'
'<ul><li>Item1</li><li>Item2</li>'
'<ul><li>This is for <b>testing </b>migration.</li>'
'<li>Item3</li></ul></ul><p></p>'
),
'expected_output': (
'<p>Hello </p><p>this </p><p>is test case.</p>'
'<ul><li>Item1</li><li>Item2'
'<ul><li>This is for <strong>testing </strong>migration.</li>'
'<li>Item3</li></ul></li></ul><p></p>'
)
}, {
'html_content': (
'<ol><li>Item1</li><ol><ol><ol><li>Item2</li><li>Item3</li>'
'<li>Item4</li><ol><ol><ol><li>Item5</li><li>Item6</li></ol>'
'</ol></ol></ol></ol></ol><li>Item7</li><ol><li>Item8</li>'
'<li>Item9</li><ol><ol><li>Item10</li><li>Item11</li>'
'</ol></ol></ol></ol>'
),
'expected_output': (
'<ol><li>Item1<ol><li>Item2</li><li>Item3</li><li>Item4<ol>'
'<li>Item5</li><li>Item6</li></ol></li></ol></li><li>Item7'
'<ol><li>Item8</li><li>Item9<ol><li>Item10</li><li>Item11'
'</li></ol></li></ol></li></ol>'
)
}, {
'html_content': (
'<p><em><strong>this is </strong></em><br></p>'
'<oppia-noninteractive-collapsible content-with-value'
'="&quot;&lt;ul&gt;&lt;li&gt;&'
'lt;p&gt;&lt;li&gt;loremipsum&lt;/li&gt;'
'&lt;li&gt;loremipsum&lt;/li&gt;&lt;li&'
'gt;loremipsum&lt;/li&gt;&lt;/p&gt;&lt;'
'oppia-noninteractive-image alt-with-value=\\&quot;&'
'amp;amp;quot;loremipsum&amp;amp;quot;\\&quot; '
'caption-with-value=\\&quot;&amp;amp;quot;&amp;amp;'
'quot;\\&quot; filepath-with-value=\\&quot;&amp;amp'
';quot;loremipsum.png&amp;amp;quot;\\&quot;&gt;&'
';lt;/oppia-noninteractive-image&gt;&lt;p&gt;&'
'lt;br&gt;&lt;/p&gt;&lt;/li&gt;&lt;/ul'
'&gt;&quot;" heading-with-value="&quot;loremipusm'
'&quot;"></oppia-noninteractive-collapsible>'
),
'expected_output': (
'<p><em><strong>this is </strong></em><br></p>'
'<oppia-noninteractive-collapsible content-with-value='
'"&quot;&lt;ul&gt;&lt;li&gt;loremipsum&'
'lt;/li&gt;&lt;li&gt;loremipsum&lt;/li&gt;'
'&lt;li&gt;loremipsum&lt;/li&gt;&lt;'
'li&gt;&lt;oppia-noninteractive-image alt-with-value'
'=\\&quot;&amp;amp;quot;loremipsum&amp;amp;quot;'
'\\&quot; caption-with-value=\\&quot;&amp;amp;quot;'
'&amp;amp;quot;\\&quot; filepath-with-value=\\&quot'
';&amp;amp;quot;loremipsum.png&amp;amp;quot;\\&quot'
';&gt;&lt;/oppia-noninteractive-image&gt;&lt;'
'p&gt;\\u00a0&lt;/p&gt;&lt;/li&'
'gt;&lt;/ul&gt;&quot;" heading-with-value="&'
'quot;loremipusm&quot;"></oppia-noninteractive-collapsible>'
)
}, {
'html_content': (
'<pre><p>Hello this is test case for </p><p>br '
'in </p><p>pre </p><p>tag<br></p></pre>'
),
'expected_output': (
'<pre>Hello this is test case for br in pre tag\n</pre>'
)
}, {
'html_content': (
'<p><li> Hello this is test case for li in p which results '
'in </li><li> in document </li><li> after unwrapping </li></p>'
),
'expected_output': (
'<ul><li> Hello this is test case for li in p which results '
'in </li><li> in document </li><li> after unwrapping </li></ul>'
)
}, {
'html_content': '',
'expected_output': ''
}, {
'html_content': '<p><li>Test case to check li is in ul</li></p>',
'expected_output': (
'<ul><li>Test case to check li is in ul</li></ul>'
)
}, {
'html_content': '<pre><p>Test case1</p></pre>',
'expected_output': '<pre>Test case1</pre>'
}, {
'html_content': (
'<ul><p>Test case 1</p></ul>'
'<ol><p>Test case 2</p></ol>'
),
'expected_output': (
'<ul><li><p>Test case 1</p></li></ul>'
'<ol><li><p>Test case 2</p></li></ol>'
)
}, {
'html_content': (
'<li>This is Some <p>Test<li> ABC</li>Content</p></li>'
),
'expected_output': (
'<p>This is Some </p><p>Test</p><ul><li> ABC</li></ul><p>'
'Content</p>'
)
}, {
'html_content': (
'<ul><p>Test Content1</p><p>Test Content2</p><li>Test Content3'
'</li></ul>'
),
'expected_output': (
'<ul><li><p>Test Content1</p><p>Test Content2'
'</p></li><li>Test Content3</li></ul>'
)
}, {
'html_content': (
'<pre><p>This is a p in pre</p></pre>'
),
'expected_output': (
'<pre>This is a p in pre</pre>'
)
}, {
'html_content': (
'<ol><p>This is a p in ol</p><p> or ul</p></ol>'
),
'expected_output': (
'<ol><li><p>This is a p in ol</p><p> or ul</p></li></ol>'
)
}, {
'html_content': '<ul>\n<li>Item</li>\n</ul>',
'expected_output': '<ul><li>Item</li></ul>'
}, {
'html_content': '<p>Para1</p>\n<p>Para2</p>',
'expected_output': '<p>Para1</p><p>Para2</p>'
}]
for test_case in test_cases:
self.assertEqual(
test_case['expected_output'],
html_validation_service.convert_to_ckeditor(
test_case['html_content']))
def test_add_caption_to_image(self):
test_cases = [{
'html_content': (
'<p><oppia-noninteractive-image filepath-with-value="&quot;'
'random.png&quot;"></oppia-noninteractive-image>Hello this '
'is test case to check that caption attribute is added to '
'image tags if it is missing.</p>'
),
'expected_output': (
'<p><oppia-noninteractive-image caption-with-value="&quot;'
'&quot;" filepath-with-value="&quot;random.png&'
'quot;"></oppia-noninteractive-image>Hello this '
'is test case to check that caption attribute is added to '
'image tags if it is missing.</p>'
)
}, {
'html_content': (
'<p><oppia-noninteractive-image caption-with-value="&quot;'
'abc&quot;" filepath-with-value="&quot;'
'random.png&quot;"></oppia-noninteractive-image>Hello this '
'is test case to check that image tags that already have '
'caption attribute are not changed.</p>'
),
'expected_output': (
'<p><oppia-noninteractive-image caption-with-value="&quot;'
'abc&quot;" filepath-with-value="&quot;'
'random.png&quot;"></oppia-noninteractive-image>Hello this '
'is test case to check that image tags that already have '
'caption attribute are not changed.</p>'
)
}]
for test_case in test_cases:
self.assertEqual(
html_validation_service.add_caption_attr_to_image(
test_case['html_content']),
test_case['expected_output'])
def test_validate_customization_args(self):
test_cases = [(
'<p><oppia-noninteractive-link text-with-value="&quot;What is '
'a link?&quot;" url-with-value="&quot;htt://link.com&'
';quot;"></oppia-noninteractive-link></p>'
), (
'<p><oppia-noninteractive-link text-with-value="3456" '
'url-with-value="&quot;http://google.com&'
';quot;"></oppia-noninteractive-link></p>'
), (
'<p><oppia-noninteractive-link text-with-value="&quot;What is '
'a link?&quot;" url-with-value="&quot;https://link.com&'
';quot;"></oppia-noninteractive-link></p>'
), (
'<oppia-noninteractive-collapsible content-with-value="'
'&quot;&lt;p&gt;&lt;oppia-noninteractive-link '
'url-with-value=\\&quot;&amp;amp;quot;'
'https://www.example.com&amp;amp;quot;\\&quot;&gt;'
'&lt;/oppia-noninteractive-link&gt;&lt;/p&gt;'
'&quot;" heading-with-value="&quot;Hello&quot;">'
'</oppia-noninteractive-collapsible>'
), (
'<oppia-noninteractive-image caption-with-value="&quot;'
'abc&quot;" filepath-with-value="&quot;'
'random.png&quot;"></oppia-noninteractive-image>'
), (
'<p><oppia-noninteractive-math math_content-with-value="'
'{&quot;raw_latex&quot;:&quot;abc&quot;'
',&quot;svg_filename&quot;:&quot;&quot;}">'
'</oppia-noninteractive-math></p>'
), (
'<p><oppia-noninteractive-math url-with-value="&quot;'
'http://link.com&quot;></oppia-noninteractive-math></p>'
), (
'<oppia-noninteractive-collapsible content-with-value='
'"&quot;&lt;p&gt;lorem ipsum&lt;/p&gt;&'
'quot;" heading-with-value="&quot;lorem ipsum&quot;">'
'</oppia-noninteractive-collapsible>'
), (
'<oppia-noninteractive-collapsible content-with-value='
'"34454" heading-with-value="&quot;lorem ipsum&quot;">'
'</oppia-noninteractive-collapsible>'
), (
'<oppia-noninteractive-collapsible content-with-value="'
'&quot;&lt;oppia-noninteractive-tabs tab_contents'
'-with-value=\\&quot;[{&amp;amp;quot;title&amp;amp;'
'quot;:&amp;amp;quot;Tab&amp;amp;quot;,&amp;amp;quot;'
'content&amp;amp;quot;:&amp;amp;quot;&amp;amp;lt;p&'
';amp;amp;gt;Hello&amp;amp;lt;/p&amp;amp;gt;&amp;'
'amp;quot;}]\\&quot;&gt;&lt;/oppia-noninteractive-tabs'
'&gt;&lt;p&gt;You have opened the collapsible block.'
'&lt;/p&gt;&quot;" heading-with-value="&quot;'
'Hello&quot;"></oppia-noninteractive-collapsible>'
), (
'<oppia-noninteractive-collapsible content-with-value='
'"&quot;&lt;oppia-noninteractive-collapsible '
'content-with-value=\\&quot;&amp;amp;quot;&'
'amp;amp;lt;p&amp;amp;gt;Hello&amp;amp;lt;/p'
'&amp;amp;gt;&amp;amp;quot;\\&quot; '
'heading-with-value=\\&quot;&amp;amp;quot;'
'SubCollapsible&amp;amp;quot;\\&quot;&'
'gt;&lt;/oppia-noninteractive-collapsible&'
'gt;&lt;p&gt;&amp;nbsp;&lt;/p&gt;'
'&quot;" heading-with-value="&quot;Collapsible'
'&quot;"></oppia-noninteractive-collapsible>'
), (
'<oppia-noninteractive-tabs tab_contents-with-value="'
'[{&quot;content&quot;: &quot;&lt;p&gt;lorem '
'ipsum&lt;/p&gt;&quot;, &quot;title&quot;: '
'&quot;hello&quot;}, {&quot;content&quot;: &'
'quot;&lt;p&gt;oppia&lt;/p&gt;&quot;, &'
'quot;title&quot;: &quot;Savjet 1&quot;}]">'
'</oppia-noninteractive-tabs>'
), (
'<oppia-noninteractive-tabs tab_contents-with-value="'
'[{&quot;content&quot;: 1234, '
'&quot;title&quot;: &quot;hello&quot;}, '
'{&quot;content&quot;: &quot;&lt;p&gt;'
'oppia&lt;/p&gt;&quot;, &'
'quot;title&quot;: &quot;Savjet 1&quot;}]">'
'</oppia-noninteractive-tabs>'
), (
'<oppia-noninteractive-tabs tab_contents-with-value="'
'[{&quot;content&quot;: &quot;&lt;p&gt;lorem '
'ipsum&lt;/p&gt;&quot;, &quot;url&quot;: '
'&quot;hello&quot;}, {&quot;content&quot;: &'
'quot;&lt;p&gt;oppia&lt;/p&gt;&quot;, &'
'quot;title&quot;: &quot;Savjet 1&quot;}]">'
'</oppia-noninteractive-tabs>'
), (
'<oppia-noninteractive-tabs tab_contents-with-value="'
'[{&quot;content&quot;: &quot;&lt;p&gt;lorem '
'ipsum&lt;/p&gt;&quot;, &quot;title&quot;: '
'&quot;hello&quot;}, [1,2,3]]">'
'</oppia-noninteractive-tabs>'
), (
'<oppia-noninteractive-tabs tab_contents-with-value="[{&quot;'
'title&quot;:&quot;Tab&quot;,&quot;content&'
'quot;:&quot;&lt;oppia-noninteractive-tabs '
'tab_contents-with-value=\\&quot;[{&amp;amp;quot;title'
'&amp;amp;quot;:&amp;amp;quot;Subtab&amp;amp;quot;'
',&amp;amp;quot;content&amp;amp;quot;:&amp;amp;quot;'
'&amp;amp;lt;p&amp;amp;gt;Hello&amp;amp;lt;/p&'
'amp;amp;gt;&amp;amp;quot;}]\\&quot;&gt;&lt;'
'/oppia-noninteractive-tabs&gt;&lt;p&gt;&amp;'
'nbsp;&lt;/p&gt;&quot;}]">'
'</oppia-noninteractive-tabs>'
), (
'<oppia-noninteractive-video autoplay-with-value="false" '
'end-with-value="0" start-with-value="0">'
'</oppia-noninteractive-video>'
), (
'<oppia-noninteractive-video autoplay-with-value="&quot;hello'
'&quot;" end-with-value="0" start-with-value="0" '
'video_id-with-value="&quot;loremipsum&quot;">'
'</oppia-noninteractive-video>'
), (
'<oppia-noninteractive-video autoplay-with-value="false" '
'end-with-value="0" start-with-value="&quot;Hello&quot;" '
'video_id-with-value="&quot;loremipsum&quot;">'
'</oppia-noninteractive-video>'
), (
'<oppia-noninteractive-video autoplay-with-value="false" '
'end-with-value="0" start-with-value="0" '
'video_id-with-value="&quot;lorem&quot;">'
'</oppia-noninteractive-video>'
), (
'<oppia-noninteractive-video autoplay-with-value="false" '
'end-with-value="0" start-with-value="0" '
'video_id-with-value="&quot;12345678901&quot;">'
'</oppia-noninteractive-video>'
), (
'<oppia-noninteractive-image alt-with-value="&quot;A '
'circle divided into equal fifths.&quot;" '
'caption-with-value="&quot;&quot;" '
'filepath-with-value="&quot;xyz.png&quot;">'
'</oppia-noninteractive-image>'
), (
'<oppia-noninteractive-image alt-with-value="&quot;A '
'circle divided into equal fifths.&quot;" '
'caption-with-value="&quot;Hello&quot;" '
'filepath-with-value="&quot;xy.z.png&quot;">'
'</oppia-noninteractive-image>'
), (
'<oppia-noninteractive-image alt-with-value="&quot;A '
'circle divided into equal fifths.&quot;" '
'caption-with-value="34454" '
'filepath-with-value="&quot;xyz.png&quot;">'
'</oppia-noninteractive-image>'
), (
'<oppia-noninteractive-image alt-with-value="&quot;A '
'circle divided into equal fifths.&quot;" '
'caption-with-value="&quot;5454&quot;" '
'filepath-with-value="&quot;xyz.jpg&quot;">'
'</oppia-noninteractive-image>'
), (
'<oppia-noninteractive-image alt-with-value="&quot;A '
'circle divided into equal fifths.&quot;" '
'caption-with-value="&quot;Hello&quot;" '
'filepath-with-value="&quot;46503*.jpg&quot;">'
'</oppia-noninteractive-image>'
), (
'<oppia-noninteractive-tabs tab_contents-with-value="[{&quot;'
'title&quot;:&quot;Hint introduction&quot;,&quot;'
'content&quot;:&quot;&lt;p&gt;&lt;'
'oppia-noninteractive-link url-with-value=\\&quot;&amp;amp;'
'quot;https://www.oppia.org&amp;amp;quot;\\&quot;&gt;'
'&lt;/oppia-noninteractive-link&gt;&lt;/p&gt;&'
'quot;}]"></oppia-noninteractive-tabs>'
)]
actual_output = html_validation_service.validate_customization_args(
test_cases)
expected_output = {
'Invalid filepath': [(
'<oppia-noninteractive-image alt-with-value="&quot;'
'A circle divided into equal fifths.&quot;" caption-'
'with-value="&quot;Hello&quot;" filepath-with-value'
'="&quot;46503*.jpg&quot;">'
'</oppia-noninteractive-image>'
), (
'<oppia-noninteractive-image alt-with-value="&quot;A '
'circle divided into equal fifths.&quot;" caption-'
'with-value="&quot;Hello&quot;" filepath-with-value'
'="&quot;xy.z.png&quot;"></oppia-noninteractive-image>'
)],
'Expected dict, received [1, 2, 3]': [(
'<oppia-noninteractive-tabs tab_contents-with-value='
'"[{&quot;content&quot;: &quot;&lt;p&'
'gt;lorem ipsum&lt;/p&gt;&quot;, &quot;'
'title&quot;: &quot;hello&quot;}, [1,2,3]]">'
'</oppia-noninteractive-tabs>'
)],
'Nested tabs and collapsible': [(
'<oppia-noninteractive-collapsible content-with-value="&'
'quot;&lt;oppia-noninteractive-collapsible content-with-'
'value=\\&quot;&amp;amp;quot;&amp;amp;lt;p&'
'amp;amp;gt;Hello&amp;amp;lt;/p&amp;amp;gt;&amp;'
'amp;quot;\\&quot; heading-with-value=\\&quot;&'
'amp;amp;quot;SubCollapsible&amp;amp;quot;\\&quot;&'
'gt;&lt;/oppia-noninteractive-collapsible&gt;&lt;p'
'&gt;&amp;nbsp;&lt;/p&gt;&quot;" '
'heading-with-value="&quot;Collapsible&quot;">'
'</oppia-noninteractive-collapsible>'
), (
'<oppia-noninteractive-collapsible content-with-value='
'"&quot;&lt;oppia-noninteractive-tabs tab_contents-'
'with-value=\\&quot;[{&amp;amp;quot;title&amp;'
'amp;quot;:&amp;amp;quot;Tab&amp;amp;quot;,&'
'amp;amp;quot;content&amp;amp;quot;:&amp;amp;quot;'
'&amp;amp;lt;p&amp;amp;gt;Hello&amp;amp;lt;/p'
'&amp;amp;gt;&amp;amp;quot;}]\\&quot;&gt;&'
'lt;/oppia-noninteractive-tabs&gt;&lt;p&gt;You '
'have opened the collapsible block.&lt;/p&gt;'
'&quot;" heading-with-value="&quot;Hello&quot;">'
'</oppia-noninteractive-collapsible>'
), (
'<oppia-noninteractive-tabs tab_contents-with-value'
'="[{&quot;title&quot;:&quot;Tab&quot;,'
'&quot;content&quot;:&quot;&lt;oppia-'
'noninteractive-tabs tab_contents-with-value=\\&quot;'
'[{&amp;amp;quot;title&amp;amp;quot;:&amp;amp;'
'quot;Subtab&amp;amp;quot;,&amp;amp;quot;content&'
'amp;amp;quot;:&amp;amp;quot;&amp;amp;lt;p&amp;'
'amp;gt;Hello&amp;amp;lt;/p&amp;amp;gt;&amp;'
'amp;quot;}]\\&quot;&gt;&lt;/oppia-noninteractive'
'-tabs&gt;&lt;p&gt;&amp;nbsp;&lt;/p&'
'gt;&quot;}]"></oppia-noninteractive-tabs>'
)],
'Expected unicode HTML string, received 34454': [(
'<oppia-noninteractive-collapsible content-with-value="34454" '
'heading-with-value="&quot;lorem ipsum&quot;">'
'</oppia-noninteractive-collapsible>'
)],
'Missing attributes: text-with-value, Extra attributes: ':
[(
'<oppia-noninteractive-collapsible content-with-value'
'="&quot;&lt;p&gt;&lt;oppia-noninteractive-'
'link url-with-value=\\&quot;&amp;amp;quot;https://'
'www.example.com&amp;amp;quot;\\&quot;&gt;&lt;'
'/oppia-noninteractive-link&gt;&lt;/p&gt;&'
'quot;" heading-with-value="&quot;Hello&quot;">'
'</oppia-noninteractive-collapsible>'
), (
'<oppia-noninteractive-tabs tab_contents-with-value="[{&'
'quot;title&quot;:&quot;Hint introduction&quot;,'
'&quot;content&quot;:&quot;&lt;p&gt;&'
'lt;oppia-noninteractive-link url-with-value=\\&quot;&'
'amp;amp;quot;https://www.oppia.org&amp;amp;quot;\\&'
'quot;&gt;&lt;/oppia-noninteractive-link&gt;&'
'lt;/p&gt;&quot;}]"></oppia-noninteractive-tabs>'
)],
'Expected bool, received hello': [(
'<oppia-noninteractive-video autoplay-with-value="&quot;'
'hello&quot;" end-with-value="0" start-with-value="0" '
'video_id-with-value="&quot;loremipsum&quot;">'
'</oppia-noninteractive-video>'
)],
(
u'Invalid URL: Sanitized URL should start with \'http://\' or '
'\'https://\'; received htt://link.com'
): [(
'<p><oppia-noninteractive-link text-with-value="&quot;'
'What is a link?&quot;" url-with-value="&quot;htt://'
'link.com&quot;"></oppia-noninteractive-link></p>'
)],
(
'Missing attributes: video_id-with-value, '
'Extra attributes: '
): [(
'<oppia-noninteractive-video autoplay-with-value="false" '
'end-with-value="0" start-with-value="0">'
'</oppia-noninteractive-video>'
)],
'Expected unicode string, received 34454': [(
'<oppia-noninteractive-image alt-with-value="&quot;'
'A circle divided into equal fifths.&quot;" '
'caption-with-value="34454" filepath-with-value="&quot;'
'xyz.png&quot;"></oppia-noninteractive-image>'
)],
'Expected unicode string, received 3456': [(
'<p><oppia-noninteractive-link text-with-value="3456" '
'url-with-value="&quot;http://google.com&quot;">'
'</oppia-noninteractive-link></p>'
)],
'Missing keys: [u\'title\'], Extra keys: [u\'url\']': [(
'<oppia-noninteractive-tabs tab_contents-with-value="'
'[{&quot;content&quot;: &quot;&lt;p&'
'gt;lorem ipsum&lt;/p&gt;&quot;, &quot;url'
'&quot;: &quot;hello&quot;}, {&quot;'
'content&quot;: &quot;&lt;p&gt;oppia'
'&lt;/p&gt;&quot;, &quot;title&quot;: '
'&quot;Savjet 1&quot;}]"></oppia-noninteractive-tabs>'
)],
'Could not convert unicode to int: Hello': [(
'<oppia-noninteractive-video autoplay-with-value="false" '
'end-with-value="0" start-with-value="&quot;Hello&'
'quot;" video_id-with-value="&quot;loremipsum&quot;">'
'</oppia-noninteractive-video>'
)],
'Expected unicode HTML string, received 1234': [(
'<oppia-noninteractive-tabs tab_contents-with-value='
'"[{&quot;content&quot;: 1234, &quot;title'
'&quot;: &quot;hello&quot;}, {&quot;'
'content&quot;: &quot;&lt;p&gt;oppia&'
'amp;lt;/p&gt;&quot;, &quot;title&quot;: '
'&quot;Savjet 1&quot;}]"></oppia-noninteractive-tabs>'
)],
'Missing attributes: alt-with-value, Extra attributes: ':
[(
'<oppia-noninteractive-image caption-with-value="&quot;abc'
'&quot;" filepath-with-value="&quot;random.png&'
'quot;"></oppia-noninteractive-image>'
)],
'Video id length is not 11': [(
'<oppia-noninteractive-video autoplay-with-value="false" '
'end-with-value="0" start-with-value="0" video_id-with-value="'
'&quot;lorem&quot;"></oppia-noninteractive-video>'
)]}
self.assertEqual(set(actual_output.keys()), set(expected_output.keys()))
for key in expected_output:
self.assertEqual(set(actual_output[key]), set(expected_output[key]))
def test_validate_customization_args_in_tag(self):
test_cases = [{
'html_string': (
'<p><oppia-noninteractive-link text-with-value="&quot;What '
'is a link?&quot;" url-with-value="&quot;https://link'
'.com&quot;"></oppia-noninteractive-link></p>'
),
'tag_name': 'oppia-noninteractive-link'
}, {
'html_string': (
'<p><oppia-noninteractive-link text-with-value="&quot;'
'What is a link?&quot;" url-with-value="&quot;'
'htt://link.com&quot;"></oppia-noninteractive-link></p>'
),
'tag_name': 'oppia-noninteractive-link'
}, {
'html_string': (
'<oppia-noninteractive-image caption-with-value="&quot;'
'abc&quot;" filepath-with-value="&quot;'
'random.png&quot;"></oppia-noninteractive-image>'
),
'tag_name': 'oppia-noninteractive-image'
}, {
'html_string': (
'<oppia-noninteractive-tabs tab_contents-with-value="[{&'
'quot;content&quot;: &quot;&lt;p&gt;lorem'
'ipsum&lt;/p&gt;&quot;, &quot;title&quot;: '
'&quot;hello&quot;}, [1,2,3]]">'
'</oppia-noninteractive-tabs>'
),
'tag_name': 'oppia-noninteractive-tabs'
}]
actual_output = []
expected_output = [
[],
[(
'Invalid URL: Sanitized URL should start with \'http://\' '
'or \'https://\'; received htt://link.com'
)],
['Missing attributes: alt-with-value, Extra attributes: '],
[u'Expected dict, received [1, 2, 3]']
]
for test_case in test_cases:
html_string = test_case['html_string']
tag_name = test_case['tag_name']
soup = bs4.BeautifulSoup(
html_string.encode(encoding='utf-8'), 'html.parser')
actual_output.append(list(
html_validation_service.validate_customization_args_in_tag(
soup.find(name=tag_name))))
self.assertEqual(actual_output, expected_output)
def test_add_dimensions_to_image_tags(self):
test_cases = [{
'html_content': (
'<p><oppia-noninteractive-image filepath-with-value="&quot;'
'abc1.png&quot;"></oppia-noninteractive-image>Hello this'
' is test case to check that dimensions are added to the oppia'
' noninteractive image tags.</p>'
),
'expected_output': (
u'<p><oppia-noninteractive-image filepath-with-value='
'"&quot;abc1_height_32_width_32.png&'
'quot;"></oppia-noninteractive-image>Hello this is test case'
' to check that dimensions are added to the oppia '
'noninteractive image tags.</p>'
)
}, {
'html_content': (
'<p><oppia-noninteractive-image filepath-with-value="&quot;'
'abc2.png&quot;"></oppia-noninteractive-image>Hello this'
' is test case to check that dimensions are added to the oppia'
' noninteractive image tags.<oppia-noninteractive-image '
'filepath-with-value="&quot;abc3.png&quot;">'
'</oppia-noninteractive-image></p>'
),
'expected_output': (
u'<p><oppia-noninteractive-image filepath-with-value="'
'&quot;abc2_height_32_width_32.png&quot;">'
'</oppia-noninteractive-image>Hello this is test case '
'to check that dimensions are added to the oppia'
' noninteractive image tags.<oppia-noninteractive-image '
'filepath-with-value="&quot;abc3_height_32_width_32.png'
'&quot;"></oppia-noninteractive-image></p>'
)
}, {
'html_content': (
'<p>Hey this is a test case with no images.</p>'
),
'expected_output': (
u'<p>Hey this is a test case with no images.</p>'
)
}, {
'html_content': (
'<p><oppia-noninteractive-image filepath-with-value="&quot;'
'does_not_exist.png&quot;"></oppia-noninteractive-image>'
'Hello this is test case to check that default dimensions '
'(120, 120) are added in case the image does not exist.</p>'
),
'expected_output': (
u'<p><oppia-noninteractive-image filepath-with-value="&'
'quot;does_not_exist_height_120_width_120.png&quot;">'
'</oppia-noninteractive-image>Hello this is test case'
' to check that default dimensions (120, 120) '
'are added in case the image does not exist.</p>'
)
}, {
'html_content': (
'<p><oppia-noninteractive-image filepath-with-value="&quot;'
'does_not_exist.png&quot;"></oppia-noninteractive-image>'
'Hello this is test case to check that default dimensions '
'(120, 120) are added in case the image does not exist.</p>'
),
'expected_output': (
u'<p><oppia-noninteractive-image filepath-with-value="&'
'quot;does_not_exist_height_120_width_120.png&quot;">'
'</oppia-noninteractive-image>Hello this is test case'
' to check that default dimensions (120, 120) '
'are added in case the image does not exist.</p>'
)
}]
exp_id = 'eid'
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
encoding=None) as f:
raw_image = f.read()
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.ENTITY_TYPE_EXPLORATION, exp_id))
fs.commit('image/abc1.png', raw_image, mimetype='image/png')
fs.commit('image/abc2.png', raw_image, mimetype='image/png')
fs.commit('image/abc3.png', raw_image, mimetype='image/png')
for test_case in test_cases:
self.assertEqual(
html_validation_service.add_dimensions_to_image_tags(
exp_id, test_case['html_content']),
test_case['expected_output'])
def test_add_dimensions_to_image_tags_with_invalid_filepath_with_value(
self):
observed_log_messages = []
def _mock_logging_function(msg, *args):
"""Mocks logging.error()."""
observed_log_messages.append(msg % args)
logging_swap = self.swap(logging, 'error', _mock_logging_function)
assert_raises_context_manager = self.assertRaisesRegexp(
Exception, 'No JSON object could be decoded')
html_content = (
'<p><oppia-noninteractive-image filepath-with-value="abc1.png">'
'</oppia-noninteractive-image>Hello this is test case to check that'
' dimensions are added to the oppia noninteractive image tags.</p>'
)
exp_id = 'exp_id'
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
encoding=None) as f:
raw_image = f.read()
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.ENTITY_TYPE_EXPLORATION, exp_id))
fs.commit('image/abc1.png', raw_image, mimetype='image/png')
with assert_raises_context_manager, logging_swap:
html_validation_service.add_dimensions_to_image_tags(
exp_id, html_content)
self.assertEqual(len(observed_log_messages), 1)
self.assertEqual(
observed_log_messages[0],
'Exploration exp_id failed to load image: abc1.png')
def test_add_dimensions_to_image_tags_when_no_filepath_specified(self):
test_cases = [{
'html_content': (
'<oppia-noninteractive-image caption-with-value="&quot;'
'&quot;" filepath-with-value="">'
'</oppia-noninteractive-image>'
'<p>Some text.</p><p>Some more text.</p><p>Yet more text.</p>'
),
'expected_output': (
'<p>Some text.</p><p>Some more text.</p><p>Yet more text.</p>'
)
}, {
'html_content': (
'<oppia-noninteractive-image caption-with-value="&quot;'
'&quot;">'
'</oppia-noninteractive-image>'
'<p>There is no filepath attr in the above tag.</p>'
),
'expected_output': (
'<p>There is no filepath attr in the above tag.</p>'
)
}, {
'html_content': (
'<oppia-noninteractive-image caption-with-value="&quot;'
'&quot;" filepath-with-value="">'
'</oppia-noninteractive-image>'
'<p>Some text.</p><p>Some more text.</p><p>Yet more text.</p>'
'<p><oppia-noninteractive-image filepath-with-value="&quot;'
'img.png&quot;"></oppia-noninteractive-image>Hello this'
' is test case to check that dimensions are added to the oppia'
' noninteractive image tags.<oppia-noninteractive-image '
'filepath-with-value="&quot;abc3.png&quot;">'
'</oppia-noninteractive-image></p>'
),
'expected_output': (
u'<p>Some text.</p><p>Some more text.</p><p>Yet more text.</p>'
'<p><oppia-noninteractive-image filepath-with-value="'
'&quot;img_height_32_width_32.png&quot;">'
'</oppia-noninteractive-image>Hello this is test case '
'to check that dimensions are added to the oppia'
' noninteractive image tags.<oppia-noninteractive-image '
'filepath-with-value="&quot;abc3_height_32_width_32.png'
'&quot;"></oppia-noninteractive-image></p>'
)
}]
exp_id = 'eid'
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
encoding=None) as f:
raw_image = f.read()
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.ENTITY_TYPE_EXPLORATION, exp_id))
fs.commit('image/img.png', raw_image, mimetype='image/png')
fs.commit('image/abc3.png', raw_image, mimetype='image/png')
for test_case in test_cases:
self.assertEqual(
html_validation_service.add_dimensions_to_image_tags(
exp_id, test_case['html_content']),
test_case['expected_output'])
def test_regenerate_image_filename_using_dimensions(self):
regenerated_name = (
html_validation_service.regenerate_image_filename_using_dimensions(
'abc.png', 45, 45))
self.assertEqual(regenerated_name, 'abc_height_45_width_45.png')
def test_svg_string_validation(self):
# A Valid SVG string.
valid_svg_string = (
'<svg version="1.0" xmlns="http://www.w3.org/2000/svg" width="'
'100pt" height="100pt" viewBox="0 0 100 100"><g><path d="M5455 '
'2632 9z"/> </g> </svg>')
self.assertEqual(
html_validation_service.get_invalid_svg_tags_and_attrs(
valid_svg_string), ([], []))
# A Valid SVG string with unicode characters.
valid_svg_string_with_unicode = (
'<svg version="1.0" xmlns="http://www.w3.org/2000/svg" width="'
'100pt" height="100pt" viewBox="0 0 100 100"><g><path d="M5455 '
'2632 9z"/></g><text transform="matrix(1 0 0 -1 0 0)" font-size'
'="884px" font-family="serif">Ì</text></svg>')
self.assertEqual(
html_validation_service.get_invalid_svg_tags_and_attrs(
valid_svg_string_with_unicode), ([], []))
# SVG containing an invalid tag.
invalid_svg_string = '<svg><testtag /></svg>'
self.assertEqual(
html_validation_service.get_invalid_svg_tags_and_attrs(
invalid_svg_string), (['testtag'], []))
# SVG containing an invalid attribute for a valid tag.
invalid_svg_string = '<svg><path d="M5455" danger="h4cK3D!" /></svg>'
self.assertEqual(
html_validation_service.get_invalid_svg_tags_and_attrs(
invalid_svg_string), ([], ['path:danger']))
# SVG containing an invalid attribute in an invalid tag.
invalid_svg_string = '<svg><hack d="M5 1z" danger="XYZ!"></svg>'
self.assertEqual(
html_validation_service.get_invalid_svg_tags_and_attrs(
invalid_svg_string), (['hack'], []))
# SVG containing a valid tag masquerading as an attribute.
invalid_svg_string = '<svg><g fill="#FFFFFF" path="YZ!" /></svg>'
self.assertEqual(
html_validation_service.get_invalid_svg_tags_and_attrs(
invalid_svg_string), ([], ['g:path']))
# SVG containing a invalid attribute for the parent tag but valid for
# a different tag.
invalid_svg_string = '<svg><path d="M5455" keytimes="h4cK3D!"></svg>'
self.assertEqual(
html_validation_service.get_invalid_svg_tags_and_attrs(
invalid_svg_string), ([], ['path:keytimes']))
def test_add_math_content_to_math_rte_components(self):
test_cases = [{
'html_content': (
'<p>Feedback</p><oppia-noninteractive-math raw_latex-with-valu'
'e="&quot;+,-,-,+&quot;"></oppia-noninteractive-math>'
),
'expected_output': (
'<p>Feedback</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot;&'
';quot;}"></oppia-noninteractive-math>'
)
}, {
'html_content': (
'<oppia-noninteractive-math raw_latex-with-value="&quot;+,'
'+,+,+&quot;"></oppia-noninteractive-math>'
),
'expected_output': (
'<oppia-noninteractive-math math_content-with-value="{&'
'quot;raw_latex&quot;: &quot;+,+,+,+&quot;, &'
'quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>'
)
}, {
'html_content': (
'<oppia-noninteractive-math raw_latex-with-value="&quot;'
'(x - a_1)(x - a_2)(x - a_3)...(x - a_n)&quot;"></oppia-'
'noninteractive-math>'
),
'expected_output': (
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n)&quot;, &quot;svg_filename&quot;'
': &quot;&quot;}"></oppia-noninteractive-math>'
)
}, {
'html_content': '<p> This is a normal tag </p>',
'expected_output': '<p> This is a normal tag </p>'
}, {
'html_content': (
'<oppia-noninteractive-math math_content-with-value="{&qu'
'ot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a_3)'
'...(x - a_n)&quot;, &quot;svg_filename&quot;'
': &quot;&quot;}"></oppia-noninteractive-math>'
),
'expected_output': (
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n)&quot;, &quot;svg_filename&quot;'
': &quot;&quot;}"></oppia-noninteractive-math>'
)
}]
for test_case in test_cases:
self.assertEqual(
html_validation_service.add_math_content_to_math_rte_components(
test_case['html_content']),
test_case['expected_output'])
invalid_cases = [{
'html_content': (
'<p>Feedback</p><oppia-noninteractive-math></oppia-nonintera'
'ctive-math>')
}, {
'html_content': (
'<p>Feedback</p><oppia-noninteractive-math raw_latex-with-valu'
'e="++--"></oppia-noninteractive-math>'
)
}]
with self.assertRaisesRegexp(
Exception, 'Invalid math tag with no proper attribute found'):
html_validation_service.add_math_content_to_math_rte_components(
invalid_cases[0]['html_content'])
with self.assertRaisesRegexp(
Exception, 'Invalid raw_latex string found in the math tag'):
html_validation_service.add_math_content_to_math_rte_components(
invalid_cases[1]['html_content'])
def test_validate_math_tags_in_html(self):
"""Test that the validate_math_tags_in_html method validates an
HTML string and returns all the invalid tags.
"""
html_string = (
'<p>Feedback</p><oppia-noninteractive-math raw_latex-with-valu'
'e="+,-,-,+"></oppia-noninteractive-math><p>Feedback</p><oppia-n'
'oninteractive-math></oppia-noninteractive-math><p>Feedback</p><'
'oppia-noninteractive-math invalid_tag-with-value="&quot;+,-'
',-,+&quot;"></oppia-noninteractive-math><p>Feedback</p><opp'
'ia-noninteractive-math raw_latex-with-value="&quot;+,-,-,+&'
'amp;quot;"></oppia-noninteractive-math><p>Feedback</p><oppia-no'
'ninteractive-math raw_latex-with-value="&quot;+,-,-,+&q'
'uot;"></oppia-noninteractive-math>'
)
expected_invalid_tags = [(
'<oppia-noninteractive-math raw_latex-with-value="+,-,-,+"></op'
'pia-noninteractive-math>'
), (
'<oppia-noninteractive-math></oppia-noninteractive-math>'
), (
'<oppia-noninteractive-math invalid_tag-with-value="&quot;+'
',-,-,+&quot;"></oppia-noninteractive-math>'
), (
'<oppia-noninteractive-math raw_latex-with-value="&quot;+,-'
',-,+&quot;"></oppia-noninteractive-math>'
)]
invalid_tags = (
html_validation_service.validate_math_tags_in_html(html_string))
for index, invalid_tag in enumerate(invalid_tags):
self.assertEqual(
python_utils.UNICODE(invalid_tag), expected_invalid_tags[index])
def test_validate_math_tags_in_html_with_attribute_math_content(self):
"""Test that the validate_math_tags_in_html_with_attribute_math_content
method validates an HTML string and returns all the invalid tags.
"""
html_string = (
'<p>Feedback</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot;&'
';quot;}"></oppia-noninteractive-math>'
'<oppia-noninteractive-math raw_latex-with-value="&quot;+,'
'+,+,+&quot;"></oppia-noninteractive-math>'
'<oppia-noninteractive-math raw_latex-with-value="&quot;'
'(x - a_1)(x - a_2)(x - a_3)...(x - a_n)&quot;"></oppia-'
'noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n)&quot;, &quot;svg_filename&quot;'
': &quot;&quot;}"></oppia-noninteractive-math>'
'<oppia-noninteractive-math></oppia-noninteractive-math>'
'<p>this is a normal tag</p>'
'<oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;}"></oppia-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{'
'raw_latex: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n)&quot;, &quot;svg_filename&quot;'
': &quot;&quot;}"></oppia-noninteractive-math>'
)
expected_invalid_tags = [(
'<oppia-noninteractive-math raw_latex-with-value="&quot;'
'(x - a_1)(x - a_2)(x - a_3)...(x - a_n)&quot;"></oppia-'
'noninteractive-math>'
), (
'<oppia-noninteractive-math></oppia-noninteractive-math>'
), (
'<oppia-noninteractive-math raw_latex-with-value="&quot;+,'
'+,+,+&quot;"></oppia-noninteractive-math>'
), (
'<oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;}"></oppia-noninteractive-math>'
), (
'<oppia-noninteractive-math math_content-with-value="{'
'raw_latex: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n)&quot;, &quot;svg_filename&quot;'
': &quot;&quot;}"></oppia-noninteractive-math>'
)]
invalid_tags = (
html_validation_service.
validate_math_tags_in_html_with_attribute_math_content(
html_string))
self.assertEqual(len(invalid_tags), 5)
for invalid_tag in invalid_tags:
self.assertTrue(
python_utils.UNICODE(invalid_tag) in expected_invalid_tags)
def test_extract_latex_strings_when_all_math_tags_have_empty_svg_filename(
self):
"""Test that get_latex_strings_without_svg_from_html
extracts filenames when all math tags have empty filename field.
"""
html_string = (
'<p>Feedback</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot;&'
';quot;}"></oppia-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&'
'quot;raw_latex&quot;: &quot;+,+,+,+&quot;, &'
'quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n)&quot;, &quot;svg_filename&quot;'
': &quot;&quot;}"></oppia-noninteractive-math>')
expected_list_of_latex_strings = [
'+,-,-,+', '+,+,+,+', '(x - a_1)(x - a_2)(x - a_3)...(x - a_n)']
expected_list_of_encoded_latex_strings = [
string.encode(encoding='utf-8') for string in (
expected_list_of_latex_strings)]
list_of_latex_string = (
html_validation_service.
get_latex_strings_without_svg_from_html(
html_string))
self.assertEqual(
sorted(list_of_latex_string),
sorted(expected_list_of_encoded_latex_strings))
def test_extract_latex_strings_when_latex_strings_have_unicode_characters(
self):
"""Test that get_latex_strings_without_svg_from_html
extracts filenames when LaTeX strings have unicode characters.
"""
html_string = (
'<p>Feedback</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;\u03A7\u03A6'
'&quot;, &quot;svg_filename&quot;: &quot;&'
';quot;}"></oppia-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&'
'quot;raw_latex&quot;: &quot;ÀÁÂÃÄÅÆÇÈ&quot;, &'
'quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n)&quot;, &quot;svg_filename&quot;'
': &quot;&quot;}"></oppia-noninteractive-math>')
expected_list_of_latex_strings = [
'ÀÁÂÃÄÅÆÇÈ', '\u03A7\u03A6',
'(x - a_1)(x - a_2)(x - a_3)...(x - a_n)']
expected_list_of_encoded_latex_strings = [
string.encode(encoding='utf-8') for string in (
expected_list_of_latex_strings)]
list_of_latex_string = (
html_validation_service.
get_latex_strings_without_svg_from_html(
html_string))
self.assertEqual(
sorted(list_of_latex_string),
sorted(expected_list_of_encoded_latex_strings))
def test_extract_latex_strings_when_math_tags_have_non_empty_svg_filename(
self):
"""Test that get_latex_strings_without_svg_from_html
extracts filenames when some math tags have non empty filename field.
"""
html_string = (
'<p>Feedback</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;\\\\frac{x}{y}'
'&quot;, &quot;svg_filename&quot;: &quot;&'
';quot;}"></oppia-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&'
'quot;raw_latex&quot;: &quot;+,+,+,+(x^2)&quot;, &'
'quot;svg_filename&quot;: &quot;abc.svg&quot;}"></oppia'
'-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;\\\\sqrt{x}&quot;, &am'
'p;quot;svg_filename&quot;: &quot;&quot;}"></opp'
'ia-noninteractive-math>')
# Here '+,+,+,+(x^2)' won't be extracted because the corresponding
# math tag has a non-empty svg_filename field.
expected_list_of_latex_strings = ['\\sqrt{x}', '\\frac{x}{y}']
expected_list_of_encoded_latex_strings = [
string.encode(encoding='utf-8') for string in (
expected_list_of_latex_strings)]
list_of_latex_string = (
html_validation_service.
get_latex_strings_without_svg_from_html(
html_string))
self.assertEqual(
sorted(list_of_latex_string),
sorted(expected_list_of_encoded_latex_strings))
def test_extract_latex_strings_when_no_math_tags_are_present(self):
"""Test that get_latex_strings_without_svg_from_html
when there are no math tags present in the HTML.
"""
html_string_with_no_math = (
'<p><oppia-noninteractive-image filepath-with-value="abc1.png">'
'</oppia-noninteractive-image>Hello this is test case to check that'
' dimensions are added to the oppia noninteractive image tags.</p>'
)
self.assertEqual(
html_validation_service.
get_latex_strings_without_svg_from_html(
html_string_with_no_math), [])
def test_extract_svg_filenames_in_math_rte_components(self):
"""Test that the extract_svg_filenames_in_math_rte_components
method extracts all the filenames from math rich-text components in
html.
"""
html_string_with_filename_having_filename = (
'<p>Feedback</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot'
';img.svg&quot;}"></oppia-noninteractive-math>'
)
self.assertEqual(
html_validation_service.
extract_svg_filenames_in_math_rte_components(
html_string_with_filename_having_filename), ['img.svg'])
html_string_with_no_filename = (
'<p>Feedback</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot'
';&quot;}"></oppia-noninteractive-math>'
)
self.assertEqual(
html_validation_service.
extract_svg_filenames_in_math_rte_components(
html_string_with_no_filename), [])
def test_validate_svg_filenames_when_all_filenames_are_valid(self):
"""Test the validate_svg_filenames_in_math_rich_text when valid
filenames are present for each math rich-text components in html.
"""
html_string_with_filename_having_filename = (
'<p>Feedback1</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot'
';img1.svg&quot;}"></oppia-noninteractive-math>'
'<p>Feedback2</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot'
';img2.svg&quot;}"></oppia-noninteractive-math>'
)
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'), 'rb',
encoding=None) as f:
raw_image = f.read()
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.ENTITY_TYPE_EXPLORATION, 'exp_id1'))
fs.commit('image/img1.svg', raw_image, mimetype='image/svg+xml')
fs.commit('image/img2.svg', raw_image, mimetype='image/svg+xml')
self.assertEqual(
html_validation_service.validate_svg_filenames_in_math_rich_text(
feconf.ENTITY_TYPE_EXPLORATION, 'exp_id1',
html_string_with_filename_having_filename), [])
def test_validate_svg_filenames_when_filenames_are_invalid(self):
"""Test the validate_svg_filenames_in_math_rich_text when
filenames are present but invalid.
"""
html_string_with_filename_having_filename = (
'<p>Feedback1</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot'
';img1.svg&quot;}"></oppia-noninteractive-math>'
'<p>Feedback2</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot'
';img2.svg&quot;}"></oppia-noninteractive-math>'
)
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'), 'rb',
encoding=None) as f:
raw_image = f.read()
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.ENTITY_TYPE_EXPLORATION, 'exp_id1'))
fs.commit('image/img1.svg', raw_image, mimetype='image/svg+xml')
self.assertEqual(
html_validation_service.validate_svg_filenames_in_math_rich_text(
feconf.ENTITY_TYPE_EXPLORATION, 'exp_id1',
html_string_with_filename_having_filename),
[(
'<oppia-noninteractive-math math_content-with-value="{&'
'amp;quot;raw_latex&quot;: &quot;+,-,-,+&qu'
'ot;, &quot;svg_filename&quot;: &quot;img2.'
'svg&quot;}"></oppia-noninteractive-math>')])
def test_validate_svg_filenames_when_filenames_are_not_present(self):
"""Test the validate_svg_filenames_in_math_rich_text when
filenames are not present.
"""
html_string_with_filename_having_filename = (
'<p>Feedback1</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot'
';img1.svg&quot;}"></oppia-noninteractive-math>'
'<p>Feedback2</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot'
';&quot;}"></oppia-noninteractive-math>'
)
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'), 'rb',
encoding=None) as f:
raw_image = f.read()
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.ENTITY_TYPE_EXPLORATION, 'exp_id1'))
fs.commit('image/img1.svg', raw_image, mimetype='image/svg+xml')
self.assertEqual(
html_validation_service.validate_svg_filenames_in_math_rich_text(
feconf.ENTITY_TYPE_EXPLORATION, 'exp_id1',
html_string_with_filename_having_filename),
[(
'<oppia-noninteractive-math math_content-with-value="{&'
'amp;quot;raw_latex&quot;: &quot;+,-,-,+&qu'
'ot;, &quot;svg_filename&quot;: &quot;'
'&quot;}"></oppia-noninteractive-math>')])
def test_generate_math_svgs_filename(self):
"""Test that the generate_math_svgs_filename method generates the
filenames in the expected pattern.
"""
filename_pattern_regex = (
r'mathImg_[0-9]+_\S{10}_height_[0-9d]+_width_[0-9d]+_vertical_[0-9d'
']+.svg')
filenames = []
filenames.append(
html_validation_service.generate_math_svgs_filename(
html_domain.LatexStringSvgImageDimensions(
'4d123', '2d145', '0d124')))
filenames.append(
html_validation_service.generate_math_svgs_filename(
html_domain.LatexStringSvgImageDimensions(
'4d123', '2d145', '0')))
filenames.append(
html_validation_service.generate_math_svgs_filename(
html_domain.LatexStringSvgImageDimensions(
'43d12', '12d14', '0d124')))
for filename in filenames:
self.assertTrue(re.match(filename_pattern_regex, filename))
def test_add_svg_filenames_for_latex_strings_in_html_string(self):
"""Test that the add_svg_filenames_for_latex_strings_in_html_string
method adds a filename if empty for each math rich-text component.
"""
html_string = (
'<p>Feedback</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot;&'
';quot;}"></oppia-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&'
'quot;raw_latex&quot;: &quot;+,+,+,+&quot;, &'
'quot;svg_filename&quot;: &quot;&quot;}"></oppia'
'-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)&qu'
'ot;, &quot;svg_filename&quot;: &quot;&quot;}'
'"></oppia-noninteractive-math>')
latex_string_svg_image_data1 = (
html_domain.LatexStringSvgImageData(
'', html_domain.LatexStringSvgImageDimensions(
'1d345', '3d124', '0d124')))
latex_string_svg_image_data2 = (
html_domain.LatexStringSvgImageData(
'', html_domain.LatexStringSvgImageDimensions(
'2d456', '6d124', '0d231')))
latex_string_svg_image_data3 = (
html_domain.LatexStringSvgImageData(
'', html_domain.LatexStringSvgImageDimensions(
'4d123', '23d122', '2d123')))
raw_latex_to_image_data_dict = {
'+,-,-,+': latex_string_svg_image_data1,
'+,+,+,+': latex_string_svg_image_data2,
'(x - a_1)(x - a_2)': latex_string_svg_image_data3
}
converted_html_string = (
html_validation_service.
add_svg_filenames_for_latex_strings_in_html_string(
raw_latex_to_image_data_dict, html_string))
filenames = (
html_validation_service.
extract_svg_filenames_in_math_rte_components(converted_html_string))
self.assertEqual(len(filenames), 3)
filename_pattern_regex = (
r'mathImg_[0-9]+_\S{10}_height_[0-9d]+_width_[0-9d]+_vertical_[0-9d'
']+.svg')
for filename in filenames:
self.assertTrue(re.match(filename_pattern_regex, filename))
def test_extract_svg_filename_latex_mapping_in_math_rte_components(self):
"""Test that extract_svg_filename_latex_mapping_in_math_rte_components
returns all the raw_latex to svg_filename mappings in the HTML.
"""
html_string = (
'<p>Feedback</p><oppia-noninteractive-math math_content-with-v'
'alue="{&quot;raw_latex&quot;: &quot;+,-,-,+'
'&quot;, &quot;svg_filename&quot;: &quot;mathIm'
'g_20207261338jhi1j6rvob_height_1d345_width_3d124_vertical_0d124'
'.svg&quot;}"></oppia-noninteractive-math><oppia-noninteract'
'ive-math math_content-with-value="{&quot;raw_latex&quot;'
': &quot;+,+,+,+&quot;, &quot;svg_filename&quot;:'
' &quot;mathImg_20207261338r3ir43lmfd_height_2d456_width_6d124'
'_vertical_0d231.svg&quot;}"></oppia-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)&qu'
'ot;, &quot;svg_filename&quot;: &quot;mathImg_20207'
'261338imzlvnf23a_height_4d123_width_23d122_vertical_2d123.svg&a'
'mp;quot;}"></oppia-noninteractive-math>'
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)&qu'
'ot;, &quot;svg_filename&quot;: &quot;mathImg_20207'
'261338imzlvnf23a_height_4d123_width_23d122_vertical_2d123.svg&a'
'mp;quot;}"></oppia-noninteractive-math>')
filename1 = (
'mathImg_20207261338jhi1j6rvob_height_1d345_width_3d124_vertical_0'
'd124.svg')
filename2 = (
'mathImg_20207261338r3ir43lmfd_height_2d456_width_6d124_vertical_0'
'd231.svg')
filename3 = (
'mathImg_20207261338imzlvnf23a_height_4d123_width_23d122_vertical_'
'2d123.svg')
expected_output = [
(filename1, '+,-,-,+'), (filename2, '+,+,+,+'),
(filename3, '(x - a_1)(x - a_2)')]
self.assertEqual(
sorted(
html_validation_service.
extract_svg_filename_latex_mapping_in_math_rte_components(
html_string)), sorted(expected_output))
def test_check_for_math_component_in_html(self):
"""Test that the check_for_math_component_in_html method checks for
math-tags in an HTML string and returns a boolean.
"""
test_cases = [{
'html_content': (
'<p>Feedback</p><oppia-noninteractive-math raw_latex-with-valu'
'e="&quot;+,-,-,+&quot;"></oppia-noninteractive-math>'
),
'expected_output': True
}, {
'html_content': (
'<oppia-noninteractive-math raw_latex-with-value="&quot;+,'
'+,+,+&quot;"></oppia-noninteractive-math>'
),
'expected_output': True
}, {
'html_content': (
'<oppia-noninteractive-math raw_latex-with-value="&quot;'
'(x - a_1)(x - a_2)(x - a_3)...(x - a_n)&quot;"></oppia-'
'noninteractive-math>'
),
'expected_output': True
}, {
'html_content': (
'<p><oppia-noninteractive-image filepath-with-value="abc1.png">'
'</oppia-noninteractive-image>Hello this is test case to check'
' that dimensions are added to the oppia noninteractive image '
'tags.</p>'
),
'expected_output': False
}]
for test_case in test_cases:
self.assertEqual(
html_validation_service.check_for_math_component_in_html(
test_case['html_content']),
test_case['expected_output'])
def test_parsable_as_xml(self):
invalid_xml = 'aDRjSzNS'
self.assertEqual(
html_validation_service.is_parsable_as_xml(invalid_xml),
False)
invalid_xml = '123'
self.assertEqual(
html_validation_service.is_parsable_as_xml(invalid_xml),
False)
invalid_xml = False
self.assertEqual(
html_validation_service.is_parsable_as_xml(invalid_xml),
False)
valid_xml = '<svg><path d="0" /></svg>'
self.assertEqual(
html_validation_service.is_parsable_as_xml(valid_xml),
True)
| 47.718083
| 80
| 0.534473
|
14de3a4596b14473a4b6c23e41b9f114685b6d2c
| 67,155
|
py
|
Python
|
astropy/time/formats.py
|
mehrdad-shokri/astropy
|
abd73b51277694338c8eca7639da956dcd06f207
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/time/formats.py
|
mehrdad-shokri/astropy
|
abd73b51277694338c8eca7639da956dcd06f207
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/time/formats.py
|
mehrdad-shokri/astropy
|
abd73b51277694338c8eca7639da956dcd06f207
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import fnmatch
import time
import re
import datetime
import warnings
from decimal import Decimal
from collections import OrderedDict, defaultdict
import numpy as np
import erfa
from astropy.utils.decorators import lazyproperty, classproperty
from astropy.utils.exceptions import AstropyDeprecationWarning
from astropy import units as u
from . import utils
from .utils import day_frac, quantity_day_frac, two_sum, two_product
__all__ = ['TimeFormat', 'TimeJD', 'TimeMJD', 'TimeFromEpoch', 'TimeUnix',
'TimeUnixTai', 'TimeCxcSec', 'TimeGPS', 'TimeDecimalYear',
'TimePlotDate', 'TimeUnique', 'TimeDatetime', 'TimeString',
'TimeISO', 'TimeISOT', 'TimeFITS', 'TimeYearDayTime',
'TimeEpochDate', 'TimeBesselianEpoch', 'TimeJulianEpoch',
'TimeDeltaFormat', 'TimeDeltaSec', 'TimeDeltaJD',
'TimeEpochDateString', 'TimeBesselianEpochString',
'TimeJulianEpochString', 'TIME_FORMATS', 'TIME_DELTA_FORMATS',
'TimezoneInfo', 'TimeDeltaDatetime', 'TimeDatetime64', 'TimeYMDHMS',
'TimeNumeric', 'TimeDeltaNumeric']
__doctest_skip__ = ['TimePlotDate']
# These both get filled in at end after TimeFormat subclasses defined.
# Use an OrderedDict to fix the order in which formats are tried.
# This ensures, e.g., that 'isot' gets tried before 'fits'.
TIME_FORMATS = OrderedDict()
TIME_DELTA_FORMATS = OrderedDict()
# Translations between deprecated FITS timescales defined by
# Rots et al. 2015, A&A 574:A36, and timescales used here.
FITS_DEPRECATED_SCALES = {'TDT': 'tt', 'ET': 'tt',
'GMT': 'utc', 'UT': 'utc', 'IAT': 'tai'}
def _regexify_subfmts(subfmts):
"""
Iterate through each of the sub-formats and try substituting simple
regular expressions for the strptime codes for year, month, day-of-month,
hour, minute, second. If no % characters remain then turn the final string
into a compiled regex. This assumes time formats do not have a % in them.
This is done both to speed up parsing of strings and to allow mixed formats
where strptime does not quite work well enough.
"""
new_subfmts = []
for subfmt_tuple in subfmts:
subfmt_in = subfmt_tuple[1]
if isinstance(subfmt_in, str):
for strptime_code, regex in (('%Y', r'(?P<year>\d\d\d\d)'),
('%m', r'(?P<mon>\d{1,2})'),
('%d', r'(?P<mday>\d{1,2})'),
('%H', r'(?P<hour>\d{1,2})'),
('%M', r'(?P<min>\d{1,2})'),
('%S', r'(?P<sec>\d{1,2})')):
subfmt_in = subfmt_in.replace(strptime_code, regex)
if '%' not in subfmt_in:
subfmt_tuple = (subfmt_tuple[0],
re.compile(subfmt_in + '$'),
subfmt_tuple[2])
new_subfmts.append(subfmt_tuple)
return tuple(new_subfmts)
class TimeFormatMeta(type):
"""
Metaclass that adds `TimeFormat` and `TimeDeltaFormat` to the
`TIME_FORMATS` and `TIME_DELTA_FORMATS` registries, respectively.
"""
_registry = TIME_FORMATS
def __new__(mcls, name, bases, members):
cls = super().__new__(mcls, name, bases, members)
# Register time formats that have a name, but leave out astropy_time since
# it is not a user-accessible format and is only used for initialization into
# a different format.
if 'name' in members and cls.name != 'astropy_time':
# FIXME: check here that we're not introducing a collision with
# an existing method or attribute; problem is it could be either
# astropy.time.Time or astropy.time.TimeDelta, and at the point
# where this is run neither of those classes have necessarily been
# constructed yet.
if 'value' in members and not hasattr(members['value'], "fget"):
raise ValueError("If defined, 'value' must be a property")
mcls._registry[cls.name] = cls
if 'subfmts' in members:
cls.subfmts = _regexify_subfmts(members['subfmts'])
return cls
class TimeFormat(metaclass=TimeFormatMeta):
"""
Base class for time representations.
Parameters
----------
val1 : numpy ndarray, list, number, str, or bytes
Values to initialize the time or times. Bytes are decoded as ascii.
val2 : numpy ndarray, list, or number; optional
Value(s) to initialize the time or times. Only used for numerical
input, to help preserve precision.
scale : str
Time scale of input value(s)
precision : int
Precision for seconds as floating point
in_subfmt : str
Select subformat for inputting string times
out_subfmt : str
Select subformat for outputting string times
from_jd : bool
If true then val1, val2 are jd1, jd2
"""
_default_scale = 'utc' # As of astropy 0.4
subfmts = ()
def __init__(self, val1, val2, scale, precision,
in_subfmt, out_subfmt, from_jd=False):
self.scale = scale # validation of scale done later with _check_scale
self.precision = precision
self.in_subfmt = in_subfmt
self.out_subfmt = out_subfmt
self._jd1, self._jd2 = None, None
if from_jd:
self.jd1 = val1
self.jd2 = val2
else:
val1, val2 = self._check_val_type(val1, val2)
self.set_jds(val1, val2)
@classmethod
def _get_allowed_subfmt(cls, subfmt):
"""Get an allowed subfmt for this class, either the input ``subfmt``
if this is valid or '*' as a default. This method gets used in situations
where the format of an existing Time object is changing and so the
out_ or in_subfmt may need to be coerced to the default '*' if that
``subfmt`` is no longer valid.
"""
try:
cls._select_subfmts(subfmt)
except ValueError:
subfmt = '*'
return subfmt
@property
def in_subfmt(self):
return self._in_subfmt
@in_subfmt.setter
def in_subfmt(self, subfmt):
# Validate subfmt value for this class, raises ValueError if not.
self._select_subfmts(subfmt)
self._in_subfmt = subfmt
@property
def out_subfmt(self):
return self._out_subfmt
@out_subfmt.setter
def out_subfmt(self, subfmt):
# Validate subfmt value for this class, raises ValueError if not.
self._select_subfmts(subfmt)
self._out_subfmt = subfmt
@property
def jd1(self):
return self._jd1
@jd1.setter
def jd1(self, jd1):
self._jd1 = _validate_jd_for_storage(jd1)
if self._jd2 is not None:
self._jd1, self._jd2 = _broadcast_writeable(self._jd1, self._jd2)
@property
def jd2(self):
return self._jd2
@jd2.setter
def jd2(self, jd2):
self._jd2 = _validate_jd_for_storage(jd2)
if self._jd1 is not None:
self._jd1, self._jd2 = _broadcast_writeable(self._jd1, self._jd2)
def __len__(self):
return len(self.jd1)
@property
def scale(self):
"""Time scale"""
self._scale = self._check_scale(self._scale)
return self._scale
@scale.setter
def scale(self, val):
self._scale = val
def mask_if_needed(self, value):
if self.masked:
value = np.ma.array(value, mask=self.mask, copy=False)
return value
@property
def mask(self):
if 'mask' not in self.cache:
self.cache['mask'] = np.isnan(self.jd2)
if self.cache['mask'].shape:
self.cache['mask'].flags.writeable = False
return self.cache['mask']
@property
def masked(self):
if 'masked' not in self.cache:
self.cache['masked'] = bool(np.any(self.mask))
return self.cache['masked']
@property
def jd2_filled(self):
return np.nan_to_num(self.jd2) if self.masked else self.jd2
@lazyproperty
def cache(self):
"""
Return the cache associated with this instance.
"""
return defaultdict(dict)
def _check_val_type(self, val1, val2):
"""Input value validation, typically overridden by derived classes"""
# val1 cannot contain nan, but val2 can contain nan
isfinite1 = np.isfinite(val1)
if val1.size > 1: # Calling .all() on a scalar is surprisingly slow
isfinite1 = isfinite1.all() # Note: arr.all() about 3x faster than np.all(arr)
elif val1.size == 0:
isfinite1 = False
ok1 = (val1.dtype.kind == 'f' and val1.dtype.itemsize >= 8
and isfinite1 or val1.size == 0)
ok2 = val2 is None or (
val2.dtype.kind == 'f' and val2.dtype.itemsize >= 8
and not np.any(np.isinf(val2))) or val2.size == 0
if not (ok1 and ok2):
raise TypeError('Input values for {} class must be finite doubles'
.format(self.name))
if getattr(val1, 'unit', None) is not None:
# Convert any quantity-likes to days first, attempting to be
# careful with the conversion, so that, e.g., large numbers of
# seconds get converted without loosing precision because
# 1/86400 is not exactly representable as a float.
val1 = u.Quantity(val1, copy=False)
if val2 is not None:
val2 = u.Quantity(val2, copy=False)
try:
val1, val2 = quantity_day_frac(val1, val2)
except u.UnitsError:
raise u.UnitConversionError(
"only quantities with time units can be "
"used to instantiate Time instances.")
# We now have days, but the format may expect another unit.
# On purpose, multiply with 1./day_unit because typically it is
# 1./erfa.DAYSEC, and inverting it recovers the integer.
# (This conversion will get undone in format's set_jds, hence
# there may be room for optimizing this.)
factor = 1. / getattr(self, 'unit', 1.)
if factor != 1.:
val1, carry = two_product(val1, factor)
carry += val2 * factor
val1, val2 = two_sum(val1, carry)
elif getattr(val2, 'unit', None) is not None:
raise TypeError('Cannot mix float and Quantity inputs')
if val2 is None:
val2 = np.array(0, dtype=val1.dtype)
def asarray_or_scalar(val):
"""
Remove ndarray subclasses since for jd1/jd2 we want a pure ndarray
or a Python or numpy scalar.
"""
return np.asarray(val) if isinstance(val, np.ndarray) else val
return asarray_or_scalar(val1), asarray_or_scalar(val2)
def _check_scale(self, scale):
"""
Return a validated scale value.
If there is a class attribute 'scale' then that defines the default /
required time scale for this format. In this case if a scale value was
provided that needs to match the class default, otherwise return
the class default.
Otherwise just make sure that scale is in the allowed list of
scales. Provide a different error message if `None` (no value) was
supplied.
"""
if scale is None:
scale = self._default_scale
if scale not in TIME_SCALES:
raise ScaleValueError("Scale value '{}' not in "
"allowed values {}"
.format(scale, TIME_SCALES))
return scale
def set_jds(self, val1, val2):
"""
Set internal jd1 and jd2 from val1 and val2. Must be provided
by derived classes.
"""
raise NotImplementedError
def to_value(self, parent=None, out_subfmt=None):
"""
Return time representation from internal jd1 and jd2 in specified
``out_subfmt``.
This is the base method that ignores ``parent`` and uses the ``value``
property to compute the output. This is done by temporarily setting
``self.out_subfmt`` and calling ``self.value``. This is required for
legacy Format subclasses prior to astropy 4.0 New code should instead
implement the value functionality in ``to_value()`` and then make the
``value`` property be a simple call to ``self.to_value()``.
Parameters
----------
parent : obj
Parent `~astropy.time.Time` object associated with this
`~astropy.time.TimeFormat` object
out_subfmt : str or `None`
Output subformt (use existing self.out_subfmt if `None`)
Returns
-------
value : numpy.array, numpy.ma.array
Array or masked array of formatted time representation values
"""
# Get value via ``value`` property, overriding out_subfmt temporarily if needed.
if out_subfmt is not None:
out_subfmt_orig = self.out_subfmt
try:
self.out_subfmt = out_subfmt
value = self.value
finally:
self.out_subfmt = out_subfmt_orig
else:
value = self.value
return self.mask_if_needed(value)
@property
def value(self):
raise NotImplementedError
@classmethod
def _select_subfmts(cls, pattern):
"""
Return a list of subformats where name matches ``pattern`` using
fnmatch.
If no subformat matches pattern then a ValueError is raised. A special
case is a format with no allowed subformats, i.e. subfmts=(), and
pattern='*'. This is OK and happens when this method is used for
validation of an out_subfmt.
"""
if not isinstance(pattern, str):
raise ValueError('subfmt attribute must be a string')
subfmts = [x for x in cls.subfmts if fnmatch.fnmatchcase(x[0], pattern)]
if len(subfmts) == 0 and pattern != '*':
if len(cls.subfmts) == 0:
raise ValueError(f'subformat not allowed for format {cls.name}')
else:
subfmt_names = [x[0] for x in cls.subfmts]
raise ValueError(f'subformat {pattern!r} must match one of '
f'{subfmt_names} for format {cls.name}')
return subfmts
class TimeNumeric(TimeFormat):
subfmts = (
('float', np.float64, None, np.add),
('long', np.longdouble, utils.longdouble_to_twoval,
utils.twoval_to_longdouble),
('decimal', np.object_, utils.decimal_to_twoval,
utils.twoval_to_decimal),
('str', np.str_, utils.decimal_to_twoval, utils.twoval_to_string),
('bytes', np.bytes_, utils.bytes_to_twoval, utils.twoval_to_bytes),
)
def _check_val_type(self, val1, val2):
"""Input value validation, typically overridden by derived classes"""
# Save original state of val2 because the super()._check_val_type below
# may change val2 from None to np.array(0). The value is saved in order
# to prevent a useless and slow call to np.result_type() below in the
# most common use-case of providing only val1.
orig_val2_is_none = val2 is None
if val1.dtype.kind == 'f':
val1, val2 = super()._check_val_type(val1, val2)
elif (not orig_val2_is_none
or not (val1.dtype.kind in 'US'
or (val1.dtype.kind == 'O'
and all(isinstance(v, Decimal) for v in val1.flat)))):
raise TypeError(
'for {} class, input should be doubles, string, or Decimal, '
'and second values are only allowed for doubles.'
.format(self.name))
val_dtype = (val1.dtype if orig_val2_is_none else
np.result_type(val1.dtype, val2.dtype))
subfmts = self._select_subfmts(self.in_subfmt)
for subfmt, dtype, convert, _ in subfmts:
if np.issubdtype(val_dtype, dtype):
break
else:
raise ValueError('input type not among selected sub-formats.')
if convert is not None:
try:
val1, val2 = convert(val1, val2)
except Exception:
raise TypeError(
'for {} class, input should be (long) doubles, string, '
'or Decimal, and second values are only allowed for '
'(long) doubles.'.format(self.name))
return val1, val2
def to_value(self, jd1=None, jd2=None, parent=None, out_subfmt=None):
"""
Return time representation from internal jd1 and jd2.
Subclasses that require ``parent`` or to adjust the jds should
override this method.
"""
# TODO: do this in metaclass.
if self.__class__.value.fget is not self.__class__.to_value:
return self.value
if jd1 is None:
jd1 = self.jd1
if jd2 is None:
jd2 = self.jd2
if out_subfmt is None:
out_subfmt = self.out_subfmt
subfmt = self._select_subfmts(out_subfmt)[0]
kwargs = {}
if subfmt[0] in ('str', 'bytes'):
unit = getattr(self, 'unit', 1)
digits = int(np.ceil(np.log10(unit / np.finfo(float).eps)))
# TODO: allow a way to override the format.
kwargs['fmt'] = f'.{digits}f'
value = subfmt[3](jd1, jd2, **kwargs)
return self.mask_if_needed(value)
value = property(to_value)
class TimeJD(TimeNumeric):
"""
Julian Date time format.
This represents the number of days since the beginning of
the Julian Period.
For example, 2451544.5 in JD is midnight on January 1, 2000.
"""
name = 'jd'
def set_jds(self, val1, val2):
self._check_scale(self._scale) # Validate scale.
self.jd1, self.jd2 = day_frac(val1, val2)
class TimeMJD(TimeNumeric):
"""
Modified Julian Date time format.
This represents the number of days since midnight on November 17, 1858.
For example, 51544.0 in MJD is midnight on January 1, 2000.
"""
name = 'mjd'
def set_jds(self, val1, val2):
self._check_scale(self._scale) # Validate scale.
jd1, jd2 = day_frac(val1, val2)
jd1 += erfa.DJM0 # erfa.DJM0=2400000.5 (from erfam.h).
self.jd1, self.jd2 = day_frac(jd1, jd2)
def to_value(self, **kwargs):
jd1 = self.jd1 - erfa.DJM0 # This cannot lose precision.
jd2 = self.jd2
return super().to_value(jd1=jd1, jd2=jd2, **kwargs)
value = property(to_value)
class TimeDecimalYear(TimeNumeric):
"""
Time as a decimal year, with integer values corresponding to midnight
of the first day of each year. For example 2000.5 corresponds to the
ISO time '2000-07-02 00:00:00'.
"""
name = 'decimalyear'
def set_jds(self, val1, val2):
self._check_scale(self._scale) # Validate scale.
sum12, err12 = two_sum(val1, val2)
iy_start = np.trunc(sum12).astype(int)
extra, y_frac = two_sum(sum12, -iy_start)
y_frac += extra + err12
val = (val1 + val2).astype(np.double)
iy_start = np.trunc(val).astype(int)
imon = np.ones_like(iy_start)
iday = np.ones_like(iy_start)
ihr = np.zeros_like(iy_start)
imin = np.zeros_like(iy_start)
isec = np.zeros_like(y_frac)
# Possible enhancement: use np.unique to only compute start, stop
# for unique values of iy_start.
scale = self.scale.upper().encode('ascii')
jd1_start, jd2_start = erfa.dtf2d(scale, iy_start, imon, iday,
ihr, imin, isec)
jd1_end, jd2_end = erfa.dtf2d(scale, iy_start + 1, imon, iday,
ihr, imin, isec)
t_start = Time(jd1_start, jd2_start, scale=self.scale, format='jd')
t_end = Time(jd1_end, jd2_end, scale=self.scale, format='jd')
t_frac = t_start + (t_end - t_start) * y_frac
self.jd1, self.jd2 = day_frac(t_frac.jd1, t_frac.jd2)
def to_value(self, **kwargs):
scale = self.scale.upper().encode('ascii')
iy_start, ims, ids, ihmsfs = erfa.d2dtf(scale, 0, # precision=0
self.jd1, self.jd2_filled)
imon = np.ones_like(iy_start)
iday = np.ones_like(iy_start)
ihr = np.zeros_like(iy_start)
imin = np.zeros_like(iy_start)
isec = np.zeros_like(self.jd1)
# Possible enhancement: use np.unique to only compute start, stop
# for unique values of iy_start.
scale = self.scale.upper().encode('ascii')
jd1_start, jd2_start = erfa.dtf2d(scale, iy_start, imon, iday,
ihr, imin, isec)
jd1_end, jd2_end = erfa.dtf2d(scale, iy_start + 1, imon, iday,
ihr, imin, isec)
# Trying to be precise, but more than float64 not useful.
dt = (self.jd1 - jd1_start) + (self.jd2 - jd2_start)
dt_end = (jd1_end - jd1_start) + (jd2_end - jd2_start)
decimalyear = iy_start + dt / dt_end
return super().to_value(jd1=decimalyear, jd2=np.float64(0.0), **kwargs)
value = property(to_value)
class TimeFromEpoch(TimeNumeric):
"""
Base class for times that represent the interval from a particular
epoch as a floating point multiple of a unit time interval (e.g. seconds
or days).
"""
@classproperty(lazy=True)
def _epoch(cls):
# Ideally we would use `def epoch(cls)` here and not have the instance
# property below. However, this breaks the sphinx API docs generation
# in a way that was not resolved. See #10406 for details.
return Time(cls.epoch_val, cls.epoch_val2, scale=cls.epoch_scale,
format=cls.epoch_format)
@property
def epoch(self):
"""Reference epoch time from which the time interval is measured"""
return self._epoch
def set_jds(self, val1, val2):
"""
Initialize the internal jd1 and jd2 attributes given val1 and val2.
For an TimeFromEpoch subclass like TimeUnix these will be floats giving
the effective seconds since an epoch time (e.g. 1970-01-01 00:00:00).
"""
# Form new JDs based on epoch time + time from epoch (converted to JD).
# One subtlety that might not be obvious is that 1.000 Julian days in
# UTC can be 86400 or 86401 seconds. For the TimeUnix format the
# assumption is that every day is exactly 86400 seconds, so this is, in
# principle, doing the math incorrectly, *except* that it matches the
# definition of Unix time which does not include leap seconds.
# note: use divisor=1./self.unit, since this is either 1 or 1/86400,
# and 1/86400 is not exactly representable as a float64, so multiplying
# by that will cause rounding errors. (But inverting it as a float64
# recovers the exact number)
day, frac = day_frac(val1, val2, divisor=1. / self.unit)
jd1 = self.epoch.jd1 + day
jd2 = self.epoch.jd2 + frac
# For the usual case that scale is the same as epoch_scale, we only need
# to ensure that abs(jd2) <= 0.5. Since abs(self.epoch.jd2) <= 0.5 and
# abs(frac) <= 0.5, we can do simple (fast) checks and arithmetic here
# without another call to day_frac(). Note also that `round(jd2.item())`
# is about 10x faster than `np.round(jd2)`` for a scalar.
if self.epoch.scale == self.scale:
jd1_extra = np.round(jd2) if jd2.shape else round(jd2.item())
jd1 += jd1_extra
jd2 -= jd1_extra
self.jd1, self.jd2 = jd1, jd2
return
# Create a temporary Time object corresponding to the new (jd1, jd2) in
# the epoch scale (e.g. UTC for TimeUnix) then convert that to the
# desired time scale for this object.
#
# A known limitation is that the transform from self.epoch_scale to
# self.scale cannot involve any metadata like lat or lon.
try:
tm = getattr(Time(jd1, jd2, scale=self.epoch_scale,
format='jd'), self.scale)
except Exception as err:
raise ScaleValueError("Cannot convert from '{}' epoch scale '{}'"
"to specified scale '{}', got error:\n{}"
.format(self.name, self.epoch_scale,
self.scale, err)) from err
self.jd1, self.jd2 = day_frac(tm._time.jd1, tm._time.jd2)
def to_value(self, parent=None, **kwargs):
# Make sure that scale is the same as epoch scale so we can just
# subtract the epoch and convert
if self.scale != self.epoch_scale:
if parent is None:
raise ValueError('cannot compute value without parent Time object')
try:
tm = getattr(parent, self.epoch_scale)
except Exception as err:
raise ScaleValueError("Cannot convert from '{}' epoch scale '{}'"
"to specified scale '{}', got error:\n{}"
.format(self.name, self.epoch_scale,
self.scale, err)) from err
jd1, jd2 = tm._time.jd1, tm._time.jd2
else:
jd1, jd2 = self.jd1, self.jd2
# This factor is guaranteed to be exactly representable, which
# means time_from_epoch1 is calculated exactly.
factor = 1. / self.unit
time_from_epoch1 = (jd1 - self.epoch.jd1) * factor
time_from_epoch2 = (jd2 - self.epoch.jd2) * factor
return super().to_value(jd1=time_from_epoch1, jd2=time_from_epoch2, **kwargs)
value = property(to_value)
@property
def _default_scale(self):
return self.epoch_scale
class TimeUnix(TimeFromEpoch):
"""
Unix time: seconds from 1970-01-01 00:00:00 UTC, ignoring leap seconds.
For example, 946684800.0 in Unix time is midnight on January 1, 2000.
NOTE: this quantity is not exactly unix time and differs from the strict
POSIX definition by up to 1 second on days with a leap second. POSIX
unix time actually jumps backward by 1 second at midnight on leap second
days while this class value is monotonically increasing at 86400 seconds
per UTC day.
"""
name = 'unix'
unit = 1.0 / erfa.DAYSEC # in days (1 day == 86400 seconds)
epoch_val = '1970-01-01 00:00:00'
epoch_val2 = None
epoch_scale = 'utc'
epoch_format = 'iso'
class TimeUnixTai(TimeUnix):
"""
Seconds from 1970-01-01 00:00:08 TAI (see notes), including leap seconds.
This will generally differ from Unix time by the cumulative integral number
of leap seconds since 1970-01-01 UTC. This convention matches the definition
for linux CLOCK_TAI (https://www.cl.cam.ac.uk/~mgk25/posix-clocks.html).
Caveats:
- Before 1972, fractional adjustments to UTC were made, so the difference
between ``unix`` and ``unix_tai`` time is no longer an integer.
- Because of the fractional adjustments, to be very precise, ``unix_tai``
is the number of seconds since ``1970-01-01 00:00:08 TAI`` or equivalently
``1969-12-31 23:59:59.999918 UTC``. The difference between TAI and UTC
at that epoch was 8.000082 sec.
- On the day of a leap second the difference between ``unix`` and ``unix_tai``
times increases linearly through the day by 1.0. See also the
documentation for the `~astropy.time.TimeUnix` class.
Examples
--------
>>> from astropy.time import Time
>>> t = Time('2020-01-01', scale='utc')
>>> t.unix_tai - t.unix
29.0
>>> t = Time('1970-01-01', scale='utc')
>>> t.unix_tai - t.unix # doctest: +FLOAT_CMP
8.200000198854696e-05
"""
name = 'unix_tai'
epoch_val = '1970-01-01 00:00:08'
epoch_scale = 'tai'
class TimeCxcSec(TimeFromEpoch):
"""
Chandra X-ray Center seconds from 1998-01-01 00:00:00 TT.
For example, 63072064.184 is midnight on January 1, 2000.
"""
name = 'cxcsec'
unit = 1.0 / erfa.DAYSEC # in days (1 day == 86400 seconds)
epoch_val = '1998-01-01 00:00:00'
epoch_val2 = None
epoch_scale = 'tt'
epoch_format = 'iso'
class TimeGPS(TimeFromEpoch):
"""GPS time: seconds from 1980-01-06 00:00:00 UTC
For example, 630720013.0 is midnight on January 1, 2000.
Notes
=====
This implementation is strictly a representation of the number of seconds
(including leap seconds) since midnight UTC on 1980-01-06. GPS can also be
considered as a time scale which is ahead of TAI by a fixed offset
(to within about 100 nanoseconds).
For details, see https://www.usno.navy.mil/USNO/time/gps/usno-gps-time-transfer
"""
name = 'gps'
unit = 1.0 / erfa.DAYSEC # in days (1 day == 86400 seconds)
epoch_val = '1980-01-06 00:00:19'
# above epoch is the same as Time('1980-01-06 00:00:00', scale='utc').tai
epoch_val2 = None
epoch_scale = 'tai'
epoch_format = 'iso'
class TimePlotDate(TimeFromEpoch):
"""
Matplotlib `~matplotlib.pyplot.plot_date` input:
1 + number of days from 0001-01-01 00:00:00 UTC
This can be used directly in the matplotlib `~matplotlib.pyplot.plot_date`
function::
>>> import matplotlib.pyplot as plt
>>> jyear = np.linspace(2000, 2001, 20)
>>> t = Time(jyear, format='jyear', scale='utc')
>>> plt.plot_date(t.plot_date, jyear)
>>> plt.gcf().autofmt_xdate() # orient date labels at a slant
>>> plt.draw()
For example, 730120.0003703703 is midnight on January 1, 2000.
"""
# This corresponds to the zero reference time for matplotlib plot_date().
# Note that TAI and UTC are equivalent at the reference time.
name = 'plot_date'
unit = 1.0
epoch_val = 1721424.5 # Time('0001-01-01 00:00:00', scale='tai').jd - 1
epoch_val2 = None
epoch_scale = 'utc'
epoch_format = 'jd'
class TimeStardate(TimeFromEpoch):
"""
Stardate: date units from 2318-07-05 12:00:00 UTC.
For example, stardate 41153.7 is 00:52 on April 30, 2363.
See http://trekguide.com/Stardates.htm#TNG for calculations and reference points
"""
name = 'stardate'
unit = 0.397766856 # Stardate units per day
epoch_val = '2318-07-05 11:00:00' # Date and time of stardate 00000.00
epoch_val2 = None
epoch_scale = 'tai'
epoch_format = 'iso'
class TimeUnique(TimeFormat):
"""
Base class for time formats that can uniquely create a time object
without requiring an explicit format specifier. This class does
nothing but provide inheritance to identify a class as unique.
"""
class TimeAstropyTime(TimeUnique):
"""
Instantiate date from an Astropy Time object (or list thereof).
This is purely for instantiating from a Time object. The output
format is the same as the first time instance.
"""
name = 'astropy_time'
def __new__(cls, val1, val2, scale, precision,
in_subfmt, out_subfmt, from_jd=False):
"""
Use __new__ instead of __init__ to output a class instance that
is the same as the class of the first Time object in the list.
"""
val1_0 = val1.flat[0]
if not (isinstance(val1_0, Time) and all(type(val) is type(val1_0)
for val in val1.flat)):
raise TypeError('Input values for {} class must all be same '
'astropy Time type.'.format(cls.name))
if scale is None:
scale = val1_0.scale
if val1.shape:
vals = [getattr(val, scale)._time for val in val1]
jd1 = np.concatenate([np.atleast_1d(val.jd1) for val in vals])
jd2 = np.concatenate([np.atleast_1d(val.jd2) for val in vals])
# Collect individual location values and merge into a single location.
if any(tm.location is not None for tm in val1):
if any(tm.location is None for tm in val1):
raise ValueError('cannot concatenate times unless all locations '
'are set or no locations are set')
locations = []
for tm in val1:
location = np.broadcast_to(tm.location, tm._time.jd1.shape,
subok=True)
locations.append(np.atleast_1d(location))
location = np.concatenate(locations)
else:
location = None
else:
val = getattr(val1_0, scale)._time
jd1, jd2 = val.jd1, val.jd2
location = val1_0.location
OutTimeFormat = val1_0._time.__class__
self = OutTimeFormat(jd1, jd2, scale, precision, in_subfmt, out_subfmt,
from_jd=True)
# Make a temporary hidden attribute to transfer location back to the
# parent Time object where it needs to live.
self._location = location
return self
class TimeDatetime(TimeUnique):
"""
Represent date as Python standard library `~datetime.datetime` object
Example::
>>> from astropy.time import Time
>>> from datetime import datetime
>>> t = Time(datetime(2000, 1, 2, 12, 0, 0), scale='utc')
>>> t.iso
'2000-01-02 12:00:00.000'
>>> t.tt.datetime
datetime.datetime(2000, 1, 2, 12, 1, 4, 184000)
"""
name = 'datetime'
def _check_val_type(self, val1, val2):
if not all(isinstance(val, datetime.datetime) for val in val1.flat):
raise TypeError('Input values for {} class must be '
'datetime objects'.format(self.name))
if val2 is not None:
raise ValueError(
f'{self.name} objects do not accept a val2 but you provided {val2}')
return val1, None
def set_jds(self, val1, val2):
"""Convert datetime object contained in val1 to jd1, jd2"""
# Iterate through the datetime objects, getting year, month, etc.
iterator = np.nditer([val1, None, None, None, None, None, None],
flags=['refs_ok', 'zerosize_ok'],
op_dtypes=[None] + 5*[np.intc] + [np.double])
for val, iy, im, id, ihr, imin, dsec in iterator:
dt = val.item()
if dt.tzinfo is not None:
dt = (dt - dt.utcoffset()).replace(tzinfo=None)
iy[...] = dt.year
im[...] = dt.month
id[...] = dt.day
ihr[...] = dt.hour
imin[...] = dt.minute
dsec[...] = dt.second + dt.microsecond / 1e6
jd1, jd2 = erfa.dtf2d(self.scale.upper().encode('ascii'),
*iterator.operands[1:])
self.jd1, self.jd2 = day_frac(jd1, jd2)
def to_value(self, timezone=None, parent=None, out_subfmt=None):
"""
Convert to (potentially timezone-aware) `~datetime.datetime` object.
If ``timezone`` is not ``None``, return a timezone-aware datetime
object.
Parameters
----------
timezone : {`~datetime.tzinfo`, None}, optional
If not `None`, return timezone-aware datetime.
Returns
-------
`~datetime.datetime`
If ``timezone`` is not ``None``, output will be timezone-aware.
"""
if out_subfmt is not None:
# Out_subfmt not allowed for this format, so raise the standard
# exception by trying to validate the value.
self._select_subfmts(out_subfmt)
if timezone is not None:
if self._scale != 'utc':
raise ScaleValueError("scale is {}, must be 'utc' when timezone "
"is supplied.".format(self._scale))
# Rather than define a value property directly, we have a function,
# since we want to be able to pass in timezone information.
scale = self.scale.upper().encode('ascii')
iys, ims, ids, ihmsfs = erfa.d2dtf(scale, 6, # 6 for microsec
self.jd1, self.jd2_filled)
ihrs = ihmsfs['h']
imins = ihmsfs['m']
isecs = ihmsfs['s']
ifracs = ihmsfs['f']
iterator = np.nditer([iys, ims, ids, ihrs, imins, isecs, ifracs, None],
flags=['refs_ok', 'zerosize_ok'],
op_dtypes=7*[None] + [object])
for iy, im, id, ihr, imin, isec, ifracsec, out in iterator:
if isec >= 60:
raise ValueError('Time {} is within a leap second but datetime '
'does not support leap seconds'
.format((iy, im, id, ihr, imin, isec, ifracsec)))
if timezone is not None:
out[...] = datetime.datetime(iy, im, id, ihr, imin, isec, ifracsec,
tzinfo=TimezoneInfo()).astimezone(timezone)
else:
out[...] = datetime.datetime(iy, im, id, ihr, imin, isec, ifracsec)
return self.mask_if_needed(iterator.operands[-1])
value = property(to_value)
class TimeYMDHMS(TimeUnique):
"""
ymdhms: A Time format to represent Time as year, month, day, hour,
minute, second (thus the name ymdhms).
Acceptable inputs must have keys or column names in the "YMDHMS" set of
``year``, ``month``, ``day`` ``hour``, ``minute``, ``second``:
- Dict with keys in the YMDHMS set
- NumPy structured array, record array or astropy Table, or single row
of those types, with column names in the YMDHMS set
One can supply a subset of the YMDHMS values, for instance only 'year',
'month', and 'day'. Inputs have the following defaults::
'month': 1, 'day': 1, 'hour': 0, 'minute': 0, 'second': 0
When the input is supplied as a ``dict`` then each value can be either a
scalar value or an array. The values will be broadcast to a common shape.
Example::
>>> from astropy.time import Time
>>> t = Time({'year': 2015, 'month': 2, 'day': 3,
... 'hour': 12, 'minute': 13, 'second': 14.567},
... scale='utc')
>>> t.iso
'2015-02-03 12:13:14.567'
>>> t.ymdhms.year
2015
"""
name = 'ymdhms'
def _check_val_type(self, val1, val2):
"""
This checks inputs for the YMDHMS format.
It is bit more complex than most format checkers because of the flexible
input that is allowed. Also, it actually coerces ``val1`` into an appropriate
dict of ndarrays that can be used easily by ``set_jds()``. This is useful
because it makes it easy to get default values in that routine.
Parameters
----------
val1 : ndarray or None
val2 : ndarray or None
Returns
-------
val1_as_dict, val2 : val1 as dict or None, val2 is always None
"""
if val2 is not None:
raise ValueError('val2 must be None for ymdhms format')
ymdhms = ['year', 'month', 'day', 'hour', 'minute', 'second']
if val1.dtype.names:
# Convert to a dict of ndarray
val1_as_dict = {name: val1[name] for name in val1.dtype.names}
elif val1.shape == (0,):
# Input was empty list [], so set to None and set_jds will handle this
return None, None
elif (val1.dtype.kind == 'O'
and val1.shape == ()
and isinstance(val1.item(), dict)):
# Code gets here for input as a dict. The dict input
# can be either scalar values or N-d arrays.
# Extract the item (which is a dict) and broadcast values to the
# same shape here.
names = val1.item().keys()
values = val1.item().values()
val1_as_dict = {name: value for name, value
in zip(names, np.broadcast_arrays(*values))}
else:
raise ValueError('input must be dict or table-like')
# Check that the key names now are good.
names = val1_as_dict.keys()
required_names = ymdhms[:len(names)]
def comma_repr(vals):
return ', '.join(repr(val) for val in vals)
bad_names = set(names) - set(ymdhms)
if bad_names:
raise ValueError(f'{comma_repr(bad_names)} not allowed as YMDHMS key name(s)')
if set(names) != set(required_names):
raise ValueError(f'for {len(names)} input key names '
f'you must supply {comma_repr(required_names)}')
return val1_as_dict, val2
def set_jds(self, val1, val2):
if val1 is None:
# Input was empty list []
jd1 = np.array([], dtype=np.float64)
jd2 = np.array([], dtype=np.float64)
else:
jd1, jd2 = erfa.dtf2d(self.scale.upper().encode('ascii'),
val1['year'],
val1.get('month', 1),
val1.get('day', 1),
val1.get('hour', 0),
val1.get('minute', 0),
val1.get('second', 0))
self.jd1, self.jd2 = day_frac(jd1, jd2)
@property
def value(self):
scale = self.scale.upper().encode('ascii')
iys, ims, ids, ihmsfs = erfa.d2dtf(scale, 9,
self.jd1, self.jd2_filled)
out = np.empty(self.jd1.shape, dtype=[('year', 'i4'),
('month', 'i4'),
('day', 'i4'),
('hour', 'i4'),
('minute', 'i4'),
('second', 'f8')])
out['year'] = iys
out['month'] = ims
out['day'] = ids
out['hour'] = ihmsfs['h']
out['minute'] = ihmsfs['m']
out['second'] = ihmsfs['s'] + ihmsfs['f'] * 10**(-9)
out = out.view(np.recarray)
return self.mask_if_needed(out)
class TimezoneInfo(datetime.tzinfo):
"""
Subclass of the `~datetime.tzinfo` object, used in the
to_datetime method to specify timezones.
It may be safer in most cases to use a timezone database package like
pytz rather than defining your own timezones - this class is mainly
a workaround for users without pytz.
"""
@u.quantity_input(utc_offset=u.day, dst=u.day)
def __init__(self, utc_offset=0 * u.day, dst=0 * u.day, tzname=None):
"""
Parameters
----------
utc_offset : `~astropy.units.Quantity`, optional
Offset from UTC in days. Defaults to zero.
dst : `~astropy.units.Quantity`, optional
Daylight Savings Time offset in days. Defaults to zero
(no daylight savings).
tzname : str or `None`, optional
Name of timezone
Examples
--------
>>> from datetime import datetime
>>> from astropy.time import TimezoneInfo # Specifies a timezone
>>> import astropy.units as u
>>> utc = TimezoneInfo() # Defaults to UTC
>>> utc_plus_one_hour = TimezoneInfo(utc_offset=1*u.hour) # UTC+1
>>> dt_aware = datetime(2000, 1, 1, 0, 0, 0, tzinfo=utc_plus_one_hour)
>>> print(dt_aware)
2000-01-01 00:00:00+01:00
>>> print(dt_aware.astimezone(utc))
1999-12-31 23:00:00+00:00
"""
if utc_offset == 0 and dst == 0 and tzname is None:
tzname = 'UTC'
self._utcoffset = datetime.timedelta(utc_offset.to_value(u.day))
self._tzname = tzname
self._dst = datetime.timedelta(dst.to_value(u.day))
def utcoffset(self, dt):
return self._utcoffset
def tzname(self, dt):
return str(self._tzname)
def dst(self, dt):
return self._dst
class TimeString(TimeUnique):
"""
Base class for string-like time representations.
This class assumes that anything following the last decimal point to the
right is a fraction of a second.
This is a reference implementation can be made much faster with effort.
"""
def _check_val_type(self, val1, val2):
if val1.dtype.kind not in ('S', 'U') and val1.size:
raise TypeError('Input values for {} class must be strings'
.format(self.name))
if val2 is not None:
raise ValueError(
f'{self.name} objects do not accept a val2 but you provided {val2}')
return val1, None
def parse_string(self, timestr, subfmts):
"""Read time from a single string, using a set of possible formats."""
# Datetime components required for conversion to JD by ERFA, along
# with the default values.
components = ('year', 'mon', 'mday', 'hour', 'min', 'sec')
defaults = (None, 1, 1, 0, 0, 0)
# Assume that anything following "." on the right side is a
# floating fraction of a second.
try:
idot = timestr.rindex('.')
except Exception:
fracsec = 0.0
else:
timestr, fracsec = timestr[:idot], timestr[idot:]
fracsec = float(fracsec)
for _, strptime_fmt_or_regex, _ in subfmts:
if isinstance(strptime_fmt_or_regex, str):
try:
tm = time.strptime(timestr, strptime_fmt_or_regex)
except ValueError:
continue
else:
vals = [getattr(tm, 'tm_' + component)
for component in components]
else:
tm = re.match(strptime_fmt_or_regex, timestr)
if tm is None:
continue
tm = tm.groupdict()
vals = [int(tm.get(component, default)) for component, default
in zip(components, defaults)]
# Add fractional seconds
vals[-1] = vals[-1] + fracsec
return vals
else:
raise ValueError('Time {} does not match {} format'
.format(timestr, self.name))
def set_jds(self, val1, val2):
"""Parse the time strings contained in val1 and set jd1, jd2"""
# Select subformats based on current self.in_subfmt
subfmts = self._select_subfmts(self.in_subfmt)
# Be liberal in what we accept: convert bytes to ascii.
# Here .item() is needed for arrays with entries of unequal length,
# to strip trailing 0 bytes.
to_string = (str if val1.dtype.kind == 'U' else
lambda x: str(x.item(), encoding='ascii'))
iterator = np.nditer([val1, None, None, None, None, None, None],
flags=['zerosize_ok'],
op_dtypes=[None] + 5 * [np.intc] + [np.double])
for val, iy, im, id, ihr, imin, dsec in iterator:
val = to_string(val)
iy[...], im[...], id[...], ihr[...], imin[...], dsec[...] = (
self.parse_string(val, subfmts))
jd1, jd2 = erfa.dtf2d(self.scale.upper().encode('ascii'),
*iterator.operands[1:])
self.jd1, self.jd2 = day_frac(jd1, jd2)
def str_kwargs(self):
"""
Generator that yields a dict of values corresponding to the
calendar date and time for the internal JD values.
"""
scale = self.scale.upper().encode('ascii'),
iys, ims, ids, ihmsfs = erfa.d2dtf(scale, self.precision,
self.jd1, self.jd2_filled)
# Get the str_fmt element of the first allowed output subformat
_, _, str_fmt = self._select_subfmts(self.out_subfmt)[0]
if '{yday:' in str_fmt:
has_yday = True
else:
has_yday = False
yday = None
ihrs = ihmsfs['h']
imins = ihmsfs['m']
isecs = ihmsfs['s']
ifracs = ihmsfs['f']
for iy, im, id, ihr, imin, isec, ifracsec in np.nditer(
[iys, ims, ids, ihrs, imins, isecs, ifracs],
flags=['zerosize_ok']):
if has_yday:
yday = datetime.datetime(iy, im, id).timetuple().tm_yday
yield {'year': int(iy), 'mon': int(im), 'day': int(id),
'hour': int(ihr), 'min': int(imin), 'sec': int(isec),
'fracsec': int(ifracsec), 'yday': yday}
def format_string(self, str_fmt, **kwargs):
"""Write time to a string using a given format.
By default, just interprets str_fmt as a format string,
but subclasses can add to this.
"""
return str_fmt.format(**kwargs)
@property
def value(self):
# Select the first available subformat based on current
# self.out_subfmt
subfmts = self._select_subfmts(self.out_subfmt)
_, _, str_fmt = subfmts[0]
# TODO: fix this ugly hack
if self.precision > 0 and str_fmt.endswith('{sec:02d}'):
str_fmt += '.{fracsec:0' + str(self.precision) + 'd}'
# Try to optimize this later. Can't pre-allocate because length of
# output could change, e.g. year rolls from 999 to 1000.
outs = []
for kwargs in self.str_kwargs():
outs.append(str(self.format_string(str_fmt, **kwargs)))
return np.array(outs).reshape(self.jd1.shape)
class TimeISO(TimeString):
"""
ISO 8601 compliant date-time format "YYYY-MM-DD HH:MM:SS.sss...".
For example, 2000-01-01 00:00:00.000 is midnight on January 1, 2000.
The allowed subformats are:
- 'date_hms': date + hours, mins, secs (and optional fractional secs)
- 'date_hm': date + hours, mins
- 'date': date
"""
name = 'iso'
subfmts = (('date_hms',
'%Y-%m-%d %H:%M:%S',
# XXX To Do - use strftime for output ??
'{year:d}-{mon:02d}-{day:02d} {hour:02d}:{min:02d}:{sec:02d}'),
('date_hm',
'%Y-%m-%d %H:%M',
'{year:d}-{mon:02d}-{day:02d} {hour:02d}:{min:02d}'),
('date',
'%Y-%m-%d',
'{year:d}-{mon:02d}-{day:02d}'))
def parse_string(self, timestr, subfmts):
# Handle trailing 'Z' for UTC time
if timestr.endswith('Z'):
if self.scale != 'utc':
raise ValueError("Time input terminating in 'Z' must have "
"scale='UTC'")
timestr = timestr[:-1]
return super().parse_string(timestr, subfmts)
class TimeISOT(TimeISO):
"""
ISO 8601 compliant date-time format "YYYY-MM-DDTHH:MM:SS.sss...".
This is the same as TimeISO except for a "T" instead of space between
the date and time.
For example, 2000-01-01T00:00:00.000 is midnight on January 1, 2000.
The allowed subformats are:
- 'date_hms': date + hours, mins, secs (and optional fractional secs)
- 'date_hm': date + hours, mins
- 'date': date
"""
name = 'isot'
subfmts = (('date_hms',
'%Y-%m-%dT%H:%M:%S',
'{year:d}-{mon:02d}-{day:02d}T{hour:02d}:{min:02d}:{sec:02d}'),
('date_hm',
'%Y-%m-%dT%H:%M',
'{year:d}-{mon:02d}-{day:02d}T{hour:02d}:{min:02d}'),
('date',
'%Y-%m-%d',
'{year:d}-{mon:02d}-{day:02d}'))
class TimeYearDayTime(TimeISO):
"""
Year, day-of-year and time as "YYYY:DOY:HH:MM:SS.sss...".
The day-of-year (DOY) goes from 001 to 365 (366 in leap years).
For example, 2000:001:00:00:00.000 is midnight on January 1, 2000.
The allowed subformats are:
- 'date_hms': date + hours, mins, secs (and optional fractional secs)
- 'date_hm': date + hours, mins
- 'date': date
"""
name = 'yday'
subfmts = (('date_hms',
'%Y:%j:%H:%M:%S',
'{year:d}:{yday:03d}:{hour:02d}:{min:02d}:{sec:02d}'),
('date_hm',
'%Y:%j:%H:%M',
'{year:d}:{yday:03d}:{hour:02d}:{min:02d}'),
('date',
'%Y:%j',
'{year:d}:{yday:03d}'))
class TimeDatetime64(TimeISOT):
name = 'datetime64'
def _check_val_type(self, val1, val2):
if not val1.dtype.kind == 'M':
if val1.size > 0:
raise TypeError('Input values for {} class must be '
'datetime64 objects'.format(self.name))
else:
val1 = np.array([], 'datetime64[D]')
if val2 is not None:
raise ValueError(
f'{self.name} objects do not accept a val2 but you provided {val2}')
return val1, None
def set_jds(self, val1, val2):
# If there are any masked values in the ``val1`` datetime64 array
# ('NaT') then stub them with a valid date so downstream parse_string
# will work. The value under the mask is arbitrary but a "modern" date
# is good.
mask = np.isnat(val1)
masked = np.any(mask)
if masked:
val1 = val1.copy()
val1[mask] = '2000'
# Make sure M(onth) and Y(ear) dates will parse and convert to bytestring
if val1.dtype.name in ['datetime64[M]', 'datetime64[Y]']:
val1 = val1.astype('datetime64[D]')
val1 = val1.astype('S')
# Standard ISO string parsing now
super().set_jds(val1, val2)
# Finally apply mask if necessary
if masked:
self.jd2[mask] = np.nan
@property
def value(self):
precision = self.precision
self.precision = 9
ret = super().value
self.precision = precision
return ret.astype('datetime64')
class TimeFITS(TimeString):
"""
FITS format: "[±Y]YYYY-MM-DD[THH:MM:SS[.sss]]".
ISOT but can give signed five-digit year (mostly for negative years);
The allowed subformats are:
- 'date_hms': date + hours, mins, secs (and optional fractional secs)
- 'date': date
- 'longdate_hms': as 'date_hms', but with signed 5-digit year
- 'longdate': as 'date', but with signed 5-digit year
See Rots et al., 2015, A&A 574:A36 (arXiv:1409.7583).
"""
name = 'fits'
subfmts = (
('date_hms',
(r'(?P<year>\d{4})-(?P<mon>\d\d)-(?P<mday>\d\d)T'
r'(?P<hour>\d\d):(?P<min>\d\d):(?P<sec>\d\d(\.\d*)?)'),
'{year:04d}-{mon:02d}-{day:02d}T{hour:02d}:{min:02d}:{sec:02d}'),
('date',
r'(?P<year>\d{4})-(?P<mon>\d\d)-(?P<mday>\d\d)',
'{year:04d}-{mon:02d}-{day:02d}'),
('longdate_hms',
(r'(?P<year>[+-]\d{5})-(?P<mon>\d\d)-(?P<mday>\d\d)T'
r'(?P<hour>\d\d):(?P<min>\d\d):(?P<sec>\d\d(\.\d*)?)'),
'{year:+06d}-{mon:02d}-{day:02d}T{hour:02d}:{min:02d}:{sec:02d}'),
('longdate',
r'(?P<year>[+-]\d{5})-(?P<mon>\d\d)-(?P<mday>\d\d)',
'{year:+06d}-{mon:02d}-{day:02d}'))
# Add the regex that parses the scale and possible realization.
# Support for this is deprecated. Read old style but no longer write
# in this style.
subfmts = tuple(
(subfmt[0],
subfmt[1] + r'(\((?P<scale>\w+)(\((?P<realization>\w+)\))?\))?',
subfmt[2]) for subfmt in subfmts)
def parse_string(self, timestr, subfmts):
"""Read time and deprecated scale if present"""
# Try parsing with any of the allowed sub-formats.
for _, regex, _ in subfmts:
tm = re.match(regex, timestr)
if tm:
break
else:
raise ValueError('Time {} does not match {} format'
.format(timestr, self.name))
tm = tm.groupdict()
# Scale and realization are deprecated and strings in this form
# are no longer created. We issue a warning but still use the value.
if tm['scale'] is not None:
warnings.warn("FITS time strings should no longer have embedded time scale.",
AstropyDeprecationWarning)
# If a scale was given, translate from a possible deprecated
# timescale identifier to the scale used by Time.
fits_scale = tm['scale'].upper()
scale = FITS_DEPRECATED_SCALES.get(fits_scale, fits_scale.lower())
if scale not in TIME_SCALES:
raise ValueError("Scale {!r} is not in the allowed scales {}"
.format(scale, sorted(TIME_SCALES)))
# If no scale was given in the initialiser, set the scale to
# that given in the string. Realization is ignored
# and is only supported to allow old-style strings to be
# parsed.
if self._scale is None:
self._scale = scale
if scale != self.scale:
raise ValueError("Input strings for {} class must all "
"have consistent time scales."
.format(self.name))
return [int(tm['year']), int(tm['mon']), int(tm['mday']),
int(tm.get('hour', 0)), int(tm.get('min', 0)),
float(tm.get('sec', 0.))]
@property
def value(self):
"""Convert times to strings, using signed 5 digit if necessary."""
if 'long' not in self.out_subfmt:
# If we have times before year 0 or after year 9999, we can
# output only in a "long" format, using signed 5-digit years.
jd = self.jd1 + self.jd2
if jd.size and (jd.min() < 1721425.5 or jd.max() >= 5373484.5):
self.out_subfmt = 'long' + self.out_subfmt
return super().value
class TimeEpochDate(TimeNumeric):
"""
Base class for support floating point Besselian and Julian epoch dates
"""
_default_scale = 'tt' # As of astropy 3.2, this is no longer 'utc'.
def set_jds(self, val1, val2):
self._check_scale(self._scale) # validate scale.
epoch_to_jd = getattr(erfa, self.epoch_to_jd)
jd1, jd2 = epoch_to_jd(val1 + val2)
self.jd1, self.jd2 = day_frac(jd1, jd2)
def to_value(self, **kwargs):
jd_to_epoch = getattr(erfa, self.jd_to_epoch)
value = jd_to_epoch(self.jd1, self.jd2)
return super().to_value(jd1=value, jd2=np.float64(0.0), **kwargs)
value = property(to_value)
class TimeBesselianEpoch(TimeEpochDate):
"""Besselian Epoch year as floating point value(s) like 1950.0"""
name = 'byear'
epoch_to_jd = 'epb2jd'
jd_to_epoch = 'epb'
def _check_val_type(self, val1, val2):
"""Input value validation, typically overridden by derived classes"""
if hasattr(val1, 'to') and hasattr(val1, 'unit'):
raise ValueError("Cannot use Quantities for 'byear' format, "
"as the interpretation would be ambiguous. "
"Use float with Besselian year instead. ")
# FIXME: is val2 really okay here?
return super()._check_val_type(val1, val2)
class TimeJulianEpoch(TimeEpochDate):
"""Julian Epoch year as floating point value(s) like 2000.0"""
name = 'jyear'
unit = erfa.DJY # 365.25, the Julian year, for conversion to quantities
epoch_to_jd = 'epj2jd'
jd_to_epoch = 'epj'
class TimeEpochDateString(TimeString):
"""
Base class to support string Besselian and Julian epoch dates
such as 'B1950.0' or 'J2000.0' respectively.
"""
_default_scale = 'tt' # As of astropy 3.2, this is no longer 'utc'.
def set_jds(self, val1, val2):
epoch_prefix = self.epoch_prefix
# Be liberal in what we accept: convert bytes to ascii.
to_string = (str if val1.dtype.kind == 'U' else
lambda x: str(x.item(), encoding='ascii'))
iterator = np.nditer([val1, None], op_dtypes=[val1.dtype, np.double],
flags=['zerosize_ok'])
for val, years in iterator:
try:
time_str = to_string(val)
epoch_type, year_str = time_str[0], time_str[1:]
year = float(year_str)
if epoch_type.upper() != epoch_prefix:
raise ValueError
except (IndexError, ValueError, UnicodeEncodeError):
raise ValueError('Time {} does not match {} format'
.format(time_str, self.name))
else:
years[...] = year
self._check_scale(self._scale) # validate scale.
epoch_to_jd = getattr(erfa, self.epoch_to_jd)
jd1, jd2 = epoch_to_jd(iterator.operands[-1])
self.jd1, self.jd2 = day_frac(jd1, jd2)
@property
def value(self):
jd_to_epoch = getattr(erfa, self.jd_to_epoch)
years = jd_to_epoch(self.jd1, self.jd2)
# Use old-style format since it is a factor of 2 faster
str_fmt = self.epoch_prefix + '%.' + str(self.precision) + 'f'
outs = [str_fmt % year for year in years.flat]
return np.array(outs).reshape(self.jd1.shape)
class TimeBesselianEpochString(TimeEpochDateString):
"""Besselian Epoch year as string value(s) like 'B1950.0'"""
name = 'byear_str'
epoch_to_jd = 'epb2jd'
jd_to_epoch = 'epb'
epoch_prefix = 'B'
class TimeJulianEpochString(TimeEpochDateString):
"""Julian Epoch year as string value(s) like 'J2000.0'"""
name = 'jyear_str'
epoch_to_jd = 'epj2jd'
jd_to_epoch = 'epj'
epoch_prefix = 'J'
class TimeDeltaFormatMeta(TimeFormatMeta):
_registry = TIME_DELTA_FORMATS
class TimeDeltaFormat(TimeFormat, metaclass=TimeDeltaFormatMeta):
"""Base class for time delta representations"""
def _check_scale(self, scale):
"""
Check that the scale is in the allowed list of scales, or is `None`
"""
if scale is not None and scale not in TIME_DELTA_SCALES:
raise ScaleValueError("Scale value '{}' not in "
"allowed values {}"
.format(scale, TIME_DELTA_SCALES))
return scale
class TimeDeltaNumeric(TimeDeltaFormat, TimeNumeric):
def set_jds(self, val1, val2):
self._check_scale(self._scale) # Validate scale.
self.jd1, self.jd2 = day_frac(val1, val2, divisor=1. / self.unit)
def to_value(self, **kwargs):
# Note that 1/unit is always exactly representable, so the
# following multiplications are exact.
factor = 1. / self.unit
jd1 = self.jd1 * factor
jd2 = self.jd2 * factor
return super().to_value(jd1=jd1, jd2=jd2, **kwargs)
value = property(to_value)
class TimeDeltaSec(TimeDeltaNumeric):
"""Time delta in SI seconds"""
name = 'sec'
unit = 1. / erfa.DAYSEC # for quantity input
class TimeDeltaJD(TimeDeltaNumeric):
"""Time delta in Julian days (86400 SI seconds)"""
name = 'jd'
unit = 1.
class TimeDeltaDatetime(TimeDeltaFormat, TimeUnique):
"""Time delta in datetime.timedelta"""
name = 'datetime'
def _check_val_type(self, val1, val2):
if not all(isinstance(val, datetime.timedelta) for val in val1.flat):
raise TypeError('Input values for {} class must be '
'datetime.timedelta objects'.format(self.name))
if val2 is not None:
raise ValueError(
f'{self.name} objects do not accept a val2 but you provided {val2}')
return val1, None
def set_jds(self, val1, val2):
self._check_scale(self._scale) # Validate scale.
iterator = np.nditer([val1, None, None],
flags=['refs_ok', 'zerosize_ok'],
op_dtypes=[None, np.double, np.double])
day = datetime.timedelta(days=1)
for val, jd1, jd2 in iterator:
jd1[...], other = divmod(val.item(), day)
jd2[...] = other / day
self.jd1, self.jd2 = day_frac(iterator.operands[-2],
iterator.operands[-1])
@property
def value(self):
iterator = np.nditer([self.jd1, self.jd2, None],
flags=['refs_ok', 'zerosize_ok'],
op_dtypes=[None, None, object])
for jd1, jd2, out in iterator:
jd1_, jd2_ = day_frac(jd1, jd2)
out[...] = datetime.timedelta(days=jd1_,
microseconds=jd2_ * 86400 * 1e6)
return self.mask_if_needed(iterator.operands[-1])
def _validate_jd_for_storage(jd):
if isinstance(jd, (float, int)):
return np.array(jd, dtype=np.float_)
if (isinstance(jd, np.generic)
and (jd.dtype.kind == 'f' and jd.dtype.itemsize <= 8
or jd.dtype.kind in 'iu')):
return np.array(jd, dtype=np.float_)
elif (isinstance(jd, np.ndarray)
and jd.dtype.kind == 'f'
and jd.dtype.itemsize == 8):
return jd
else:
raise TypeError(
f"JD values must be arrays (possibly zero-dimensional) "
f"of floats but we got {jd!r} of type {type(jd)}")
def _broadcast_writeable(jd1, jd2):
if jd1.shape == jd2.shape:
return jd1, jd2
# When using broadcast_arrays, *both* are flagged with
# warn-on-write, even the one that wasn't modified, and
# require "C" only clears the flag if it actually copied
# anything.
shape = np.broadcast(jd1, jd2).shape
if jd1.shape == shape:
s_jd1 = jd1
else:
s_jd1 = np.require(np.broadcast_to(jd1, shape),
requirements=["C", "W"])
if jd2.shape == shape:
s_jd2 = jd2
else:
s_jd2 = np.require(np.broadcast_to(jd2, shape),
requirements=["C", "W"])
return s_jd1, s_jd2
# Import symbols from core.py that are used in this module. This succeeds
# because __init__.py imports format.py just before core.py.
from .core import Time, TIME_SCALES, TIME_DELTA_SCALES, ScaleValueError # noqa
| 37.791221
| 91
| 0.575802
|
b95d8cb9fb8ee1c5fd170117776397cc55557f8b
| 52,386
|
py
|
Python
|
image_generation.py
|
kiralpoon/TwinGAN
|
af067039d4706312c67a9a38ed7f59a3e53831ae
|
[
"Apache-2.0"
] | 736
|
2018-04-18T05:43:46.000Z
|
2022-03-24T12:43:29.000Z
|
image_generation.py
|
JustinLion83/TwinGAN
|
4e5593445778dfb77af9f815b3f4fcafc35758dc
|
[
"Apache-2.0"
] | 43
|
2018-07-25T03:07:28.000Z
|
2022-03-11T23:26:33.000Z
|
image_generation.py
|
JustinLion83/TwinGAN
|
4e5593445778dfb77af9f815b3f4fcafc35758dc
|
[
"Apache-2.0"
] | 104
|
2018-04-18T08:06:15.000Z
|
2022-02-17T05:51:49.000Z
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""General GAN framework for training image translation/generation networks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import copy
import functools
import math
import os
import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.contrib.gan.python.eval.python import sliced_wasserstein_impl as swd
import util_misc
import util_io
from datasets import dataset_factory
from deployment import model_deploy
from libs import ops
from model import model_inheritor
from nets import cyclegan
from nets import cyclegan_dis
from nets import nets_factory
from nets import pggan
#################
# Dataset Flags #
#################
# Inherited from `model_inheritor`.
#################
# Network Flags #
#################
tf.flags.DEFINE_string(
'generator_network', 'pggan',
'The name of the generator architecture, one of the supported network under _select_network() such as "pggan".')
tf.flags.DEFINE_boolean(
'use_conditional_labels', False,
'If true, use conditional_labels in generator/discriminator.')
tf.flags.DEFINE_integer(
'cyclegan_num_channels', 32,
'Number of channels/filters for cyclegan.')
tf.flags.DEFINE_boolean(
'do_self_attention', False,
'If true, adds self attention layer at `self_attention_hw` layers in the encoder, generator, and the discriminator.')
tf.flags.DEFINE_integer(
'self_attention_hw', 64,
'See `do_self_attention`.')
tf.flags.DEFINE_boolean(
'is_growing', None,
'Used only for PGGAN. If true, then the model is growing.'
'Note: is_growing and max_number_of_steps does not interact well when batch size changes half way during training.')
tf.flags.DEFINE_integer(
'grow_start_number_of_steps', 0,
'The number of training steps when current cycle of growth starts.')
##############
# Loss Flags #
##############
tf.flags.DEFINE_string(
'loss_architecture', 'dragan',
'The name of the loss architecture, one of "gan", "wgan_gp", "wgan", "dragan".')
tf.flags.DEFINE_float(
'gan_weight', 1.0,
'The weight for the GAN losses. Does not include weights for "wgan_gp" or "dragan".')
tf.flags.DEFINE_integer(
'n_critic', 2,
'The generator is updated every n_critic rounds and the discriminator is updated on the other rounds.'
'e.g. If n_critic == 2, generator and discriminator are updated alternatingly.')
tf.flags.DEFINE_float(
'gradient_penalty_lambda', 10,
'Gradient Penalty weight for WGAN GP and DRAGAN model. Default in the papers is 10 for WGAN GP and 10 for DRAGAN.'
'Note that larger values can also lead to unrealistic outputs.')
tf.flags.DEFINE_float(
'wgan_drift_loss_weight', 0.0,
'Drift loss weight for WGAN (and GP) model.')
tf.flags.DEFINE_boolean(
'use_gdrop', False,
'If true, Adds a general dropping term in the discriminator. Used by the PGGAN to ensure training stability.')
tf.flags.DEFINE_float(
'gdrop_coef', 0.2,
'gdrop parameter. gdrop_strength = gdrop_coef * tf.pow(tf.maximum(generator_loss_cur - gdrop_lim, 0.0), gdrop_exp)')
tf.flags.DEFINE_float(
'gdrop_lim', 0.5,
'gdrop parameter. gdrop_strength = gdrop_coef * tf.pow(tf.maximum(generator_loss_cur - gdrop_lim, 0.0), gdrop_exp)')
tf.flags.DEFINE_float(
'gdrop_exp', 2.0,
'gdrop parameter. gdrop_strength = gdrop_coef * tf.pow(tf.maximum(generator_loss_cur - gdrop_lim, 0.0), gdrop_exp)')
tf.flags.DEFINE_boolean(
'use_ttur', False,
'If true, D and G uses different learning rate following "Two time-scale update rule for training GANs". The flag'
'`learning_rate` is assumed to be the generator learning rate. Discriminator learning rate flag is defined below.'
)
tf.flags.DEFINE_float(
'discriminator_learning_rate', 0.0004,
'Only used when `use_ttur` flag is set.'
)
#################
# Logging Flags #
#################
tf.flags.DEFINE_integer(
'log_image_every_n_iter', 1000,
'Every n iteration, output samples of input and generated images.')
tf.flags.DEFINE_integer(
'log_image_n_per_hw', 8,
'Stack n images on each side of the image square. Used along with `log_image_every_n_iter`.')
##############################
# Fine-Tuning and Eval Flags #
##############################
tf.flags.DEFINE_integer(
'eval_every_n_iter_in_training', 0,
'Every n iteration, do evaluation on the generated output.')
tf.flags.DEFINE_boolean(
'calc_inception_score', False,
'If true, calculates the inception score of the generated images.')
tf.flags.DEFINE_boolean(
'calc_swd', False,
'If true, calculates the sliced wasserstein score as described in sec. 5 of PGGAN paper.'
)
tf.flags.DEFINE_boolean(
'use_tf_swd', False,
'If true, uses tensorflow native implementation (which has a bug as of tf1.8).'
)
tf.flags.DEFINE_integer(
'swd_num_images', 1024,
'Number of randomly sampled images used to calculate swd. For eval please use a large number, such as 8196.'
)
tf.flags.DEFINE_boolean(
'swd_save_images', False,
'If true, save the generated images for debugging.'
)
tf.flags.DEFINE_string(
'incep_classifier_name', None,
'')
tf.flags.DEFINE_string(
'incep_classifier_path', None,
'')
tf.flags.DEFINE_integer(
'incep_num_classes', None,
'')
tf.flags.DEFINE_boolean(
'output_single_file', False,
'If true, the output mode will only output one file.')
tf.flags.DEFINE_string(
'output_single_file_name', 'output.csv',
'Name of the output file.')
FLAGS = tf.flags.FLAGS
#############
# Constants #
#############
GENERATOR_SCOPE = 'generator'
DISCRIMINATOR_SCOPE = 'discriminator'
GENERATOR_LOSS_COLLECTION = 'GENERATOR_LOSSES'
DISCRIMINATOR_LOSS_COLLECTION = 'DISCRIMINATOR_LOSSES'
CUSTOM_GENERATED_TARGETS = 'custom_generated_targets'
CUSTOM_INPUT_PH = 'custom_input_ph'
GDROP_STRENGTH_VAR_NAME = 'gdrop_strength'
class GanModel(model_inheritor.GeneralModel):
#######################
# Select the dataset #
#######################
def _select_dataset(self):
"""Selects and returns the dataset used for training/eval.
:return: One ore more slim.dataset.Dataset.
"""
dataset = super(GanModel, self)._select_dataset()
if FLAGS.unpaired_target_dataset_name:
target_dataset = dataset_factory.get_dataset(
FLAGS.unpaired_target_dataset_name, FLAGS.dataset_split_name, FLAGS.unpaired_target_dataset_dir)
return (dataset, target_dataset)
else:
return dataset
######################
# Select the network #
######################
def _select_network(self):
get_noise_shape = None
if FLAGS.generator_network == 'pggan':
generator_network_fn = pggan.generator
discriminator_network_fn = pggan.discriminator
get_noise_shape = pggan.get_noise_shape
elif FLAGS.generator_network == 'cyclegan':
generator_network_fn = cyclegan.cyclegan_generator_resnet
discriminator_network_fn = cyclegan_dis.cyclegan_discriminator_resnet
else:
raise NotImplementedError('Generator network %s is not implemented.', FLAGS.generator_network)
return {'generator_network_fn': generator_network_fn,
'discriminator_network_fn': discriminator_network_fn,
'get_noise_shape': get_noise_shape, }
####################
# Define the model #
####################
@staticmethod
def _clone_fn(networks, batch_queue, batch_names, data_batched=None, is_training=False, **kwargs):
"""Allows data parallelism by creating multiple clones of network_fn."""
# Get Data
data_batched = super(GanModel, GanModel)._get_data_batched(batch_queue, batch_names, data_batched)
targets = data_batched.get('target')
# Get network functions
generator_network_fn = networks['generator_network_fn']
discriminator_network_fn = networks['discriminator_network_fn']
get_noise_shape = networks['get_noise_shape']
global_step = kwargs['global_step']
# Source will be either None or a tensor which the generator output is conditioned on.
generator_input = data_batched.get('source', None)
# Define kwargs.
generator_kwargs = {'is_training': is_training, 'target_shape': targets.shape}
discriminator_kwargs = {'is_training': is_training}
if FLAGS.generator_network == 'cyclegan':
del generator_kwargs['target_shape']
generator_kwargs['num_outputs'] = targets.shape[-1]
generator_kwargs['num_filters'] = FLAGS.cyclegan_num_channels
elif FLAGS.generator_network == 'pggan':
sources, targets, alpha_grow = GanModel.get_growing_source_and_target(data_batched, global_step)
GanModel._add_pggan_kwargs(data_batched, sources, targets, alpha_grow, generator_kwargs, discriminator_kwargs)
with tf.variable_scope(GENERATOR_SCOPE):
generated_targets, generator_end_points = generator_network_fn(generator_input, **generator_kwargs)
if generator_input is None:
custom_input_ph = tf.placeholder(targets.dtype, shape=get_noise_shape(), name=CUSTOM_INPUT_PH)
else:
custom_input_ph = tf.placeholder(generator_input.dtype, shape=generator_input.shape, name=CUSTOM_INPUT_PH)
with tf.variable_scope(GENERATOR_SCOPE, reuse=True):
not_training_generator_kwargs = copy.copy(generator_kwargs)
not_training_generator_kwargs['is_training'] = False
custom_generated_targets, custom_generator_end_points = generator_network_fn(custom_input_ph,
**not_training_generator_kwargs)
# Do post-processing for outputting the custom output image.
custom_generated_targets = GanModel._post_process_image(custom_generated_targets)
# Change name for convenience during inference.
custom_generated_targets = tf.identity(custom_generated_targets, name=CUSTOM_GENERATED_TARGETS)
with tf.variable_scope(DISCRIMINATOR_SCOPE):
real_target_prediction, real_target_end_points = discriminator_network_fn(targets, **discriminator_kwargs)
with tf.variable_scope(DISCRIMINATOR_SCOPE, reuse=True):
generated_target_prediction, generated_target_end_points = discriminator_network_fn(generated_targets,
**discriminator_kwargs)
# Combine the end points.
end_points = util_misc.combine_dicts({
'generator': generator_end_points,
'discriminator_real': real_target_end_points,
'discriminator_generated': generated_target_end_points
})
if generator_input is not None:
end_points['sources'] = generator_input
end_points['targets'] = targets
end_points[CUSTOM_GENERATED_TARGETS] = custom_generated_targets
end_points[CUSTOM_INPUT_PH] = custom_input_ph
# Define ops for evaluation during training.
if FLAGS.use_tf_swd:
GanModel._prepare_tf_swd(end_points, targets, generated_targets)
#############################
# Specify the loss function #
#############################
GanModel.add_loss(data_batched, end_points, functools.partial(discriminator_network_fn, **discriminator_kwargs))
return end_points
####################
# Define the loss #
####################
@staticmethod
def add_loss(data_batched, end_points, discriminator_network_fn=None):
GanModel.add_gan_loss(end_points['discriminator_generated_prediction'], end_points['discriminator_real_prediction'],
end_points['generator_output'], end_points['targets'], discriminator_network_fn)
if FLAGS.generator_network == 'cyclegan':
tf.logging.log_every_n(tf.logging.INFO, 'Assuming cyclegan has a paired dataset.', 100)
tf.losses.absolute_difference(end_points['targets'], end_points['generator_output'],
scope='l1_loss', loss_collection=GENERATOR_LOSS_COLLECTION)
@staticmethod
def add_gan_loss(generated_prediction, real_prediction, generated_image, real_image, discriminator_network_fn,
name_postfix='', discriminator_var_scope=DISCRIMINATOR_SCOPE, only_real_fake_loss=False,
overall_weight=1.0):
"""This function takes the combined end_points and adds gan losses to the corresponding loss collections."""
assert (generated_prediction is not None and
real_prediction is not None and
generated_image is not None and
real_image is not None)
# Generator fool discriminator loss.
generator_loss_name = 'generator_fool_loss%s' % (name_postfix)
generated_prediction = tf.cast(generated_prediction, tf.float32)
real_prediction = tf.cast(real_prediction, tf.float32)
if FLAGS.loss_architecture in ['wgan_gp', 'wgan', 'hinge']:
# Note the losses below are not returned because it's added to the loss collection already.
generator_fool_loss = tf.negative(tf.reduce_mean(generated_prediction), name=generator_loss_name)
generator_fool_loss = tf.losses.compute_weighted_loss(generator_fool_loss,
weights=FLAGS.gan_weight * overall_weight,
scope=generator_loss_name,
loss_collection=GENERATOR_LOSS_COLLECTION)
else:
assert FLAGS.loss_architecture == 'gan' or FLAGS.loss_architecture == 'dragan'
# Equivalent to maximizing log D(G(z)).
generator_fool_loss = tf.losses.sigmoid_cross_entropy(tf.ones_like(generated_prediction), generated_prediction,
weights=FLAGS.gan_weight * overall_weight,
scope=generator_loss_name,
loss_collection=GENERATOR_LOSS_COLLECTION)
discriminator_loss_name = 'discriminator_loss%s' % (name_postfix)
if FLAGS.loss_architecture == 'wgan_gp' or FLAGS.loss_architecture == 'wgan':
# Discriminator loss from WGAN
discriminator_loss = tf.subtract(tf.reduce_mean(generated_prediction), tf.reduce_mean(real_prediction),
name=discriminator_loss_name)
discriminator_loss = tf.losses.compute_weighted_loss(discriminator_loss,
weights=FLAGS.gan_weight * overall_weight,
scope=discriminator_loss_name,
loss_collection=DISCRIMINATOR_LOSS_COLLECTION)
if only_real_fake_loss:
return
# Adds additional penalty term to keep the scores from drifting too far from zero.
if FLAGS.wgan_drift_loss_weight:
discriminator_drift_loss_name = 'discriminator_drift_loss%s' % (name_postfix)
discriminator_drift_loss = (tf.constant(FLAGS.wgan_drift_loss_weight, dtype=real_prediction.dtype)
* tf.reduce_mean(tf.square(real_prediction, name='discriminator_drift_loss')))
discriminator_drift_loss = tf.losses.compute_weighted_loss(discriminator_drift_loss,
weights=1.0 * overall_weight,
scope=discriminator_drift_loss_name,
loss_collection=DISCRIMINATOR_LOSS_COLLECTION)
########################
# WGAN GP loss #
########################
if FLAGS.loss_architecture == 'wgan_gp':
scope_name = 'discriminator_gradient_penalty%s' % (name_postfix)
gradient_penalty = GanModel._add_wgan_gp_loss(real_image,
generated_image,
discriminator_network_fn,
loss_scope=scope_name,
discriminator_var_scope=discriminator_var_scope,
overall_weight=overall_weight)
elif FLAGS.loss_architecture == 'hinge':
# Discriminator loss from WGAN
discriminator_loss = tf.add(tf.reduce_mean(tf.nn.relu(1+generated_prediction)),
tf.reduce_mean(tf.nn.relu(1-real_prediction)),
name=discriminator_loss_name)
discriminator_loss = tf.losses.compute_weighted_loss(discriminator_loss,
weights=FLAGS.gan_weight * overall_weight,
scope=discriminator_loss_name,
loss_collection=DISCRIMINATOR_LOSS_COLLECTION)
elif FLAGS.loss_architecture == 'gan' or FLAGS.loss_architecture == 'dragan':
# Equivalent to minimizing -(log D(x) + log (1 - D(G(z))))
discriminator_fake_loss = tf.losses.sigmoid_cross_entropy(tf.zeros_like(generated_prediction),
generated_prediction,
weights=FLAGS.gan_weight * overall_weight,
scope='discriminator_fake_loss%s' % (name_postfix),
loss_collection=DISCRIMINATOR_LOSS_COLLECTION)
discriminator_real_loss = tf.losses.sigmoid_cross_entropy(tf.ones_like(real_prediction),
real_prediction,
weights=FLAGS.gan_weight * overall_weight,
scope='discriminator_real_loss%s' % (name_postfix),
loss_collection=DISCRIMINATOR_LOSS_COLLECTION)
if only_real_fake_loss:
return
if FLAGS.loss_architecture == 'dragan':
scope_name = 'discriminator_gradient_penalty%s' % (name_postfix)
gradient_penalty = GanModel._add_dragan_loss(real_image,
discriminator_network_fn,
loss_scope=scope_name,
var_scope=discriminator_var_scope,
overall_weight=overall_weight)
else:
raise NotImplementedError('unsupported loss architecture: %s' %FLAGS.loss_architecture)
@staticmethod
def _add_wgan_gp_loss(real_image, generated_image, discriminator_network_fn, loss_scope='gradient_penalty',
discriminator_var_scope=DISCRIMINATOR_SCOPE, overall_weight=1.0):
tf.logging.info('wgan_gp does not interact well when discriminator uses batch norm (according to wgan-gp paper).'
'Double check the discriminator architecture if necessary.')
with tf.variable_scope('wgan_gp'):
alpha_shape = tf.TensorShape(
[real_image.shape[0]] + [1 for _ in range(1, len(real_image.shape))])
alpha = tf.random_uniform(shape=alpha_shape, minval=0., maxval=1., name='alpha')
real_image_casted = tf.cast(real_image, generated_image.dtype)
interpolates = real_image_casted + alpha * (generated_image - real_image_casted)
with tf.variable_scope(discriminator_var_scope, reuse=True):
interpolate_prediction, interpolate_end_points = discriminator_network_fn(interpolates)
interpolate_prediction = tf.cast(interpolate_prediction, tf.float32)
interpolate_gradients = tf.gradients(interpolate_prediction, [interpolates])[0]
interpolate_gradients = tf.cast(interpolate_gradients, tf.float32)
interpolate_slopes = tf.sqrt(tf.reduce_sum(tf.square(interpolate_gradients), axis=[1, 2, 3]))
gradient_penalty = tf.reduce_mean(tf.square((interpolate_slopes - 1.0)))
# WGAN-GP loss
gradient_penalty = tf.losses.compute_weighted_loss(gradient_penalty,
weights=FLAGS.gradient_penalty_lambda * overall_weight,
scope=loss_scope,
loss_collection=DISCRIMINATOR_LOSS_COLLECTION)
return gradient_penalty
@staticmethod
def get_perturbed_batch(minibatch):
"""Adds a random noise to each item in the minibatch. Used by dragan loss."""
# Notice that std is calculated within a mini-batch, implying that lower batch size may yield worse performance.
std = tf.nn.moments(minibatch, axes=[i for i in range(len(minibatch.shape))])[1]
# Note that different from the "On Convergence and Stability of GANs"paper, which uses N(0,cI) where c ~ 10, here
# we follow "How to Train Your DRAGAN", except that we change random uniform from [0,1] to [-1,1]
# https://github.com/pfnet-research/chainer-gan-lib/blob/master/dragan/updater.py
return minibatch + 0.5 * std * tf.random_uniform(minibatch.shape, minval=-1.0, maxval=1.0, dtype=minibatch.dtype)
@staticmethod
def _add_dragan_loss(real_image, discriminator_network_fn, loss_scope='gradient_penalty',
var_scope=DISCRIMINATOR_SCOPE,
overall_weight=1.0):
tf.logging.info('Dragan does not interact well with batch norm in both generator and discriminator, according to '
'"How to Train Your DRAGAN". Please double check your network setup.')
with tf.variable_scope('dragan'):
alpha_shape = tf.TensorShape([real_image.shape[0]] + [1 for _ in range(1, len(real_image.shape))])
alpha = tf.random_uniform(shape=alpha_shape, minval=0., maxval=1., name='alpha', dtype=real_image.dtype)
difference = GanModel.get_perturbed_batch(real_image) - real_image
interpolates = real_image + alpha * difference
with tf.variable_scope(var_scope, reuse=True):
interpolate_prediction, interpolate_end_points = discriminator_network_fn(interpolates)
interpolate_prediction = tf.cast(interpolate_prediction, tf.float32)
interpolate_gradients = tf.gradients(interpolate_prediction, [interpolates])[0]
interpolate_gradients = tf.cast(interpolate_gradients, tf.float32)
interpolate_slopes = tf.sqrt(tf.reduce_sum(tf.square(interpolate_gradients),
reduction_indices=[i for i in
range(1, len(interpolate_gradients.shape))]))
gradient_penalty = tf.reduce_mean((interpolate_slopes - 1.0) ** 2)
gradient_penalty = tf.losses.compute_weighted_loss(gradient_penalty,
weights=FLAGS.gradient_penalty_lambda * overall_weight,
scope=loss_scope,
loss_collection=DISCRIMINATOR_LOSS_COLLECTION)
return gradient_penalty
################
# Optimization #
################
def _get_generator_variable_scopes(self):
return [GENERATOR_SCOPE]
def _get_discriminator_variable_scopes(self):
return [DISCRIMINATOR_SCOPE]
def _get_generator_variables_to_train(self):
generator_variables_to_train = []
for scope in self._get_generator_variable_scopes():
generator_variables_to_train += self._get_variables_to_train(trainable_scopes=scope)
return generator_variables_to_train
def _get_discriminator_variables_to_train(self):
generator_variables_to_train = []
for scope in self._get_discriminator_variable_scopes():
generator_variables_to_train += self._get_variables_to_train(trainable_scopes=scope)
return generator_variables_to_train
def _check_trainable_vars(self, generator_variables_to_train, discriminator_variables_to_train):
assert len(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)) == (len(generator_variables_to_train) +
len(discriminator_variables_to_train))
@staticmethod
def _configure_learning_rate(num_samples_per_epoch, global_step, start_learning_rate=None):
"""Configures the learning rate.
Args:
num_samples_per_epoch: The number of samples in each epoch of training.
global_step: The global_step tensor.
start_learning_rate: An optional float specifying the starting learning rate.
If unspecified, it uses `FLAGS.learning_rate`.
Returns:
A `Tensor` representing the learning rate.
Raises:
AssertionError: if the flag `learning_rate_decay_type` is not set to fixed.
ValueError: if the flag `learning_rate_decay_type` is not supported.
"""
assert FLAGS.learning_rate_decay_type == 'fixed', 'Only fixed learning rate has been tested in this implementation.'
return super(GanModel, GanModel)._configure_learning_rate(num_samples_per_epoch, global_step,
start_learning_rate=start_learning_rate)
@staticmethod
def _get_items_in_scope(items, scopes):
"""Given a list of update operations and a list of scopes, separate the ops into in-scope and not-in-scope."""
in_scopes = []
not_in_scopes = []
for tensor in items:
is_item_in_scope = False
for scope in scopes:
if tensor.name.startswith(scope):
is_item_in_scope = True
in_scopes.append(tensor)
break
if is_item_in_scope:
continue
else:
not_in_scopes.append(tensor)
return in_scopes, not_in_scopes
@staticmethod
def maybe_apply_gradients(optimizer, gradients, step, update_ops=None):
"""If `gradients` is none, increase `step` by 1. Otherwise apply gradient as normal."""
dependencies = []
if update_ops is not None:
dependencies = update_ops
with tf.control_dependencies(dependencies):
if gradients:
return optimizer.apply_gradients(gradients, global_step=step)
else:
return tf.cast(step.assign(step + 1), tf.bool) # because `apply_gradients()` returns a boolean.
def _get_discriminator_optimizer(self, generator_optimizer, global_step):
d_optimizer = generator_optimizer
if FLAGS.use_ttur:
tf.logging.info('Using TTUR. Generator learning rate: %f, Discriminator learning rate: %f'
% (FLAGS.learning_rate, FLAGS.discriminator_learning_rate))
d_lr = self._configure_learning_rate(0, global_step, start_learning_rate=FLAGS.discriminator_learning_rate)
d_optimizer = self._configure_optimizer(d_lr)
return d_optimizer
def _maybe_add_gdrop_update_op(self, global_step, generator_loss, other_update_ops, summaries):
"""gdrop is used in PGGAN to stabilize training."""
if FLAGS.use_gdrop:
# If there exists gdrop_strength variable, update that.
try:
gdrop_strength = slim.get_unique_variable(GDROP_STRENGTH_VAR_NAME)
except ValueError:
raise ValueError('`gdrop_strength` variable cannot be found!')
else:
# Adding the cond may help for training from checkpoints without gdrop?
gdrop_coef = tf.cond(tf.greater(global_step, 100), lambda: FLAGS.gdrop_coef, # 0.2
lambda: 0.0)
gdrop_lim = FLAGS.gdrop_lim # 0.5
gdrop_exp = FLAGS.gdrop_exp # 2.0
generator_loss_cur = tf.clip_by_value(tf.reduce_mean(generator_loss), 0.0, 1.0, )
ema = tf.train.ExponentialMovingAverage(decay=0.9)
generator_loss_cur_update_op = ema.apply([generator_loss_cur])
with tf.control_dependencies([generator_loss_cur_update_op]):
gdrop_strength_val = gdrop_coef * tf.pow(tf.maximum(generator_loss_cur - gdrop_lim, 0.0), gdrop_exp)
gdrop_strength_assign_op = gdrop_strength.assign(gdrop_strength_val)
other_update_ops.append(gdrop_strength_assign_op)
summaries.add(tf.summary.scalar('gdrop_strength', gdrop_strength))
def _add_optimization(self, clones, optimizer, summaries, update_ops, global_step):
# Variables to train.
generator_variables_to_train = self._get_generator_variables_to_train()
discriminator_variables_to_train = self._get_discriminator_variables_to_train()
self._check_trainable_vars(generator_variables_to_train, discriminator_variables_to_train)
# Check optimizer.
if FLAGS.loss_architecture == 'wgan' and FLAGS.optimizer != 'rmsprop':
tf.logging.warning('It is recommended in WGAN paper that the optimizer should be rmsprop.')
# TTUR
d_optimizer = self._get_discriminator_optimizer(optimizer, global_step)
generator_loss, generator_clones_gradients = model_deploy.optimize_clones(
clones,
optimizer,
gradient_scale=self._get_gradient_scale(),
loss_collection=GENERATOR_LOSS_COLLECTION,
var_list=generator_variables_to_train)
discriminator_loss, discriminator_clones_gradients = model_deploy.optimize_clones(
clones,
d_optimizer,
gradient_scale=self._get_gradient_scale(),
loss_collection=DISCRIMINATOR_LOSS_COLLECTION,
var_list=discriminator_variables_to_train)
# Add losses to summary.
summaries.add(tf.summary.scalar('generator_loss', generator_loss))
summaries.add(tf.summary.scalar('discriminator_loss', discriminator_loss))
# Add gradient summaries.
if generator_clones_gradients:
summaries |= set(model_deploy.add_gradients_summaries(generator_clones_gradients))
if discriminator_clones_gradients:
# Add summaries to the gradients.
summaries |= set(model_deploy.add_gradients_summaries(discriminator_clones_gradients))
# Create gradient updates.
n_critic_counter = tf.get_variable('n_critic_counter', shape=[], dtype=tf.int32, initializer=tf.zeros_initializer(),
trainable=False)
# Here the `generator_update_ops` includes the encoder as well.
generator_update_ops, non_gen_ops = GanModel._get_items_in_scope(
update_ops, self._get_generator_variable_scopes())
discriminator_update_ops, other_update_ops = GanModel._get_items_in_scope(
non_gen_ops, self._get_discriminator_variable_scopes())
# A note on the `tf.cond()` function: It will evaluate both true and false branch regardless of the result, unless
# the op used in that branch is created within the lambda function.
# Thus all the update ops for both generator or discriminator will be ran regardless of whether generator or the
# discriminator is being optimized. Most operations requires both gen and dis to be called, but things like
# evaluating discriminator score on real data does not... There seems to be no easy way to fix this.
# Example code: Both generator run and discriminator run will be printed even though only variables on one side
# gets updated, because update variable op is created inside lambda func where as print is created outside.
# generator_update_ops.append(tf.Print('gen run', ['gen run', n_critic_counter, global_step], first_n=10))
# discriminator_update_ops.append(tf.Print('dis run', ['dis run', n_critic_counter, global_step], first_n=10))
grad_updates = tf.cond(tf.equal(tf.mod(n_critic_counter, FLAGS.n_critic), 0),
lambda: GanModel.maybe_apply_gradients(optimizer, generator_clones_gradients,
step=n_critic_counter,
update_ops=generator_update_ops),
lambda: GanModel.maybe_apply_gradients(optimizer, discriminator_clones_gradients,
step=n_critic_counter,
update_ops=discriminator_update_ops), )
with tf.control_dependencies([grad_updates, ]):
increase_global_step = tf.cond(tf.equal(tf.mod(n_critic_counter, FLAGS.n_critic), 0),
lambda: tf.assign(global_step, global_step + 1), lambda: tf.identity(global_step),
name='increase_global_step')
other_update_ops.append(increase_global_step)
self._maybe_add_gdrop_update_op(global_step, generator_loss, other_update_ops, summaries)
update_op = tf.group(*other_update_ops)
with tf.control_dependencies([update_op]):
if FLAGS.loss_architecture.startswith('wgan') or FLAGS.loss_architecture == 'hinge':
train_tensor = tf.identity(discriminator_loss, name='train_op')
else:
# The closer discriminator is to 0 (or 1 for non-WGAN), usually the better the output is.
train_tensor = tf.negative(discriminator_loss, name='train_op')
return train_tensor
#################
# Add summaries #
#################
def _add_image_summaries(self, end_points, _):
# Add summaries for images, if there are any.
for end_point_name in ['sources', 'targets', 'generator_output']:
if (end_point_name in end_points and len(end_points[end_point_name].shape) == 4):
self._add_one_image_summary(end_point_name, self._post_process_image(end_points[end_point_name]))
@staticmethod
def _add_loss_summaries(first_clone_scope, summaries, end_points):
for collection in [tf.GraphKeys.LOSSES, GENERATOR_LOSS_COLLECTION, DISCRIMINATOR_LOSS_COLLECTION]:
for loss in tf.get_collection(collection, first_clone_scope):
summaries.add(tf.summary.scalar('losses/%s' % loss.op.name, loss))
@staticmethod
def _add_end_point_summaries(end_points, summaries):
"""Wrapper around the inherited _add_end_point_summaries(). Excludes some end points."""
# Exclude two end points for generated images with custom noise inputs.
excluded_end_points = {CUSTOM_INPUT_PH, CUSTOM_GENERATED_TARGETS, 'swd_real_ph', 'swd_fake_ph', 'swd'}
not_excluded_end_points = copy.copy(end_points)
for end_point in end_points:
if end_point in excluded_end_points:
del not_excluded_end_points[end_point]
super(GanModel, GanModel)._add_end_point_summaries(not_excluded_end_points, summaries)
###################################################
# Extra function to run after each training step. #
###################################################
@staticmethod
def do_extra_train_step_aux(session, run_list, out_list, feed_dict_per_hw=None):
"""Save the output of each tensor in `run_list` as an image to their corresponding path in `out_list`."""
image_list = [[] for _ in range(len(out_list))]
for i in range(FLAGS.log_image_n_per_hw):
run_results = session.run(run_list, feed_dict=None if feed_dict_per_hw is None else feed_dict_per_hw[i])
for run_items_i, run_result in enumerate(run_results):
# For task like anime_faces, the source is a conditional one-hot tensor. Visualize that as an image as well.
if len(run_result.shape) == 2:
run_result = np.expand_dims(np.expand_dims(run_result, axis=2), axis=3)
run_result = np.repeat(run_result, 64, axis=2)
image_list[run_items_i].append(run_result)
for i, save_path in enumerate(out_list):
images = image_list[i]
if len(images):
concatenated_images = np.concatenate(images, axis=2)
concatenated_images = np.reshape(concatenated_images, (
concatenated_images.shape[0] * concatenated_images.shape[1], concatenated_images.shape[2],
concatenated_images.shape[3]))
util_io.save_float_image(save_path, concatenated_images)
return image_list
@staticmethod
def do_extra_train_step(session, end_points, global_step):
"""This function is hooked up to the main training step function. It is executed after each training step.
Child class should change this function to fit it's input/output.
"""
current_step = global_step.eval(session)
if current_step % FLAGS.log_image_every_n_iter == 0:
image_dir = os.path.join(FLAGS.train_dir, 'generated_samples')
if not tf.gfile.Exists(image_dir):
tf.gfile.MakeDirs(image_dir)
run_list = []
out_list = []
if 'generator_conditional_layer' in end_points:
run_list.append(end_points['generator_conditional_layer'])
out_list.append(os.path.join(image_dir, '%d_conditional_layer.jpg' % (current_step)))
if 'sources' in end_points:
run_list = run_list + [end_points['sources'], end_points['generator_output'], end_points['targets']]
out_list = out_list + [os.path.join(image_dir, '%d_source.jpg' % (current_step)),
os.path.join(image_dir, '%d.jpg' % (current_step)),
os.path.join(image_dir, '%d_target.jpg' % (current_step)), ]
feed_dict_per_hw = None
else:
# For image generation task where the generator source is a random tensor.
target_img_name = 'targets'
run_list = run_list + [end_points[CUSTOM_GENERATED_TARGETS], end_points[target_img_name]]
out_list = out_list + [os.path.join(image_dir, '%d.jpg' % (current_step)),
os.path.join(image_dir, '%d_target.jpg' % (current_step))]
np_dtype = getattr(np, FLAGS.dataset_dtype)
ph = end_points[CUSTOM_INPUT_PH]
ph_shape = ph.get_shape().as_list()
np.random.seed(314)
noise = np.random.standard_normal([FLAGS.batch_size, ] + ph_shape[-1:]).astype(dtype=np_dtype)
noise = np.expand_dims(np.expand_dims(noise, 1), 1)
noise2 = np.random.standard_normal([FLAGS.batch_size, ] + ph_shape[-1:]).astype(dtype=np_dtype)
noise2 = np.expand_dims(np.expand_dims(noise2, 1), 1)
feed_dict_per_hw = []
for i in range(FLAGS.log_image_n_per_hw):
current_noise_vectors = (noise * i + noise2 * (FLAGS.log_image_n_per_hw - i - 1)) / float(FLAGS.log_image_n_per_hw - 1)
assert list(current_noise_vectors.shape[1:]) == ph_shape[1:]
feed_dict = {ph: current_noise_vectors}
feed_dict_per_hw.append(feed_dict)
GanModel.do_extra_train_step_aux(session, run_list=run_list, out_list=out_list, feed_dict_per_hw=feed_dict_per_hw)
if FLAGS.eval_every_n_iter_in_training and current_step % FLAGS.eval_every_n_iter_in_training == 0:
# TODO: Just use swd for now. in the future trigger different eval by flags.
GanModel._calc_swd(session, end_points, current_step=current_step)
########
# Eval #
########
@staticmethod
def _define_eval_metrics(end_points, data_batched):
metric_map = {}
generator_losses = tf.get_collection(GENERATOR_LOSS_COLLECTION, )
discriminator_losses = tf.get_collection(DISCRIMINATOR_LOSS_COLLECTION, )
for loss in generator_losses + discriminator_losses:
metric_map[loss.name.rstrip('/value:0')] = slim.metrics.streaming_mean(loss)
return metric_map
def get_items_to_encode(self, end_points, data_batched):
"""Outputs a list with format (name, is_image, tensor)"""
items_to_encode = []
if 'source' in data_batched:
items_to_encode.append(('sources', True, self._post_process_image(data_batched.get('source'))))
generated_targets = end_points['generator_output']
generated_target_prediction = end_points['discriminator_generated_prediction']
real_target_prediction = end_points['discriminator_real_prediction']
targets = data_batched.get('target')
items_to_encode.append(('targets', True, self._post_process_image(targets)))
items_to_encode.append(('generated_targets', True, self._post_process_image(generated_targets)))
items_to_encode.append(('generated_target_prediction', False, generated_target_prediction))
items_to_encode.append(('real_target_prediction', False, real_target_prediction))
best_generated_target_i = tf.argmax(tf.squeeze(generated_target_prediction, axis=1))
worst_real_target_i = tf.argmin(tf.squeeze(real_target_prediction, axis=1))
items_to_encode.append(
('best_generated_target', True, self._post_process_image(generated_targets[best_generated_target_i])))
items_to_encode.append(('worst_real_target', True, self._post_process_image(targets[worst_real_target_i])))
return items_to_encode
@staticmethod
def to_human_friendly(eval_items):
"""For non-image items, use space to join the list values."""
ret = []
for name, is_image, vals in eval_items:
if is_image:
ret.append((name, is_image, vals))
else:
human_readable_vals = []
for val in vals:
human_readable_val = []
for i, item in enumerate(val):
human_readable_val.append(str(item))
human_readable_vals.append(' '.join(human_readable_val))
ret.append((name, is_image, human_readable_vals))
return ret
@staticmethod
def prepare_inception_score_classifier(classifier_name, num_classes, images, return_saver=True):
network_fn = nets_factory.get_network_fn(
classifier_name,
num_classes=num_classes,
weight_decay=0.0,
is_training=False,
)
# Note: you may need to change the prediction_fn here.
try:
logits, end_points = network_fn(images, prediction_fn=tf.sigmoid, create_aux_logits=False)
except TypeError:
tf.logging.warning('Cannot specify prediction_fn=tf.sigmoid, create_aux_logits=False.')
logits, end_points = network_fn(images, )
variables_to_restore = slim.get_model_variables(scope=nets_factory.scopes_map[classifier_name])
predictions = end_points['Predictions']
if return_saver:
saver = tf.train.Saver(variables_to_restore)
return predictions, end_points, saver
else:
return predictions, end_points
@staticmethod
def calc_inception_score(predictions, saver, classifier_path, session,
splits=10):
# Currently this function is not designed for use during training.
saver.restore(session, util_misc.get_latest_checkpoint_path(classifier_path))
# The inception score is by convention calculated using 10 batches of 5000 samples
# with each batch separated into mini-batches.
num_batches = int(math.ceil(5000.0 * splits / int(predictions.shape[0])))
preds = []
for i in range(num_batches):
pred = session.run(predictions, )
preds.append(pred)
preds = np.concatenate(preds, 0)
scores = []
for i in range(splits):
part = preds[(i * preds.shape[0] // splits):((i + 1) * preds.shape[0] // splits), :]
kl = part * (np.log(part) - np.log(np.expand_dims(np.mean(part, 0), 0)))
kl = np.mean(np.sum(kl, 1))
scores.append(np.exp(kl))
return np.mean(scores), np.std(scores)
@staticmethod
def _get_swd_real_fake(end_points):
return end_points['targets'], end_points['generator_output']
@staticmethod
def _calc_swd(session, end_points, current_step=0, get_swd_real_fake=None):
if not FLAGS.train_image_size >= 16:
tf.logging.log_every_n(tf.logging.INFO, 'Not doing swd on small images.', 100)
return
if not FLAGS.is_training:
tf.logging.info('Beware of preprocessing! If the score you get from training and eval are too different, '
'you may be preprocessing images during training but not during eval.')
num_batches = int(FLAGS.swd_num_images / FLAGS.batch_size)
util_io.touch_folder(FLAGS.eval_dir)
if FLAGS.swd_save_images:
save_image_dir = os.path.join(FLAGS.eval_dir, 'swd_debug', str(int(time.time())))
util_io.touch_folder(save_image_dir)
save_result_path = os.path.join(FLAGS.eval_dir, 'swd_%s_step_%d_%d_images.txt' %(
'train' if FLAGS.is_training else 'eval', current_step, FLAGS.swd_num_images))
if os.path.exists(save_result_path) and FLAGS.is_training:
print('not repeating swd calculation during training.')
return
if get_swd_real_fake is None:
get_swd_real_fake = GanModel._get_swd_real_fake
source, t_prime = get_swd_real_fake(end_points)
if FLAGS.use_tf_swd:
reals = []
fakes = []
for i in range(num_batches):
real_minibatch, fake_minibatch = session.run([source, t_prime])
reals.append(real_minibatch)
fakes.append(fake_minibatch)
reals = np.concatenate(reals, axis=0)
fakes = np.concatenate(fakes, axis=0)
if FLAGS.swd_save_images:
for image_i in range(reals.shape[0]):
util_io.imsave(os.path.join(save_image_dir, str(image_i) + '_real.jpg'),
reals[image_i] * 255.0)
util_io.imsave(os.path.join(save_image_dir, str(image_i) + '_fake.jpg'),
fakes[image_i] * 255.0)
score = session.run(end_points['swd'],
feed_dict={end_points['swd_real_ph']: reals, end_points['swd_fake_ph']: fakes})
score = np.array(score) * 1e3 # In the PGGAN paper numbers are reported on this scale.
print(score)
resolutions = []
res = int(end_points['swd_real_ph'].shape[1])
while res >= 16:
resolutions.append(res)
res //= 2
with open(save_result_path, 'w') as f:
f.write('swd sliced wasserstein score evaluated on %d images.\n' % FLAGS.swd_num_images)
f.write('res\treal\tfake\n')
for i, hw in enumerate(resolutions):
f.write('%d\t%f\t%f\n' % (hw, score[i][0], score[i][1]))
avg = np.average(score, axis=0)
f.write('Average\t%f\t%f\n' % (avg[0], avg[1]))
assert len(score) == len(resolutions)
else:
raise NotImplementedError('Dependent library cannot be open sourced due to licencing issues. Sorry. :(')
@staticmethod
def _prepare_tf_swd(end_points, real, fake):
raise AssertionError('There is a bug in the tensorflow 1.8 implementation. It is wrongly normalizing by patch.')
swd_real_ph = tf.placeholder(real.dtype,
tf.TensorShape([None, real.shape[1], real.shape[2], real.shape[3]]),
name='swd_real_ph')
swd_fake_ph = tf.placeholder(fake.dtype,
tf.TensorShape([None, fake.shape[1], fake.shape[2], fake.shape[3]]),
name='swd_fake_ph')
distance = swd.sliced_wasserstein_distance(swd_real_ph, swd_fake_ph, patches_per_image=128, random_sampling_count=4, random_projection_dim=128)
end_points['swd_real_ph'] = swd_real_ph
end_points['swd_fake_ph'] = swd_fake_ph
end_points['swd'] = distance
def _define_extra_eval_actions(self, end_points, data_batched):
if FLAGS.calc_inception_score:
return self.prepare_inception_score_classifier(FLAGS.incep_classifier_name, FLAGS.incep_num_classes,
end_points['generator_output'])
elif FLAGS.calc_swd:
source, t_prime = self._get_swd_real_fake(end_points)
if FLAGS.use_tf_swd:
self._prepare_tf_swd(end_points, source, t_prime)
return (end_points, data_batched)
else:
raise NotImplementedError('please specify the extra eval action type. e.g. set `calc_swd` flag to True.')
def _do_extra_eval_actions(self, session, extra_eval):
if FLAGS.calc_inception_score:
predictions, end_points, saver = extra_eval
self.calc_inception_score(predictions, saver, FLAGS.incep_classifier_path, session, )
elif FLAGS.calc_swd:
(end_points, data_batched) = extra_eval
self._calc_swd(session, end_points)
##########
# Export #
##########
@staticmethod
def _build_signature_def_map(end_points, data_batched):
# Build the signature_def_map.
sources = tf.saved_model.utils.build_tensor_info(
end_points[CUSTOM_INPUT_PH])
outputs = tf.saved_model.utils.build_tensor_info(
end_points[CUSTOM_GENERATED_TARGETS])
domain_transfer_signature = (
tf.saved_model.signature_def_utils.build_signature_def(
inputs={
tf.saved_model.signature_constants.PREDICT_INPUTS:
sources
},
outputs={
tf.saved_model.signature_constants.PREDICT_OUTPUTS:
outputs,
},
method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME))
ret = {
tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
domain_transfer_signature,
}
return ret
@staticmethod
def _build_assets_collection(end_points, data_batched):
return None
#############################
# PGGAN specific functions. #
#############################
@staticmethod
def get_growing_image(image, alpha_grow, name_postfix='image'):
low_res_image = tf.nn.avg_pool(image, (1, 2, 2, 1), (1, 2, 2, 1), 'VALID')
low_res_image = tf.image.resize_nearest_neighbor(
low_res_image, (low_res_image.shape[1] * 2, low_res_image.shape[2] * 2), name='low_res_%s' % name_postfix)
return alpha_grow * image + (1 - alpha_grow) * low_res_image
@staticmethod
def get_growing_source_and_target(data_batched, global_step):
# TODO: should I grow the source as well?
print('TODO: should I grow the source as well? Run an experiment on this!')
sources = data_batched.get('source')
targets = data_batched.get('target')
if FLAGS.is_growing:
with tf.variable_scope('alpha_grow'):
alpha_grow = tf.cast(global_step - FLAGS.grow_start_number_of_steps, targets.dtype) / (
FLAGS.max_number_of_steps - FLAGS.grow_start_number_of_steps)
if sources is not None:
sources = GanModel.get_growing_image(sources, alpha_grow, name_postfix='sources')
if targets is not None:
targets = GanModel.get_growing_image(targets, alpha_grow, name_postfix='targets')
else:
alpha_grow = 0.0
return sources, targets, alpha_grow
@staticmethod
def _add_pggan_kwargs(data_batched, sources, targets, alpha_grow, generator_kwargs, discriminator_kwargs):
additional_kwargs = {'is_growing': FLAGS.is_growing, 'alpha_grow': alpha_grow, 'do_self_attention': FLAGS.do_self_attention, 'self_attention_hw': FLAGS.self_attention_hw}
generator_kwargs.update(**additional_kwargs)
discriminator_kwargs.update(**additional_kwargs)
generator_kwargs['do_pixel_norm'] = FLAGS.do_pixel_norm
generator_kwargs['dtype'] = targets.dtype
if FLAGS.use_gdrop:
discriminator_kwargs[GDROP_STRENGTH_VAR_NAME] = slim.model_variable(GDROP_STRENGTH_VAR_NAME, shape=[],
dtype=targets.dtype,
initializer=tf.zeros_initializer,
trainable=False)
else:
discriminator_kwargs['do_dgrop'] = False
# Conditional related params.
if FLAGS.use_conditional_labels:
conditional_labels = data_batched.get('conditional_labels', None)
if conditional_labels is not None:
generator_kwargs['arg_scope_fn'] = functools.partial(pggan.conditional_progressive_gan_generator_arg_scope,
conditional_layer=conditional_labels)
source_embed = GanModel._embed_one_hot(conditional_labels, FLAGS.conditional_embed_dim, )
discriminator_kwargs['conditional_embed'] = source_embed
def main(self):
if not FLAGS.train_image_size:
raise ValueError('Please set the `train_image_size` flag.')
super(GanModel, self).main()
def main(_):
model = GanModel()
model.main()
if __name__ == '__main__':
tf.app.run()
| 49.234962
| 174
| 0.675199
|
239d0cd15c8f9b2fa7cd83304521a33d1e67ad74
| 1,630
|
py
|
Python
|
chap_01/exe_009_compound_interest.py
|
aleattene/python-workbook
|
bf26ba716c957316d1463fb25488384e319d5b91
|
[
"MIT"
] | null | null | null |
chap_01/exe_009_compound_interest.py
|
aleattene/python-workbook
|
bf26ba716c957316d1463fb25488384e319d5b91
|
[
"MIT"
] | null | null | null |
chap_01/exe_009_compound_interest.py
|
aleattene/python-workbook
|
bf26ba716c957316d1463fb25488384e319d5b91
|
[
"MIT"
] | null | null | null |
"""
The program receives from the USER the MONEY DEPOSITED
in one savings account and displays the amount in the same account
after 1, 2 and 3 years, based on a compound interest rate of 4% annual.
"""
# Start Definition of FUNCTIONS
def calcolaInteresse(amount):
TAX_INTEREST = 1.04 # compound interest 4% annual
return amount * TAX_INTEREST
def valutaFloat(amount):
countPoints = 0
for char in amount:
if ord(char) == 46:
countPoints += 1
if countPoints == 1 and amount != ".":
if isinstance(float(amount), float):
return True
else:
return False
# End Definition of FUNCTIONS
# Acquisition and Control of the DATA entered by the USER
amount = input(
"Enter the amount of MONEY deposited in the savings account (€): ")
numberFloat = valutaFloat(amount)
while not(amount.isdigit()) and not(numberFloat):
print("Incorrect entry. Try again by entering a new amount")
amount = input(
"Enter the amount of MONEY deposited in the savings account (€): ")
numberFloat = valutaFloat(amount)
# Computing of ANNUAL INTERESTS
amount = float(amount) # conversion string -> float
amountYears01 = calcolaInteresse(amount)
amountYears02 = calcolaInteresse(amountYears01)
amountYears03 = calcolaInteresse(amountYears02)
# Displaying the RESULTS
print("Amount of money deposited at the end of the first year € %.2f" %
amountYears01)
print("Amount of money deposited at the end of the second year € %.2f" %
amountYears02)
print("Amount of money deposited at the end of the third year € %.2f" %
amountYears03)
| 30.754717
| 75
| 0.699387
|
82a7f2a1c73d11cc32e47a27a98e45aafa6fafb6
| 437
|
py
|
Python
|
python_challenge/5_resources/5_helpful.py
|
facmartoni/python_exercises
|
7f05c7491a0eee05e32f04c7f07ddc1ba688b7a2
|
[
"Apache-2.0"
] | null | null | null |
python_challenge/5_resources/5_helpful.py
|
facmartoni/python_exercises
|
7f05c7491a0eee05e32f04c7f07ddc1ba688b7a2
|
[
"Apache-2.0"
] | null | null | null |
python_challenge/5_resources/5_helpful.py
|
facmartoni/python_exercises
|
7f05c7491a0eee05e32f04c7f07ddc1ba688b7a2
|
[
"Apache-2.0"
] | 1
|
2021-10-11T00:25:14.000Z
|
2021-10-11T00:25:14.000Z
|
"""
convert dos linefeeds (crlf) to unix (lf)
usage: dos2unix.py
"""
original = "5_text.pickle"
destination = "5_text_unix.pickle"
content = ''
outsize = 0
with open(original, 'rb') as infile:
content = infile.read()
with open(destination, 'wb') as output:
for line in content.splitlines():
outsize += len(line) + 1
output.write(line + str.encode('\n'))
print("Done. Saved %s bytes." % (len(content)-outsize))
| 24.277778
| 55
| 0.649886
|
d885cf898c2653fb310cba3c370e35cffb7d1a70
| 144
|
py
|
Python
|
typedtsv/__init__.py
|
jimmybot/typedtsv
|
0fbc5488d4e6dbe361afb7c35c8a06e174363d8b
|
[
"Apache-2.0"
] | 2
|
2018-07-10T05:19:42.000Z
|
2018-07-11T14:39:40.000Z
|
typedtsv/__init__.py
|
jimmybot/typedtsv
|
0fbc5488d4e6dbe361afb7c35c8a06e174363d8b
|
[
"Apache-2.0"
] | 4
|
2018-07-13T03:21:05.000Z
|
2018-11-29T20:18:40.000Z
|
typedtsv/__init__.py
|
jimmybot/typedtsv
|
0fbc5488d4e6dbe361afb7c35c8a06e174363d8b
|
[
"Apache-2.0"
] | null | null | null |
__version__ = '0.9.1'
from typedtsv.typedtsv import dumps, loads, dump_list, load_list
__all__ = ['dumps', 'loads', 'dump_list', 'load_list']
| 24
| 64
| 0.715278
|
87908e73310ee247296225480f7ad2180d6618d9
| 2,259
|
py
|
Python
|
webruntime/_browser.py
|
caph1993/webruntime
|
98775b9abfef4ec0874c940660832ef11f8ff92f
|
[
"BSD-2-Clause"
] | 36
|
2018-02-26T23:02:59.000Z
|
2022-03-22T16:18:45.000Z
|
webruntime/_browser.py
|
caph1993/webruntime
|
98775b9abfef4ec0874c940660832ef11f8ff92f
|
[
"BSD-2-Clause"
] | 22
|
2018-04-14T15:14:05.000Z
|
2021-08-23T18:39:09.000Z
|
webruntime/_browser.py
|
caph1993/webruntime
|
98775b9abfef4ec0874c940660832ef11f8ff92f
|
[
"BSD-2-Clause"
] | 10
|
2018-08-09T09:55:32.000Z
|
2021-04-19T19:32:12.000Z
|
""" Web runtime based on a common browser
Opens browser via webbrowser module.
"""
import webbrowser
from . import logger
from ._common import BaseRuntime
BROWSER_MAP = {'chrome': ['google-chrome', 'chrome',
'chromium-browser', 'chromium'],
'chromium': ['chromium-browser', 'chromium'],
'default': [],
}
class BrowserRuntime(BaseRuntime):
""" Runtime based on the Python webbrowser module. This runtime is used
to open a url in the system default browser, and to attempt to handle a
given browser name that is unknown to the webruntime module (maybe
the webbrowser module knows it).
"""
def __init__(self, type=None, **kwargs):
self._type = type or ''
super().__init__(**kwargs)
def _get_install_instuctions(self):
return '' # we know nothing of the browsers
def _get_name(self):
return 'browser'
def _get_exe(self):
b, errors = self._get_openers(self._type)
if not self._type:
return 'stub_exe_default_browser'
elif b:
return 'stub_exe_%s_browser' % self._type
def _get_version(self):
return None
def _launch_tab(self, url):
b, errors = self._get_openers(self._type)
if b:
b.open(url)
else:
if errors:
logger.warn('Given browser %r not valid/available;\n'
'Falling back to the default browser.' % type)
# Run default
webbrowser.open(url)
def _launch_app(self, url):
raise RuntimeError('Browser runtime cannot run as an app.')
def _get_openers(self, type):
# Get alternative types
types = BROWSER_MAP.get(type, [type])
types = [t for t in types if t]
# Use default browser
if not types:
return webbrowser, []
# Try to open all possibilities
errors = []
b = None
for t in types:
try:
b = webbrowser.get(t)
break
except webbrowser.Error as err:
errors.append(str(err))
return b, errors
| 28.2375
| 75
| 0.557326
|
2a7472bdd894597f3bf61139a5679de0fb7833d0
| 1,193
|
py
|
Python
|
sessions/session_08.py
|
pavanmaradia/202102-Python-Django-Training
|
6ce836009acf037a921b7439a6edda7fc8370dcb
|
[
"MIT"
] | 1
|
2021-02-09T16:14:36.000Z
|
2021-02-09T16:14:36.000Z
|
sessions/session_08.py
|
pavanmaradia/202102-Python-Django-Training
|
6ce836009acf037a921b7439a6edda7fc8370dcb
|
[
"MIT"
] | null | null | null |
sessions/session_08.py
|
pavanmaradia/202102-Python-Django-Training
|
6ce836009acf037a921b7439a6edda7fc8370dcb
|
[
"MIT"
] | null | null | null |
"""
User Defined Function:
simple function
parameter function
return function
parameter and return function
Named Function
"""
'''
def <function_name>(<parameters>):
<statements>
<return>
'''
# def greetings():
# print("Hello World")
#
# greetings()
# def greetings(name, greeting_message):
# print(F"{greeting_message} {name}")
#
# _name = input("Enter Name: ")
# _message = input("Enter message: ")
# greetings(_message, _name)
# greetings(greeting_message=_message, name=_name)
# def greetings(name, greeting_message='Hi'):
# print(F"{greeting_message} {name}")
# _name = input("Enter Name: ")
# _message = input("Enter message: ")
# greetings(name=_name)
# greetings(name=_name, greeting_message=_message)
# def greetings():
# message = "Hi world"
# return message
#
# resp = greetings()
# print(resp)
# def addition(no_1, no_2):
# return F"Sum of {no_1} and {no_2} is {no_1 + no_2}"
#
#
# num1 = int(input("Enter number 1: "))
# num2 = int(input("Enter number 2: "))
#
#
# resp = addition(num1, num2)
# print(resp)
#
# for i in range(5,1,-1):
# print(" "*i, '1')
| 19.883333
| 58
| 0.605197
|
d092a58814b1b65d57d3c17af7bc51a515d82c23
| 638
|
py
|
Python
|
bench/model.py
|
yoshiya0503/SQLAlchemy-on-tornado
|
5ca790dba72f15291d409fe1f96b287dffe25fcd
|
[
"MIT"
] | 2
|
2015-10-15T04:02:12.000Z
|
2016-04-07T20:31:20.000Z
|
bench/model.py
|
yoshiya0503/SQLAlchemy-on-tornado
|
5ca790dba72f15291d409fe1f96b287dffe25fcd
|
[
"MIT"
] | null | null | null |
bench/model.py
|
yoshiya0503/SQLAlchemy-on-tornado
|
5ca790dba72f15291d409fe1f96b287dffe25fcd
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
# -*- encoding: utf-8 -*-
"""
model.py
model on SQLAlchemy
"""
__author__ = 'Yoshiya Ito <myon53@gmail.com>'
__version__ = '0.0.1'
__date__ = '22 Sep 2015'
from sqlalchemy import Column, Boolean, String, DateTime
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(String(64), primary_key=True)
name = Column(String(64))
def __init__(self, id, name):
self.id = id or str(uuid4())
self.name = name or 'hoge'
def __repr__(self):
return 'id:{0}, name:{1}'.format(self.id, self.name)
| 22
| 60
| 0.65674
|
ac5c1e83caecd37a2f6c99841236f9ae78b87ca7
| 515
|
py
|
Python
|
core/admin/migrations/versions/2335c80a6bc3_.py
|
mboogert/Mailu
|
ebf6d84aabebe854824546df3ea64566107ed004
|
[
"MIT"
] | 3,620
|
2016-10-30T14:13:46.000Z
|
2022-03-31T18:40:15.000Z
|
core/admin/migrations/versions/2335c80a6bc3_.py
|
mboogert/Mailu
|
ebf6d84aabebe854824546df3ea64566107ed004
|
[
"MIT"
] | 2,113
|
2016-10-27T10:36:52.000Z
|
2022-03-31T16:38:26.000Z
|
core/admin/migrations/versions/2335c80a6bc3_.py
|
mboogert/Mailu
|
ebf6d84aabebe854824546df3ea64566107ed004
|
[
"MIT"
] | 815
|
2016-10-29T12:02:00.000Z
|
2022-03-31T08:44:28.000Z
|
""" Add a maximum quota per domain
Revision ID: 2335c80a6bc3
Revises: 12e9a4f6ed73
Create Date: 2016-12-04 12:57:37.576622
"""
# revision identifiers, used by Alembic.
revision = '2335c80a6bc3'
down_revision = '12e9a4f6ed73'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('domain', sa.Column('max_quota_bytes', sa.Integer(), nullable=False, server_default='0'))
def downgrade():
with op.batch_alter_table('domain') as batch:
batch.drop_column('max_quota_bytes')
| 21.458333
| 107
| 0.735922
|
0e81213e38c2d80faaf6cc19ae8003b4404d499e
| 1,811
|
py
|
Python
|
python/q091/q91.py
|
MatthewTsan/Leetcode
|
b0705013eea8517ba7742730cd14ea8601b83c70
|
[
"Apache-2.0"
] | null | null | null |
python/q091/q91.py
|
MatthewTsan/Leetcode
|
b0705013eea8517ba7742730cd14ea8601b83c70
|
[
"Apache-2.0"
] | null | null | null |
python/q091/q91.py
|
MatthewTsan/Leetcode
|
b0705013eea8517ba7742730cd14ea8601b83c70
|
[
"Apache-2.0"
] | null | null | null |
class Solution:
def numDecodings(self, s: str) -> int:
cache = []
if s[0] == "0":
cache.append(0)
else:
cache.append(1)
if len(s) == 1:
return cache[-1]
if int(s[0:1]) <= 26 and int(s[0:1]) >= 10:
cache.append(2)
else:
cache.append(1)
for i in range(2, len(s) + 1):
cache.append(0)
if s[i-1] != "0":
cache[i] += cache[i-1]
if s[i-2] != "0" and 10 <= int(s[i-2: i]) <= 26:
cache[i] += cache[i-2]
return cache[-1]
class Solution_2:
def numDecodings(self, s: str) -> int:
if s[0] == '0':
return 0
def canCombine(s, i):
if i < 1:
return False
if s[i - 1] == "0":
return False
num = int(s[i - 1] + s[i])
if num > 26:
return False
return True
if len(s) == 1:
return 1
result = [1]
if s[1] == "0":
if canCombine(s, 1):
result.append(1)
else:
return 0
elif canCombine(s, 1):
result.append(2)
else:
result.append(1)
i = 2
while (i < len(s)):
if s[i] == "0":
if canCombine(s, i):
result.append(result[i - 2])
else:
return 0
elif canCombine(s, i):
result.append(result[i - 1] + result[i - 2])
else:
result.append(result[i - 1])
i += 1
return result[-1]
if __name__ == '__main__':
sol = Solution()
list = "11110"
res = sol.numDecodings(list)
print(res)
| 24.472973
| 60
| 0.386527
|
3e7677bbcebab673dc3bd091adfbfa35061010b7
| 9,179
|
py
|
Python
|
MenuFrames.py
|
Astatine-213-Tian/Past-Paper-Crawler
|
bbf686e9e4ddf61cc9918e61cbc108ee53631c81
|
[
"MIT"
] | 2
|
2020-03-18T04:43:57.000Z
|
2020-03-22T11:28:01.000Z
|
MenuFrames.py
|
Astatine-213-Tian/Past-Paper-Crawler
|
bbf686e9e4ddf61cc9918e61cbc108ee53631c81
|
[
"MIT"
] | 1
|
2020-03-22T13:56:47.000Z
|
2020-03-22T14:02:29.000Z
|
MenuFrames.py
|
Astatine-213-Tian/Past-Paper-Crawler
|
bbf686e9e4ddf61cc9918e61cbc108ee53631c81
|
[
"MIT"
] | 2
|
2020-03-22T11:50:44.000Z
|
2020-04-15T03:44:05.000Z
|
import wx
import os
import platform
import subprocess
import Cache
class AboutFrame(wx.Frame):
def __init__(self, call):
wx.Frame.__init__(self, None, -1, size=(300, 180), style=wx.SYSTEM_MENU | wx.CAPTION | wx.CLOSE_BOX)
title = wx.StaticText(self, label=" Past Paper Crawler ")
version = wx.StaticText(self, label=" Version 1.4.0 ")
team = wx.StaticText(self, label=" Made by Teresa, John, Ethan, and Peter ")
maintenance = wx.StaticText(self, label=" Currently maintained by Teresa ")
copy_right = wx.StaticText(self, label=" Copyright © 2020 田青禾 ")
thanks = wx.StaticText(self, label=" Inspired by Past Paper Crawler created by Raymond ")
if platform.system() == "Darwin":
title_font = wx.Font(wx.FontInfo(13).Bold())
content_font = wx.Font(wx.FontInfo(10))
else:
title_font = wx.Font(wx.FontInfo(13).Bold().FaceName("Arial"))
content_font = wx.Font(wx.FontInfo(10).FaceName("Arial"))
title.SetFont(title_font)
version.SetFont(content_font)
team.SetFont(content_font)
maintenance.SetFont(content_font)
copy_right.SetFont(content_font)
thanks.SetFont(content_font)
bottom_border = 10
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(title, flag=wx.ALIGN_CENTER | wx.BOTTOM | wx.TOP, border=bottom_border)
sizer.Add(version, flag=wx.ALIGN_CENTER | wx.BOTTOM, border=bottom_border)
sizer.Add(team, flag=wx.ALIGN_CENTER | wx.BOTTOM, border=bottom_border)
sizer.Add(maintenance, flag=wx.ALIGN_CENTER | wx.BOTTOM, border=bottom_border)
sizer.Add(copy_right, flag=wx.ALIGN_CENTER | wx.BOTTOM, border=bottom_border)
sizer.Add(thanks, flag=wx.ALIGN_CENTER | wx.BOTTOM, border=bottom_border)
self.SetSizer(sizer)
self.call = call
self.Bind(wx.EVT_CLOSE, self.on_close, self)
def on_close(self, event):
self.Destroy()
self.call()
class PreferencesFrame(wx.Frame):
def __init__(self, call):
wx.Frame.__init__(self, None, style=wx.SYSTEM_MENU | wx.CAPTION | wx.CLOSE_BOX, size=(400, 485))
self.init_UI()
self.call = call
self.Bind(wx.EVT_CLOSE, self.on_close, self)
self.refresh = False
def init_UI(self):
preference = wx.Notebook(self)
preference.AddPage(GeneralPanel(preference), "General")
preference.AddPage(CachePanel(preference), "Cache")
self.Show()
def on_close(self, event):
self.Destroy()
self.call()
class GeneralPanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
self.config_path = Cache.preference_directory()
self.current_setting = Cache.load(self.config_path)
level_txt = wx.StaticText(self, label="Default level:")
self.level_choice = wx.Choice(self, choices=["--- Select level ---", "IGCSE", "AS & A-Level", "O-Level"])
self.level_choice.SetSelection(self.current_setting["Default level"])
self.Bind(wx.EVT_CHOICE, self.on_choose_level, self.level_choice)
level_sizer = wx.BoxSizer(wx.HORIZONTAL)
level_sizer.Add(level_txt, flag=wx.RIGHT, border=5)
level_sizer.Add(self.level_choice)
split_after_level = wx.StaticLine(self, style=wx.LI_HORIZONTAL)
download_txt = wx.StaticText(self, label="Download path:")
ask_radio_button = wx.RadioButton(self, label="Ask every time")
path_radio_button = wx.RadioButton(self, label="Use default path")
if self.current_setting["Default path mode"]:
path_radio_button.SetValue(True)
else:
ask_radio_button.SetValue(True)
self.Bind(wx.EVT_RADIOBUTTON, self.on_radio_button)
change_button = wx.Button(self, label="change", size=(65, -1))
self.Bind(wx.EVT_BUTTON, self.on_change_path, change_button)
set_path_sizer = wx.BoxSizer(wx.HORIZONTAL)
set_path_sizer.Add(path_radio_button, flag=wx.ALIGN_CENTER_VERTICAL)
set_path_sizer.Add(change_button, flag=wx.LEFT | wx.ALIGN_CENTER_VERTICAL, border=5)
self.default_directory = wx.StaticText(self, label=self.current_setting["Default path"])
split_after_download = wx.StaticLine(self, style=wx.LI_HORIZONTAL)
border = 10
general_sizer = wx.BoxSizer(wx.VERTICAL)
general_sizer.Add(level_sizer, flag=wx.LEFT | wx.TOP, border=border)
general_sizer.AddSpacer(6)
general_sizer.Add(split_after_level, flag=wx.EXPAND | wx.RIGHT | wx.LEFT, border=border)
general_sizer.AddSpacer(2)
general_sizer.Add(download_txt, flag=wx.LEFT, border=border)
general_sizer.AddSpacer(5)
general_sizer.Add(ask_radio_button, flag=wx.LEFT, border=border)
general_sizer.AddSpacer(3)
general_sizer.Add(set_path_sizer, flag=wx.LEFT, border=border)
general_sizer.AddSpacer(2)
general_sizer.Add(self.default_directory, flag=wx.LEFT, border=25)
general_sizer.AddSpacer(6)
general_sizer.Add(split_after_download, flag=wx.EXPAND | wx.RIGHT | wx.LEFT, border=border)
self.SetSizer(general_sizer)
def on_choose_level(self, event):
self.current_setting["Default level"] = self.level_choice.GetSelection()
Cache.store(self.current_setting, self.config_path)
def on_radio_button(self, event):
choice = event.GetEventObject()
if choice.GetLabel() == "Use default path":
self.current_setting["Default path mode"] = True
if not self.current_setting["Default path"]:
self.on_change_path(None)
else:
self.current_setting["Default path mode"] = False
Cache.store(self.current_setting, self.config_path)
def on_change_path(self, event):
dlg = wx.DirDialog(self, "Choose the default folder for past paper")
if dlg.ShowModal() == wx.ID_OK:
folder_directory = dlg.GetPath()
self.current_setting["Default path"] = folder_directory
self.default_directory.SetLabel(folder_directory)
Cache.store(self.current_setting, self.config_path)
dlg.Destroy()
else:
dlg.Destroy()
return
class CachePanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
explain_txt = wx.StaticText(self, label="Past Paper Crawler caches viewed web pages to memory \nand disk to boost efficiency.")
hint_txt = wx.StaticText(self, label="Current cache on the disk: ")
open_button = wx.Button(self, label="open folder")
self.Bind(wx.EVT_BUTTON, self.on_open, open_button)
self.cache_folder = Cache.customized_directory()
cache_list = sorted([file for file in os.listdir(self.cache_folder) if not file.startswith(".")])
self.cache_checklist = wx.CheckListBox(self, choices=cache_list, size=(0, 295))
open_cache_sizer = wx.BoxSizer(wx.HORIZONTAL)
open_cache_sizer.Add(hint_txt, flag=wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, border=5)
open_cache_sizer.Add(open_button, flag=wx.ALIGN_CENTER_VERTICAL)
select_all_button = wx.Button(self, label="Select all")
self.Bind(wx.EVT_BUTTON, self.on_select_all, select_all_button)
remove_button = wx.Button(self, label="Remove")
self.Bind(wx.EVT_BUTTON, self.on_remove, remove_button)
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
button_sizer.Add(select_all_button)
button_sizer.Add(remove_button, flag=wx.LEFT, border=8)
cache_sizer = wx.BoxSizer(wx.VERTICAL)
cache_sizer.Add(explain_txt, flag=wx.ALL, border=10)
cache_sizer.Add(open_cache_sizer, flag=wx.BOTTOM | wx.LEFT | wx.RIGHT, border=10)
cache_sizer.Add(self.cache_checklist, flag=wx.EXPAND | wx.BOTTOM | wx.LEFT | wx.RIGHT,
border=10)
cache_sizer.Add(button_sizer, flag=wx.ALIGN_RIGHT | wx.BOTTOM | wx.LEFT | wx.RIGHT, border=10)
self.SetSizer(cache_sizer)
def on_open(self, event):
if platform.system() == "Darwin":
subprocess.call(["open", self.cache_folder])
else:
subprocess.Popen("explorer %s" % self.cache_folder)
def on_select_all(self, event):
if len(self.cache_checklist.GetCheckedItems()) != self.cache_checklist.GetCount():
self.cache_checklist.SetCheckedItems(range(self.cache_checklist.GetCount()))
else:
for i in range(self.cache_checklist.GetCount()):
self.cache_checklist.Check(i, check=False)
def on_remove(self, event):
for file in self.cache_checklist.GetCheckedStrings():
path = os.path.join(self.cache_folder, file)
os.remove(path)
# Update cache list
cache_list = sorted([file for file in os.listdir(self.cache_folder) if not file.startswith(".")])
self.cache_checklist.Set(cache_list)
if __name__ == '__main__':
app = wx.App()
frame = PreferencesFrame(None)
# frame = AboutFrame(None)
frame.Show()
app.MainLoop()
| 42.49537
| 135
| 0.664234
|
f2caf6f48d050da9c3ba6be1bebca952f8a9710b
| 9,475
|
py
|
Python
|
endsem/007_jack_car_update.py
|
KishoreKaushal/ailab
|
f89cb1ff7d49c05859cb4d89b45c214ded6e1acf
|
[
"MIT"
] | 1
|
2018-11-27T19:55:41.000Z
|
2018-11-27T19:55:41.000Z
|
endsem/007_jack_car_update.py
|
KishoreKaushal/ailab
|
f89cb1ff7d49c05859cb4d89b45c214ded6e1acf
|
[
"MIT"
] | null | null | null |
endsem/007_jack_car_update.py
|
KishoreKaushal/ailab
|
f89cb1ff7d49c05859cb4d89b45c214ded6e1acf
|
[
"MIT"
] | null | null | null |
import numpy as np
import copy
import itertools
from scipy.stats import poisson
import time
class Environment:
def __init__(self):
# dynamics of the MDP process for Jack-Car rental Problem
self.number_of_locations = 3
self.rental_credit = 10
self.expected_rental_requests = [3, 2, 2]
self.expected_rental_returns = [3, 1, 1]
self.capacity = [19, 9, 9]
self.max_car_moved = 5
self.gamma = 0.9
self.cost_of_moving = [2, 0, 2]
# available actions : actions can be accessed through the index
self.actions = self.generate_actions()
# available states : available states can be accessed through the index
self.states = [i for i in itertools.product(range(self.capacity[0]+1),
range(self.capacity[1]+1),
range(self.capacity[2]+1))]
# initializing the values of the states
self.V = np.zeros(tuple(np.array(self.capacity) + 1), dtype=np.float)
# initializing the policy array
self.policy = np.zeros(tuple(np.array(self.capacity) + 1), dtype=np.int)
# poisson precompute
self.poisson_pmf = dict()
self.poisson_sf = dict()
for n , lam in itertools.product(range(-1 , max(self.capacity) + 1),
range(max(self.expected_rental_requests + self.expected_rental_returns)+1)):
self.poisson_pmf[(n,lam)] = poisson.pmf(n,lam)
self.poisson_sf[(n,lam)] = poisson.sf(n,lam)
# printing the dynamics
self.print_dynamics()
def generate_actions(self):
red = {}
for i in range(-self.max_car_moved, self.max_car_moved + 1):
for j in range(-self.max_car_moved, self.max_car_moved + 1):
for k in range(-self.max_car_moved, self.max_car_moved + 1):
cost = self.cost_of_moving[0] * abs(i) + self.cost_of_moving[1] * abs(j) + \
self.cost_of_moving[2] * abs(k)
c = (i - k, j - i, k - j)
if c not in red or red[c][0] > cost:
red[c] = cost, (i, j, k)
actions = []
for el in red.items():
actions.append(el[1][1])
print (len(actions))
return actions
def print_dynamics(self):
print("Total Number of Locations: " , self.number_of_locations)
print("Rental Credit: ", self.rental_credit)
print("Expected Rental Requests: ", self.expected_rental_requests)
print("Expected Rental Returns: ", self.expected_rental_returns)
print("Capacity: " , self.capacity)
print("Discount Factor: ", self.gamma)
print("Cost of Moving: ", self.cost_of_moving)
print("Total number of actions: " , len(self.actions))
print("Total number of states: ", len(self.states) )
def expected_return(self, state, action):
# initiate and populate returns with cost associated with moving cars
returns = 0.0
returns -= np.sum(np.multiply(self.cost_of_moving , np.abs(action)))
# number of cars to start the day
# cars available at a location:
# current number of car - number of cars exit + number of cars return
cars_available = [min(state[0] - action[0] + action[2], self.capacity[0]),
min(state[1] - action[1] + action[0], self.capacity[1]),
min(state[2] - action[2] + action[1], self.capacity[2])]
val = 0
# iterate over all rental rates combinations: (rental_loc0 , rental_loc1 , rental_loc2)
for rental_rates in itertools.product(range(cars_available[0]+1),
range(cars_available[1]+1),
range(cars_available[2]+1)):
# finding the rental probabilities: probability of occurring this event
# rental probability =
# p(rental rate of location 0) * p(rental rate of location 1) * p(rental rate of location 2)
temp = [int(rental_rates[i]==cars_available[i]) for i in range(len(rental_rates))]
prob = [self.poisson_pmf[(rental_rates[i] , self.expected_rental_requests[i])] * (1-temp[i])
+ self.poisson_sf[(rental_rates[i]-1 , self.expected_rental_requests[i])] * (temp[i])
for i in range(len(rental_rates))]
rental_prob = np.prod(prob)
# total rentals: number of car that can be rented given a request
total_rentals = [min(cars_available[i] , rental_rates[i]) for i in range(len(rental_rates))]
# total rewards
current_rental_rewards = self.rental_credit * np.sum(total_rentals)
current_capacity = (self.capacity[0] - state[0] + rental_rates[0],
self.capacity[1] - state[1] + rental_rates[1],
self.capacity[2] - state[2] + rental_rates[2])
current_capacity = (min(current_capacity[0], self.capacity[0]),
min(current_capacity[1], self.capacity[1]),
min(current_capacity[2], self.capacity[2]))
# iterate over all return rate combinations: (return_loc0 , return_loc1 , return_loc2)
for return_rates in itertools.product(range(current_capacity[0] + 1),
range(current_capacity[1] + 1),
range(current_capacity[2] + 1)):
# finding the return rate probabilities: probability of occurring this event
# return priobability =
# p(return rate of location 0) * p(return rate of location 1) * p(return rate of location 2)
temp = [int(return_rates[i]==current_capacity[i]) for i in range(len(return_rates))]
prob = [self.poisson_pmf[(return_rates[i] , self.expected_rental_returns[i])] * (1-temp[i])
+ self.poisson_sf[(return_rates[i]-1 , self.expected_rental_returns[i])] * (temp[i])
for i in range(len(return_rates))]
return_prob = np.prod(prob)
current_return_prob = rental_prob * return_prob
val += current_return_prob
# number of cars at the end of the day
cars_available_eod = [min(cars_available[i] - rental_rates[i] + return_rates[i] , self.capacity[i])
for i in range(len(return_rates))]
# increment the return
returns += current_return_prob * (current_rental_rewards + self.gamma * self.V[tuple(cars_available_eod)])
#if (abs (val - 1) > 1e-6):
# print (val)
return returns
def value_iteration(self, threshold=0.1):
# no need to copy, because its going to converge in each value iterations
V = self.V
# copy for safety
actions = copy.deepcopy(self.actions)
states = copy.deepcopy(self.states)
iteration = 0
while True:
delta = 0
t0 = time.time()
# for all the states
for state_idx, state in enumerate(states):
# print("State_idx: " , state_idx)
# state : (car in loc0 , car in loc1, car in loc2)
v = V[state]
# assign V[state] = max( expected return for choosing an action a from all possible actions)
# possible actions : so that the cars_available must not be less than equal to zero at any location
temp = -np.inf
for action_idx , action in enumerate(actions):
#print("Action_idx: " , action_idx)
next_state = np.array([state[0] - action[0] + action[2],
state[1] - action[1] + action[0],
state[2] - action[2] + action[1]])
# if the next state is valid then the action is possible for this state
if np.all(next_state >= 0):
expected_return_from_this_state = self.expected_return(state, action)
#print("time taken: {} seconds".format(t1-t0))
if expected_return_from_this_state > temp:
temp = expected_return_from_this_state
V[state] = temp
delta = max(delta , abs(v-V[state]))
t1 = time.time()
iteration += 1
print("Iteration: {}\tDelta: {}\tTE: {} s".format(iteration , delta , t1-t0))
if delta <= threshold:
break
def main():
jack_car_rental = Environment()
jack_car_rental.value_iteration()
#value_fname = "value.txt"
#policy_fname = "policy.txt"
#print("Value array saving to file: {}".format(value_fname))
#print("Policy array saving to file: {}".format(policy_fname))
if __name__ == "__main__":
main()
| 48.341837
| 122
| 0.539525
|
1458757aa2ea705f613bdc3ae9e3cc35a82af4de
| 21,732
|
py
|
Python
|
pdfminer/converter.py
|
naren8642/pdfminer.six
|
e85a4bd86d912e7fa5691af8cca6bcf0a63e259a
|
[
"MIT"
] | null | null | null |
pdfminer/converter.py
|
naren8642/pdfminer.six
|
e85a4bd86d912e7fa5691af8cca6bcf0a63e259a
|
[
"MIT"
] | null | null | null |
pdfminer/converter.py
|
naren8642/pdfminer.six
|
e85a4bd86d912e7fa5691af8cca6bcf0a63e259a
|
[
"MIT"
] | null | null | null |
import logging
import re
import sys
from .pdfdevice import PDFTextDevice
from .pdffont import PDFUnicodeNotDefined
from .layout import LTContainer
from .layout import LTPage
from .layout import LTText
from .layout import LTLine
from .layout import LTRect
from .layout import LTCurve
from .layout import LTFigure
from .layout import LTImage
from .layout import LTChar
from .layout import LTTextLine
from .layout import LTTextBox
from .layout import LTTextBoxVertical
from .layout import LTTextGroup
from .utils import apply_matrix_pt
from .utils import mult_matrix
from .utils import enc
from .utils import bbox2str
from . import utils
log = logging.getLogger(__name__)
class PDFLayoutAnalyzer(PDFTextDevice):
RECTS = re.compile('^(mlllh)+$')
def __init__(self, rsrcmgr, pageno=1, laparams=None):
PDFTextDevice.__init__(self, rsrcmgr)
self.pageno = pageno
self.laparams = laparams
self._stack = []
return
def begin_page(self, page, ctm):
(x0, y0, x1, y1) = page.mediabox
(x0, y0) = apply_matrix_pt(ctm, (x0, y0))
(x1, y1) = apply_matrix_pt(ctm, (x1, y1))
mediabox = (0, 0, abs(x0-x1), abs(y0-y1))
self.cur_item = LTPage(self.pageno, mediabox)
return
def end_page(self, page):
assert not self._stack, str(len(self._stack))
assert isinstance(self.cur_item, LTPage), str(type(self.cur_item))
if self.laparams is not None:
self.cur_item.analyze(self.laparams)
self.pageno += 1
self.receive_layout(self.cur_item)
return
def begin_figure(self, name, bbox, matrix):
self._stack.append(self.cur_item)
self.cur_item = LTFigure(name, bbox, mult_matrix(matrix, self.ctm))
return
def end_figure(self, _):
fig = self.cur_item
assert isinstance(self.cur_item, LTFigure), str(type(self.cur_item))
self.cur_item = self._stack.pop()
self.cur_item.add(fig)
return
def render_image(self, name, stream):
assert isinstance(self.cur_item, LTFigure), str(type(self.cur_item))
item = LTImage(name, stream,
(self.cur_item.x0, self.cur_item.y0,
self.cur_item.x1, self.cur_item.y1))
self.cur_item.add(item)
return
def paint_path(self, gstate, stroke, fill, evenodd, path):
"""Paint paths described in section 4.4 of the PDF reference manual"""
shape = ''.join(x[0] for x in path)
if shape == 'ml':
# horizontal/vertical line
(_, x0, y0) = path[0]
(_, x1, y1) = path[1]
(x0, y0) = apply_matrix_pt(self.ctm, (x0, y0))
(x1, y1) = apply_matrix_pt(self.ctm, (x1, y1))
if x0 == x1 or y0 == y1:
line = LTLine(gstate.linewidth, (x0, y0), (x1, y1), stroke,
fill, evenodd, gstate.scolor, gstate.ncolor)
self.cur_item.add(line)
elif shape == 'mlllh':
# rectangle
(_, x0, y0) = path[0]
(_, x1, y1) = path[1]
(_, x2, y2) = path[2]
(_, x3, y3) = path[3]
(x0, y0) = apply_matrix_pt(self.ctm, (x0, y0))
(x1, y1) = apply_matrix_pt(self.ctm, (x1, y1))
(x2, y2) = apply_matrix_pt(self.ctm, (x2, y2))
(x3, y3) = apply_matrix_pt(self.ctm, (x3, y3))
if (x0 == x1 and y1 == y2 and x2 == x3 and y3 == y0) or \
(y0 == y1 and x1 == x2 and y2 == y3 and x3 == x0):
rect = LTRect(gstate.linewidth, (x0, y0, x2, y2), stroke,
fill, evenodd, gstate.scolor, gstate.ncolor)
self.cur_item.add(rect)
elif self.RECTS.match(shape):
for paths in zip(*(iter(path),) * 5):
self.paint_path(gstate, stroke, fill, evenodd, list(paths))
else:
pts = []
for p in path:
for i in range(1, len(p), 2):
pts.append(apply_matrix_pt(self.ctm, (p[i], p[i+1])))
curve = LTCurve(gstate.linewidth, pts, stroke, fill, evenodd,
gstate.scolor, gstate.ncolor)
self.cur_item.add(curve)
def render_char(self, matrix, font, fontsize, scaling, rise, cid, ncs,
graphicstate):
try:
text = font.to_unichr(cid)
assert isinstance(text, str), str(type(text))
except PDFUnicodeNotDefined:
text = self.handle_undefined_char(font, cid)
textwidth = font.char_width(cid)
textdisp = font.char_disp(cid)
item = LTChar(matrix, font, fontsize, scaling, rise, text, textwidth,
textdisp, ncs, graphicstate)
self.cur_item.add(item)
return item.adv
def handle_undefined_char(self, font, cid):
return '(cid:%d)' % cid
def receive_layout(self, ltpage):
return
class PDFPageAggregator(PDFLayoutAnalyzer):
def __init__(self, rsrcmgr, pageno=1, laparams=None):
PDFLayoutAnalyzer.__init__(self, rsrcmgr, pageno=pageno,
laparams=laparams)
self.result = None
return
def receive_layout(self, ltpage):
self.result = ltpage
return
def get_result(self):
return self.result
class PDFConverter(PDFLayoutAnalyzer):
def __init__(self, rsrcmgr, outfp, codec='utf-8', pageno=1,
laparams=None):
PDFLayoutAnalyzer.__init__(self, rsrcmgr, pageno=pageno,
laparams=laparams)
self.outfp = outfp
self.codec = codec
if hasattr(self.outfp, 'mode'):
if 'b' in self.outfp.mode:
self.outfp_binary = True
else:
self.outfp_binary = False
else:
import io
if isinstance(self.outfp, io.BytesIO):
self.outfp_binary = True
elif isinstance(self.outfp, io.StringIO):
self.outfp_binary = False
else:
try:
self.outfp.write("é")
self.outfp_binary = False
except TypeError:
self.outfp_binary = True
return
class TextConverter(PDFConverter):
def __init__(self, rsrcmgr, outfp, codec='utf-8', pageno=1, laparams=None,
showpageno=False, imagewriter=None):
PDFConverter.__init__(self, rsrcmgr, outfp, codec=codec, pageno=pageno,
laparams=laparams)
self.showpageno = showpageno
self.imagewriter = imagewriter
return
def write_text(self, text):
text = utils.compatible_encode_method(text, self.codec, 'ignore')
if self.outfp_binary:
text = text.encode()
self.outfp.write(text)
return
def receive_layout(self, ltpage):
def render(item):
if isinstance(item, LTContainer):
for child in item:
render(child)
elif isinstance(item, LTText):
self.write_text(item.get_text())
if isinstance(item, LTTextBox):
self.write_text('\n')
elif isinstance(item, LTImage):
if self.imagewriter is not None:
self.imagewriter.export_image(item)
if self.showpageno:
self.write_text('Page %s\n' % ltpage.pageid)
render(ltpage)
self.write_text('\f')
return
# Some dummy functions to save memory/CPU when all that is wanted
# is text. This stops all the image and drawing output from being
# recorded and taking up RAM.
def render_image(self, name, stream):
if self.imagewriter is None:
return
PDFConverter.render_image(self, name, stream)
return
def paint_path(self, gstate, stroke, fill, evenodd, path):
return
class HTMLConverter(PDFConverter):
RECT_COLORS = {
'figure': 'yellow',
'textline': 'magenta',
'textbox': 'cyan',
'textgroup': 'red',
'curve': 'black',
'page': 'gray',
}
TEXT_COLORS = {
'textbox': 'blue',
'char': 'black',
}
def __init__(self, rsrcmgr, outfp, codec='utf-8', pageno=1, laparams=None,
scale=1, fontscale=1.0, layoutmode='normal', showpageno=True,
pagemargin=50, imagewriter=None, debug=0, rect_colors=None,
text_colors=None):
PDFConverter.__init__(self, rsrcmgr, outfp, codec=codec, pageno=pageno,
laparams=laparams)
if text_colors is None:
text_colors = {'char': 'black'}
if rect_colors is None:
rect_colors = {'curve': 'black', 'page': 'gray'}
self.scale = scale
self.fontscale = fontscale
self.layoutmode = layoutmode
self.showpageno = showpageno
self.pagemargin = pagemargin
self.imagewriter = imagewriter
self.rect_colors = rect_colors
self.text_colors = text_colors
if debug:
self.rect_colors.update(self.RECT_COLORS)
self.text_colors.update(self.TEXT_COLORS)
self._yoffset = self.pagemargin
self._font = None
self._fontstack = []
self.write_header()
return
def write(self, text):
if self.codec:
text = text.encode(self.codec)
if sys.version_info < (3, 0):
text = str(text)
self.outfp.write(text)
return
def write_header(self):
self.write('<html><head>\n')
if self.codec:
s = '<meta http-equiv="Content-Type" content="text/html; ' \
'charset=%s">\n' % self.codec
else:
s = '<meta http-equiv="Content-Type" content="text/html">\n'
self.write(s)
self.write('</head><body>\n')
return
def write_footer(self):
page_links = ['<a href="#{}">{}</a>'.format(i, i)
for i in range(1, self.pageno)]
s = '<div style="position:absolute; top:0px;">Page: %s</div>\n' % \
', '.join(page_links)
self.write(s)
self.write('</body></html>\n')
return
def write_text(self, text):
self.write(enc(text))
return
def place_rect(self, color, borderwidth, x, y, w, h):
color = self.rect_colors.get(color)
if color is not None:
s = '<span style="position:absolute; border: %s %dpx solid; ' \
'left:%dpx; top:%dpx; width:%dpx; height:%dpx;"></span>\n' % \
(color, borderwidth, x * self.scale,
(self._yoffset - y) * self.scale, w * self.scale,
h * self.scale)
self.write(
s)
return
def place_border(self, color, borderwidth, item):
self.place_rect(color, borderwidth, item.x0, item.y1, item.width,
item.height)
return
def place_image(self, item, borderwidth, x, y, w, h):
if self.imagewriter is not None:
name = self.imagewriter.export_image(item)
s = '<img src="%s" border="%d" style="position:absolute; ' \
'left:%dpx; top:%dpx;" width="%d" height="%d" />\n' % \
(enc(name), borderwidth, x * self.scale,
(self._yoffset - y) * self.scale, w * self.scale,
h * self.scale)
self.write(s)
return
def place_text(self, color, text, x, y, size):
color = self.text_colors.get(color)
if color is not None:
s = '<span style="position:absolute; color:%s; left:%dpx; ' \
'top:%dpx; font-size:%dpx;">' % \
(color, x * self.scale, (self._yoffset - y) * self.scale,
size * self.scale * self.fontscale)
self.write(s)
self.write_text(text)
self.write('</span>\n')
return
def begin_div(self, color, borderwidth, x, y, w, h, writing_mode=False):
self._fontstack.append(self._font)
self._font = None
s = '<div style="position:absolute; border: %s %dpx solid; ' \
'writing-mode:%s; left:%dpx; top:%dpx; width:%dpx; ' \
'height:%dpx;">' % \
(color, borderwidth, writing_mode, x * self.scale,
(self._yoffset - y) * self.scale, w * self.scale, h * self.scale)
self.write(s)
return
def end_div(self, color):
if self._font is not None:
self.write('</span>')
self._font = self._fontstack.pop()
self.write('</div>')
return
def put_text(self, text, fontname, fontsize):
font = (fontname, fontsize)
if font != self._font:
if self._font is not None:
self.write('</span>')
# Remove subset tag from fontname, see PDF Reference 5.5.3
fontname_without_subset_tag = fontname.split('+')[-1]
self.write('<span style="font-family: %s; font-size:%dpx">' %
(fontname_without_subset_tag,
fontsize * self.scale * self.fontscale))
self._font = font
self.write_text(text)
return
def put_newline(self):
self.write('<br>')
return
def receive_layout(self, ltpage):
def show_group(item):
if isinstance(item, LTTextGroup):
self.place_border('textgroup', 1, item)
for child in item:
show_group(child)
return
def render(item):
if isinstance(item, LTPage):
self._yoffset += item.y1
self.place_border('page', 1, item)
if self.showpageno:
self.write('<div style="position:absolute; top:%dpx;">' %
((self._yoffset-item.y1)*self.scale))
self.write('<a name="{}">Page {}</a></div>\n'
.format(item.pageid, item.pageid))
for child in item:
render(child)
if item.groups is not None:
for group in item.groups:
show_group(group)
elif isinstance(item, LTCurve):
self.place_border('curve', 1, item)
elif isinstance(item, LTFigure):
self.begin_div('figure', 1, item.x0, item.y1, item.width,
item.height)
for child in item:
render(child)
self.end_div('figure')
elif isinstance(item, LTImage):
self.place_image(item, 1, item.x0, item.y1, item.width,
item.height)
else:
if self.layoutmode == 'exact':
if isinstance(item, LTTextLine):
self.place_border('textline', 1, item)
for child in item:
render(child)
elif isinstance(item, LTTextBox):
self.place_border('textbox', 1, item)
self.place_text('textbox', str(item.index+1), item.x0,
item.y1, 20)
for child in item:
render(child)
elif isinstance(item, LTChar):
self.place_border('char', 1, item)
self.place_text('char', item.get_text(), item.x0,
item.y1, item.size)
else:
if isinstance(item, LTTextLine):
for child in item:
render(child)
if self.layoutmode != 'loose':
self.put_newline()
elif isinstance(item, LTTextBox):
self.begin_div('textbox', 1, item.x0, item.y1,
item.width, item.height,
item.get_writing_mode())
for child in item:
render(child)
self.end_div('textbox')
elif isinstance(item, LTChar):
self.put_text(item.get_text(), item.fontname,
item.size)
elif isinstance(item, LTText):
self.write_text(item.get_text())
return
render(ltpage)
self._yoffset += self.pagemargin
return
def close(self):
self.write_footer()
return
class XMLConverter(PDFConverter):
CONTROL = re.compile('[\x00-\x08\x0b-\x0c\x0e-\x1f]')
def __init__(self, rsrcmgr, outfp, codec='utf-8', pageno=1, laparams=None,
imagewriter=None, stripcontrol=False):
PDFConverter.__init__(self, rsrcmgr, outfp, codec=codec, pageno=pageno,
laparams=laparams)
self.imagewriter = imagewriter
self.stripcontrol = stripcontrol
self.write_header()
return
def write(self, text):
if self.codec:
text = text.encode(self.codec)
self.outfp.write(text)
return
def write_header(self):
if self.codec:
self.write('<?xml version="1.0" encoding="%s" ?>\n' % self.codec)
else:
self.write('<?xml version="1.0" ?>\n')
self.write('<pages>\n')
return
def write_footer(self):
self.write('</pages>\n')
return
def write_text(self, text):
if self.stripcontrol:
text = self.CONTROL.sub('', text)
self.write(enc(text))
return
def receive_layout(self, ltpage):
def show_group(item):
if isinstance(item, LTTextBox):
self.write('<textbox id="%d" bbox="%s" />\n' %
(item.index, bbox2str(item.bbox)))
elif isinstance(item, LTTextGroup):
self.write('<textgroup bbox="%s">\n' % bbox2str(item.bbox))
for child in item:
show_group(child)
self.write('</textgroup>\n')
return
def render(item):
if isinstance(item, LTPage):
s = '<page id="%s" bbox="%s" rotate="%d">\n' % \
(item.pageid, bbox2str(item.bbox), item.rotate)
self.write(s)
for child in item:
render(child)
if item.groups is not None:
self.write('<layout>\n')
for group in item.groups:
show_group(group)
self.write('</layout>\n')
self.write('</page>\n')
elif isinstance(item, LTLine):
s = '<line linewidth="%d" bbox="%s" />\n' % \
(item.linewidth, bbox2str(item.bbox))
self.write(s)
elif isinstance(item, LTRect):
s = '<rect linewidth="%d" bbox="%s" />\n' % \
(item.linewidth, bbox2str(item.bbox))
self.write(s)
elif isinstance(item, LTCurve):
s = '<curve linewidth="%d" bbox="%s" pts="%s"/>\n' % \
(item.linewidth, bbox2str(item.bbox), item.get_pts())
self.write(s)
elif isinstance(item, LTFigure):
s = '<figure name="%s" bbox="%s">\n' % \
(item.name, bbox2str(item.bbox))
self.write(s)
for child in item:
render(child)
self.write('</figure>\n')
elif isinstance(item, LTTextLine):
self.write('<textline bbox="%s">\n' % bbox2str(item.bbox))
for child in item:
render(child)
self.write('</textline>\n')
elif isinstance(item, LTTextBox):
wmode = ''
if isinstance(item, LTTextBoxVertical):
wmode = ' wmode="vertical"'
s = '<textbox id="%d" bbox="%s"%s>\n' %\
(item.index, bbox2str(item.bbox), wmode)
self.write(s)
for child in item:
render(child)
self.write('</textbox>\n')
elif isinstance(item, LTChar):
s = '<text font="%s" bbox="%s" colourspace="%s" ' \
'ncolour="%s" size="%.3f">' % \
(enc(item.fontname), bbox2str(item.bbox),
item.ncs.name, item.graphicstate.ncolor, item.size)
self.write(s)
self.write_text(item.get_text())
self.write('</text>\n')
elif isinstance(item, LTText):
self.write('<text>%s</text>\n' % item.get_text())
elif isinstance(item, LTImage):
if self.imagewriter is not None:
name = self.imagewriter.export_image(item)
self.write('<image src="%s" width="%d" height="%d" />\n' %
(enc(name), item.width, item.height))
else:
self.write('<image width="%d" height="%d" />\n' %
(item.width, item.height))
else:
assert False, str(('Unhandled', item))
return
render(ltpage)
return
def close(self):
self.write_footer()
return
| 37.404475
| 79
| 0.514403
|
d6f9f2845524d11ee0f5041a8bf044504f6d0336
| 11,081
|
py
|
Python
|
galaxy/accounts/tests/test_custom_user_model.py
|
bmclaughlin/galaxy
|
3f57e3684c27cb88d45881eaec16dc3095ac4e6d
|
[
"Apache-2.0"
] | 904
|
2016-10-11T13:35:19.000Z
|
2022-03-25T09:29:09.000Z
|
galaxy/accounts/tests/test_custom_user_model.py
|
bmclaughlin/galaxy
|
3f57e3684c27cb88d45881eaec16dc3095ac4e6d
|
[
"Apache-2.0"
] | 1,866
|
2016-10-15T21:28:09.000Z
|
2022-03-29T18:09:20.000Z
|
galaxy/accounts/tests/test_custom_user_model.py
|
bmclaughlin/galaxy
|
3f57e3684c27cb88d45881eaec16dc3095ac4e6d
|
[
"Apache-2.0"
] | 368
|
2016-10-11T13:44:08.000Z
|
2022-03-30T02:23:12.000Z
|
# (c) 2012-2018, Ansible
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
from unittest import mock
from django.core.exceptions import ValidationError
from django.contrib.auth.models import UserManager
from django.db.utils import DataError, IntegrityError
from django.utils import timezone
from django.test import TestCase
import pytest
from galaxy.accounts.models import CustomUser
from galaxy.common.testing import NOW
class TestCustomUserModel(TestCase):
VALID_EMAIL = "user@example.com"
VALID_PASSWORD = "****"
VALID_USERNAME = "USERNAME"
USERNAME_MAX_LENGTH = 30
FULL_NAME_MAX_LENGTH = 254
SHORT_NAME_MAX_LENGTH = 30
EMAIL_MAX_LENGTH = 254
def setUp(self):
CustomUser.objects.all().delete()
def test_manager_class(self):
assert isinstance(CustomUser.objects, UserManager)
def test_default(self):
assert CustomUser._meta.get_field('date_joined').default == \
timezone.now
@pytest.mark.model_fields_validation
@mock.patch.object(
CustomUser._meta.get_field('date_joined'),
"get_default",
side_effect=[NOW]
)
def test_create_minimal(self, fake_now):
# no mandatory fields
user = CustomUser.objects.create()
assert isinstance(user, CustomUser)
# check defaults
assert user.username == ""
assert user.full_name == ""
assert user.short_name == ""
assert not user.is_staff
assert user.email == ""
assert user.is_active
assert user.date_joined == NOW
assert user.avatar_url == ""
fake_now.assert_called_once()
@pytest.mark.database_integrity
def test_username_length_is_limited_in_db(self):
# does not raise
CustomUser.objects.create(
username='*' * self.USERNAME_MAX_LENGTH
)
with pytest.raises(DataError) as excinfo:
CustomUser.objects.create(
username='*' * (self.USERNAME_MAX_LENGTH + 1)
)
assert str(excinfo.value) == (
'value too long for type character varying({max_allowed})\n'
).format(
max_allowed=self.USERNAME_MAX_LENGTH
)
@pytest.mark.database_integrity
def test_username_must_be_unique_in_db(self):
with pytest.raises(IntegrityError) as excinfo:
CustomUser.objects.create(username=self.VALID_USERNAME)
CustomUser.objects.create(username=self.VALID_USERNAME)
assert str(excinfo.value) == (
'duplicate key value violates unique constraint '
'"accounts_customuser_username_key"\n'
'DETAIL: Key (username)=({duplicated_name}) already exists.\n'
).format(
duplicated_name=self.VALID_USERNAME
)
@pytest.mark.model_fields_validation
def test_username_must_match_regex(self):
# does not raise
CustomUser(
username='Abc',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
# does not raise
CustomUser(
username='A',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
# does not raise
CustomUser(
username='007',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
# does not raise
CustomUser(
username='@',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
# does not raise
CustomUser(
username='+++',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
# does not raise
CustomUser(
username='---',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
with pytest.raises(ValidationError) as excinfo:
CustomUser(
username='',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL).full_clean()
assert excinfo.value.message_dict == {
'username': ['This field cannot be blank.']
}
with pytest.raises(ValidationError) as excinfo:
CustomUser(
username='~',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
assert excinfo.value.message_dict == {
'username': ['Enter a valid username.']
}
with pytest.raises(ValidationError) as excinfo:
CustomUser(
username='$',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
assert excinfo.value.message_dict == {
'username': ['Enter a valid username.']
}
with pytest.raises(ValidationError) as excinfo:
CustomUser(
username='"',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
assert excinfo.value.message_dict == {
'username': ['Enter a valid username.']
}
with pytest.raises(ValidationError) as excinfo:
CustomUser(
username='юникод',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
assert excinfo.value.message_dict == {
'username': ['Enter a valid username.']
}
with pytest.raises(ValidationError) as excinfo:
CustomUser(
username='юникод',
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
).full_clean()
assert excinfo.value.message_dict == {
'username': ['Enter a valid username.']
}
@pytest.mark.database_integrity
def test_full_name_length_is_limited_in_db(self):
# does not raise
CustomUser.objects.create(
full_name='*' * self.FULL_NAME_MAX_LENGTH)
with pytest.raises(DataError) as excinfo:
CustomUser.objects.create(
full_name='*' * (self.FULL_NAME_MAX_LENGTH + 1)
)
assert str(excinfo.value) == (
'value too long for type character varying({max_allowed})\n'
).format(
max_allowed=self.FULL_NAME_MAX_LENGTH
)
@pytest.mark.model_fields_validation
def test_full_name_length_is_limited(self):
# does not raise
CustomUser(
username=self.VALID_USERNAME,
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL,
full_name='*' * self.EMAIL_MAX_LENGTH
).full_clean()
with pytest.raises(ValidationError) as excinfo:
CustomUser(
username=self.VALID_USERNAME,
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL,
full_name='*' * (self.EMAIL_MAX_LENGTH + 1)
).full_clean()
assert excinfo.value.message_dict == {
'full_name': [
'Ensure this value has at most {valid} '
'characters (it has {given}).'.format(
valid=self.FULL_NAME_MAX_LENGTH,
given=self.FULL_NAME_MAX_LENGTH + 1)
]
}
@pytest.mark.database_integrity
def test_short_name_length_is_limited_in_db(self):
# does not raise
CustomUser.objects.create(
short_name='*' * self.SHORT_NAME_MAX_LENGTH)
with pytest.raises(DataError) as excinfo:
CustomUser.objects.create(
short_name='*' * (self.SHORT_NAME_MAX_LENGTH + 1)
)
assert str(excinfo.value) == (
'value too long for type character varying({max_allowed})\n'
).format(
max_allowed=self.SHORT_NAME_MAX_LENGTH
)
@pytest.mark.database_integrity
def test_email_length_is_limited_in_db(self):
# does not raise
CustomUser.objects.create(
email='*' * self.EMAIL_MAX_LENGTH)
with pytest.raises(DataError) as excinfo:
CustomUser.objects.create(
email='*' * (self.EMAIL_MAX_LENGTH + 1))
assert str(excinfo.value) == (
'value too long for type character varying({max_allowed})\n'
).format(
max_allowed=self.EMAIL_MAX_LENGTH
)
@pytest.mark.database_integrity
def test_email_must_be_unique_in_db(self):
with pytest.raises(IntegrityError) as excinfo:
CustomUser.objects.create(
username=self.VALID_USERNAME,
email=self.VALID_EMAIL
)
CustomUser.objects.create(
username=self.VALID_USERNAME + "_",
email=self.VALID_EMAIL
)
assert str(excinfo.value) == (
'duplicate key value violates unique constraint '
'"accounts_customuser_email_key"\n'
'DETAIL: Key (email)=({duplicated_name}) already exists.\n'
).format(
duplicated_name=self.VALID_EMAIL
)
# testing custom methods
@pytest.mark.model_methods
def test_convert_to_string(self):
# __str__ will return username
user = CustomUser(
username=self.VALID_USERNAME,
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
)
assert str(user) == self.VALID_USERNAME
@pytest.mark.model_methods
def test_repr(self):
# __str__ returns username, but it did not affected __repr__
user = CustomUser(
username=self.VALID_USERNAME,
password=self.VALID_PASSWORD,
email=self.VALID_EMAIL
)
assert repr(user) == (
'<CustomUser: {username}>'
).format(username=self.VALID_USERNAME)
@pytest.mark.model_methods
def test_get_absolute_url(self):
# this method creates url with urlencoded username
user = CustomUser(
username=self.VALID_USERNAME)
assert user.get_absolute_url() == (
'/users/{username}/'
).format(
username=self.VALID_USERNAME
)
user = CustomUser(
username="Aaa123@"
)
assert user.get_absolute_url() == (
'/users/Aaa123%40/'
)
| 31.126404
| 75
| 0.595253
|
54ef741e0239bb88028a2f9a77a9eda69d7d937a
| 4,992
|
py
|
Python
|
zerver/webhooks/bitbucket/tests.py
|
ajayns/zulip
|
c5ff020246cd75d0650f93ef621b76bdc4bbb8aa
|
[
"Apache-2.0"
] | null | null | null |
zerver/webhooks/bitbucket/tests.py
|
ajayns/zulip
|
c5ff020246cd75d0650f93ef621b76bdc4bbb8aa
|
[
"Apache-2.0"
] | null | null | null |
zerver/webhooks/bitbucket/tests.py
|
ajayns/zulip
|
c5ff020246cd75d0650f93ef621b76bdc4bbb8aa
|
[
"Apache-2.0"
] | 1
|
2019-10-14T23:36:14.000Z
|
2019-10-14T23:36:14.000Z
|
# -*- coding: utf-8 -*-
from mock import patch, MagicMock
from typing import Dict, Union, Text, Optional
from zerver.lib.test_classes import WebhookTestCase
class BitbucketHookTests(WebhookTestCase):
STREAM_NAME = 'bitbucket'
URL_TEMPLATE = "/api/v1/external/bitbucket?stream={stream}"
FIXTURE_DIR_NAME = 'bitbucket'
EXPECTED_SUBJECT = u"Repository name"
EXPECTED_SUBJECT_BRANCH_EVENTS = u"Repository name / master"
def test_bitbucket_on_push_event(self) -> None:
fixture_name = 'push'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name))
commit_info = u'* c ([25f93d2](https://bitbucket.org/kolaszek/repository-name/commits/25f93d22b719e2d678a7ad5ee0ef0d1fcdf39c12))'
expected_message = u"kolaszek pushed 1 commit to branch master.\n\n{}".format(commit_info)
self.send_and_test_stream_message(fixture_name, self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, **self.api_auth(self.TEST_USER_EMAIL))
def test_bitbucket_on_push_event_filtered_by_branches(self) -> None:
fixture_name = 'push'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name),
branches='master,development')
commit_info = u'* c ([25f93d2](https://bitbucket.org/kolaszek/repository-name/commits/25f93d22b719e2d678a7ad5ee0ef0d1fcdf39c12))'
expected_message = u"kolaszek pushed 1 commit to branch master.\n\n{}".format(commit_info)
self.send_and_test_stream_message(fixture_name, self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, **self.api_auth(self.TEST_USER_EMAIL))
def test_bitbucket_on_push_commits_above_limit_event(self) -> None:
fixture_name = 'push_commits_above_limit'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name))
commit_info = u'* c ([25f93d2](https://bitbucket.org/kolaszek/repository-name/commits/25f93d22b719e2d678a7ad5ee0ef0d1fcdf39c12))\n'
expected_message = u"kolaszek pushed 50 commits to branch master.\n\n{}[and 30 more commit(s)]".format(commit_info * 20)
self.send_and_test_stream_message(fixture_name, self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, **self.api_auth(self.TEST_USER_EMAIL))
def test_bitbucket_on_push_commits_above_limit_event_filtered_by_branches(self) -> None:
fixture_name = 'push_commits_above_limit'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name),
branches='master,development')
commit_info = u'* c ([25f93d2](https://bitbucket.org/kolaszek/repository-name/commits/25f93d22b719e2d678a7ad5ee0ef0d1fcdf39c12))\n'
expected_message = u"kolaszek pushed 50 commits to branch master.\n\n{}[and 30 more commit(s)]".format(commit_info * 20)
self.send_and_test_stream_message(fixture_name, self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, **self.api_auth(self.TEST_USER_EMAIL))
def test_bitbucket_on_force_push_event(self) -> None:
fixture_name = 'force_push'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name))
expected_message = u"kolaszek [force pushed](https://bitbucket.org/kolaszek/repository-name)"
self.send_and_test_stream_message(fixture_name, self.EXPECTED_SUBJECT, expected_message, **self.api_auth(self.TEST_USER_EMAIL))
@patch('zerver.webhooks.bitbucket.view.check_send_stream_message')
def test_bitbucket_on_push_event_filtered_by_branches_ignore(self, check_send_stream_message_mock: MagicMock) -> None:
fixture_name = 'push'
payload = self.get_body(fixture_name)
self.url = self.build_webhook_url(payload=payload,
branches='changes,development')
result = self.client_post(self.url, payload,
content_type="application/json,",
**self.api_auth(self.TEST_USER_EMAIL))
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.bitbucket.view.check_send_stream_message')
def test_bitbucket_push_commits_above_limit_filtered_by_branches_ignore(
self, check_send_stream_message_mock):
# type: (MagicMock) -> None
fixture_name = 'push_commits_above_limit'
payload = self.get_body(fixture_name)
self.url = self.build_webhook_url(payload=payload,
branches='changes,development')
result = self.client_post(self.url, payload,
content_type="application/json,",
**self.api_auth(self.TEST_USER_EMAIL))
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
def get_body(self, fixture_name: Text) -> Union[Text, Dict[str, Text]]:
return self.fixture_data(self.FIXTURE_DIR_NAME, fixture_name)
| 64
| 149
| 0.713942
|
00064f320dc05223df5e6ed8128e34dc0ab97e5d
| 1,461
|
py
|
Python
|
src/mcsdk/integration/os/process.py
|
Stick97/mcsdk-automation-framework-core
|
5c7cc798fd4e0d54dfb3e0b900a828db4a72034e
|
[
"BSD-3-Clause"
] | 9
|
2019-11-03T10:15:06.000Z
|
2022-02-26T06:16:10.000Z
|
src/mcsdk/integration/os/process.py
|
Stick97/mcsdk-automation-framework-core
|
5c7cc798fd4e0d54dfb3e0b900a828db4a72034e
|
[
"BSD-3-Clause"
] | 2
|
2020-07-08T18:23:02.000Z
|
2022-01-17T17:31:18.000Z
|
src/mcsdk/integration/os/process.py
|
Stick97/mcsdk-automation-framework-core
|
5c7cc798fd4e0d54dfb3e0b900a828db4a72034e
|
[
"BSD-3-Clause"
] | 5
|
2020-07-06T16:28:15.000Z
|
2022-02-22T00:51:48.000Z
|
import subprocess
class Command:
""" Command runner class """
def __init__(self, command, debug=True):
""" Class constructor """
self.__command = command
self.__output = b''
self.__debug = debug
def run(self):
""" Runs the command and returns the status code """
if self.__debug:
print("Running command: " + self.get_command())
result = subprocess.run(self.__command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
self.__output = result.stdout
return result.returncode
def returned_errors(self):
""" Checks if the output contains errors """
output = self.get_output()
if output.find("error:") != -1 \
or output.find("fatal:") != -1 \
or output.find('FAILURES!') != -1 \
or output.find('Exception in thread') != -1 \
or output.find('ERRORS!') != -1 \
or output.find('Build FAILED') != -1:
return True
return False
def get_command(self):
""" Returns the string representation of the command """
if isinstance(self.__command, list):
return " ".join(self.__command)
return self.__command
def get_output(self):
"""
Returns the output of the command that has been run.
:returns: str
"""
return self.__output.decode("utf-8")
| 29.816327
| 109
| 0.559206
|
8e74c473b53b29dbb787befc8ef3b67225dcc69a
| 50,308
|
py
|
Python
|
pyhandle/tests/testcases/handleclient_write_patched_unit_test.py
|
merretbuurman/PYHANDLE
|
3c621eda80e26fdec945d4d42f6b62d0dcc70726
|
[
"Apache-2.0"
] | null | null | null |
pyhandle/tests/testcases/handleclient_write_patched_unit_test.py
|
merretbuurman/PYHANDLE
|
3c621eda80e26fdec945d4d42f6b62d0dcc70726
|
[
"Apache-2.0"
] | null | null | null |
pyhandle/tests/testcases/handleclient_write_patched_unit_test.py
|
merretbuurman/PYHANDLE
|
3c621eda80e26fdec945d4d42f6b62d0dcc70726
|
[
"Apache-2.0"
] | 1
|
2020-11-04T04:23:50.000Z
|
2020-11-04T04:23:50.000Z
|
"""Testing methods that normally need Handle server read access,
by patching the get request to replace read access."""
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
import json
import mock
from pyhandle.client.resthandleclient import RESTHandleClient
from pyhandle.clientcredentials import PIDClientCredentials
from pyhandle.handleexceptions import *
from pyhandle.tests.mockresponses import MockResponse, MockSearchResponse
from pyhandle.tests.utilities import failure_message, replace_timestamps, sort_lists
from pyhandle.utilhandle import check_handle_syntax
class RESTHandleClientWriteaccessPatchedTestCase(unittest.TestCase):
'''Testing methods with write access (patched server access).
The tests work by intercepting all HTTP put requests and comparing their payload to
the payload of successful real put requests from previous integration tests.
The payloads from previous tests were collected by a logger in the integration
tests (look for REQUESTLOGGER in the write-integration test code). Of course,
the names of the handles have to be adapted in there.
Comparison it done by python dictionary comparison, which ignores
the order of the record entries, whitespace, string separators and
whether keys are unicode strings or normal strings.
The timestamps should not be compared, so they should be removed. For this,
there is a method "replace_timestamps".
'''
@mock.patch('pyhandle.handlesystemconnector.HandleSystemConnector.check_if_username_exists')
def setUp(self, username_check_patch):
# Define replacement for the patched check for username existence:
username_check_patch = mock.Mock()
username_check_patch.response_value = True
# Create a client instance for write access:
self.inst = RESTHandleClient.instantiate_with_username_and_password('http://handle.server', '999:user/name', 'apassword')
def tearDown(self):
pass
pass
def get_payload_headers_from_mockresponse(self, putpatch):
# For help, please see: http://www.voidspace.org.uk/python/mock/examples.html#checking-multiple-calls-with-mock
kwargs_passed_to_put = putpatch.call_args_list[len(putpatch.call_args_list) - 1][1]
passed_payload = json.loads(kwargs_passed_to_put['data'])
replace_timestamps(passed_payload)
passed_headers = kwargs_passed_to_put['headers']
return passed_payload, passed_headers
# register_handle
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_register_handle(self, getpatch, putpatch):
"""Test registering a new handle with various types of values."""
# Define the replacement for the patched GET method:
# The handle does not exist yet, so a response with 404
mock_response_get = MockResponse(notfound=True)
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.put method:
mock_response_put = MockResponse(wascreated=True)
putpatch.return_value = mock_response_put
# Run the code to be tested:
testhandle = 'my/testhandle'
testlocation = 'http://foo.bar'
testchecksum = '123456'
additional_URLs = ['http://bar.bar', 'http://foo.foo']
handle_returned = self.inst.register_handle(testhandle,
location=testlocation,
checksum=testchecksum,
additional_URLs=additional_URLs,
FOO='foo',
BAR='bar')
# Check if the PUT request was sent exactly once:
self.assertEqual(putpatch.call_count, 1,
'The method "requests.put" was not called once, but ' + str(putpatch.call_count) + ' times.')
# Get the payload+headers passed to "requests.put"
passed_payload, _ = self.get_payload_headers_from_mockresponse(putpatch)
# Compare with expected payload:
expected_payload = {"values": [{"index": 100, "type": "HS_ADMIN", "data": {"value": {"index": "200", "handle": "0.NA/my", "permissions": "011111110011"}, "format": "admin"}}, {"index": 1, "type": "URL", "data": "http://foo.bar"}, {"index": 2, "type": "CHECKSUM", "data": "123456"}, {"index": 3, "type": "FOO", "data": "foo"}, {"index": 4, "type": "BAR", "data": "bar"}, {"index": 5, "type": "10320/LOC", "data": "<locations><location href=\"http://bar.bar\" id=\"0\" /><location href=\"http://foo.foo\" id=\"1\" /></locations>"}]}
replace_timestamps(expected_payload)
self.assertEqual(sort_lists(passed_payload), sort_lists(expected_payload),
failure_message(expected=expected_payload, passed=passed_payload, methodname='register_handle'))
@mock.patch('pyhandle.handlesystemconnector.HandleSystemConnector.check_if_username_exists')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_register_handle_different_owner(self, getpatch, putpatch, username_check_patch):
"""Test registering a new handle with various types of values."""
# Define the replacement for the patched GET method:
# The handle does not exist yet, so a response with 404
mock_response_get = MockResponse(notfound=True)
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.put method:
mock_response_put = MockResponse(wascreated=True)
putpatch.return_value = mock_response_put
# Define replacement for the patched check for username existence:
username_check_patch = mock.Mock()
username_check_patch.response_value = True
# Make another connector, to add the handle owner:
cred = PIDClientCredentials(client='client',
handle_server_url='http://handle.server',
username='999:user/name',
password='apassword',
prefix='myprefix',
handleowner='300:handle/owner')
newInst = RESTHandleClient.instantiate_with_credentials(cred)
# Run the code to be tested:
testhandle = 'my/testhandle'
testlocation = 'http://foo.bar'
testchecksum = '123456'
additional_URLs = ['http://bar.bar', 'http://foo.foo']
handle_returned = newInst.register_handle(testhandle,
location=testlocation,
checksum=testchecksum,
additional_URLs=additional_URLs,
FOO='foo',
BAR='bar')
# Check if the PUT request was sent exactly once:
self.assertEqual(putpatch.call_count, 1,
'The method "requests.put" was not called once, but ' + str(putpatch.call_count) + ' times.')
# Get the payload+headers passed to "requests.put"
passed_payload, _ = self.get_payload_headers_from_mockresponse(putpatch)
# Compare with expected payload:
expected_payload = {"values": [{"index": 100, "type": "HS_ADMIN", "data": {"value": {"index": 300, "handle": "handle/owner", "permissions": "011111110011"}, "format": "admin"}}, {"index": 1, "type": "URL", "data": "http://foo.bar"}, {"index": 2, "type": "CHECKSUM", "data": "123456"}, {"index": 3, "type": "FOO", "data": "foo"}, {"index": 4, "type": "BAR", "data": "bar"}, {"index": 5, "type": "10320/LOC", "data": "<locations><location href=\"http://bar.bar\" id=\"0\" /><location href=\"http://foo.foo\" id=\"1\" /></locations>"}]}
replace_timestamps(expected_payload)
self.assertEqual(sort_lists(passed_payload), sort_lists(expected_payload),
failure_message(expected=expected_payload, passed=passed_payload, methodname='register_handle'))
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_register_handle_already_exists(self, getpatch, putpatch):
"""Test if overwrite=False prevents handle overwriting."""
# Define the replacement for the patched GET method:
mock_response_get = MockResponse(success=True)
getpatch.return_value = mock_response_get
# Run code to be tested + check exception:
with self.assertRaises(HandleAlreadyExistsException):
self.inst.register_handle('my/testhandle',
'http://foo.foo',
test1='I am just an illusion.',
overwrite=False)
# Check if nothing was changed (PUT should not have been called):
self.assertEqual(putpatch.call_count, 0,
'The method "requests.put" was called! (' + str(putpatch.call_count) + ' times). It should NOT have been called.')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_register_handle_already_exists_overwrite(self, getpatch, putpatch):
"""Test registering an existing handle with various types of values, with overwrite=True."""
# Define the replacement for the patched GET method:
mock_response_get = MockResponse(success=True)
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.put method:
mock_response_put = MockResponse(wascreated=True)
putpatch.return_value = mock_response_put
# Run the method to be tested:
testhandle = 'my/testhandle'
testlocation = 'http://foo.bar'
testchecksum = '123456'
overwrite = True
additional_URLs = ['http://bar.bar', 'http://foo.foo']
handle_returned = self.inst.register_handle(testhandle,
location=testlocation,
checksum=testchecksum,
additional_URLs=additional_URLs,
overwrite=overwrite,
FOO='foo',
BAR='bar')
# Check if the PUT request was sent exactly once:
self.assertEqual(putpatch.call_count, 1,
'The method "requests.put" was not called once, but ' + str(putpatch.call_count) + ' times.')
# Get the payload+headers passed to "requests.put"
passed_payload, passed_headers = self.get_payload_headers_from_mockresponse(putpatch)
# Compare with expected payload:
expected_payload = {"values": [{"index": 100, "type": "HS_ADMIN", "data": {"value": {"index": "200", "handle": "0.NA/my", "permissions": "011111110011"}, "format": "admin"}}, {"index": 1, "type": "URL", "data": "http://foo.bar"}, {"index": 2, "type": "CHECKSUM", "data": "123456"}, {"index": 3, "type": "FOO", "data": "foo"}, {"index": 4, "type": "BAR", "data": "bar"}, {"index": 5, "type": "10320/LOC", "data": "<locations><location href=\"http://bar.bar\" id=\"0\" /><location href=\"http://foo.foo\" id=\"1\" /></locations>"}]}
replace_timestamps(expected_payload)
self.assertEqual(sort_lists(passed_payload), sort_lists(expected_payload),
failure_message(expected=expected_payload, passed=passed_payload, methodname='register_handle'))
# Check if requests.put received an authorization header:
self.assertIn('Authorization', passed_headers,
'Authorization header not passed: ' + str(passed_headers))
# generate_and_register_handle
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_generate_and_register_handle(self, getpatch, putpatch):
"""Test generating and registering a new handle."""
# Define the replacement for the patched GET method:
mock_response_get = MockResponse(notfound=True)
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.put method:
mock_response_put = MockResponse(wascreated=True)
putpatch.return_value = mock_response_put
# Run the method to be tested:
testlocation = 'http://foo.bar'
testchecksum = '123456'
handle_returned = self.inst.generate_and_register_handle(
prefix='my',
location=testlocation,
checksum=testchecksum)
# Check if the PUT request was sent exactly once:
self.assertEqual(putpatch.call_count, 1,
'The method "requests.put" was not called once, but ' + str(putpatch.call_count) + ' times.')
# Get the payload+headers passed to "requests.put"
passed_payload, _ = self.get_payload_headers_from_mockresponse(putpatch)
# Compare with expected payload:
expected_payload = {"values": [{"index": 100, "type": "HS_ADMIN", "data": {"value": {"index": "200", "handle": "0.NA/my", "permissions": "011111110011"}, "format": "admin"}}, {"index": 1, "type": "URL", "data": "http://foo.bar"}, {"index": 2, "type": "CHECKSUM", "data": "123456"}]}
replace_timestamps(expected_payload)
self.assertEqual(sort_lists(passed_payload), sort_lists(expected_payload),
failure_message(expected=expected_payload, passed=passed_payload, methodname='generate_and_register_handle'))
# modify_handle_value
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_modify_handle_value_one(self, getpatch, putpatch):
"""Test modifying one existing handle value."""
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":"my/testhandle", "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":333, "type": "TEST3", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.put method:
cont = {"responseCode":1, "handle":"my/testhandle"}
mock_response_put = MockResponse(status_code=201, content=json.dumps(cont))
putpatch.return_value = mock_response_put
# Run the method to be tested:
testhandle = 'my/testhandle'
self.inst.modify_handle_value(testhandle, TEST4='newvalue')
# Check if the PUT request was sent exactly once:
self.assertEqual(putpatch.call_count, 1,
'The method "requests.put" was not called once, but ' + str(putpatch.call_count) + ' times.')
# Get the payload passed to "requests.put"
passed_payload, _ = self.get_payload_headers_from_mockresponse(putpatch)
# Compare with expected payload:
expected_payload = {"values": [{"index": 4, "ttl": 86400, "type": "TEST4", "data": "newvalue"}]}
replace_timestamps(expected_payload)
self.assertEqual(passed_payload, expected_payload,
failure_message(expected=expected_payload,
passed=passed_payload,
methodname='modify_handle_value'))
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_modify_handle_value_several(self, getpatch, putpatch):
"""Test modifying several existing handle values."""
# Define the replacement for the patched GET method:
cont = {
"responseCode":1,
"handle":"my/testhandle",
"values":[
{
"index":111,
"type": "TEST1",
"data":{
"format":"string",
"value":"val1"
},
"ttl":86400,
"timestamp":"2015-09-29T15:51:08Z"
}, {
"index":2222,
"type": "TEST2",
"data":{
"format":"string",
"value":"val2"
},
"ttl":86400,
"timestamp":"2015-09-29T15:51:08Z"
}, {
"index":333,
"type": "TEST3",
"data":{
"format":"string",
"value":"val3"
},
"ttl":86400,
"timestamp":"2015-09-29T15:51:08Z"
}, {
"index":4,
"type": "TEST4",
"data":{
"format":"string",
"value":"val4"
},
"ttl":86400,
"timestamp":"2015-09-29T15:51:08Z"
}]
}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_put = MockResponse()
putpatch.return_value = mock_response_put
# Test variables
testhandle = 'my/testhandle'
# Run the method to be tested:
self.inst.modify_handle_value(testhandle,
TEST4='new4',
TEST2='new2',
TEST3='new3')
# Check if the PUT request was sent exactly once:
self.assertEqual(putpatch.call_count, 1,
'The method "requests.put" was not called once, but ' + str(putpatch.call_count) + ' times.')
# Get the payload passed to "requests.put"
passed_payload, _ = self.get_payload_headers_from_mockresponse(putpatch)
# Compare with expected payload:
expected_payload = {
"values":[
{
"index":333,
"type": "TEST3",
"data":"new3",
"ttl":86400,
}, {
"index":2222,
"type": "TEST2",
"data":"new2",
"ttl":86400,
}, {
"index":4,
"type": "TEST4",
"data":"new4",
"ttl":86400,
}]
}
replace_timestamps(expected_payload)
self.assertEqual(sort_lists(passed_payload), sort_lists(expected_payload),
failure_message(expected=expected_payload,
passed=passed_payload,
methodname='modify_handle_value'))
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_modify_handle_value_corrupted(self, getpatch, putpatch):
"""Test exception when trying to modify corrupted handle record."""
# Define the replacement for the patched GET method (getting a corrupted record):
cont = {"responseCode":1, "handle":"my/testhandle", "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":333, "type": "TEST2", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.put method:
cont = {"responseCode":1, "handle":"my/testhandle"}
mock_response_put = MockResponse(status_code=201, content=json.dumps(cont))
putpatch.return_value = mock_response_put
# Call the method to be tested: Modifying corrupted raises exception:
with self.assertRaises(BrokenHandleRecordException):
self.inst.modify_handle_value('my/testhandle',
TEST4='new4',
TEST2='new2',
TEST3='new3')
# Check if PUT was called (PUT should not have been called):
self.assertEqual(putpatch.call_count, 0,
'The method "requests.put" was called! (' + str(putpatch.call_count) + ' times). It should NOT have been called.')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.delete')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_modify_handle_value_without_authentication(self, getpatch, putpatch):
"""Test if exception when not authenticated."""
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":"my/testhandle", "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":333, "type": "TEST3", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_put = MockResponse()
putpatch.return_value = mock_response_put
# Test variables
inst_readonly = RESTHandleClient('http://foo.com', HTTP_verify=True)
testhandle = 'my/testhandle'
# Run code to be tested and check exception:
with self.assertRaises(HandleAuthenticationError):
inst_readonly.modify_handle_value(testhandle, FOO='bar')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_modify_handle_value_several_inexistent(self, getpatch, putpatch):
"""Test modifying several existing handle values, one of them inexistent."""
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":"my/testhandle", "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":333, "type": "TEST3", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_put = MockResponse()
putpatch.return_value = mock_response_put
# Test variables
testhandle = 'my/testhandle'
# Run the method to be tested:
self.inst.modify_handle_value(testhandle,
TEST4='new4',
TEST2='new2',
TEST100='new100')
# Check if the PUT request was sent exactly once:
self.assertEqual(putpatch.call_count, 1,
'The method "requests.put" was not called once, but ' + str(putpatch.call_count) + ' times.')
# Get the payload passed to "requests.put"
passed_payload, _ = self.get_payload_headers_from_mockresponse(putpatch)
passed_payload.get('values', {})
# Compare with expected payload:
expected_payload = {"values": [{"index": 2, "type": "TEST100", "data": "new100"}, {"index": 2222, "ttl": 86400, "type": "TEST2", "data": "new2"}, {"index": 4, "ttl": 86400, "type": "TEST4", "data": "new4"}]}
expected_payload.get('values', {})
replace_timestamps(expected_payload)
self.assertEqual(sort_lists(passed_payload), sort_lists(expected_payload),
failure_message(expected=expected_payload,
passed=passed_payload,
methodname='modify_handle_value'))
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_modify_handle_value_several_inexistent_2(self, getpatch, putpatch):
"""Test modifying several existing handle values, SEVERAL of them inexistent."""
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":"my/testhandle", "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":333, "type": "TEST3", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_put = MockResponse()
putpatch.return_value = mock_response_put
# Test variables
testhandle = 'my/testhandle'
# Run the method to be tested:
self.inst.modify_handle_value(testhandle,
TEST4='new4',
TEST2='new2',
TEST100='new100',
TEST101='new101')
# Check if the PUT request was sent exactly once:
self.assertEqual(putpatch.call_count, 1,
'The method "requests.put" was not called once, but ' + str(putpatch.call_count) + ' times.')
# Get the payload passed to "requests.put"
passed_payload, _ = self.get_payload_headers_from_mockresponse(putpatch)
# Compare with expected payload:
expected_payload = {'values': [{'index': 2, 'type': 'TEST100', 'data': 'new100'}, {'index': 2222, 'ttl': 86400, 'type': 'TEST2', 'data': 'new2'}, {'index': 4, 'ttl': 86400, 'type': 'TEST4', 'data': 'new4'}, {'index': 3, 'type': 'TEST101', 'data': 'new101'}]}
expected_payload.get('values', {})
replace_timestamps(expected_payload)
self.assertEqual(sort_lists(passed_payload), sort_lists(expected_payload),
failure_message(expected=expected_payload,
passed=passed_payload,
methodname='modify_handle_value'))
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_modify_handle_value_HS_ADMIN(self, getpatch, putpatch):
"""Test exception when trying to modify HS_ADMIN."""
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":"my/testhandle", "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":333, "type": "TEST3", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-29T15:51:08Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_put = MockResponse()
putpatch.return_value = mock_response_put
# Test variables
testhandle = 'my/testhandle'
# Run the method to be tested and check exception:
with self.assertRaises(IllegalOperationException):
self.inst.modify_handle_value(testhandle, HS_ADMIN='please let me in!')
# delete_handle_value:
@mock.patch('pyhandle.handlesystemconnector.requests.Session.delete')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_delete_handle_value_one_entry(self, getpatch, deletepatch):
"""Test deleting one entry from a record."""
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":"my/testhandle", "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":333, "type": "TEST2", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_del = MockResponse()
deletepatch.return_value = mock_response_del
# Call the method to be tested:
self.inst.delete_handle_value('my/testhandle', 'TEST1')
# Get the args passed to "requests.delete"
# For help, please see: http://www.voidspace.org.uk/python/mock/examples.html#checking-multiple-calls-with-mock
positional_args_passed_to_delete = deletepatch.call_args_list[len(deletepatch.call_args_list) - 1][0]
passed_url = positional_args_passed_to_delete[0]
# Compare with expected URL:
self.assertIn('?index=111', passed_url,
'The index 111 is not specified in the URL ' + passed_url + '. This is serious!')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.delete')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_delete_handle_value_several_entries(self, getpatch, deletepatch):
"""Test deleting several entries from a record."""
# Test variables
testhandle = 'my/testhandle'
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":testhandle, "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":333, "type": "TEST2", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_del = MockResponse()
deletepatch.return_value = mock_response_del
# Call the method to be tested:
self.inst.delete_handle_value(testhandle, ['TEST1', 'TEST2'])
# Get the args passed to "requests.delete"
# For help, please see: http://www.voidspace.org.uk/python/mock/examples.html#checking-multiple-calls-with-mock
positional_args_passed_to_delete = deletepatch.call_args_list[len(deletepatch.call_args_list) - 1][0]
passed_url = positional_args_passed_to_delete[0]
# Compare with expected URL:
self.assertIn('index=111', passed_url,
'The index 111 is not specified in the URL ' + passed_url + '. This may be serious!')
self.assertIn('index=222', passed_url,
'The index 2222 is not specified in the URL ' + passed_url + '. This may be serious!')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.delete')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_delete_handle_value_inexistent_entry(self, getpatch, deletepatch):
"""Test deleting one inexistent entry from a record."""
# Test variables
testhandle = 'my/testhandle'
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":testhandle, "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":333, "type": "TEST2", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_del = MockResponse()
deletepatch.return_value = mock_response_del
# Call the method to be tested:
self.inst.delete_handle_value(testhandle, 'test100')
# Check if PUT was called (PUT should not have been called):
self.assertEqual(deletepatch.call_count, 0,
'The method "requests.put" was called! (' + str(deletepatch.call_count) + ' times). It should NOT have been called.')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.delete')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_delete_handle_value_several_entries_one_nonexistent(self, getpatch, deletepatch):
"""Test deleting several entries from a record, one of them does not exist."""
# Test variables
testhandle = 'my/testhandle'
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":testhandle, "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":333, "type": "TEST2", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_del = MockResponse()
deletepatch.return_value = mock_response_del
# Call the method to be tested:
self.inst.delete_handle_value(testhandle, ['TEST1', 'TEST100'])
# Get the args passed to "requests.delete"
# For help, please see: http://www.voidspace.org.uk/python/mock/examples.html#checking-multiple-calls-with-mock
positional_args_passed_to_delete = deletepatch.call_args_list[len(deletepatch.call_args_list) - 1][0]
passed_url = positional_args_passed_to_delete[0]
# Compare with expected URL:
self.assertIn('index=111', passed_url,
'The index 111 is not specified in the URL ' + passed_url + '. This may be serious!')
self.assertNotIn('&index=', passed_url,
'A second index was specified in the URL ' + passed_url + '. This may be serious!')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.delete')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_delete_handle_value_several_occurrences(self, getpatch, deletepatch):
"""Test trying to delete from a corrupted handle record."""
# Define the replacement for the patched GET method (getting a corrupted record):
cont = {"responseCode":1, "handle":"my/testhandle", "values":[{"index":111, "type": "TEST1", "data":{"format":"string", "value":"val1"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":2222, "type": "TEST2", "data":{"format":"string", "value":"val2"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":333, "type": "TEST2", "data":{"format":"string", "value":"val3"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}, {"index":4, "type": "TEST4", "data":{"format":"string", "value":"val4"}, "ttl":86400, "timestamp":"2015-09-30T15:08:49Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_del = MockResponse()
deletepatch.return_value = mock_response_del
# Call the method to be tested:
self.inst.delete_handle_value('my/testhandle', 'TEST2')
# Get the args passed to "requests.delete"
# For help, please see: http://www.voidspace.org.uk/python/mock/examples.html#checking-multiple-calls-with-mock
positional_args_passed_to_delete = deletepatch.call_args_list[len(deletepatch.call_args_list) - 1][0]
passed_url = positional_args_passed_to_delete[0]
# Compare with expected URL:
self.assertIn('index=2222', passed_url,
'The index 2222 is not specified in the URL ' + passed_url + '. This may be serious!')
self.assertIn('index=333', passed_url,
'The index 333 is not specified in the URL ' + passed_url + '. This may be serious!')
# Check if PUT was called once:
self.assertEqual(deletepatch.call_count, 1,
'The method "requests.put" was not called once, but ' + str(deletepatch.call_count) + ' times.')
# delete_handle:
@mock.patch('pyhandle.handlesystemconnector.requests.Session.delete')
def test_delete_handle(self, deletepatch):
# Define the replacement for the patched requests.delete method:
mock_response_del = MockResponse(success=True)
deletepatch.return_value = mock_response_del
# Call method to be tested:
self.inst.delete_handle('my/testhandle')
# Get the args passed to "requests.delete"
# For help, please see: http://www.voidspace.org.uk/python/mock/examples.html#checking-multiple-calls-with-mock
positional_args_passed_to_delete = deletepatch.call_args_list[len(deletepatch.call_args_list) - 1][0]
passed_url = positional_args_passed_to_delete[0]
# Compare with expected URL:
self.assertNotIn('index=', passed_url,
'Indices were passed to the delete method.')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.delete')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_delete_handle_inexistent(self, getpatch, deletepatch):
# Define the replacement for the patched GET method:
mock_response_get = MockResponse(notfound=True)
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.delete method:
mock_response_del = MockResponse(notfound=True)
deletepatch.return_value = mock_response_del
# Call method to be tested, assert exception
with self.assertRaises(HandleNotFoundException):
resp = self.inst.delete_handle('my/testhandle')
def test_delete_handle_too_many_args(self):
# Call method to be tested:
with self.assertRaises(TypeError):
self.inst.delete_handle('my/testhandle', 'TEST1')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.put')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_GenericHandleError(self, getpatch, putpatch):
"""Test causing a Generic Handle Exception.
This should never happen, but this exception was designed for the
really unexpected things, so to make sure it works, I invent a
ver broken illegal action here.
"""
# Define the replacement for the patched GET method:
cont = {"responseCode":1, "handle":"not/me", "values":[{"index":1, "type":"URL", "data":{"format":"string", "value":"www.url.foo"}, "ttl":86400, "timestamp":"2015-09-30T15:54:30Z"}, {"index":2, "type":"10320/LOC", "data":{"format":"string", "value":"<locations><location href = 'http://first.foo' /><location href = 'http://second.foo' /></locations> "}, "ttl":86400, "timestamp":"2015-09-30T15:54:30Z"}]}
mock_response_get = MockResponse(status_code=200, content=json.dumps(cont))
getpatch.return_value = mock_response_get
# Define the replacement for the patched requests.put method:
mock_response_put = MockResponse()
putpatch.return_value = mock_response_put
# Run the method to be tested:
with self.assertRaises(GenericHandleError):
self.inst.retrieve_handle_record_json('my/testhandle')
# Check if the PUT request was sent exactly once:
self.assertEqual(putpatch.call_count, 0,
'The method "requests.put" was called ' + str(putpatch.call_count) + ' times. It should not have been called at all.')
# search_handle
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
@mock.patch('pyhandle.handlesystemconnector.HandleSystemConnector.check_if_username_exists')
def test_search_handle_wrong_url(self, usernamepatch, getpatch):
"""Test exception when wrong search servlet URL is given."""
# Define the replacement for the patched check_if_username_exists method:
mock_response_user = MockResponse(success=True)
usernamepatch.return_value = mock_response_user
# Define the replacement for the patched GET method:
mock_response_get = MockSearchResponse(wrong_url=True)
getpatch.return_value = mock_response_get
# Setup client for searching with existent but wrong url (google.com):
inst = RESTHandleClient.instantiate_with_username_and_password(
"url_https",
"100:user/name",
"password",
reverselookup_baseuri='http://www.google.com',
HTTP_verify=True)
# Run code to be tested + check exception:
with self.assertRaises(ReverseLookupException):
self.inst.search_handle(URL='*')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
@mock.patch('pyhandle.handlesystemconnector.HandleSystemConnector.check_if_username_exists')
def test_search_handle_handleurl(self, usernamepatch, getpatch):
"""Test exception when wrong search servlet URL (Handle Server REST API URL) is given."""
# Define the replacement for the patched check_if_username_exists method:
mock_response_user = MockResponse(success=True)
usernamepatch.return_value = mock_response_user
# Define the replacement for the patched GET method:
mock_response_search = MockSearchResponse(handle_url=True)
getpatch.return_value = mock_response_search
# Setup client for searching with Handle Server url:
inst = RESTHandleClient.instantiate_with_username_and_password(
"url_https",
"100:user/name",
"password",
reverselookup_url_extension='/api/handles/',
HTTP_verify=True)
# Run code to be tested + check exception:
with self.assertRaises(ReverseLookupException):
self.inst.search_handle(URL='*')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_search_handle(self, getpatch):
"""Test searching for handles with any url (server should return list of handles)."""
# Define the replacement for the patched GET method:
mock_response_get = MockSearchResponse(success=True)
getpatch.return_value = mock_response_get
# Run code to be tested:
val = self.inst.search_handle(URL='*')
# Check desired outcome:
self.assertEqual(type(val), type([]),
'')
self.assertTrue(len(val) > 0,
'')
self.assertTrue(check_handle_syntax(val[0]),
'')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_search_handle_emptylist(self, getpatch):
"""Test empty search result."""
# Define the replacement for the patched GET method:
mock_response_get = MockSearchResponse(empty=True)
getpatch.return_value = mock_response_get
# Run code to be tested:
val = self.inst.search_handle(URL='noturldoesnotexist')
# Check desired outcome:
self.assertEqual(type(val), type([]),
'')
self.assertEqual(len(val), 0,
'')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_search_handle_for_url(self, getpatch):
"""Test searching for url with wildcards."""
# Define the replacement for the patched GET method:
mock_response_get = MockSearchResponse(success=True)
getpatch.return_value = mock_response_get
# Run code to be tested:
val = self.inst.search_handle(URL='*dkrz*')
# Check desired outcome:
self.assertEqual(type(val), type([]),
'')
# Run code to be tested:
val = self.inst.search_handle('*dkrz*')
# Check desired outcome:
self.assertEqual(type(val), type([]),
'')
if False:
# At the moment, two keywords can not be searched!
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_search_handle_for_url_and_checksum(self, getpatch):
"""Test searching for url and checksum with wildcards."""
# Define the replacement for the patched GET method:
mock_response_get = MockSearchResponse(success=True)
getpatch.return_value = mock_response_get
# Run code to be tested:
val = self.inst.search_handle('*dkrz*', CHECKSUM='*123*')
# Check desired outcome:
self.assertEqual(type(val), type([]),
'')
# Run code to be tested:
val = self.inst.search_handle(URL='*dkrz*', CHECKSUM='*123*')
# Check desired outcome:
self.assertEqual(type(val), type([]),
'')
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_search_handle_prefixfilter(self, getpatch):
"""Test filtering for prefixes."""
prefix = "11111"
# Define the replacement for the patched GET method:
mock_response_get = MockSearchResponse(prefix=prefix)
getpatch.return_value = mock_response_get
# Run code to be tested:
val = self.inst.search_handle(URL='*dkrz*', prefix=prefix)
# Check desired outcome:
self.assertEqual(type(val), type([]),
'')
for item in val:
self.assertEqual(item.split('/')[0], prefix)
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_search_handle_prefixfilter_realprefix(self, getpatch):
"""Test filtering for prefixes."""
prefix = "10876.test"
# Define the replacement for the patched GET method:
mock_response_get = MockSearchResponse(prefix=prefix)
getpatch.return_value = mock_response_get
# Run code to be tested:
val = self.inst.search_handle(URL='*dkrz*', prefix=prefix)
# Check desired outcome:
self.assertEqual(type(val), type([]),
'')
for item in val:
self.assertEqual(item.split('/')[0], prefix)
@mock.patch('pyhandle.handlesystemconnector.requests.Session.get')
def test_search_handle_fulltext(self, getpatch):
"""Test filtering for prefixes."""
prefix = "10876.test"
# Define the replacement for the patched GET method:
mock_response_get = MockSearchResponse(prefix=prefix)
getpatch.return_value = mock_response_get
# Run code to be tested + check exception:
with self.assertRaises(ReverseLookupException):
self.inst.search_handle(URL='*dkrz*', searchterms=['foo', 'bar'])
| 53.292373
| 569
| 0.636201
|
55bfa028ce0cb662f230ca9deb151bb5583dd403
| 757
|
py
|
Python
|
_erwin/main.py
|
der2b2/erwin
|
9a6ced587d213779618ac1c77a2e5c039f5c8731
|
[
"MIT"
] | 1
|
2020-10-31T21:15:35.000Z
|
2020-10-31T21:15:35.000Z
|
_erwin/main.py
|
der2b2/erwin
|
9a6ced587d213779618ac1c77a2e5c039f5c8731
|
[
"MIT"
] | 13
|
2020-09-23T16:05:45.000Z
|
2022-03-12T00:51:01.000Z
|
_erwin/main.py
|
der2b2/erwin
|
9a6ced587d213779618ac1c77a2e5c039f5c8731
|
[
"MIT"
] | null | null | null |
from _erwin import build
from _erwin import serve
from _erwin import clean
from _erwin import initialize
def run(argv):
if argv[0] == "clean" or argv[0] == "c":
print("Cleaning output folder")
clean.run_clean()
elif argv[0] == "build" or argv[0] == "b":
print("Build")
build.main()
elif argv[0] == "serve" or argv[0] == "s":
print("Serve")
serve.run_server()
elif argv[0] == "init" or argv[0] == "i":
print("Initialize")
print("")
read = input("Initialize will override templates, sure you want to proceed? [Y|n] ")
if read == "Y":
initialize.run_init()
else:
print("Aborted")
else:
print("usage: python erwin.py build|serve|clean|init b|s|c|i")
| 28.037037
| 90
| 0.581242
|
9e9a034ac85065fff3d74f61addfc94861789ec3
| 4,720
|
py
|
Python
|
dev/circuitpython/examples/mlx90640_pygamer.py
|
scripsi/picodeebee
|
0ec77e92f09fa8711705623482e57a5e0b702696
|
[
"MIT"
] | 7
|
2021-03-15T10:06:20.000Z
|
2022-03-23T02:53:15.000Z
|
Lights/adafruit-circuitpython-bundle-6.x-mpy-20210310/examples/mlx90640_pygamer.py
|
IanSMoyes/SpiderPi
|
cc3469980ae87b92d0dc43c05dbd579f0fa8c4b1
|
[
"Apache-2.0"
] | 5
|
2021-04-27T18:21:11.000Z
|
2021-05-02T14:17:14.000Z
|
Lights/adafruit-circuitpython-bundle-6.x-mpy-20210310/examples/mlx90640_pygamer.py
|
IanSMoyes/SpiderPi
|
cc3469980ae87b92d0dc43c05dbd579f0fa8c4b1
|
[
"Apache-2.0"
] | null | null | null |
# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
# SPDX-License-Identifier: MIT
import time
import board
import busio
import displayio
import terminalio
from adafruit_display_text.label import Label
from simpleio import map_range
import adafruit_mlx90640
number_of_colors = 64 # Number of color in the gradian
last_color = number_of_colors - 1 # Last color in palette
palette = displayio.Palette(number_of_colors) # Palette with all our colors
## Heatmap code inspired from: http://www.andrewnoske.com/wiki/Code_-_heatmaps_and_color_gradients
color_A = [
[0, 0, 0],
[0, 0, 255],
[0, 255, 255],
[0, 255, 0],
[255, 255, 0],
[255, 0, 0],
[255, 255, 255],
]
color_B = [[0, 0, 255], [0, 255, 255], [0, 255, 0], [255, 255, 0], [255, 0, 0]]
color_C = [[0, 0, 0], [255, 255, 255]]
color_D = [[0, 0, 255], [255, 0, 0]]
color = color_B
NUM_COLORS = len(color)
def MakeHeatMapColor():
for c in range(number_of_colors):
value = c * (NUM_COLORS - 1) / last_color
idx1 = int(value) # Our desired color will be after this index.
if idx1 == value: # This is the corner case
red = color[idx1][0]
green = color[idx1][1]
blue = color[idx1][2]
else:
idx2 = idx1 + 1 # ... and before this index (inclusive).
fractBetween = value - idx1 # Distance between the two indexes (0-1).
red = int(
round((color[idx2][0] - color[idx1][0]) * fractBetween + color[idx1][0])
)
green = int(
round((color[idx2][1] - color[idx1][1]) * fractBetween + color[idx1][1])
)
blue = int(
round((color[idx2][2] - color[idx1][2]) * fractBetween + color[idx1][2])
)
palette[c] = (0x010000 * red) + (0x000100 * green) + (0x000001 * blue)
MakeHeatMapColor()
# Bitmap for colour coded thermal value
image_bitmap = displayio.Bitmap(32, 24, number_of_colors)
# Create a TileGrid using the Bitmap and Palette
image_tile = displayio.TileGrid(image_bitmap, pixel_shader=palette)
# Create a Group that scale 32*24 to 128*96
image_group = displayio.Group(scale=4)
image_group.append(image_tile)
scale_bitmap = displayio.Bitmap(number_of_colors, 1, number_of_colors)
# Create a Group Scale must be 128 divided by number_of_colors
scale_group = displayio.Group(scale=2)
scale_tile = displayio.TileGrid(scale_bitmap, pixel_shader=palette, x=0, y=60)
scale_group.append(scale_tile)
for i in range(number_of_colors):
scale_bitmap[i, 0] = i # Fill the scale with the palette gradian
# Create the super Group
group = displayio.Group()
min_label = Label(terminalio.FONT, max_glyphs=10, color=palette[0], x=0, y=110)
max_label = Label(
terminalio.FONT, max_glyphs=10, color=palette[last_color], x=80, y=110
)
# Add all the sub-group to the SuperGroup
group.append(image_group)
group.append(scale_group)
group.append(min_label)
group.append(max_label)
# Add the SuperGroup to the Display
board.DISPLAY.show(group)
min_t = 20 # Initial minimum temperature range, before auto scale
max_t = 37 # Initial maximum temperature range, before auto scale
i2c = busio.I2C(board.SCL, board.SDA, frequency=800000)
mlx = adafruit_mlx90640.MLX90640(i2c)
print("MLX addr detected on I2C")
print([hex(i) for i in mlx.serial_number])
# mlx.refresh_rate = adafruit_mlx90640.RefreshRate.REFRESH_2_HZ
mlx.refresh_rate = adafruit_mlx90640.RefreshRate.REFRESH_4_HZ
frame = [0] * 768
while True:
stamp = time.monotonic()
try:
mlx.getFrame(frame)
except ValueError:
# these happen, no biggie - retry
continue
# print("Time for data aquisition: %0.2f s" % (time.monotonic()-stamp))
mini = frame[0] # Define a min temperature of current image
maxi = frame[0] # Define a max temperature of current image
for h in range(24):
for w in range(32):
t = frame[h * 32 + w]
if t > maxi:
maxi = t
if t < mini:
mini = t
image_bitmap[w, (23 - h)] = int(map_range(t, min_t, max_t, 0, last_color))
min_label.text = "%0.2f" % (min_t)
max_string = "%0.2f" % (max_t)
max_label.x = 120 - (5 * len(max_string)) # Tricky calculation to left align
max_label.text = max_string
min_t = mini # Automatically change the color scale
max_t = maxi
# print((mini, maxi)) # Use this line to display min and max graph in Mu
# print("Total time for aquisition and display %0.2f s" % (time.monotonic()-stamp))
| 33.714286
| 99
| 0.63411
|
21abbd2ea91423c4a0cc7ff298abb0fda9491ce7
| 13,360
|
py
|
Python
|
nova/tests/api/openstack/compute/test_auth.py
|
bopopescu/trusted-nova
|
b440afb89f6f170c0831f5d6318a08ec41bc8c0a
|
[
"Apache-2.0"
] | 1
|
2015-07-15T08:51:16.000Z
|
2015-07-15T08:51:16.000Z
|
nova/tests/api/openstack/compute/test_auth.py
|
bopopescu/trusted-nova
|
b440afb89f6f170c0831f5d6318a08ec41bc8c0a
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/api/openstack/compute/test_auth.py
|
bopopescu/trusted-nova
|
b440afb89f6f170c0831f5d6318a08ec41bc8c0a
|
[
"Apache-2.0"
] | 2
|
2019-06-12T00:52:15.000Z
|
2020-07-24T10:35:29.000Z
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import webob
import webob.dec
import nova.api.openstack.compute
import nova.auth.manager
from nova.api.openstack import auth
from nova import context
from nova import db
from nova import test
from nova.tests.api.openstack import fakes
class Test(test.TestCase):
def setUp(self):
super(Test, self).setUp()
self.stubs.Set(auth.AuthMiddleware,
'__init__', fakes.fake_auth_init)
self.stubs.Set(context, 'RequestContext', fakes.FakeRequestContext)
fakes.FakeAuthManager.clear_fakes()
fakes.FakeAuthDatabase.data = {}
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_networking(self.stubs)
def test_authorize_user(self):
f = fakes.FakeAuthManager()
user = nova.auth.manager.User('id1', 'user1', 'user1_key', None, None)
f.add_user(user)
req = webob.Request.blank('/v2/')
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
req.headers['X-Auth-Project-Id'] = 'user1_project'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '204 No Content')
self.assertEqual(len(result.headers['X-Auth-Token']), 40)
def test_authorize_token(self):
f = fakes.FakeAuthManager()
user = nova.auth.manager.User('id1', 'user1', 'user1_key', None, None)
f.add_user(user)
f.create_project('user1_project', user)
req = webob.Request.blank('/v2/', {'HTTP_HOST': 'foo'})
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '204 No Content')
self.assertEqual(len(result.headers['X-Auth-Token']), 40)
self.assertEqual(result.headers['X-Server-Management-Url'],
"http://foo/v2/user1_project")
token = result.headers['X-Auth-Token']
self.stubs.Set(nova.api.openstack.compute, 'APIRouter',
fakes.FakeRouter)
req = webob.Request.blank('/v2/user1_project')
req.headers['X-Auth-Token'] = token
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '200 OK')
self.assertEqual(result.headers['X-Test-Success'], 'True')
def test_token_expiry(self):
self.destroy_called = False
def destroy_token_mock(meh, context, token):
self.destroy_called = True
def bad_token(meh, context, token_hash):
return fakes.FakeToken(
token_hash=token_hash,
created_at=datetime.datetime(1990, 1, 1))
self.stubs.Set(fakes.FakeAuthDatabase, 'auth_token_destroy',
destroy_token_mock)
self.stubs.Set(fakes.FakeAuthDatabase, 'auth_token_get',
bad_token)
req = webob.Request.blank('/v2/')
req.headers['X-Auth-Token'] = 'token_hash'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '401 Unauthorized')
self.assertEqual(self.destroy_called, True)
def test_authorize_project(self):
f = fakes.FakeAuthManager()
user = nova.auth.manager.User('id1', 'user1', 'user1_key', None, None)
f.add_user(user)
f.create_project('user1_project', user)
f.create_project('user2_project', user)
req = webob.Request.blank('/v2/', {'HTTP_HOST': 'foo'})
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '204 No Content')
token = result.headers['X-Auth-Token']
self.stubs.Set(nova.api.openstack.compute, 'APIRouter',
fakes.FakeRouter)
req = webob.Request.blank('/v2/user2_project')
req.headers['X-Auth-Token'] = token
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '200 OK')
self.assertEqual(result.headers['X-Test-Success'], 'True')
def test_bad_user_bad_key(self):
req = webob.Request.blank('/v2/')
req.headers['X-Auth-User'] = 'unknown_user'
req.headers['X-Auth-Key'] = 'unknown_user_key'
req.headers['X-Auth-Project-Id'] = 'user_project'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '401 Unauthorized')
def test_bad_user_good_key(self):
f = fakes.FakeAuthManager()
user = nova.auth.manager.User('id1', 'user1', 'user1_key', None, None)
f.add_user(user)
req = webob.Request.blank('/v2/')
req.headers['X-Auth-User'] = 'unknown_user'
req.headers['X-Auth-Key'] = 'user1_key'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '401 Unauthorized')
def test_no_user(self):
req = webob.Request.blank('/v2/')
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '401 Unauthorized')
def test_bad_token(self):
req = webob.Request.blank('/v2/')
req.headers['X-Auth-Token'] = 'unknown_token'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '401 Unauthorized')
def test_bad_project(self):
f = fakes.FakeAuthManager()
user1 = nova.auth.manager.User('id1', 'user1', 'user1_key', None, None)
user2 = nova.auth.manager.User('id2', 'user2', 'user2_key', None, None)
f.add_user(user1)
f.add_user(user2)
f.create_project('user1_project', user1)
f.create_project('user2_project', user2)
req = webob.Request.blank('/v2/', {'HTTP_HOST': 'foo'})
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '204 No Content')
token = result.headers['X-Auth-Token']
self.stubs.Set(nova.api.openstack.compute, 'APIRouter',
fakes.FakeRouter)
req = webob.Request.blank('/v2/user2_project')
req.headers['X-Auth-Token'] = token
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '401 Unauthorized')
def test_not_authorized_project(self):
f = fakes.FakeAuthManager()
user1 = nova.auth.manager.User('id1', 'user1', 'user1_key', None, None)
f.add_user(user1)
f.create_project('user1_project', user1)
user2 = nova.auth.manager.User('id2', 'user2', 'user2_key', None, None)
f.add_user(user2)
f.create_project('user2_project', user2)
req = webob.Request.blank('/v2/', {'HTTP_HOST': 'foo'})
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '204 No Content')
token = result.headers['X-Auth-Token']
self.stubs.Set(nova.api.openstack.compute, 'APIRouter',
fakes.FakeRouter)
req = webob.Request.blank('/v2/user2_project')
req.headers['X-Auth-Token'] = token
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '401 Unauthorized')
def test_auth_token_no_empty_headers(self):
f = fakes.FakeAuthManager()
user = nova.auth.manager.User('id1', 'user1', 'user1_key', None, None)
f.add_user(user)
req = webob.Request.blank('/v2/')
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
req.headers['X-Auth-Project-Id'] = 'user1_project'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '204 No Content')
self.assertEqual(len(result.headers['X-Auth-Token']), 40)
self.assertFalse('X-CDN-Management-Url' in result.headers)
self.assertFalse('X-Storage-Url' in result.headers)
class TestFunctional(test.TestCase):
def test_token_expiry(self):
ctx = context.get_admin_context()
tok = db.auth_token_create(ctx, dict(
token_hash='test_token_hash',
cdn_management_url='',
server_management_url='',
storage_url='',
user_id='user1',
))
db.auth_token_update(ctx, tok.token_hash, dict(
created_at=datetime.datetime(2000, 1, 1, 12, 0, 0),
))
req = webob.Request.blank('/v2/')
req.headers['X-Auth-Token'] = 'test_token_hash'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '401 Unauthorized')
def test_token_doesnotexist(self):
req = webob.Request.blank('/v2/')
req.headers['X-Auth-Token'] = 'nonexistant_token_hash'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '401 Unauthorized')
class TestLimiter(test.TestCase):
def setUp(self):
super(TestLimiter, self).setUp()
self.stubs.Set(auth.AuthMiddleware,
'__init__', fakes.fake_auth_init)
self.stubs.Set(context, 'RequestContext', fakes.FakeRequestContext)
fakes.FakeAuthManager.clear_fakes()
fakes.FakeAuthDatabase.data = {}
fakes.stub_out_networking(self.stubs)
def test_authorize_token(self):
f = fakes.FakeAuthManager()
user = nova.auth.manager.User('id1', 'user1', 'user1_key', None, None)
f.add_user(user)
f.create_project('test', user)
req = webob.Request.blank('/v2/')
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(len(result.headers['X-Auth-Token']), 40)
token = result.headers['X-Auth-Token']
self.stubs.Set(nova.api.openstack.compute, 'APIRouter',
fakes.FakeRouter)
req = webob.Request.blank('/v2/test')
req.method = 'POST'
req.headers['X-Auth-Token'] = token
result = req.get_response(fakes.wsgi_app(fake_auth=False))
self.assertEqual(result.status, '200 OK')
self.assertEqual(result.headers['X-Test-Success'], 'True')
class TestNoAuthMiddleware(test.TestCase):
def setUp(self):
super(TestNoAuthMiddleware, self).setUp()
self.stubs.Set(context, 'RequestContext', fakes.FakeRequestContext)
fakes.FakeAuthManager.clear_fakes()
fakes.FakeAuthDatabase.data = {}
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_networking(self.stubs)
def test_authorize_user(self):
req = webob.Request.blank('/v2')
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
req.headers['X-Auth-Project-Id'] = 'user1_project'
result = req.get_response(fakes.wsgi_app(fake_auth=False,
use_no_auth=True))
self.assertEqual(result.status, '204 No Content')
self.assertEqual(result.headers['X-Server-Management-Url'],
"http://localhost/v2/user1_project")
def test_authorize_user_trailing_slash(self):
#make sure it works with trailing slash on the request
req = webob.Request.blank('/v2/')
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
req.headers['X-Auth-Project-Id'] = 'user1_project'
result = req.get_response(fakes.wsgi_app(fake_auth=False,
use_no_auth=True))
self.assertEqual(result.status, '204 No Content')
self.assertEqual(result.headers['X-Server-Management-Url'],
"http://localhost/v2/user1_project")
def test_auth_token_no_empty_headers(self):
req = webob.Request.blank('/v2')
req.headers['X-Auth-User'] = 'user1'
req.headers['X-Auth-Key'] = 'user1_key'
req.headers['X-Auth-Project-Id'] = 'user1_project'
result = req.get_response(fakes.wsgi_app(fake_auth=False,
use_no_auth=True))
self.assertEqual(result.status, '204 No Content')
self.assertFalse('X-CDN-Management-Url' in result.headers)
self.assertFalse('X-Storage-Url' in result.headers)
| 41.490683
| 79
| 0.633832
|
d0c516fb3258d3d28b09fb87a652c4f1d5c56d51
| 4,485
|
py
|
Python
|
azure-mgmt-network/azure/mgmt/network/v2018_11_01/models/virtual_network_tap_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2021-09-07T18:36:04.000Z
|
2021-09-07T18:36:04.000Z
|
azure-mgmt-network/azure/mgmt/network/v2018_11_01/models/virtual_network_tap_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
azure-mgmt-network/azure/mgmt/network/v2018_11_01/models/virtual_network_tap_py3.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource_py3 import Resource
class VirtualNetworkTap(Resource):
"""Virtual Network Tap resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:ivar network_interface_tap_configurations: Specifies the list of resource
IDs for the network interface IP configuration that needs to be tapped.
:vartype network_interface_tap_configurations:
list[~azure.mgmt.network.v2018_11_01.models.NetworkInterfaceTapConfiguration]
:ivar resource_guid: The resourceGuid property of the virtual network tap.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the virtual network
tap. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param destination_network_interface_ip_configuration: The reference to
the private IP Address of the collector nic that will receive the tap
:type destination_network_interface_ip_configuration:
~azure.mgmt.network.v2018_11_01.models.NetworkInterfaceIPConfiguration
:param destination_load_balancer_front_end_ip_configuration: The reference
to the private IP address on the internal Load Balancer that will receive
the tap
:type destination_load_balancer_front_end_ip_configuration:
~azure.mgmt.network.v2018_11_01.models.FrontendIPConfiguration
:param destination_port: The VXLAN destination port that will receive the
tapped traffic.
:type destination_port: int
:param etag: Gets a unique read-only string that changes whenever the
resource is updated.
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'network_interface_tap_configurations': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'network_interface_tap_configurations': {'key': 'properties.networkInterfaceTapConfigurations', 'type': '[NetworkInterfaceTapConfiguration]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'destination_network_interface_ip_configuration': {'key': 'properties.destinationNetworkInterfaceIPConfiguration', 'type': 'NetworkInterfaceIPConfiguration'},
'destination_load_balancer_front_end_ip_configuration': {'key': 'properties.destinationLoadBalancerFrontEndIPConfiguration', 'type': 'FrontendIPConfiguration'},
'destination_port': {'key': 'properties.destinationPort', 'type': 'int'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, *, id: str=None, location: str=None, tags=None, destination_network_interface_ip_configuration=None, destination_load_balancer_front_end_ip_configuration=None, destination_port: int=None, etag: str=None, **kwargs) -> None:
super(VirtualNetworkTap, self).__init__(id=id, location=location, tags=tags, **kwargs)
self.network_interface_tap_configurations = None
self.resource_guid = None
self.provisioning_state = None
self.destination_network_interface_ip_configuration = destination_network_interface_ip_configuration
self.destination_load_balancer_front_end_ip_configuration = destination_load_balancer_front_end_ip_configuration
self.destination_port = destination_port
self.etag = etag
| 50.393258
| 245
| 0.696544
|
1943390e20274bef0a97ea662adf8eae33284e2e
| 2,075
|
py
|
Python
|
task/tagger/data/boson.py
|
ssunqf/nlp-exp
|
f4ebc77d2f7b85b95a12d2cb47196a950c8a1a1f
|
[
"Apache-2.0"
] | 4
|
2019-11-07T12:13:53.000Z
|
2019-12-16T17:43:57.000Z
|
task/tagger/data/boson.py
|
ssunqf/nlp-exp
|
f4ebc77d2f7b85b95a12d2cb47196a950c8a1a1f
|
[
"Apache-2.0"
] | null | null | null |
task/tagger/data/boson.py
|
ssunqf/nlp-exp
|
f4ebc77d2f7b85b95a12d2cb47196a950c8a1a1f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3.6
# -*- coding: utf-8 -*-
import sys
from task.util import utils
if __name__ == '__main__':
with open(sys.argv[1]) as reader, open(sys.argv[2], 'w') as writer:
for line in reader:
for sentence in line.split('\\n\\n'):
chars = []
tags = []
for left in sentence.split('{{'):
for field in left.split('}}'):
type = None
text = field
if field.startswith('person_name:'):
type = 'PERSON'
text = field[len('person_name:'):]
elif field.startswith('company_name:'):
type = 'ORGANIZATION'
text = field[len('company_name:'):]
elif field.startswith('org_name:'):
type = 'ORGANIZATION'
text = field[len('org_name:'):]
elif field.startswith('location:'):
type = 'LOCATION'
text = field[len('location:'):]
elif field.startswith('product_name:'):
type = None
text = field[len('product_name:'):]
elif field.startswith('time:'):
type = 'TIME'
text = field[len('time:'):]
t_chars = [word for t, word in utils.replace_entity(text.strip())]
if type:
t_tags = ['S_'+type] if len(t_chars) == 1 else \
['B_'+type] + ['M_'+type] * (len(t_chars) - 2) + ['E_'+type]
else:
t_tags = ['O'] * len(t_chars)
chars.extend(t_chars)
tags.extend(t_tags)
writer.write(' '.join([char + '#' + tag for char, tag in zip(chars, tags)]) + '\n')
| 42.346939
| 99
| 0.388916
|
e95275363e8e41c2b7ef5bfcb094a97cc8ddae0f
| 2,240
|
py
|
Python
|
examples/eit_dynamic_svd.py
|
fzouari/pyEIT
|
0e4e5200b0adb1d49413ca5a321ff9f4f102f420
|
[
"BSD-3-Clause"
] | null | null | null |
examples/eit_dynamic_svd.py
|
fzouari/pyEIT
|
0e4e5200b0adb1d49413ca5a321ff9f4f102f420
|
[
"BSD-3-Clause"
] | null | null | null |
examples/eit_dynamic_svd.py
|
fzouari/pyEIT
|
0e4e5200b0adb1d49413ca5a321ff9f4f102f420
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
""" demo on dynamic eit using JAC method """
# Copyright (c) Benyuan Liu. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from __future__ import division, absolute_import, print_function
import numpy as np
import matplotlib.pyplot as plt
import pyeit.mesh as mesh
from pyeit.eit.fem import Forward
from pyeit.eit.utils import eit_scan_lines
import pyeit.eit.svd as svd
from pyeit.eit.interp2d import sim2pts
""" 0. construct mesh """
mesh_obj, el_pos = mesh.create(16, h0=0.1)
# mesh_obj, el_pos = mesh.layer_circle()
# extract node, element, alpha
pts = mesh_obj["node"]
tri = mesh_obj["element"]
x, y = pts[:, 0], pts[:, 1]
""" 1. problem setup """
mesh_obj["alpha"] = np.ones(tri.shape[0]) * 10
anomaly = [{"x": 0.5, "y": 0.5, "d": 0.1, "perm": 100.0}]
mesh_new = mesh.set_perm(mesh_obj, anomaly=anomaly)
""" 2. FEM simulation """
el_dist, step = 8, 1
ex_mat = eit_scan_lines(16, el_dist)
# calculate simulated data
fwd = Forward(mesh_obj, el_pos)
f0 = fwd.solve_eit(ex_mat, step=step, perm=mesh_obj["perm"])
f1 = fwd.solve_eit(ex_mat, step=step, perm=mesh_new["perm"])
""" 3. JAC solver """
# Note: if the jac and the real-problem are generated using the same mesh,
# then, data normalization in solve are not needed.
# However, when you generate jac from a known mesh, but in real-problem
# (mostly) the shape and the electrode positions are not exactly the same
# as in mesh generating the jac, then data must be normalized.
eit = svd.SVD(mesh_obj, el_pos, ex_mat=ex_mat, step=step, perm=1.0, parser="std")
eit.setup(n=35, method="svd")
ds = eit.solve(f1.v, f0.v, normalize=True)
ds_n = sim2pts(pts, tri, np.real(ds))
# plot ground truth
fig, ax = plt.subplots(figsize=(6, 4))
delta_perm = mesh_new["perm"] - mesh_obj["perm"]
im = ax.tripcolor(x, y, tri, np.real(delta_perm), shading="flat")
fig.colorbar(im)
ax.set_aspect("equal")
# plot EIT reconstruction
fig, ax = plt.subplots(figsize=(6, 4))
im = ax.tripcolor(x, y, tri, ds_n, shading="flat")
for i, e in enumerate(el_pos):
ax.annotate(str(i + 1), xy=(x[e], y[e]), color="r")
fig.colorbar(im)
ax.set_aspect("equal")
# fig.set_size_inches(6, 4)
# plt.savefig('../figs/demo_jac.png', dpi=96)
plt.show()
| 32.941176
| 81
| 0.703571
|
508e858d816969c00758bf9c2d966626ad22e8ca
| 45,151
|
py
|
Python
|
jodconverter-web/src/main/office/program/python-core-2.7.6/lib/pickle.py
|
huleigithup/filepreview
|
815fac0e21547301604bb5fd623a91d885cb4437
|
[
"Apache-2.0"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
BitmessageKit/Vendor/static-python/Lib/pickle.py
|
VoluntaryLabs/BitmessageKit
|
dd634977a629ab4dec184e12bb6324cc01149ba3
|
[
"MIT"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
BitmessageKit/Vendor/static-python/Lib/pickle.py
|
VoluntaryLabs/BitmessageKit
|
dd634977a629ab4dec184e12bb6324cc01149ba3
|
[
"MIT"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
"""Create portable serialized representations of Python objects.
See module cPickle for a (much) faster implementation.
See module copy_reg for a mechanism for registering custom picklers.
See module pickletools source for extensive comments.
Classes:
Pickler
Unpickler
Functions:
dump(object, file)
dumps(object) -> string
load(file) -> object
loads(string) -> object
Misc variables:
__version__
format_version
compatible_formats
"""
__version__ = "$Revision: 72223 $" # Code version
from types import *
from copy_reg import dispatch_table
from copy_reg import _extension_registry, _inverted_registry, _extension_cache
import marshal
import sys
import struct
import re
__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
"Unpickler", "dump", "dumps", "load", "loads"]
# These are purely informational; no code uses these.
format_version = "2.0" # File format version we write
compatible_formats = ["1.0", # Original protocol 0
"1.1", # Protocol 0 with INST added
"1.2", # Original protocol 1
"1.3", # Protocol 1 with BINFLOAT added
"2.0", # Protocol 2
] # Old format versions we can read
# Keep in synch with cPickle. This is the highest protocol number we
# know how to read.
HIGHEST_PROTOCOL = 2
# Why use struct.pack() for pickling but marshal.loads() for
# unpickling? struct.pack() is 40% faster than marshal.dumps(), but
# marshal.loads() is twice as fast as struct.unpack()!
mloads = marshal.loads
class PickleError(Exception):
"""A common base class for the other pickling exceptions."""
pass
class PicklingError(PickleError):
"""This exception is raised when an unpicklable object is passed to the
dump() method.
"""
pass
class UnpicklingError(PickleError):
"""This exception is raised when there is a problem unpickling an object,
such as a security violation.
Note that other exceptions may also be raised during unpickling, including
(but not necessarily limited to) AttributeError, EOFError, ImportError,
and IndexError.
"""
pass
# An instance of _Stop is raised by Unpickler.load_stop() in response to
# the STOP opcode, passing the object that is the result of unpickling.
class _Stop(Exception):
def __init__(self, value):
self.value = value
# Jython has PyStringMap; it's a dict subclass with string keys
try:
from org.python.core import PyStringMap
except ImportError:
PyStringMap = None
# UnicodeType may or may not be exported (normally imported from types)
try:
UnicodeType
except NameError:
UnicodeType = None
# Pickle opcodes. See pickletools.py for extensive docs. The listing
# here is in kind-of alphabetical order of 1-character pickle code.
# pickletools groups them by purpose.
MARK = '(' # push special markobject on stack
STOP = '.' # every pickle ends with STOP
POP = '0' # discard topmost stack item
POP_MARK = '1' # discard stack top through topmost markobject
DUP = '2' # duplicate top stack item
FLOAT = 'F' # push float object; decimal string argument
INT = 'I' # push integer or bool; decimal string argument
BININT = 'J' # push four-byte signed int
BININT1 = 'K' # push 1-byte unsigned int
LONG = 'L' # push long; decimal string argument
BININT2 = 'M' # push 2-byte unsigned int
NONE = 'N' # push None
PERSID = 'P' # push persistent object; id is taken from string arg
BINPERSID = 'Q' # " " " ; " " " " stack
REDUCE = 'R' # apply callable to argtuple, both on stack
STRING = 'S' # push string; NL-terminated string argument
BINSTRING = 'T' # push string; counted binary string argument
SHORT_BINSTRING = 'U' # " " ; " " " " < 256 bytes
UNICODE = 'V' # push Unicode string; raw-unicode-escaped'd argument
BINUNICODE = 'X' # " " " ; counted UTF-8 string argument
APPEND = 'a' # append stack top to list below it
BUILD = 'b' # call __setstate__ or __dict__.update()
GLOBAL = 'c' # push self.find_class(modname, name); 2 string args
DICT = 'd' # build a dict from stack items
EMPTY_DICT = '}' # push empty dict
APPENDS = 'e' # extend list on stack by topmost stack slice
GET = 'g' # push item from memo on stack; index is string arg
BINGET = 'h' # " " " " " " ; " " 1-byte arg
INST = 'i' # build & push class instance
LONG_BINGET = 'j' # push item from memo on stack; index is 4-byte arg
LIST = 'l' # build list from topmost stack items
EMPTY_LIST = ']' # push empty list
OBJ = 'o' # build & push class instance
PUT = 'p' # store stack top in memo; index is string arg
BINPUT = 'q' # " " " " " ; " " 1-byte arg
LONG_BINPUT = 'r' # " " " " " ; " " 4-byte arg
SETITEM = 's' # add key+value pair to dict
TUPLE = 't' # build tuple from topmost stack items
EMPTY_TUPLE = ')' # push empty tuple
SETITEMS = 'u' # modify dict by adding topmost key+value pairs
BINFLOAT = 'G' # push float; arg is 8-byte float encoding
TRUE = 'I01\n' # not an opcode; see INT docs in pickletools.py
FALSE = 'I00\n' # not an opcode; see INT docs in pickletools.py
# Protocol 2
PROTO = '\x80' # identify pickle protocol
NEWOBJ = '\x81' # build object by applying cls.__new__ to argtuple
EXT1 = '\x82' # push object from extension registry; 1-byte index
EXT2 = '\x83' # ditto, but 2-byte index
EXT4 = '\x84' # ditto, but 4-byte index
TUPLE1 = '\x85' # build 1-tuple from stack top
TUPLE2 = '\x86' # build 2-tuple from two topmost stack items
TUPLE3 = '\x87' # build 3-tuple from three topmost stack items
NEWTRUE = '\x88' # push True
NEWFALSE = '\x89' # push False
LONG1 = '\x8a' # push long from < 256 bytes
LONG4 = '\x8b' # push really big long
_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)])
del x
# Pickling machinery
class Pickler:
def __init__(self, file, protocol=None):
"""This takes a file-like object for writing a pickle data stream.
The optional protocol argument tells the pickler to use the
given protocol; supported protocols are 0, 1, 2. The default
protocol is 0, to be backwards compatible. (Protocol 0 is the
only protocol that can be written to a file opened in text
mode and read back successfully. When using a protocol higher
than 0, make sure the file is opened in binary mode, both when
pickling and unpickling.)
Protocol 1 is more efficient than protocol 0; protocol 2 is
more efficient than protocol 1.
Specifying a negative protocol version selects the highest
protocol version supported. The higher the protocol used, the
more recent the version of Python needed to read the pickle
produced.
The file parameter must have a write() method that accepts a single
string argument. It can thus be an open file object, a StringIO
object, or any other custom object that meets this interface.
"""
if protocol is None:
protocol = 0
if protocol < 0:
protocol = HIGHEST_PROTOCOL
elif not 0 <= protocol <= HIGHEST_PROTOCOL:
raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
self.write = file.write
self.memo = {}
self.proto = int(protocol)
self.bin = protocol >= 1
self.fast = 0
def clear_memo(self):
"""Clears the pickler's "memo".
The memo is the data structure that remembers which objects the
pickler has already seen, so that shared or recursive objects are
pickled by reference and not by value. This method is useful when
re-using picklers.
"""
self.memo.clear()
def dump(self, obj):
"""Write a pickled representation of obj to the open file."""
if self.proto >= 2:
self.write(PROTO + chr(self.proto))
self.save(obj)
self.write(STOP)
def memoize(self, obj):
"""Store an object in the memo."""
# The Pickler memo is a dictionary mapping object ids to 2-tuples
# that contain the Unpickler memo key and the object being memoized.
# The memo key is written to the pickle and will become
# the key in the Unpickler's memo. The object is stored in the
# Pickler memo so that transient objects are kept alive during
# pickling.
# The use of the Unpickler memo length as the memo key is just a
# convention. The only requirement is that the memo values be unique.
# But there appears no advantage to any other scheme, and this
# scheme allows the Unpickler memo to be implemented as a plain (but
# growable) array, indexed by memo key.
if self.fast:
return
assert id(obj) not in self.memo
memo_len = len(self.memo)
self.write(self.put(memo_len))
self.memo[id(obj)] = memo_len, obj
# Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
def put(self, i, pack=struct.pack):
if self.bin:
if i < 256:
return BINPUT + chr(i)
else:
return LONG_BINPUT + pack("<i", i)
return PUT + repr(i) + '\n'
# Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
def get(self, i, pack=struct.pack):
if self.bin:
if i < 256:
return BINGET + chr(i)
else:
return LONG_BINGET + pack("<i", i)
return GET + repr(i) + '\n'
def save(self, obj):
# Check for persistent id (defined by a subclass)
pid = self.persistent_id(obj)
if pid:
self.save_pers(pid)
return
# Check the memo
x = self.memo.get(id(obj))
if x:
self.write(self.get(x[0]))
return
# Check the type dispatch table
t = type(obj)
f = self.dispatch.get(t)
if f:
f(self, obj) # Call unbound method with explicit self
return
# Check copy_reg.dispatch_table
reduce = dispatch_table.get(t)
if reduce:
rv = reduce(obj)
else:
# Check for a class with a custom metaclass; treat as regular class
try:
issc = issubclass(t, TypeType)
except TypeError: # t is not a class (old Boost; see SF #502085)
issc = 0
if issc:
self.save_global(obj)
return
# Check for a __reduce_ex__ method, fall back to __reduce__
reduce = getattr(obj, "__reduce_ex__", None)
if reduce:
rv = reduce(self.proto)
else:
reduce = getattr(obj, "__reduce__", None)
if reduce:
rv = reduce()
else:
raise PicklingError("Can't pickle %r object: %r" %
(t.__name__, obj))
# Check for string returned by reduce(), meaning "save as global"
if type(rv) is StringType:
self.save_global(obj, rv)
return
# Assert that reduce() returned a tuple
if type(rv) is not TupleType:
raise PicklingError("%s must return string or tuple" % reduce)
# Assert that it returned an appropriately sized tuple
l = len(rv)
if not (2 <= l <= 5):
raise PicklingError("Tuple returned by %s must have "
"two to five elements" % reduce)
# Save the reduce() output and finally memoize the object
self.save_reduce(obj=obj, *rv)
def persistent_id(self, obj):
# This exists so a subclass can override it
return None
def save_pers(self, pid):
# Save a persistent id reference
if self.bin:
self.save(pid)
self.write(BINPERSID)
else:
self.write(PERSID + str(pid) + '\n')
def save_reduce(self, func, args, state=None,
listitems=None, dictitems=None, obj=None):
# This API is called by some subclasses
# Assert that args is a tuple or None
if not isinstance(args, TupleType):
raise PicklingError("args from reduce() should be a tuple")
# Assert that func is callable
if not hasattr(func, '__call__'):
raise PicklingError("func from reduce should be callable")
save = self.save
write = self.write
# Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
# A __reduce__ implementation can direct protocol 2 to
# use the more efficient NEWOBJ opcode, while still
# allowing protocol 0 and 1 to work normally. For this to
# work, the function returned by __reduce__ should be
# called __newobj__, and its first argument should be a
# new-style class. The implementation for __newobj__
# should be as follows, although pickle has no way to
# verify this:
#
# def __newobj__(cls, *args):
# return cls.__new__(cls, *args)
#
# Protocols 0 and 1 will pickle a reference to __newobj__,
# while protocol 2 (and above) will pickle a reference to
# cls, the remaining args tuple, and the NEWOBJ code,
# which calls cls.__new__(cls, *args) at unpickling time
# (see load_newobj below). If __reduce__ returns a
# three-tuple, the state from the third tuple item will be
# pickled regardless of the protocol, calling __setstate__
# at unpickling time (see load_build below).
#
# Note that no standard __newobj__ implementation exists;
# you have to provide your own. This is to enforce
# compatibility with Python 2.2 (pickles written using
# protocol 0 or 1 in Python 2.3 should be unpicklable by
# Python 2.2).
cls = args[0]
if not hasattr(cls, "__new__"):
raise PicklingError(
"args[0] from __newobj__ args has no __new__")
if obj is not None and cls is not obj.__class__:
raise PicklingError(
"args[0] from __newobj__ args has the wrong class")
args = args[1:]
save(cls)
save(args)
write(NEWOBJ)
else:
save(func)
save(args)
write(REDUCE)
if obj is not None:
self.memoize(obj)
# More new special cases (that work with older protocols as
# well): when __reduce__ returns a tuple with 4 or 5 items,
# the 4th and 5th item should be iterators that provide list
# items and dict items (as (key, value) tuples), or None.
if listitems is not None:
self._batch_appends(listitems)
if dictitems is not None:
self._batch_setitems(dictitems)
if state is not None:
save(state)
write(BUILD)
# Methods below this point are dispatched through the dispatch table
dispatch = {}
def save_none(self, obj):
self.write(NONE)
dispatch[NoneType] = save_none
def save_bool(self, obj):
if self.proto >= 2:
self.write(obj and NEWTRUE or NEWFALSE)
else:
self.write(obj and TRUE or FALSE)
dispatch[bool] = save_bool
def save_int(self, obj, pack=struct.pack):
if self.bin:
# If the int is small enough to fit in a signed 4-byte 2's-comp
# format, we can store it more efficiently than the general
# case.
# First one- and two-byte unsigned ints:
if obj >= 0:
if obj <= 0xff:
self.write(BININT1 + chr(obj))
return
if obj <= 0xffff:
self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8))
return
# Next check for 4-byte signed ints:
high_bits = obj >> 31 # note that Python shift sign-extends
if high_bits == 0 or high_bits == -1:
# All high bits are copies of bit 2**31, so the value
# fits in a 4-byte signed int.
self.write(BININT + pack("<i", obj))
return
# Text pickle, or int too big to fit in signed 4-byte format.
self.write(INT + repr(obj) + '\n')
dispatch[IntType] = save_int
def save_long(self, obj, pack=struct.pack):
if self.proto >= 2:
bytes = encode_long(obj)
n = len(bytes)
if n < 256:
self.write(LONG1 + chr(n) + bytes)
else:
self.write(LONG4 + pack("<i", n) + bytes)
return
self.write(LONG + repr(obj) + '\n')
dispatch[LongType] = save_long
def save_float(self, obj, pack=struct.pack):
if self.bin:
self.write(BINFLOAT + pack('>d', obj))
else:
self.write(FLOAT + repr(obj) + '\n')
dispatch[FloatType] = save_float
def save_string(self, obj, pack=struct.pack):
if self.bin:
n = len(obj)
if n < 256:
self.write(SHORT_BINSTRING + chr(n) + obj)
else:
self.write(BINSTRING + pack("<i", n) + obj)
else:
self.write(STRING + repr(obj) + '\n')
self.memoize(obj)
dispatch[StringType] = save_string
def save_unicode(self, obj, pack=struct.pack):
if self.bin:
encoding = obj.encode('utf-8')
n = len(encoding)
self.write(BINUNICODE + pack("<i", n) + encoding)
else:
obj = obj.replace("\\", "\\u005c")
obj = obj.replace("\n", "\\u000a")
self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n')
self.memoize(obj)
dispatch[UnicodeType] = save_unicode
if StringType is UnicodeType:
# This is true for Jython
def save_string(self, obj, pack=struct.pack):
unicode = obj.isunicode()
if self.bin:
if unicode:
obj = obj.encode("utf-8")
l = len(obj)
if l < 256 and not unicode:
self.write(SHORT_BINSTRING + chr(l) + obj)
else:
s = pack("<i", l)
if unicode:
self.write(BINUNICODE + s + obj)
else:
self.write(BINSTRING + s + obj)
else:
if unicode:
obj = obj.replace("\\", "\\u005c")
obj = obj.replace("\n", "\\u000a")
obj = obj.encode('raw-unicode-escape')
self.write(UNICODE + obj + '\n')
else:
self.write(STRING + repr(obj) + '\n')
self.memoize(obj)
dispatch[StringType] = save_string
def save_tuple(self, obj):
write = self.write
proto = self.proto
n = len(obj)
if n == 0:
if proto:
write(EMPTY_TUPLE)
else:
write(MARK + TUPLE)
return
save = self.save
memo = self.memo
if n <= 3 and proto >= 2:
for element in obj:
save(element)
# Subtle. Same as in the big comment below.
if id(obj) in memo:
get = self.get(memo[id(obj)][0])
write(POP * n + get)
else:
write(_tuplesize2code[n])
self.memoize(obj)
return
# proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
# has more than 3 elements.
write(MARK)
for element in obj:
save(element)
if id(obj) in memo:
# Subtle. d was not in memo when we entered save_tuple(), so
# the process of saving the tuple's elements must have saved
# the tuple itself: the tuple is recursive. The proper action
# now is to throw away everything we put on the stack, and
# simply GET the tuple (it's already constructed). This check
# could have been done in the "for element" loop instead, but
# recursive tuples are a rare thing.
get = self.get(memo[id(obj)][0])
if proto:
write(POP_MARK + get)
else: # proto 0 -- POP_MARK not available
write(POP * (n+1) + get)
return
# No recursion.
self.write(TUPLE)
self.memoize(obj)
dispatch[TupleType] = save_tuple
# save_empty_tuple() isn't used by anything in Python 2.3. However, I
# found a Pickler subclass in Zope3 that calls it, so it's not harmless
# to remove it.
def save_empty_tuple(self, obj):
self.write(EMPTY_TUPLE)
def save_list(self, obj):
write = self.write
if self.bin:
write(EMPTY_LIST)
else: # proto 0 -- can't use EMPTY_LIST
write(MARK + LIST)
self.memoize(obj)
self._batch_appends(iter(obj))
dispatch[ListType] = save_list
# Keep in synch with cPickle's BATCHSIZE. Nothing will break if it gets
# out of synch, though.
_BATCHSIZE = 1000
def _batch_appends(self, items):
# Helper to batch up APPENDS sequences
save = self.save
write = self.write
if not self.bin:
for x in items:
save(x)
write(APPEND)
return
r = xrange(self._BATCHSIZE)
while items is not None:
tmp = []
for i in r:
try:
x = items.next()
tmp.append(x)
except StopIteration:
items = None
break
n = len(tmp)
if n > 1:
write(MARK)
for x in tmp:
save(x)
write(APPENDS)
elif n:
save(tmp[0])
write(APPEND)
# else tmp is empty, and we're done
def save_dict(self, obj):
write = self.write
if self.bin:
write(EMPTY_DICT)
else: # proto 0 -- can't use EMPTY_DICT
write(MARK + DICT)
self.memoize(obj)
self._batch_setitems(obj.iteritems())
dispatch[DictionaryType] = save_dict
if not PyStringMap is None:
dispatch[PyStringMap] = save_dict
def _batch_setitems(self, items):
# Helper to batch up SETITEMS sequences; proto >= 1 only
save = self.save
write = self.write
if not self.bin:
for k, v in items:
save(k)
save(v)
write(SETITEM)
return
r = xrange(self._BATCHSIZE)
while items is not None:
tmp = []
for i in r:
try:
tmp.append(items.next())
except StopIteration:
items = None
break
n = len(tmp)
if n > 1:
write(MARK)
for k, v in tmp:
save(k)
save(v)
write(SETITEMS)
elif n:
k, v = tmp[0]
save(k)
save(v)
write(SETITEM)
# else tmp is empty, and we're done
def save_inst(self, obj):
cls = obj.__class__
memo = self.memo
write = self.write
save = self.save
if hasattr(obj, '__getinitargs__'):
args = obj.__getinitargs__()
len(args) # XXX Assert it's a sequence
_keep_alive(args, memo)
else:
args = ()
write(MARK)
if self.bin:
save(cls)
for arg in args:
save(arg)
write(OBJ)
else:
for arg in args:
save(arg)
write(INST + cls.__module__ + '\n' + cls.__name__ + '\n')
self.memoize(obj)
try:
getstate = obj.__getstate__
except AttributeError:
stuff = obj.__dict__
else:
stuff = getstate()
_keep_alive(stuff, memo)
save(stuff)
write(BUILD)
dispatch[InstanceType] = save_inst
def save_global(self, obj, name=None, pack=struct.pack):
write = self.write
memo = self.memo
if name is None:
name = obj.__name__
module = getattr(obj, "__module__", None)
if module is None:
module = whichmodule(obj, name)
try:
__import__(module)
mod = sys.modules[module]
klass = getattr(mod, name)
except (ImportError, KeyError, AttributeError):
raise PicklingError(
"Can't pickle %r: it's not found as %s.%s" %
(obj, module, name))
else:
if klass is not obj:
raise PicklingError(
"Can't pickle %r: it's not the same object as %s.%s" %
(obj, module, name))
if self.proto >= 2:
code = _extension_registry.get((module, name))
if code:
assert code > 0
if code <= 0xff:
write(EXT1 + chr(code))
elif code <= 0xffff:
write("%c%c%c" % (EXT2, code&0xff, code>>8))
else:
write(EXT4 + pack("<i", code))
return
write(GLOBAL + module + '\n' + name + '\n')
self.memoize(obj)
dispatch[ClassType] = save_global
dispatch[FunctionType] = save_global
dispatch[BuiltinFunctionType] = save_global
dispatch[TypeType] = save_global
# Pickling helpers
def _keep_alive(x, memo):
"""Keeps a reference to the object x in the memo.
Because we remember objects by their id, we have
to assure that possibly temporary objects are kept
alive by referencing them.
We store a reference at the id of the memo, which should
normally not be used unless someone tries to deepcopy
the memo itself...
"""
try:
memo[id(memo)].append(x)
except KeyError:
# aha, this is the first one :-)
memo[id(memo)]=[x]
# A cache for whichmodule(), mapping a function object to the name of
# the module in which the function was found.
classmap = {} # called classmap for backwards compatibility
def whichmodule(func, funcname):
"""Figure out the module in which a function occurs.
Search sys.modules for the module.
Cache in classmap.
Return a module name.
If the function cannot be found, return "__main__".
"""
# Python functions should always get an __module__ from their globals.
mod = getattr(func, "__module__", None)
if mod is not None:
return mod
if func in classmap:
return classmap[func]
for name, module in sys.modules.items():
if module is None:
continue # skip dummy package entries
if name != '__main__' and getattr(module, funcname, None) is func:
break
else:
name = '__main__'
classmap[func] = name
return name
# Unpickling machinery
class Unpickler:
def __init__(self, file):
"""This takes a file-like object for reading a pickle data stream.
The protocol version of the pickle is detected automatically, so no
proto argument is needed.
The file-like object must have two methods, a read() method that
takes an integer argument, and a readline() method that requires no
arguments. Both methods should return a string. Thus file-like
object can be a file object opened for reading, a StringIO object,
or any other custom object that meets this interface.
"""
self.readline = file.readline
self.read = file.read
self.memo = {}
def load(self):
"""Read a pickled object representation from the open file.
Return the reconstituted object hierarchy specified in the file.
"""
self.mark = object() # any new unique object
self.stack = []
self.append = self.stack.append
read = self.read
dispatch = self.dispatch
try:
while 1:
key = read(1)
dispatch[key](self)
except _Stop, stopinst:
return stopinst.value
# Return largest index k such that self.stack[k] is self.mark.
# If the stack doesn't contain a mark, eventually raises IndexError.
# This could be sped by maintaining another stack, of indices at which
# the mark appears. For that matter, the latter stack would suffice,
# and we wouldn't need to push mark objects on self.stack at all.
# Doing so is probably a good thing, though, since if the pickle is
# corrupt (or hostile) we may get a clue from finding self.mark embedded
# in unpickled objects.
def marker(self):
stack = self.stack
mark = self.mark
k = len(stack)-1
while stack[k] is not mark: k = k-1
return k
dispatch = {}
def load_eof(self):
raise EOFError
dispatch[''] = load_eof
def load_proto(self):
proto = ord(self.read(1))
if not 0 <= proto <= 2:
raise ValueError, "unsupported pickle protocol: %d" % proto
dispatch[PROTO] = load_proto
def load_persid(self):
pid = self.readline()[:-1]
self.append(self.persistent_load(pid))
dispatch[PERSID] = load_persid
def load_binpersid(self):
pid = self.stack.pop()
self.append(self.persistent_load(pid))
dispatch[BINPERSID] = load_binpersid
def load_none(self):
self.append(None)
dispatch[NONE] = load_none
def load_false(self):
self.append(False)
dispatch[NEWFALSE] = load_false
def load_true(self):
self.append(True)
dispatch[NEWTRUE] = load_true
def load_int(self):
data = self.readline()
if data == FALSE[1:]:
val = False
elif data == TRUE[1:]:
val = True
else:
try:
val = int(data)
except ValueError:
val = long(data)
self.append(val)
dispatch[INT] = load_int
def load_binint(self):
self.append(mloads('i' + self.read(4)))
dispatch[BININT] = load_binint
def load_binint1(self):
self.append(ord(self.read(1)))
dispatch[BININT1] = load_binint1
def load_binint2(self):
self.append(mloads('i' + self.read(2) + '\000\000'))
dispatch[BININT2] = load_binint2
def load_long(self):
self.append(long(self.readline()[:-1], 0))
dispatch[LONG] = load_long
def load_long1(self):
n = ord(self.read(1))
bytes = self.read(n)
self.append(decode_long(bytes))
dispatch[LONG1] = load_long1
def load_long4(self):
n = mloads('i' + self.read(4))
bytes = self.read(n)
self.append(decode_long(bytes))
dispatch[LONG4] = load_long4
def load_float(self):
self.append(float(self.readline()[:-1]))
dispatch[FLOAT] = load_float
def load_binfloat(self, unpack=struct.unpack):
self.append(unpack('>d', self.read(8))[0])
dispatch[BINFLOAT] = load_binfloat
def load_string(self):
rep = self.readline()[:-1]
for q in "\"'": # double or single quote
if rep.startswith(q):
if len(rep) < 2 or not rep.endswith(q):
raise ValueError, "insecure string pickle"
rep = rep[len(q):-len(q)]
break
else:
raise ValueError, "insecure string pickle"
self.append(rep.decode("string-escape"))
dispatch[STRING] = load_string
def load_binstring(self):
len = mloads('i' + self.read(4))
self.append(self.read(len))
dispatch[BINSTRING] = load_binstring
def load_unicode(self):
self.append(unicode(self.readline()[:-1],'raw-unicode-escape'))
dispatch[UNICODE] = load_unicode
def load_binunicode(self):
len = mloads('i' + self.read(4))
self.append(unicode(self.read(len),'utf-8'))
dispatch[BINUNICODE] = load_binunicode
def load_short_binstring(self):
len = ord(self.read(1))
self.append(self.read(len))
dispatch[SHORT_BINSTRING] = load_short_binstring
def load_tuple(self):
k = self.marker()
self.stack[k:] = [tuple(self.stack[k+1:])]
dispatch[TUPLE] = load_tuple
def load_empty_tuple(self):
self.stack.append(())
dispatch[EMPTY_TUPLE] = load_empty_tuple
def load_tuple1(self):
self.stack[-1] = (self.stack[-1],)
dispatch[TUPLE1] = load_tuple1
def load_tuple2(self):
self.stack[-2:] = [(self.stack[-2], self.stack[-1])]
dispatch[TUPLE2] = load_tuple2
def load_tuple3(self):
self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])]
dispatch[TUPLE3] = load_tuple3
def load_empty_list(self):
self.stack.append([])
dispatch[EMPTY_LIST] = load_empty_list
def load_empty_dictionary(self):
self.stack.append({})
dispatch[EMPTY_DICT] = load_empty_dictionary
def load_list(self):
k = self.marker()
self.stack[k:] = [self.stack[k+1:]]
dispatch[LIST] = load_list
def load_dict(self):
k = self.marker()
d = {}
items = self.stack[k+1:]
for i in range(0, len(items), 2):
key = items[i]
value = items[i+1]
d[key] = value
self.stack[k:] = [d]
dispatch[DICT] = load_dict
# INST and OBJ differ only in how they get a class object. It's not
# only sensible to do the rest in a common routine, the two routines
# previously diverged and grew different bugs.
# klass is the class to instantiate, and k points to the topmost mark
# object, following which are the arguments for klass.__init__.
def _instantiate(self, klass, k):
args = tuple(self.stack[k+1:])
del self.stack[k:]
instantiated = 0
if (not args and
type(klass) is ClassType and
not hasattr(klass, "__getinitargs__")):
try:
value = _EmptyClass()
value.__class__ = klass
instantiated = 1
except RuntimeError:
# In restricted execution, assignment to inst.__class__ is
# prohibited
pass
if not instantiated:
try:
value = klass(*args)
except TypeError, err:
raise TypeError, "in constructor for %s: %s" % (
klass.__name__, str(err)), sys.exc_info()[2]
self.append(value)
def load_inst(self):
module = self.readline()[:-1]
name = self.readline()[:-1]
klass = self.find_class(module, name)
self._instantiate(klass, self.marker())
dispatch[INST] = load_inst
def load_obj(self):
# Stack is ... markobject classobject arg1 arg2 ...
k = self.marker()
klass = self.stack.pop(k+1)
self._instantiate(klass, k)
dispatch[OBJ] = load_obj
def load_newobj(self):
args = self.stack.pop()
cls = self.stack[-1]
obj = cls.__new__(cls, *args)
self.stack[-1] = obj
dispatch[NEWOBJ] = load_newobj
def load_global(self):
module = self.readline()[:-1]
name = self.readline()[:-1]
klass = self.find_class(module, name)
self.append(klass)
dispatch[GLOBAL] = load_global
def load_ext1(self):
code = ord(self.read(1))
self.get_extension(code)
dispatch[EXT1] = load_ext1
def load_ext2(self):
code = mloads('i' + self.read(2) + '\000\000')
self.get_extension(code)
dispatch[EXT2] = load_ext2
def load_ext4(self):
code = mloads('i' + self.read(4))
self.get_extension(code)
dispatch[EXT4] = load_ext4
def get_extension(self, code):
nil = []
obj = _extension_cache.get(code, nil)
if obj is not nil:
self.append(obj)
return
key = _inverted_registry.get(code)
if not key:
raise ValueError("unregistered extension code %d" % code)
obj = self.find_class(*key)
_extension_cache[code] = obj
self.append(obj)
def find_class(self, module, name):
# Subclasses may override this
__import__(module)
mod = sys.modules[module]
klass = getattr(mod, name)
return klass
def load_reduce(self):
stack = self.stack
args = stack.pop()
func = stack[-1]
value = func(*args)
stack[-1] = value
dispatch[REDUCE] = load_reduce
def load_pop(self):
del self.stack[-1]
dispatch[POP] = load_pop
def load_pop_mark(self):
k = self.marker()
del self.stack[k:]
dispatch[POP_MARK] = load_pop_mark
def load_dup(self):
self.append(self.stack[-1])
dispatch[DUP] = load_dup
def load_get(self):
self.append(self.memo[self.readline()[:-1]])
dispatch[GET] = load_get
def load_binget(self):
i = ord(self.read(1))
self.append(self.memo[repr(i)])
dispatch[BINGET] = load_binget
def load_long_binget(self):
i = mloads('i' + self.read(4))
self.append(self.memo[repr(i)])
dispatch[LONG_BINGET] = load_long_binget
def load_put(self):
self.memo[self.readline()[:-1]] = self.stack[-1]
dispatch[PUT] = load_put
def load_binput(self):
i = ord(self.read(1))
self.memo[repr(i)] = self.stack[-1]
dispatch[BINPUT] = load_binput
def load_long_binput(self):
i = mloads('i' + self.read(4))
self.memo[repr(i)] = self.stack[-1]
dispatch[LONG_BINPUT] = load_long_binput
def load_append(self):
stack = self.stack
value = stack.pop()
list = stack[-1]
list.append(value)
dispatch[APPEND] = load_append
def load_appends(self):
stack = self.stack
mark = self.marker()
list = stack[mark - 1]
list.extend(stack[mark + 1:])
del stack[mark:]
dispatch[APPENDS] = load_appends
def load_setitem(self):
stack = self.stack
value = stack.pop()
key = stack.pop()
dict = stack[-1]
dict[key] = value
dispatch[SETITEM] = load_setitem
def load_setitems(self):
stack = self.stack
mark = self.marker()
dict = stack[mark - 1]
for i in range(mark + 1, len(stack), 2):
dict[stack[i]] = stack[i + 1]
del stack[mark:]
dispatch[SETITEMS] = load_setitems
def load_build(self):
stack = self.stack
state = stack.pop()
inst = stack[-1]
setstate = getattr(inst, "__setstate__", None)
if setstate:
setstate(state)
return
slotstate = None
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if state:
try:
d = inst.__dict__
try:
for k, v in state.iteritems():
d[intern(k)] = v
# keys in state don't have to be strings
# don't blow up, but don't go out of our way
except TypeError:
d.update(state)
except RuntimeError:
# XXX In restricted execution, the instance's __dict__
# is not accessible. Use the old way of unpickling
# the instance variables. This is a semantic
# difference when unpickling in restricted
# vs. unrestricted modes.
# Note, however, that cPickle has never tried to do the
# .update() business, and always uses
# PyObject_SetItem(inst.__dict__, key, value) in a
# loop over state.items().
for k, v in state.items():
setattr(inst, k, v)
if slotstate:
for k, v in slotstate.items():
setattr(inst, k, v)
dispatch[BUILD] = load_build
def load_mark(self):
self.append(self.mark)
dispatch[MARK] = load_mark
def load_stop(self):
value = self.stack.pop()
raise _Stop(value)
dispatch[STOP] = load_stop
# Helper class for load_inst/load_obj
class _EmptyClass:
pass
# Encode/decode longs in linear time.
import binascii as _binascii
def encode_long(x):
r"""Encode a long to a two's complement little-endian binary string.
Note that 0L is a special case, returning an empty string, to save a
byte in the LONG1 pickling context.
>>> encode_long(0L)
''
>>> encode_long(255L)
'\xff\x00'
>>> encode_long(32767L)
'\xff\x7f'
>>> encode_long(-256L)
'\x00\xff'
>>> encode_long(-32768L)
'\x00\x80'
>>> encode_long(-128L)
'\x80'
>>> encode_long(127L)
'\x7f'
>>>
"""
if x == 0:
return ''
if x > 0:
ashex = hex(x)
assert ashex.startswith("0x")
njunkchars = 2 + ashex.endswith('L')
nibbles = len(ashex) - njunkchars
if nibbles & 1:
# need an even # of nibbles for unhexlify
ashex = "0x0" + ashex[2:]
elif int(ashex[2], 16) >= 8:
# "looks negative", so need a byte of sign bits
ashex = "0x00" + ashex[2:]
else:
# Build the 256's-complement: (1L << nbytes) + x. The trick is
# to find the number of bytes in linear time (although that should
# really be a constant-time task).
ashex = hex(-x)
assert ashex.startswith("0x")
njunkchars = 2 + ashex.endswith('L')
nibbles = len(ashex) - njunkchars
if nibbles & 1:
# Extend to a full byte.
nibbles += 1
nbits = nibbles * 4
x += 1L << nbits
assert x > 0
ashex = hex(x)
njunkchars = 2 + ashex.endswith('L')
newnibbles = len(ashex) - njunkchars
if newnibbles < nibbles:
ashex = "0x" + "0" * (nibbles - newnibbles) + ashex[2:]
if int(ashex[2], 16) < 8:
# "looks positive", so need a byte of sign bits
ashex = "0xff" + ashex[2:]
if ashex.endswith('L'):
ashex = ashex[2:-1]
else:
ashex = ashex[2:]
assert len(ashex) & 1 == 0, (x, ashex)
binary = _binascii.unhexlify(ashex)
return binary[::-1]
def decode_long(data):
r"""Decode a long from a two's complement little-endian binary string.
>>> decode_long('')
0L
>>> decode_long("\xff\x00")
255L
>>> decode_long("\xff\x7f")
32767L
>>> decode_long("\x00\xff")
-256L
>>> decode_long("\x00\x80")
-32768L
>>> decode_long("\x80")
-128L
>>> decode_long("\x7f")
127L
"""
nbytes = len(data)
if nbytes == 0:
return 0L
ashex = _binascii.hexlify(data[::-1])
n = long(ashex, 16) # quadratic time before Python 2.3; linear now
if data[-1] >= '\x80':
n -= 1L << (nbytes * 8)
return n
# Shorthands
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
def dump(obj, file, protocol=None):
Pickler(file, protocol).dump(obj)
def dumps(obj, protocol=None):
file = StringIO()
Pickler(file, protocol).dump(obj)
return file.getvalue()
def load(file):
return Unpickler(file).load()
def loads(str):
file = StringIO(str)
return Unpickler(file).load()
# Doctest
def _test():
import doctest
return doctest.testmod()
if __name__ == "__main__":
_test()
| 32.436063
| 80
| 0.558504
|
37158288c63a24d24c8a3a92c3e040d63a29f33c
| 229,585
|
py
|
Python
|
pysnmp-with-texts/DAP-3520-v115.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/DAP-3520-v115.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/DAP-3520-v115.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module DAP-3520-v115 (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DAP-3520-v115
# Produced by pysmi-0.3.4 at Wed May 1 12:36:38 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
MibIdentifier, ObjectIdentity, IpAddress, Bits, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, private, iso, Counter64, Unsigned32, Counter32, Integer32, Gauge32, TimeTicks, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "ObjectIdentity", "IpAddress", "Bits", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "private", "iso", "Counter64", "Unsigned32", "Counter32", "Integer32", "Gauge32", "TimeTicks", "NotificationType")
DisplayString, MacAddress, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "MacAddress", "TextualConvention")
class DisplayString(OctetString):
pass
enterprises = MibIdentifier((1, 3, 6, 1, 4, 1))
dlink = MibIdentifier((1, 3, 6, 1, 4, 1, 171))
dlink_products = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10)).setLabel("dlink-products")
dlink_dapfamily = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37)).setLabel("dlink-dapfamily")
dap3520 = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37))
interface = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2))
lan = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1))
lanIfSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1))
lanIfSettingTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1, 1), )
if mibBuilder.loadTexts: lanIfSettingTable.setStatus('mandatory')
if mibBuilder.loadTexts: lanIfSettingTable.setDescription('')
lanIfSettingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: lanIfSettingEntry.setStatus('mandatory')
if mibBuilder.loadTexts: lanIfSettingEntry.setDescription('')
lanIfGetIpAddressFrom = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("static", 1), ("dynamic", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lanIfGetIpAddressFrom.setStatus('mandatory')
if mibBuilder.loadTexts: lanIfGetIpAddressFrom.setDescription('')
lanIfIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1, 1, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lanIfIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: lanIfIpAddress.setDescription('')
lanIfSubnetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1, 1, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lanIfSubnetMask.setStatus('mandatory')
if mibBuilder.loadTexts: lanIfSubnetMask.setDescription('')
lanIfDefaultGateway = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1, 1, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lanIfDefaultGateway.setStatus('mandatory')
if mibBuilder.loadTexts: lanIfDefaultGateway.setDescription('')
lanIfMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1, 1, 1, 5), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lanIfMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: lanIfMacAddress.setDescription('')
lanIfPrimaryDNS = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1, 1, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lanIfPrimaryDNS.setStatus('mandatory')
if mibBuilder.loadTexts: lanIfPrimaryDNS.setDescription('')
lanIfSecondaryDNS = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 1, 1, 1, 7), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lanIfSecondaryDNS.setStatus('mandatory')
if mibBuilder.loadTexts: lanIfSecondaryDNS.setDescription('')
wirelesslan = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3))
wirelessLanIfNumber = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wirelessLanIfNumber.setStatus('mandatory')
if mibBuilder.loadTexts: wirelessLanIfNumber.setDescription('')
wirelessLanIfTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 2), )
if mibBuilder.loadTexts: wirelessLanIfTable.setStatus('mandatory')
if mibBuilder.loadTexts: wirelessLanIfTable.setDescription('')
wirelessLanIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: wirelessLanIfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wirelessLanIfEntry.setDescription('')
wirelessLanIfDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 2, 1, 1), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wirelessLanIfDesc.setStatus('mandatory')
if mibBuilder.loadTexts: wirelessLanIfDesc.setDescription('')
wirelessLanIfObjectID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 2, 1, 2), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wirelessLanIfObjectID.setStatus('mandatory')
if mibBuilder.loadTexts: wirelessLanIfObjectID.setDescription('')
ieee802dot11 = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3))
dot11Parameters = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1))
dot11ParametersTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1), )
if mibBuilder.loadTexts: dot11ParametersTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ParametersTable.setDescription('')
dot11ParametersEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11ParametersEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ParametersEntry.setDescription('')
dot11Ssid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 1), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Ssid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Ssid.setDescription('')
dot11SsidBroadcast = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11SsidBroadcast.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SsidBroadcast.setDescription('')
dot11Channel = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Channel.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Channel.setDescription('')
dot11ChannelList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ChannelList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ChannelList.setDescription('')
dot11Frequency = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 10), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11Frequency.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Frequency.setDescription('')
dot11DataRate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 11), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11DataRate.setStatus('mandatory')
if mibBuilder.loadTexts: dot11DataRate.setDescription('')
dot11WifiMode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 6, 7, 8, 9))).clone(namedValues=NamedValues(("band2dot4-n", 2), ("band2dot4-bg", 3), ("band2dot4-bgn", 6), ("band5-a", 7), ("band5-n", 8), ("band5-an", 9)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WifiMode.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WifiMode.setDescription('')
dot11BeaconInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(25, 200))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11BeaconInterval.setStatus('mandatory')
if mibBuilder.loadTexts: dot11BeaconInterval.setDescription('')
dot11Dtim = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Dtim.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Dtim.setDescription('')
dot11TransmitPower = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("full", 1), ("half", 2), ("quarter", 3), ("eighth", 4), ("min", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11TransmitPower.setStatus('mandatory')
if mibBuilder.loadTexts: dot11TransmitPower.setDescription('')
dot11RadioWave = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RadioWave.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RadioWave.setDescription('')
dot11AutoChannelScan = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11AutoChannelScan.setStatus('mandatory')
if mibBuilder.loadTexts: dot11AutoChannelScan.setDescription('')
dot11Wmm = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Wmm.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Wmm.setDescription('')
dot11ApMode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("ap", 1), ("wdsWithAp", 2), ("wdsWithoutAp", 3), ("wirelessClient", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ApMode.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ApMode.setDescription('')
dot11ChannelWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("cwm20MHz", 1), ("cwmAuto", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ChannelWidth.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ChannelWidth.setDescription(' 1:20M Hz ; 2:Auto 20/40M Hz')
dot11DataRateList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 30), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11DataRateList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11DataRateList.setDescription('')
dot11AckTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 31), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11AckTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: dot11AckTimeout.setDescription('')
dot11ShortGI = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ShortGI.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ShortGI.setDescription('')
dot11Igmpsnooping = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 33), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Igmpsnooping.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Igmpsnooping.setDescription('')
dot11Band = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("band2dot4gHz", 0), ("band5gHz", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Band.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Band.setDescription('')
dot11Band5GHzDataRateList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 35), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11Band5GHzDataRateList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Band5GHzDataRateList.setDescription('')
dot11Band5GHzWdsChannelList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 36), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11Band5GHzWdsChannelList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Band5GHzWdsChannelList.setDescription('')
dot11Band5GHzChannelList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 37), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11Band5GHzChannelList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Band5GHzChannelList.setDescription('')
dot11ApModeStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 38), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ApModeStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ApModeStatus.setDescription('')
dot11Countrycode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 39), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11Countrycode.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Countrycode.setDescription('')
dot11Application = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 40), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("disable", 0), ("indoor", 1), ("outdoor", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Application.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Application.setDescription('')
dot11Band5GHzOutdoorChannelList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 41), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11Band5GHzOutdoorChannelList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Band5GHzOutdoorChannelList.setDescription('')
dot11MulticastRateABandList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 42), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11MulticastRateABandList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MulticastRateABandList.setDescription('')
dot11MulticastRateGBandList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 43), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11MulticastRateGBandList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MulticastRateGBandList.setDescription('')
dot11MulticastRateABand = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 44), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MulticastRateABand.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MulticastRateABand.setDescription('')
dot11MulticastRateGBand = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 45), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MulticastRateGBand.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MulticastRateGBand.setDescription('')
dot11HT2040Coexistence = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 1, 1, 49), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11HT2040Coexistence.setStatus('mandatory')
if mibBuilder.loadTexts: dot11HT2040Coexistence.setDescription('')
dot11RemoteApMacAddress = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2))
dot11RemoteApMacAddressTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2, 1), )
if mibBuilder.loadTexts: dot11RemoteApMacAddressTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RemoteApMacAddressTable.setDescription('')
dot11RemoteApMacAddressEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DAP-3520-v115", "dot11RemoteApMacAddressIndex"))
if mibBuilder.loadTexts: dot11RemoteApMacAddressEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RemoteApMacAddressEntry.setDescription('')
dot11RemoteApMacAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)))
if mibBuilder.loadTexts: dot11RemoteApMacAddressIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RemoteApMacAddressIndex.setDescription('')
dot11RemoteApMacAddressList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2, 1, 1, 2), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RemoteApMacAddressList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RemoteApMacAddressList.setDescription('')
dot11RemoteApMacAddressAccessTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2, 2), )
if mibBuilder.loadTexts: dot11RemoteApMacAddressAccessTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RemoteApMacAddressAccessTable.setDescription('')
dot11RemoteApMacAddressAccessEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11RemoteApMacAddressAccessEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RemoteApMacAddressAccessEntry.setDescription('')
dot11RemoteApMacAddressAdd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2, 2, 1, 1), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RemoteApMacAddressAdd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RemoteApMacAddressAdd.setDescription('')
dot11RemoteApMacAddressDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2, 2, 1, 2), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RemoteApMacAddressDelete.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RemoteApMacAddressDelete.setDescription('')
dot11RemoteApMacAddressDeleteAll = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("deleteall", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RemoteApMacAddressDeleteAll.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RemoteApMacAddressDeleteAll.setDescription('')
dot11SiteSurvey = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3))
dot11SiteSurveyRefreshTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 1), )
if mibBuilder.loadTexts: dot11SiteSurveyRefreshTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyRefreshTable.setDescription('')
dot11SiteSurveyRefreshEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11SiteSurveyRefreshEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyRefreshEntry.setDescription('')
dot11SiteSurveyRefresh = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("refresh", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11SiteSurveyRefresh.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyRefresh.setDescription('')
dot11SiteSurveyTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 2), )
if mibBuilder.loadTexts: dot11SiteSurveyTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyTable.setDescription('')
dot11SiteSurveyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DAP-3520-v115", "dot11SiteSurveyIndex"))
if mibBuilder.loadTexts: dot11SiteSurveyEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyEntry.setDescription('')
dot11SiteSurveyIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11SiteSurveyIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyIndex.setDescription('')
dot11SiteSurveyBssType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 2, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11SiteSurveyBssType.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyBssType.setDescription('')
dot11SiteSurveyChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11SiteSurveyChannel.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyChannel.setDescription('')
dot11SiteSurveyRssi = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11SiteSurveyRssi.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyRssi.setDescription('')
dot11SiteSurveyBssid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 2, 1, 5), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11SiteSurveyBssid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyBssid.setDescription('')
dot11SiteSurveyEncryption = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 2, 1, 6), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11SiteSurveyEncryption.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveyEncryption.setDescription('')
dot11SiteSurveySsid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 3, 2, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11SiteSurveySsid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SiteSurveySsid.setDescription('')
dot11WdsSiteSurvey = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4))
dot11WdsSiteSurveyRefreshTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 1), )
if mibBuilder.loadTexts: dot11WdsSiteSurveyRefreshTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyRefreshTable.setDescription('')
dot11WdsSiteSurveyRefreshEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11WdsSiteSurveyRefreshEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyRefreshEntry.setDescription('')
dot11WdsSiteSurveyRefresh = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("refresh", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WdsSiteSurveyRefresh.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyRefresh.setDescription('')
dot11WdsSiteSurveyTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 2), )
if mibBuilder.loadTexts: dot11WdsSiteSurveyTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyTable.setDescription('')
dot11WdsSiteSurveyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DAP-3520-v115", "dot11WdsSiteSurveyIndex"))
if mibBuilder.loadTexts: dot11WdsSiteSurveyEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyEntry.setDescription('')
dot11WdsSiteSurveyIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11WdsSiteSurveyIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyIndex.setDescription('')
dot11WdsSiteSurveyChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsSiteSurveyChannel.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyChannel.setDescription('')
dot11WdsSiteSurveyMode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsSiteSurveyMode.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyMode.setDescription('')
dot11WdsSiteSurveyBssid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 2, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsSiteSurveyBssid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyBssid.setDescription('')
dot11WdsSiteSurveyEncryption = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 2, 1, 5), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsSiteSurveyEncryption.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveyEncryption.setDescription('')
dot11WdsSiteSurveySsid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 1, 4, 2, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsSiteSurveySsid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSiteSurveySsid.setDescription('')
dot11Securities = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2))
dot11SecuritiesTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1), )
if mibBuilder.loadTexts: dot11SecuritiesTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SecuritiesTable.setDescription('')
dot11SecuritiesEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11SecuritiesEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SecuritiesEntry.setDescription('')
dot11Authentication = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("opensystem", 1), ("sharedkey", 2), ("opensystem-sharedkey", 3), ("wpa-psk", 4), ("wpa-eap", 5), ("wpa2-psk", 6), ("wpa2-eap", 7), ("wpa2-auto-psk", 8), ("wpa2-auto-eap", 9), ("dot1x", 10)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Authentication.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Authentication.setDescription('')
dot11Encryption = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Encryption.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Encryption.setDescription('')
dot11KeyIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("first", 1), ("second", 2), ("third", 3), ("fourth", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11KeyIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11KeyIndex.setDescription('')
dot11PassPhrase = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PassPhrase.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PassPhrase.setDescription('')
dot11CipherType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("auto", 1), ("aes", 2), ("tkip", 3), ("wep", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11CipherType.setStatus('mandatory')
if mibBuilder.loadTexts: dot11CipherType.setDescription('')
dot11GroupKeyUpdateInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(300, 9999999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupKeyUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupKeyUpdateInterval.setDescription('')
dot11PrimaryRadiusServer = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PrimaryRadiusServer.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PrimaryRadiusServer.setDescription('')
dot11PrimaryRadiusPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PrimaryRadiusPort.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PrimaryRadiusPort.setDescription('')
dot11PrimaryRadiusSecret = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PrimaryRadiusSecret.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PrimaryRadiusSecret.setDescription('')
dot11NetworkAccessProtection = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11NetworkAccessProtection.setStatus('mandatory')
if mibBuilder.loadTexts: dot11NetworkAccessProtection.setDescription('')
dot11RadiusKeyUpdateInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(300, 9999999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RadiusKeyUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RadiusKeyUpdateInterval.setDescription('')
dot11WpaEapType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ttls", 1), ("peap", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WpaEapType.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WpaEapType.setDescription('')
dot11WpaEapAuthenticationType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4))).clone(namedValues=NamedValues(("mschapv2", 2), ("pap", 3), ("chap", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WpaEapAuthenticationType.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WpaEapAuthenticationType.setDescription('')
dot11WpaEapUsername = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 27), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WpaEapUsername.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WpaEapUsername.setDescription('')
dot11WpaEapPasswd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 28), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WpaEapPasswd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WpaEapPasswd.setDescription('')
dot11AutoRekeyControl = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11AutoRekeyControl.setStatus('mandatory')
if mibBuilder.loadTexts: dot11AutoRekeyControl.setDescription('disable( Manual ) , enable(Periodrical Key Change )')
dot11AutoRekeyStartWeek = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("sun", 0), ("mon", 1), ("tue", 2), ("wed", 3), ("thu", 4), ("fri", 5), ("sat", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11AutoRekeyStartWeek.setStatus('mandatory')
if mibBuilder.loadTexts: dot11AutoRekeyStartWeek.setDescription('')
dot11AutoRekeyStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 31), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11AutoRekeyStartTime.setStatus('mandatory')
if mibBuilder.loadTexts: dot11AutoRekeyStartTime.setDescription(" Please follow this format: 'xx:xx' For example: 23:45 ")
dot11AutoRekeyTimeInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 32), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 168))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11AutoRekeyTimeInterval.setStatus('mandatory')
if mibBuilder.loadTexts: dot11AutoRekeyTimeInterval.setDescription('')
dot11AutoRekeyPassPhrase = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 1, 1, 33), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11AutoRekeyPassPhrase.setStatus('mandatory')
if mibBuilder.loadTexts: dot11AutoRekeyPassPhrase.setDescription('')
dot11WepKeyTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 2), )
if mibBuilder.loadTexts: dot11WepKeyTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WepKeyTable.setDescription('')
dot11WepKeyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DAP-3520-v115", "dot11wepKeyIndex"))
if mibBuilder.loadTexts: dot11WepKeyEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WepKeyEntry.setDescription('')
dot11wepKeyIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)))
if mibBuilder.loadTexts: dot11wepKeyIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11wepKeyIndex.setDescription('')
dot11WepKeyEntryMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ascii", 1), ("hex", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WepKeyEntryMethod.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WepKeyEntryMethod.setDescription('')
dot11WepKey = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 2, 1, 3), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WepKey.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WepKey.setDescription('')
dot11Filter = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3))
dot11PartionTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 1), )
if mibBuilder.loadTexts: dot11PartionTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PartionTable.setDescription('')
dot11PartionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11PartionEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PartionEntry.setDescription('')
dot11EthernetToWlanAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11EthernetToWlanAccess.setStatus('mandatory')
if mibBuilder.loadTexts: dot11EthernetToWlanAccess.setDescription('')
dot11InternalStationConnectionPrimarySSID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1), ("guestmode", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11InternalStationConnectionPrimarySSID.setStatus('mandatory')
if mibBuilder.loadTexts: dot11InternalStationConnectionPrimarySSID.setDescription('')
dot11InternalStationConnectionMultiSSID1 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1), ("guestmode", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11InternalStationConnectionMultiSSID1.setStatus('mandatory')
if mibBuilder.loadTexts: dot11InternalStationConnectionMultiSSID1.setDescription('')
dot11InternalStationConnectionMultiSSID2 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1), ("guestmode", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11InternalStationConnectionMultiSSID2.setStatus('mandatory')
if mibBuilder.loadTexts: dot11InternalStationConnectionMultiSSID2.setDescription('')
dot11InternalStationConnectionMultiSSID3 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1), ("guestmode", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11InternalStationConnectionMultiSSID3.setStatus('mandatory')
if mibBuilder.loadTexts: dot11InternalStationConnectionMultiSSID3.setDescription('')
dot11MacAccessControlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 2), )
if mibBuilder.loadTexts: dot11MacAccessControlTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacAccessControlTable.setDescription('')
dot11MacAccessControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11MacAccessControlEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacAccessControlEntry.setDescription('')
dot11MacAccessControl = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("accept", 1), ("reject", 2), ("disabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MacAccessControl.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacAccessControl.setDescription('')
dot11MacAccessControlMacAddressAdd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 2, 1, 2), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MacAccessControlMacAddressAdd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacAccessControlMacAddressAdd.setDescription('')
dot11MacAccessControlMacAddressDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 2, 1, 3), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MacAccessControlMacAddressDelete.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacAccessControlMacAddressDelete.setDescription('')
dot11MacAccessControlListTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 3), )
if mibBuilder.loadTexts: dot11MacAccessControlListTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacAccessControlListTable.setDescription('')
dot11MacAccessControlListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DAP-3520-v115", "dot11MacAccessControlListIndex"))
if mibBuilder.loadTexts: dot11MacAccessControlListEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacAccessControlListEntry.setDescription('')
dot11MacAccessControlListIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16)))
if mibBuilder.loadTexts: dot11MacAccessControlListIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacAccessControlListIndex.setDescription('')
dot11MacAccessControlListMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 2, 3, 3, 1, 2), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MacAccessControlListMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacAccessControlListMacAddress.setDescription('')
dot11ClientInformation = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4))
dot11GetClientInformationTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 1), )
if mibBuilder.loadTexts: dot11GetClientInformationTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GetClientInformationTable.setDescription('')
dot11GetClientInformationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11GetClientInformationEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GetClientInformationEntry.setDescription('')
dot11ClientInformationRefresh = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("get", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ClientInformationRefresh.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientInformationRefresh.setDescription('')
dot11ClientInformationTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2), )
if mibBuilder.loadTexts: dot11ClientInformationTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientInformationTable.setDescription('')
dot11ClientInformationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DAP-3520-v115", "dot11ClientIndex"))
if mibBuilder.loadTexts: dot11ClientInformationEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientInformationEntry.setDescription('')
dot11ClientIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11ClientIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientIndex.setDescription('')
dot11ClientMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ClientMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientMacAddress.setDescription('')
dot11ClientBand = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("dot11b", 1), ("dot11g", 2), ("dot11n", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ClientBand.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientBand.setDescription('')
dot11ClientAuthentication = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("opensystem", 1), ("sharedkey", 2), ("wpa-psk", 4), ("wpa-eap", 5), ("wpa2-psk", 6), ("wpa2-eap", 7), ("wpa2-auto-psk", 8), ("wpa2-auto-eap", 9), ("dot1x", 10), ("wep", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ClientAuthentication.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientAuthentication.setDescription('')
dot11ClientRssi = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ClientRssi.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientRssi.setDescription('')
dot11ClientPsm = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ClientPsm.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientPsm.setDescription('')
dot11SSIDIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 7), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11SSIDIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11SSIDIndex.setDescription('')
dot11ClientIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 10), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ClientIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientIpAddress.setDescription('')
dot11ClientTxBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ClientTxBytesCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientTxBytesCount.setDescription('')
dot11ClientRxBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 4, 2, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ClientRxBytesCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ClientRxBytesCount.setDescription('')
dot11WdsMonitor = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5))
dot11GetWdsTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 1), )
if mibBuilder.loadTexts: dot11GetWdsTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GetWdsTable.setDescription('')
dot11GetWdsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11GetWdsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GetWdsEntry.setDescription('')
dot11WdsRefresh = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("get", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WdsRefresh.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsRefresh.setDescription('')
dot11WdsTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2), )
if mibBuilder.loadTexts: dot11WdsTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsTable.setDescription('')
dot11WdsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DAP-3520-v115", "dot11WdsIndex"))
if mibBuilder.loadTexts: dot11WdsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsEntry.setDescription('')
dot11WdsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11WdsIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsIndex.setDescription('')
dot11WdsMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsMacAddress.setDescription('')
dot11WdsBand = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("dot11b", 1), ("dot11g", 2), ("dot11n", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsBand.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsBand.setDescription('')
dot11WdsAuthentication = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 6, 7, 8, 9, 11))).clone(namedValues=NamedValues(("opensystem", 1), ("sharedkey", 2), ("wpa-enterprise", 3), ("wpa-personal", 4), ("wpa2-enterprise", 6), ("wpa2-personal", 7), ("wpa2-auto-enterprise", 8), ("wpa2-auto-personal", 9), ("wep", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsAuthentication.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsAuthentication.setDescription('')
dot11WdsRssi = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsRssi.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsRssi.setDescription('')
dot11WdsSsidIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1, 7), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsSsidIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsSsidIndex.setDescription('')
dot11WdsConnected = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsConnected.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsConnected.setDescription('')
dot11WdsStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1, 9), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsStatus.setDescription('')
dot11WdsPsm = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 5, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11WdsPsm.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WdsPsm.setDescription('')
dot11MacClone = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6))
dot11MacCloneTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 1), )
if mibBuilder.loadTexts: dot11MacCloneTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneTable.setDescription('')
dot11MacCloneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11MacCloneEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneEntry.setDescription('')
dot11MacCloneStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MacCloneStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneStatus.setDescription('')
dot11MacCloneSource = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("disabled", 0), ("auto", 1), ("manual", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MacCloneSource.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneSource.setDescription('')
dot11MacCloneMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 1, 1, 3), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MacCloneMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneMacAddress.setDescription('')
dot11MacCloneAddressRefresh = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("refresh", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MacCloneAddressRefresh.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneAddressRefresh.setDescription('')
dot11MacCloneSurveryTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 2), )
if mibBuilder.loadTexts: dot11MacCloneSurveryTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneSurveryTable.setDescription('')
dot11MacCloneSurveryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 2, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11MacCloneSurveryIndex"))
if mibBuilder.loadTexts: dot11MacCloneSurveryEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneSurveryEntry.setDescription('')
dot11MacCloneSurveryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11MacCloneSurveryIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneSurveryIndex.setDescription('')
dot11MacCloneSurveryMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 6, 2, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11MacCloneSurveryMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MacCloneSurveryMacAddress.setDescription('')
dot11ZoneDefence = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7))
dot11ZoneDefenceTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7, 1), )
if mibBuilder.loadTexts: dot11ZoneDefenceTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ZoneDefenceTable.setDescription('')
dot11ZoneDefenceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11ZoneDefenceEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ZoneDefenceEntry.setDescription('')
dot11ZoneDefenceControl = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ZoneDefenceControl.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ZoneDefenceControl.setDescription('')
dot11ZoneDefenceIpAddressAdd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7, 1, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressAdd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressAdd.setDescription('')
dot11ZoneDefenceIpAddressDelete = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressDelete.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressDelete.setDescription('')
dot11ZoneDefenceIpAddressListTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7, 2), )
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressListTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressListTable.setDescription('')
dot11ZoneDefenceIpAddressListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DAP-3520-v115", "dot11ZoneDefenceIpAddressListIndex"))
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressListEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressListEntry.setDescription('')
dot11ZoneDefenceIpAddressListIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16)))
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressListIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressListIndex.setDescription('')
dot11ZoneDefenceIpAddressList = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 2, 1, 3, 3, 7, 2, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressList.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ZoneDefenceIpAddressList.setDescription('')
advance = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3))
dhcpServer = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1))
dhcpServerControl = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dhcpServerControl.setStatus('mandatory')
if mibBuilder.loadTexts: dhcpServerControl.setDescription('')
dhcpServerDynamicParameter = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2))
dhcpServerDynamicControl = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dhcpServerDynamicControl.setStatus('mandatory')
if mibBuilder.loadTexts: dhcpServerDynamicControl.setDescription('')
dhcpServerDomainNameStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("both", 0), ("dynamic", 1), ("static", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dhcpServerDomainNameStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dhcpServerDomainNameStatus.setDescription('')
dhcpServerDynamicTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2), )
if mibBuilder.loadTexts: dhcpServerDynamicTable.setStatus('mandatory')
if mibBuilder.loadTexts: dhcpServerDynamicTable.setDescription('')
dhcpServerDynamicEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1), ).setIndexNames((0, "DAP-3520-v115", "dynamicIndex"))
if mibBuilder.loadTexts: dhcpServerDynamicEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dhcpServerDynamicEntry.setDescription('')
dynamicIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: dynamicIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dynamicIndex.setDescription('')
dynamicIpPoolStart = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicIpPoolStart.setStatus('mandatory')
if mibBuilder.loadTexts: dynamicIpPoolStart.setDescription('')
dynamicIpPoolEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicIpPoolEnd.setStatus('mandatory')
if mibBuilder.loadTexts: dynamicIpPoolEnd.setDescription('')
dynamicMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicMask.setStatus('mandatory')
if mibBuilder.loadTexts: dynamicMask.setDescription('')
dynamicGateway = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1, 7), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicGateway.setStatus('mandatory')
if mibBuilder.loadTexts: dynamicGateway.setDescription('')
dynamicWins = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicWins.setStatus('mandatory')
if mibBuilder.loadTexts: dynamicWins.setDescription('')
dynamicDns = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1, 9), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicDns.setStatus('mandatory')
if mibBuilder.loadTexts: dynamicDns.setDescription('')
dynamicDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicDomainName.setStatus('mandatory')
if mibBuilder.loadTexts: dynamicDomainName.setDescription('')
dynamicLeaseTime = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 2, 2, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(60, 31536000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dynamicLeaseTime.setStatus('mandatory')
if mibBuilder.loadTexts: dynamicLeaseTime.setDescription('')
dhcpServerStaticParameter = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3))
dhcpServerStaticControl = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dhcpServerStaticControl.setStatus('mandatory')
if mibBuilder.loadTexts: dhcpServerStaticControl.setDescription('')
dhcpServerStaticDelete = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 25))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dhcpServerStaticDelete.setStatus('mandatory')
if mibBuilder.loadTexts: dhcpServerStaticDelete.setDescription('')
dhcpServerStaticTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2), )
if mibBuilder.loadTexts: dhcpServerStaticTable.setStatus('mandatory')
if mibBuilder.loadTexts: dhcpServerStaticTable.setDescription('')
dhcpServerStaticEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1), ).setIndexNames((0, "DAP-3520-v115", "staticIndex"))
if mibBuilder.loadTexts: dhcpServerStaticEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dhcpServerStaticEntry.setDescription('')
staticIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: staticIndex.setStatus('mandatory')
if mibBuilder.loadTexts: staticIndex.setDescription('')
staticEntryStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: staticEntryStatus.setStatus('mandatory')
if mibBuilder.loadTexts: staticEntryStatus.setDescription('')
staticHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: staticHostName.setStatus('mandatory')
if mibBuilder.loadTexts: staticHostName.setDescription('')
staticIP = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: staticIP.setStatus('mandatory')
if mibBuilder.loadTexts: staticIP.setDescription('')
staticMac = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 5), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: staticMac.setStatus('mandatory')
if mibBuilder.loadTexts: staticMac.setDescription('')
staticMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: staticMask.setStatus('mandatory')
if mibBuilder.loadTexts: staticMask.setDescription('')
staticGateway = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 7), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: staticGateway.setStatus('mandatory')
if mibBuilder.loadTexts: staticGateway.setDescription('')
staticDns = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: staticDns.setStatus('mandatory')
if mibBuilder.loadTexts: staticDns.setDescription('')
staticWins = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 9), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: staticWins.setStatus('mandatory')
if mibBuilder.loadTexts: staticWins.setDescription('')
staticDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 3, 2, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: staticDomainName.setStatus('mandatory')
if mibBuilder.loadTexts: staticDomainName.setDescription('')
dhcpServerCurrentList = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4))
refreshCurrentDynamicList = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("refresh", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: refreshCurrentDynamicList.setStatus('mandatory')
if mibBuilder.loadTexts: refreshCurrentDynamicList.setDescription('')
currentDynamicTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 2), )
if mibBuilder.loadTexts: currentDynamicTable.setStatus('mandatory')
if mibBuilder.loadTexts: currentDynamicTable.setDescription('')
currentDynamicEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 2, 1), ).setIndexNames((0, "DAP-3520-v115", "currentDynamicIndex"))
if mibBuilder.loadTexts: currentDynamicEntry.setStatus('mandatory')
if mibBuilder.loadTexts: currentDynamicEntry.setDescription('')
currentDynamicIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: currentDynamicIndex.setStatus('mandatory')
if mibBuilder.loadTexts: currentDynamicIndex.setDescription('')
currentDynamicHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 2, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: currentDynamicHostName.setStatus('mandatory')
if mibBuilder.loadTexts: currentDynamicHostName.setDescription('')
currentDynamicMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 2, 1, 3), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentDynamicMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: currentDynamicMacAddress.setDescription('')
currentDynamicAssignedIP = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 2, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentDynamicAssignedIP.setStatus('mandatory')
if mibBuilder.loadTexts: currentDynamicAssignedIP.setDescription('')
currentDynamicLease = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 2, 1, 5), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentDynamicLease.setStatus('mandatory')
if mibBuilder.loadTexts: currentDynamicLease.setDescription('')
refreshCurrentStaticList = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("refresh", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: refreshCurrentStaticList.setStatus('mandatory')
if mibBuilder.loadTexts: refreshCurrentStaticList.setDescription('')
currentStaticTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 4), )
if mibBuilder.loadTexts: currentStaticTable.setStatus('mandatory')
if mibBuilder.loadTexts: currentStaticTable.setDescription('')
currentStaticEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 4, 1), ).setIndexNames((0, "DAP-3520-v115", "currentStaticIndex"))
if mibBuilder.loadTexts: currentStaticEntry.setStatus('mandatory')
if mibBuilder.loadTexts: currentStaticEntry.setDescription('')
currentStaticIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 4, 1, 1), Integer32())
if mibBuilder.loadTexts: currentStaticIndex.setStatus('mandatory')
if mibBuilder.loadTexts: currentStaticIndex.setDescription('')
currentStaticHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 4, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentStaticHostName.setStatus('mandatory')
if mibBuilder.loadTexts: currentStaticHostName.setDescription('')
currentStaticMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 4, 1, 3), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentStaticMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: currentStaticMacAddress.setDescription('')
currentStaticAssignedIP = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 1, 4, 4, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentStaticAssignedIP.setStatus('mandatory')
if mibBuilder.loadTexts: currentStaticAssignedIP.setDescription('')
ieee802dot11Grouping = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 2))
dot11GroupingTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 2, 1), )
if mibBuilder.loadTexts: dot11GroupingTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupingTable.setDescription('')
dot11GroupingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11GroupingEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupingEntry.setDescription('')
dot11LoadBalance = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11LoadBalance.setStatus('mandatory')
if mibBuilder.loadTexts: dot11LoadBalance.setDescription('')
dot11UserLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11UserLimit.setStatus('mandatory')
if mibBuilder.loadTexts: dot11UserLimit.setDescription('')
dot11LinkIntegrate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11LinkIntegrate.setStatus('mandatory')
if mibBuilder.loadTexts: dot11LinkIntegrate.setDescription('')
ieee802dot11MultiSsid = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3))
dot11MssidStateTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 1), )
if mibBuilder.loadTexts: dot11MssidStateTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidStateTable.setDescription('')
dot11MssidStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11MssidStateEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidStateEntry.setDescription('')
dot11MssidState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidState.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidState.setDescription('')
dot11MssidPriorityState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidPriorityState.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidPriorityState.setDescription('')
dot11MssidTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3), )
if mibBuilder.loadTexts: dot11MssidTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidTable.setDescription('')
dot11MssidEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DAP-3520-v115", "dot11MssidIndex"))
if mibBuilder.loadTexts: dot11MssidEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidEntry.setDescription('')
dot11MssidIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11MssidIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidIndex.setDescription('')
dot11MssIndividualState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssIndividualState.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssIndividualState.setDescription('')
dot11MssidSsid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidSsid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidSsid.setDescription('')
dot11MssidSuppress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidSuppress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidSuppress.setDescription('')
dot11MssidAuthentication = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("opensystem", 1), ("sharedkey", 2), ("opensystem-sharedkey", 3), ("wpa-psk", 4), ("wpa-eap", 5), ("wpa2-psk", 6), ("wpa2-eap", 7), ("wpa2-auto-psk", 8), ("wpa2-auto-eap", 9), ("dot1x", 10)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidAuthentication.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidAuthentication.setDescription('')
dot11MssidEncryption = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidEncryption.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidEncryption.setDescription('')
dot11MssidWepKeyIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("first", 1), ("second", 2), ("third", 3), ("fourth", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidWepKeyIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidWepKeyIndex.setDescription('')
dot11MssidWepKey = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 8), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidWepKey.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidWepKey.setDescription('')
dot11MssidCipherType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("auto", 1), ("aes", 2), ("tkip", 3), ("wep", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidCipherType.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidCipherType.setDescription('')
dot11MssidPassPhrase = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidPassPhrase.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidPassPhrase.setDescription('')
dot11MssidKeyType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ascii", 1), ("hex", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidKeyType.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidKeyType.setDescription('')
dot11MssidWmm = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidWmm.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidWmm.setDescription('')
dot11MssidGroupKeyUpdateInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(300, 9999999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidGroupKeyUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidGroupKeyUpdateInterval.setDescription('')
dot11MssidPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidPriority.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidPriority.setDescription('')
dot11MssidAutoRekeyControl = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidAutoRekeyControl.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidAutoRekeyControl.setDescription('disable( Manual ) , enable(Periodrical Key Change )')
dot11MssidAutoRekeyStartWeek = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("sun", 0), ("mon", 1), ("tue", 2), ("wed", 3), ("thu", 4), ("fri", 5), ("sat", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidAutoRekeyStartWeek.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidAutoRekeyStartWeek.setDescription('')
dot11MssidAutoRekeyStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 19), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidAutoRekeyStartTime.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidAutoRekeyStartTime.setDescription(" Please follow this format: 'xx:xx' For example: 23:45 ")
dot11MssidAutoRekeyTimeInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 168))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidAutoRekeyTimeInterval.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidAutoRekeyTimeInterval.setDescription('')
dot11MssidAutoRekeyPassPhrase = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 3, 1, 21), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11MssidAutoRekeyPassPhrase.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidAutoRekeyPassPhrase.setDescription('')
dot11MssidRADIUSTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 4), )
if mibBuilder.loadTexts: dot11MssidRADIUSTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidRADIUSTable.setDescription('')
dot11MssidRADIUSEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 4, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11MssidRADIUSIndex"), (0, "DAP-3520-v115", "dot11MssidIndex"))
if mibBuilder.loadTexts: dot11MssidRADIUSEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidRADIUSEntry.setDescription('')
dot11MssidRADIUSIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 4, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11MssidRADIUSIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidRADIUSIndex.setDescription('')
dot11MssidRADIUSServer = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 4, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidRADIUSServer.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidRADIUSServer.setDescription('')
dot11MssidRADIUSPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 4, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidRADIUSPort.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidRADIUSPort.setDescription('')
dot11MssidRADIUSSecret = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 4, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidRADIUSSecret.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidRADIUSSecret.setDescription('')
dot11MssidRadiusKeyUpdateInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 3, 4, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11MssidRadiusKeyUpdateInterval.setStatus('mandatory')
if mibBuilder.loadTexts: dot11MssidRadiusKeyUpdateInterval.setDescription('')
ieee802dot11RogueApDetection = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4))
dot11RogueApSurvey = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4))
dot11RogueApSurveyRefresh = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("refresh", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RogueApSurveyRefresh.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyRefresh.setDescription('')
dot11RogueApAddtoValid = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RogueApAddtoValid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApAddtoValid.setDescription('')
dot11RogueApAddtoNeighbor = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RogueApAddtoNeighbor.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApAddtoNeighbor.setDescription('')
dot11RogueApAddtoRouge = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RogueApAddtoRouge.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApAddtoRouge.setDescription('')
dot11RogueApAddtoNew = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RogueApAddtoNew.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApAddtoNew.setDescription('')
dot11RogueApAllNewNodesAsValid = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RogueApAllNewNodesAsValid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApAllNewNodesAsValid.setDescription('')
dot11RogueApAllNewNodesAsRogue = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RogueApAllNewNodesAsRogue.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApAllNewNodesAsRogue.setDescription('')
dot11RogueApSurveyTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4), )
if mibBuilder.loadTexts: dot11RogueApSurveyTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyTable.setDescription('')
dot11RogueApSurveyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11RogueApSurveyIndex"))
if mibBuilder.loadTexts: dot11RogueApSurveyEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyEntry.setDescription('')
dot11RogueApSurveyIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11RogueApSurveyIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyIndex.setDescription('')
dot11RogueApSurveyChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApSurveyChannel.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyChannel.setDescription('')
dot11RogueApSurveyBssid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4, 1, 5), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApSurveyBssid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyBssid.setDescription('')
dot11RogueApSurveyMode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("dot11g", 1), ("dot11n", 2), ("dot11a", 3), ("dot11b", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApSurveyMode.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyMode.setDescription('')
dot11RogueApSurveySsid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApSurveySsid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveySsid.setDescription('')
dot11RogueApSurveyLastseen = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4, 1, 10), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApSurveyLastseen.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyLastseen.setDescription('')
dot11RogueApSurveyType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("new", 1), ("valid", 2), ("neighborhood", 3), ("rogue", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApSurveyType.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyType.setDescription('')
dot11RogueApSurveyStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 4, 4, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 1))).clone(namedValues=NamedValues(("up", 2), ("down", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApSurveyStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApSurveyStatus.setDescription('')
dot11RogueApListRecord = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5))
dot11RogueApDeleteFromRecord = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11RogueApDeleteFromRecord.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApDeleteFromRecord.setDescription('')
dot11RogueApListRecordTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2), )
if mibBuilder.loadTexts: dot11RogueApListRecordTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordTable.setDescription('')
dot11RogueApListRecordEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11RogueApListRecordIndex"))
if mibBuilder.loadTexts: dot11RogueApListRecordEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordEntry.setDescription('')
dot11RogueApListRecordIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11RogueApListRecordIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordIndex.setDescription('')
dot11RogueApListRecordChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApListRecordChannel.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordChannel.setDescription('')
dot11RogueApListRecordBssid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2, 1, 5), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApListRecordBssid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordBssid.setDescription('')
dot11RogueApListRecordMode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4))).clone(namedValues=NamedValues(("dot11a", 0), ("dot11b", 2), ("dot11g", 3), ("dot11n", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApListRecordMode.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordMode.setDescription('')
dot11RogueApListRecordSsid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApListRecordSsid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordSsid.setDescription('')
dot11RogueApListRecordLastseen = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2, 1, 10), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApListRecordLastseen.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordLastseen.setDescription('')
dot11RogueApListRecordType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("new", 1), ("valid", 2), ("neighborhood", 3), ("rogue", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApListRecordType.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordType.setDescription('')
dot11RogueApListRecordStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 4, 5, 2, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 1))).clone(namedValues=NamedValues(("up", 2), ("down", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11RogueApListRecordStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11RogueApListRecordStatus.setDescription('')
ieee802dot11VLAN = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6))
dot11VLANParameter = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1))
dot11VlanStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11VlanStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11VlanStatus.setDescription('')
dot11VlanMode = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("static", 0), ("dynamic", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11VlanMode.setStatus('mandatory')
if mibBuilder.loadTexts: dot11VlanMode.setDescription('')
dot11GroupVlanListTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3), )
if mibBuilder.loadTexts: dot11GroupVlanListTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListTable.setDescription('')
dot11GroupVlanListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11GroupVlanListIndex"))
if mibBuilder.loadTexts: dot11GroupVlanListEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListEntry.setDescription('')
dot11GroupVlanListIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11GroupVlanListIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListIndex.setDescription('')
dot11GroupVlanListVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListVid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListVid.setDescription('')
dot11GroupVlanListName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListName.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListName.setDescription('')
dot11GroupVlanListMgmt = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListMgmt.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListMgmt.setDescription('')
dot11GroupVlanListLan = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListLan.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListLan.setDescription('')
dot11GroupVlanListPrimary = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListPrimary.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListPrimary.setDescription('')
dot11GroupVlanListMssid1 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListMssid1.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListMssid1.setDescription('')
dot11GroupVlanListMssid2 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListMssid2.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListMssid2.setDescription('')
dot11GroupVlanListMssid3 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListMssid3.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListMssid3.setDescription('')
dot11GroupVlanListWds1 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListWds1.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListWds1.setDescription('')
dot11GroupVlanListWds2 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListWds2.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListWds2.setDescription('')
dot11GroupVlanListWds3 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListWds3.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListWds3.setDescription('')
dot11GroupVlanListWds4 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("tag", 1), ("untag", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11GroupVlanListWds4.setStatus('mandatory')
if mibBuilder.loadTexts: dot11GroupVlanListWds4.setDescription('')
dot11VlanListSurveydelete = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 1, 3, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("delete", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11VlanListSurveydelete.setStatus('mandatory')
if mibBuilder.loadTexts: dot11VlanListSurveydelete.setDescription('')
dot11PvidSettingRecord = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2))
dot11PvidAutoAssignStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidAutoAssignStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidAutoAssignStatus.setDescription('')
dot11PvidSettingTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2), )
if mibBuilder.loadTexts: dot11PvidSettingTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingTable.setDescription('')
dot11PvidSettingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11PvidSettingIndex"))
if mibBuilder.loadTexts: dot11PvidSettingEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingEntry.setDescription('')
dot11PvidSettingIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11PvidSettingIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingIndex.setDescription('')
dot11PvidSettingMgmt = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingMgmt.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingMgmt.setDescription('')
dot11PvidSettingLan = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingLan.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingLan.setDescription('')
dot11PvidSettingPrimary = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingPrimary.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingPrimary.setDescription('')
dot11PvidSettingMssid1 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingMssid1.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingMssid1.setDescription('')
dot11PvidSettingMssid2 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingMssid2.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingMssid2.setDescription('')
dot11PvidSettingMssid3 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingMssid3.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingMssid3.setDescription('')
dot11PvidSettingWds1 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingWds1.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingWds1.setDescription('')
dot11PvidSettingWds2 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingWds2.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingWds2.setDescription('')
dot11PvidSettingWds3 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingWds3.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingWds3.setDescription('')
dot11PvidSettingWds4 = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 2, 2, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PvidSettingWds4.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PvidSettingWds4.setDescription('')
dot11PortListRecord = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 3))
dot11PortListRefresh = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("refresh", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11PortListRefresh.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PortListRefresh.setDescription('')
dot11PortListTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 3, 2), )
if mibBuilder.loadTexts: dot11PortListTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PortListTable.setDescription('')
dot11PortListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 3, 2, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11PortListIndex"))
if mibBuilder.loadTexts: dot11PortListEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PortListEntry.setDescription('')
dot11PortListIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 3, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11PortListIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PortListIndex.setDescription('')
dot11PortListTagVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 3, 2, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11PortListTagVid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PortListTagVid.setDescription('')
dot11PortListUntagVid = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 3, 2, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11PortListUntagVid.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PortListUntagVid.setDescription('')
dot11PortLisPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 6, 3, 2, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11PortLisPortName.setStatus('mandatory')
if mibBuilder.loadTexts: dot11PortLisPortName.setDescription('')
ieee802dot11Qos = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7))
dot11QosStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosStatus.setDescription('')
dot11QosPriorityClassifiers = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2))
dot11QosHttp = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosHttp.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosHttp.setDescription('')
dot11QosAutomatic = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosAutomatic.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosAutomatic.setDescription('')
dot11QosRuleStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRuleStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRuleStatus.setDescription('')
dot11QosRulesDelete = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesDelete.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesDelete.setDescription('')
dot11QosRulesTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5), )
if mibBuilder.loadTexts: dot11QosRulesTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesTable.setDescription('')
dot11QosRulesEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11QosRulesIndex"))
if mibBuilder.loadTexts: dot11QosRulesEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesEntry.setDescription('')
dot11QosRulesIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11QosRulesIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesIndex.setDescription('')
dot11QosRulesState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesState.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesState.setDescription('')
dot11QosRulesName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesName.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesName.setDescription('')
dot11QosRulesPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("vo", 0), ("vi", 1), ("be", 2), ("bk", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesPriority.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesPriority.setDescription('')
dot11QosRulesProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesProtocol.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesProtocol.setDescription('')
dot11QosRulesProtocolType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("any", 0), ("tcp", 1), ("udp", 2), ("both", 3), ("icmp", 4), ("other", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesProtocolType.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesProtocolType.setDescription('')
dot11QosRulesHostOneIpStart = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 7), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostOneIpStart.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostOneIpStart.setDescription('')
dot11QosRulesHostOneIpEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostOneIpEnd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostOneIpEnd.setDescription('')
dot11QosRulesHostOneIpRange = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostOneIpRange.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostOneIpRange.setDescription('')
dot11QosRulesHostOnePortStart = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostOnePortStart.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostOnePortStart.setDescription('')
dot11QosRulesHostOnePortEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostOnePortEnd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostOnePortEnd.setDescription('')
dot11QosRulesHostOnePortRange = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostOnePortRange.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostOnePortRange.setDescription('')
dot11QosRulesHostTwoIpStart = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 13), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostTwoIpStart.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostTwoIpStart.setDescription('')
dot11QosRulesHostTwoIpEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 14), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostTwoIpEnd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostTwoIpEnd.setDescription('')
dot11QosRulesHostTwoIpRange = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 15), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostTwoIpRange.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostTwoIpRange.setDescription('')
dot11QosRulesHostTwoPortStart = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostTwoPortStart.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostTwoPortStart.setDescription('')
dot11QosRulesHostTwoPortEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostTwoPortEnd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostTwoPortEnd.setDescription('')
dot11QosRulesHostTwoPortRange = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 7, 2, 5, 1, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11QosRulesHostTwoPortRange.setStatus('mandatory')
if mibBuilder.loadTexts: dot11QosRulesHostTwoPortRange.setDescription('')
capwap = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8))
capwapWlanSwitchSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1))
capwapWtpStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: capwapWtpStatus.setStatus('mandatory')
if mibBuilder.loadTexts: capwapWtpStatus.setDescription('')
capwapWtpName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 2), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: capwapWtpName.setStatus('mandatory')
if mibBuilder.loadTexts: capwapWtpName.setDescription('')
capwapWtpLocationData = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: capwapWtpLocationData.setStatus('mandatory')
if mibBuilder.loadTexts: capwapWtpLocationData.setDescription('')
capwapWtpConnectingSwitchIP = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: capwapWtpConnectingSwitchIP.setStatus('mandatory')
if mibBuilder.loadTexts: capwapWtpConnectingSwitchIP.setDescription('')
capwapWtpConnectingSwitchName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 5), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: capwapWtpConnectingSwitchName.setStatus('mandatory')
if mibBuilder.loadTexts: capwapWtpConnectingSwitchName.setDescription('')
capwapWtpSwitchIpAddressDelete = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: capwapWtpSwitchIpAddressDelete.setStatus('mandatory')
if mibBuilder.loadTexts: capwapWtpSwitchIpAddressDelete.setDescription('')
capwapWtpSwitchIpAddressAdd = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 7), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: capwapWtpSwitchIpAddressAdd.setStatus('mandatory')
if mibBuilder.loadTexts: capwapWtpSwitchIpAddressAdd.setDescription('')
wtpSwitchAddressListTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 8), )
if mibBuilder.loadTexts: wtpSwitchAddressListTable.setStatus('mandatory')
if mibBuilder.loadTexts: wtpSwitchAddressListTable.setDescription('')
wtpSwitchAddressListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 8, 1), ).setIndexNames((0, "DAP-3520-v115", "wtpSwitchAddressIndex"))
if mibBuilder.loadTexts: wtpSwitchAddressListEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wtpSwitchAddressListEntry.setDescription('')
wtpSwitchAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 8, 1, 1), Integer32())
if mibBuilder.loadTexts: wtpSwitchAddressIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wtpSwitchAddressIndex.setDescription('')
wtpSwitchIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 8, 1, 8, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wtpSwitchIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: wtpSwitchIpAddress.setDescription('')
ieee802dot11Schedule = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10))
ieee802dot11ScheduleSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1))
ieee802dot11ScheduleStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 1))
dot11ScheduleStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleStatus.setDescription('')
ieee802dot11ScheduleRuleSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2))
dot11ScheduleRuleName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleRuleName.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleRuleName.setDescription('')
dot11ScheduleDaysSelect = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleDaysSelect.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleDaysSelect.setDescription('Sun 1 Mon 2 Tue 4 Wed 8 Thu 16 Fri 32 Sat 64 All week 0')
dot11ScheduleAllDaySelect = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleAllDaySelect.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleAllDaySelect.setDescription('')
dot11ScheduleRuleStartTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(5, 5)).setFixedLength(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleRuleStartTime.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleRuleStartTime.setDescription(" Please follow this format: 'xx:xx' For example: 02:00 ")
dot11ScheduleRuleEndTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(5, 5)).setFixedLength(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleRuleEndTime.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleRuleEndTime.setDescription(" Please follow this format: 'xx:xx' For example: 23:45 ")
dot11ScheduleAction = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("del", 0), ("add", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleAction.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleAction.setDescription('')
dot11ScheduleSSIDIndex = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 3))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleSSIDIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleSSIDIndex.setDescription('')
dot11ScheduleNodeStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleNodeStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleNodeStatus.setDescription('')
dot11ScheduleOverNight = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 1, 2, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleOverNight.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleOverNight.setDescription('')
ieee802dot11ScheduleList = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2))
dot11ScheduleListTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1), )
if mibBuilder.loadTexts: dot11ScheduleListTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListTable.setDescription('')
dot11ScheduleListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11ScheduleListIndex"))
if mibBuilder.loadTexts: dot11ScheduleListEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListEntry.setDescription('')
dot11ScheduleListIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11ScheduleListIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListIndex.setDescription('')
dot11ScheduleListName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleListName.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListName.setDescription('')
dot11ScheduleListDays = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleListDays.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListDays.setDescription('')
dot11ScheduleListTimeFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleListTimeFrame.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListTimeFrame.setDescription("Please follow this format: 'xx:xx-xx:xx' For example: 02:00-24:00, another format is 'allday' ")
dot11ScheduleListWirelessStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleListWirelessStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListWirelessStatus.setDescription('')
dot11ScheduleListSSIDIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 3))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleListSSIDIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListSSIDIndex.setDescription('')
dot11ScheduleListSSID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleListSSID.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListSSID.setDescription('')
dot11ScheduleListNodeStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleListNodeStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListNodeStatus.setDescription('')
dot11ScheduleListOverNight = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 10, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ScheduleListOverNight.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ScheduleListOverNight.setDescription('')
ieee802dot11APArray = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11))
ieee802dot11APArraySetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1))
ieee802dot11APArrayStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 1))
dot11APArrayStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArrayStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayStatus.setDescription('')
dot11APArrayModeSelect = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("master", 1), ("masterbackup", 2), ("slave", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArrayModeSelect.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayModeSelect.setDescription('')
dot11ApArrayName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ApArrayName.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ApArrayName.setDescription('')
dot11ApArrayPassword = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ApArrayPassword.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ApArrayPassword.setDescription('')
ieee802dot11APArrayScans = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2))
ieee802dot11APArrayScanSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1))
dot11ApArrayScan = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("refresh", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ApArrayScan.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ApArrayScan.setDescription('')
ieee802dot11APArrayScanList = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3))
dot11APArrayScanListTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1), )
if mibBuilder.loadTexts: dot11APArrayScanListTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListTable.setDescription('')
dot11APArrayScanListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11APArrayScanListIndex"))
if mibBuilder.loadTexts: dot11APArrayScanListEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListEntry.setDescription('')
dot11APArrayScanListIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11APArrayScanListIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListIndex.setDescription('')
dot11APArrayScanListName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayScanListName.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListName.setDescription('')
dot11APArrayScanListMasterIP = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayScanListMasterIP.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListMasterIP.setDescription('')
dot11APArrayScanListMac = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayScanListMac.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListMac.setDescription('')
dot11APArrayScanListMasterNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayScanListMasterNumber.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListMasterNumber.setDescription('')
dot11APArrayScanListBackupNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayScanListBackupNumber.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListBackupNumber.setDescription('')
dot11APArrayScanListSlaverNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayScanListSlaverNumber.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListSlaverNumber.setDescription('')
dot11APArrayScanListTotal = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 1, 2, 1, 3, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayScanListTotal.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayScanListTotal.setDescription('')
ieee802dot11APArrayMeberList = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 2))
dot11APArrayMeberListTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 2, 1), )
if mibBuilder.loadTexts: dot11APArrayMeberListTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayMeberListTable.setDescription('')
dot11APArrayMeberListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 2, 1, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11APArrayMeberListIndex"))
if mibBuilder.loadTexts: dot11APArrayMeberListEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayMeberListEntry.setDescription('')
dot11APArrayMeberListIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 2, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11APArrayMeberListIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayMeberListIndex.setDescription('')
dot11APArrayMeberListRole = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("master", 1), ("masterbackup", 2), ("slave", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayMeberListRole.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayMeberListRole.setDescription('')
dot11APArrayMeberListIP = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 2, 1, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayMeberListIP.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayMeberListIP.setDescription('')
dot11APArrayMeberListMac = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 2, 1, 1, 5), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayMeberListMac.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayMeberListMac.setDescription('')
dot11APArrayMeberListLoacation = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 2, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11APArrayMeberListLoacation.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArrayMeberListLoacation.setDescription('')
ieee802dot11APArraySyncParametersStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3))
dot11APArraySyncParametersStatusTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1), )
if mibBuilder.loadTexts: dot11APArraySyncParametersStatusTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncParametersStatusTable.setDescription('')
dot11APArraySyncParametersStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11APArraySyncParametersStatusIndex"))
if mibBuilder.loadTexts: dot11APArraySyncParametersStatusEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncParametersStatusEntry.setDescription('')
dot11APArraySyncParametersStatusIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11APArraySyncParametersStatusIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncParametersStatusIndex.setDescription('')
dot11APArraySyncSSIDStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncSSIDStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncSSIDStatus.setDescription('')
dot11APArraySyncSsidHiddenStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncSsidHiddenStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncSsidHiddenStatus.setDescription('')
dot11APArraySyncAutoChannelStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncAutoChannelStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncAutoChannelStatus.setDescription('')
dot11APArraySyncChannelWidthStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncChannelWidthStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncChannelWidthStatus.setDescription('')
dot11APArraySyncSecurityStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncSecurityStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncSecurityStatus.setDescription('')
dot11APArraySyncFixedRateStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncFixedRateStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncFixedRateStatus.setDescription('')
dot11APArraySyncBeaconIntervalStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncBeaconIntervalStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncBeaconIntervalStatus.setDescription('')
dot11APArraySyncDtimStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncDtimStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncDtimStatus.setDescription('')
dot11APArraySyncTxPowerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncTxPowerStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncTxPowerStatus.setDescription('')
dot11APArraySyncWMMStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncWMMStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncWMMStatus.setDescription('')
dot11APArraySyncAckTimeoutStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncAckTimeoutStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncAckTimeoutStatus.setDescription('')
dot11APArraySyncShortGIStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncShortGIStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncShortGIStatus.setDescription('')
dot11APArraySyncIgmpSnoopStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncIgmpSnoopStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncIgmpSnoopStatus.setDescription('')
dot11APArraySyncConnectionLimitStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncConnectionLimitStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncConnectionLimitStatus.setDescription('')
dot11APArraySyncLinkIntegrityStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncLinkIntegrityStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncLinkIntegrityStatus.setDescription('')
dot11APArraySyncMultiSsidStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncMultiSsidStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncMultiSsidStatus.setDescription('')
dot11APArraySyncMultiSsidHiddenStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncMultiSsidHiddenStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncMultiSsidHiddenStatus.setDescription('')
dot11APArraySyncMultiSsidSecurityStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncMultiSsidSecurityStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncMultiSsidSecurityStatus.setDescription('')
dot11APArraySyncMultiSsidWMMStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncMultiSsidWMMStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncMultiSsidWMMStatus.setDescription('')
dot11APArraySyncQOSStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncQOSStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncQOSStatus.setDescription('')
dot11APArraySyncVlanStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncVlanStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncVlanStatus.setDescription('')
dot11APArraySyncScheduleStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncScheduleStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncScheduleStatus.setDescription('')
dot11APArraySyncTimeStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncTimeStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncTimeStatus.setDescription('')
dot11APArraySyncLogStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncLogStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncLogStatus.setDescription('')
dot11APArraySyncAdminLimitStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncAdminLimitStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncAdminLimitStatus.setDescription('')
dot11APArraySyncSystemStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncSystemStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncSystemStatus.setDescription('')
dot11APArraySyncConsoleProtocolStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncConsoleProtocolStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncConsoleProtocolStatus.setDescription('')
dot11APArraySyncSnmpStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncSnmpStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncSnmpStatus.setDescription('')
dot11APArraySyncPingCtlStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncPingCtlStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncPingCtlStatus.setDescription('')
dot11APArraySyncDhcpStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncDhcpStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncDhcpStatus.setDescription('')
dot11APArraySyncLoginStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncLoginStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncLoginStatus.setDescription('')
dot11APArraySyncAclStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 33), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncAclStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncAclStatus.setDescription('')
dot11APArraySyncBandStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 11, 3, 1, 1, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11APArraySyncBandStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11APArraySyncBandStatus.setDescription('')
ieee802dot11WebRedirection = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14))
ieee802dot11WebRedirectionSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 1))
dot11WebRedirectionStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WebRedirectionStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionStatus.setDescription('')
ieee802dot11WebRedirectionAccountSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2))
dot11WebRedirectionAccountName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 1), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WebRedirectionAccountName.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionAccountName.setDescription('')
dot11WebRedirectionAccountPasswd = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 2), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WebRedirectionAccountPasswd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionAccountPasswd.setDescription('')
dot11WebRedirectionAccountStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WebRedirectionAccountStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionAccountStatus.setDescription('')
dot11WebRedirectionAccountAction = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("del", 0), ("add", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WebRedirectionAccountAction.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionAccountAction.setDescription('')
dot11WebRedirectionAccountTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 5), )
if mibBuilder.loadTexts: dot11WebRedirectionAccountTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionAccountTable.setDescription('')
dot11WebRedirectionAccountEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 5, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11WebRedirectionIndex"))
if mibBuilder.loadTexts: dot11WebRedirectionAccountEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionAccountEntry.setDescription('')
dot11WebRedirectionIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 5, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11WebRedirectionIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionIndex.setDescription('')
dot11WebRedirectionListAccountName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 5, 1, 2), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WebRedirectionListAccountName.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionListAccountName.setDescription('')
dot11WebRedirectionListAccountPasswd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 5, 1, 3), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WebRedirectionListAccountPasswd.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionListAccountPasswd.setDescription('')
dot11WebRedirectionListAccountStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 14, 2, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11WebRedirectionListAccountStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11WebRedirectionListAccountStatus.setDescription('')
ieee802dot11ARPSpoofingPrevention = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15))
ieee802dot11ARPSpoofingPreventionSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 1))
dot11ARPSpoofingPreventionStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionStatus.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionStatus.setDescription('')
ieee802dot11ARPSpoofingPreventionAddressSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 2))
dot11ARPSpoofingPreventionIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 2, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionIpAddress.setDescription('')
dot11ARPSpoofingPreventionMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 2, 2), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionMacAddress.setDescription('')
dot11ARPSpoofingPreventionAction = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("del", 0), ("add", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionAction.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionAction.setDescription('')
dot11ARPSpoofingPreventionTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 2, 4), )
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionTable.setDescription('')
dot11ARPSpoofingPreventionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 2, 4, 1), ).setIndexNames((0, "DAP-3520-v115", "dot11ARPSpoofingPreventionIndex"))
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionEntry.setDescription('')
dot11ARPSpoofingPreventionIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 2, 4, 1, 1), Integer32())
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionIndex.setDescription('')
dot11ARPSpoofingPreventionListIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 2, 4, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionListIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionListIpAddress.setDescription('')
dot11ARPSpoofingPreventionListMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 3, 15, 2, 4, 1, 3), MacAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionListMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ARPSpoofingPreventionListMacAddress.setDescription('')
administration = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4))
users = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 1))
usersTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 1, 1), )
if mibBuilder.loadTexts: usersTable.setStatus('mandatory')
if mibBuilder.loadTexts: usersTable.setDescription('')
usersEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 1, 1, 1), ).setIndexNames((0, "DAP-3520-v115", "usersIndex"))
if mibBuilder.loadTexts: usersEntry.setStatus('mandatory')
if mibBuilder.loadTexts: usersEntry.setDescription('')
usersIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 1, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: usersIndex.setStatus('mandatory')
if mibBuilder.loadTexts: usersIndex.setDescription('')
usersName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 1, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: usersName.setStatus('mandatory')
if mibBuilder.loadTexts: usersName.setDescription('')
usersPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 1, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 12))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: usersPassword.setStatus('mandatory')
if mibBuilder.loadTexts: usersPassword.setDescription('')
device = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 2))
deviceRestart = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("reboot", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceRestart.setStatus('mandatory')
if mibBuilder.loadTexts: deviceRestart.setDescription('')
deviceFactoryDefault = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceFactoryDefault.setStatus('mandatory')
if mibBuilder.loadTexts: deviceFactoryDefault.setDescription('')
deviceSettingApply = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("apply", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceSettingApply.setStatus('mandatory')
if mibBuilder.loadTexts: deviceSettingApply.setDescription('')
deviceSettingDiscard = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("apply", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceSettingDiscard.setStatus('mandatory')
if mibBuilder.loadTexts: deviceSettingDiscard.setDescription('')
languagePackClear = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("clear", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: languagePackClear.setStatus('mandatory')
if mibBuilder.loadTexts: languagePackClear.setDescription('')
update = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3))
updateFirmwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 1), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: updateFirmwareVersion.setStatus('mandatory')
if mibBuilder.loadTexts: updateFirmwareVersion.setDescription('')
tftp = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 2))
tftpServerIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 2, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tftpServerIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: tftpServerIPAddress.setDescription('')
tftpRemoteFileName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 2, 2), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tftpRemoteFileName.setStatus('mandatory')
if mibBuilder.loadTexts: tftpRemoteFileName.setDescription("If put config file. Please follow this format: 'xx.config' For example: aaa.config If put certificate file. Please follow this format: 'xx.pem' For example: aaa.pem ")
tftpCommand = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("connect", 1), ("get", 2), ("put", 3), ("nothing", 4), ("putacl", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tftpCommand.setStatus('mandatory')
if mibBuilder.loadTexts: tftpCommand.setDescription('')
tftpUpgradeSettingCommand = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("firmwareUpdate", 1), ("configSetting", 2), ("configSave", 3), ("reboot", 4), ("factoryReset", 5), ("nothing", 6), ("certificateFileUpdate", 7), ("keyFileUpdate", 8), ("getacl", 9)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tftpUpgradeSettingCommand.setStatus('mandatory')
if mibBuilder.loadTexts: tftpUpgradeSettingCommand.setDescription('')
ftp = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 3))
ftpServerIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 3, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpServerIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ftpServerIPAddress.setDescription('')
ftpUserName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 3, 3), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpUserName.setStatus('mandatory')
if mibBuilder.loadTexts: ftpUserName.setDescription('')
ftpPassword = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 3, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpPassword.setStatus('mandatory')
if mibBuilder.loadTexts: ftpPassword.setDescription('')
ftpRemoteFileName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 3, 5), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpRemoteFileName.setStatus('mandatory')
if mibBuilder.loadTexts: ftpRemoteFileName.setDescription("If put config file. Please follow this format: 'xx.config' For example: aaa.config If put certificate file. Please follow this format: 'xx.pem' For example: aaa.pem")
ftpCommand = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 3, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("connect", 1), ("get", 2), ("put", 3), ("nothing", 4), ("putacl", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpCommand.setStatus('mandatory')
if mibBuilder.loadTexts: ftpCommand.setDescription('')
ftpUpgradeSettingCommand = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 3, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("firmwareUpdate", 1), ("configSetting", 2), ("configSave", 3), ("reboot", 4), ("factoryReset", 5), ("nothing", 6), ("certificateFileUpdate", 7), ("keyFileUpdate", 8), ("getacl", 9)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ftpUpgradeSettingCommand.setStatus('mandatory')
if mibBuilder.loadTexts: ftpUpgradeSettingCommand.setDescription('')
updateStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 7, 8, 9))).clone(namedValues=NamedValues(("correct", 0), ("fwSuccess", 1), ("wrongImageFile", 2), ("wrongConfigFile", 3), ("wrongAclFile", 4), ("configSuccess", 5), ("inProcess", 7), ("failed", 8), ("none", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: updateStatus.setStatus('mandatory')
if mibBuilder.loadTexts: updateStatus.setDescription('')
console = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 4))
telnet = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: telnet.setStatus('mandatory')
if mibBuilder.loadTexts: telnet.setDescription('')
ssh = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 4, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ssh.setStatus('mandatory')
if mibBuilder.loadTexts: ssh.setDescription('')
timeout = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 4, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("never", 0), ("s60", 1), ("s180", 2), ("s300", 3), ("s600", 4), ("s900", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: timeout.setStatus('mandatory')
if mibBuilder.loadTexts: timeout.setDescription('')
web = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 5))
webStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: webStatus.setStatus('mandatory')
if mibBuilder.loadTexts: webStatus.setDescription('')
snmp = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 6))
ssl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 7))
sntp = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8))
sntpServerIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 1), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpServerIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: sntpServerIpAddress.setDescription('')
sntpTimeZoneIndex = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 75))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpTimeZoneIndex.setStatus('mandatory')
if mibBuilder.loadTexts: sntpTimeZoneIndex.setDescription('')
sntpDayLightSaving = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpDayLightSaving.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDayLightSaving.setDescription('')
sntpTimeofDay = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sntpTimeofDay.setStatus('mandatory')
if mibBuilder.loadTexts: sntpTimeofDay.setDescription('')
sntpStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpStatus.setStatus('mandatory')
if mibBuilder.loadTexts: sntpStatus.setDescription('')
sntpInterval = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("one-day", 1), ("three-days", 2), ("seven-days", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpInterval.setStatus('mandatory')
if mibBuilder.loadTexts: sntpInterval.setDescription('')
setTimeManually = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 7), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: setTimeManually.setStatus('mandatory')
if mibBuilder.loadTexts: setTimeManually.setDescription(" Please follow this format: 'xx-xx-xxxx xx:xx:xx' For example: 02-04-1980 14:12:18 ")
sntpDstStartMonth = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 12))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpDstStartMonth.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDstStartMonth.setDescription('Jan(1),Feb(2),Mar(3),Apr(4),May(5),Jun(6), Jul(7),Aug(8),Sep(9),Oct(10),Nov(11),Dec(12)')
sntpDstStartWeek = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpDstStartWeek.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDstStartWeek.setDescription('1st(1),2nd(2),3rd(3),4th(4),5th(5)')
sntpDstStartDayOfWeek = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("sun", 0), ("mon", 1), ("tue", 2), ("wed", 3), ("thu", 4), ("fri", 5), ("sat", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpDstStartDayOfWeek.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDstStartDayOfWeek.setDescription('')
sntpDstStartCurrentTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 23))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpDstStartCurrentTime.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDstStartCurrentTime.setDescription('0:(12:00 AM) 23:(11:00 PM)')
sntpDstEndMonth = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 12))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpDstEndMonth.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDstEndMonth.setDescription('Jan(1),Feb(2),Mar(3),Apr(4),May(5),Jun(6), Jul(7),Aug(8),Sep(9),Oct(10),Nov(11),Dec(12)')
sntpDstEndWeek = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpDstEndWeek.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDstEndWeek.setDescription('1st(1),2nd(2),3rd(3),4th(4),5th(5)')
sntpDstEndDayOfWeek = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("sun", 0), ("mon", 1), ("tue", 2), ("wed", 3), ("thu", 4), ("fri", 5), ("sat", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpDstEndDayOfWeek.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDstEndDayOfWeek.setDescription('')
sntpDstEndCurrentTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 23))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sntpDstEndCurrentTime.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDstEndCurrentTime.setDescription('0:(12:00 AM) 23:(11:00 PM)')
sntpDayLightSavingOffset = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 8, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sntpDayLightSavingOffset.setStatus('mandatory')
if mibBuilder.loadTexts: sntpDayLightSavingOffset.setDescription('0:(-2:00) 1:(-1:30) 2:(-1:00) 3:(-0:30) 4:(+0:30) 5:(+1:00) 6:(+1:30) 7:(+2:00)')
smtp = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 9))
smtpStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 9, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: smtpStatus.setStatus('mandatory')
if mibBuilder.loadTexts: smtpStatus.setDescription('')
smtpServerIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 9, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: smtpServerIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: smtpServerIpAddress.setDescription('')
smtpAccountingName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 9, 5), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: smtpAccountingName.setStatus('mandatory')
if mibBuilder.loadTexts: smtpAccountingName.setDescription('')
smtpPassword = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 9, 6), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: smtpPassword.setStatus('mandatory')
if mibBuilder.loadTexts: smtpPassword.setDescription('')
limitedAdministrator = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10))
managerAddress = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10, 1))
managerIpAddressStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("disable", 0), ("vlanID", 1), ("ipaddress", 2), ("both", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: managerIpAddressStatus.setStatus('mandatory')
if mibBuilder.loadTexts: managerIpAddressStatus.setDescription('')
managerIpAddressDelete = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: managerIpAddressDelete.setStatus('mandatory')
if mibBuilder.loadTexts: managerIpAddressDelete.setDescription('')
managerIpAddressTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10, 1, 4), )
if mibBuilder.loadTexts: managerIpAddressTable.setStatus('mandatory')
if mibBuilder.loadTexts: managerIpAddressTable.setDescription('')
managerIpAddressEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10, 1, 4, 1), ).setIndexNames((0, "DAP-3520-v115", "managerIpAddressIndex"))
if mibBuilder.loadTexts: managerIpAddressEntry.setStatus('mandatory')
if mibBuilder.loadTexts: managerIpAddressEntry.setDescription('')
managerIpAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10, 1, 4, 1, 1), Integer32())
if mibBuilder.loadTexts: managerIpAddressIndex.setStatus('mandatory')
if mibBuilder.loadTexts: managerIpAddressIndex.setDescription('')
managerIpAddressPoolStart = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10, 1, 4, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: managerIpAddressPoolStart.setStatus('mandatory')
if mibBuilder.loadTexts: managerIpAddressPoolStart.setDescription('')
managerIpAddressPoolEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10, 1, 4, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: managerIpAddressPoolEnd.setStatus('mandatory')
if mibBuilder.loadTexts: managerIpAddressPoolEnd.setDescription('')
manergeVLANTag = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 10, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: manergeVLANTag.setStatus('mandatory')
if mibBuilder.loadTexts: manergeVLANTag.setDescription('')
pingControl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 12))
pingControlStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 4, 12, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pingControlStatus.setStatus('mandatory')
if mibBuilder.loadTexts: pingControlStatus.setDescription('')
report = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5))
deviceInformation = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1))
deviceInformationFirmwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 1), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: deviceInformationFirmwareVersion.setStatus('mandatory')
if mibBuilder.loadTexts: deviceInformationFirmwareVersion.setDescription('')
interfaceInformation = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2))
interfaceInformationTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1), )
if mibBuilder.loadTexts: interfaceInformationTable.setStatus('mandatory')
if mibBuilder.loadTexts: interfaceInformationTable.setDescription('')
interfaceInformationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: interfaceInformationEntry.setStatus('mandatory')
if mibBuilder.loadTexts: interfaceInformationEntry.setDescription('')
ifGetIpAddressFrom = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("static", 1), ("dynamic", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ifGetIpAddressFrom.setStatus('mandatory')
if mibBuilder.loadTexts: ifGetIpAddressFrom.setDescription('')
ifIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ifIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ifIpAddress.setDescription('')
ifSubnetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ifSubnetMask.setStatus('mandatory')
if mibBuilder.loadTexts: ifSubnetMask.setDescription('')
ifDefaultGateway = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ifDefaultGateway.setStatus('mandatory')
if mibBuilder.loadTexts: ifDefaultGateway.setDescription('')
ifMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 5), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ifMacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ifMacAddress.setDescription('')
wirelessLed2dot4G = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("blinking", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wirelessLed2dot4G.setStatus('mandatory')
if mibBuilder.loadTexts: wirelessLed2dot4G.setDescription('')
wirelessLed5G = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("blinking", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wirelessLed5G.setStatus('mandatory')
if mibBuilder.loadTexts: wirelessLed5G.setDescription('')
dataBaseChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dataBaseChannel.setStatus('mandatory')
if mibBuilder.loadTexts: dataBaseChannel.setDescription('')
mssid1MacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 11), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mssid1MacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: mssid1MacAddress.setDescription('')
mssid2MacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 12), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mssid2MacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: mssid2MacAddress.setDescription('')
mssid3MacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 13), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mssid3MacAddress.setStatus('mandatory')
if mibBuilder.loadTexts: mssid3MacAddress.setDescription('')
lanLED = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 1, 2, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("blinking", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lanLED.setStatus('mandatory')
if mibBuilder.loadTexts: lanLED.setDescription('')
trafficStatistics = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2))
trafficStatisticsWired = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1))
dot3TrafficStatistics = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1))
dot3TrafficStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1, 1), )
if mibBuilder.loadTexts: dot3TrafficStatisticsTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot3TrafficStatisticsTable.setDescription('')
dot3TrafficStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot3TrafficStatisticsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot3TrafficStatisticsEntry.setDescription('')
dot3TransmittedPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot3TransmittedPacketCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot3TransmittedPacketCount.setDescription('')
dot3TransmittedBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot3TransmittedBytesCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot3TransmittedBytesCount.setDescription('')
dot3TransmittedDroppedPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot3TransmittedDroppedPacketCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot3TransmittedDroppedPacketCount.setDescription('')
dot3ReceivedPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot3ReceivedPacketCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot3ReceivedPacketCount.setDescription('')
dot3ReceivedBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot3ReceivedBytesCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot3ReceivedBytesCount.setDescription('')
dot3ReceivedDroppedPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot3ReceivedDroppedPacketCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot3ReceivedDroppedPacketCount.setDescription('')
dot3Clear = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 1, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("clear", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot3Clear.setStatus('mandatory')
if mibBuilder.loadTexts: dot3Clear.setDescription('')
trafficStatisticsWireless = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2))
dot11TrafficStatistics = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1))
dot11TrafficStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1), )
if mibBuilder.loadTexts: dot11TrafficStatisticsTable.setStatus('mandatory')
if mibBuilder.loadTexts: dot11TrafficStatisticsTable.setDescription('')
dot11TrafficStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dot11TrafficStatisticsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dot11TrafficStatisticsEntry.setDescription('')
dot11TransmitSuccessRate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11TransmitSuccessRate.setStatus('mandatory')
if mibBuilder.loadTexts: dot11TransmitSuccessRate.setDescription('')
dot11TransmitRetryRate = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11TransmitRetryRate.setStatus('mandatory')
if mibBuilder.loadTexts: dot11TransmitRetryRate.setDescription('')
dot11TransmittedPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 19), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11TransmittedPacketCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11TransmittedPacketCount.setDescription('')
dot11TransmittedBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 20), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11TransmittedBytesCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11TransmittedBytesCount.setDescription('')
dot11TransmittedDroppedPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 21), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11TransmittedDroppedPacketCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11TransmittedDroppedPacketCount.setDescription('')
dot11TransmittedRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11TransmittedRetryCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11TransmittedRetryCount.setDescription('')
dot11ReceivedPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ReceivedPacketCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ReceivedPacketCount.setDescription('')
dot11ReceivedBytesCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ReceivedBytesCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ReceivedBytesCount.setDescription('')
dot11ReceivedDroppedPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 25), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ReceivedDroppedPacketCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ReceivedDroppedPacketCount.setDescription('')
dot11ReceivedCRCCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 26), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ReceivedCRCCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ReceivedCRCCount.setDescription('')
dot11ReceivedDecryptionErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 27), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ReceivedDecryptionErrorCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ReceivedDecryptionErrorCount.setDescription('')
dot11ReceivedMICErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 28), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ReceivedMICErrorCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ReceivedMICErrorCount.setDescription('')
dot11ReceivedPHYErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 29), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dot11ReceivedPHYErrorCount.setStatus('mandatory')
if mibBuilder.loadTexts: dot11ReceivedPHYErrorCount.setDescription('')
dot11Clear = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 2, 2, 1, 1, 1, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("clear", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot11Clear.setStatus('mandatory')
if mibBuilder.loadTexts: dot11Clear.setDescription('')
systemLog = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4))
systemLogSystemLevel = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemLogSystemLevel.setStatus('mandatory')
if mibBuilder.loadTexts: systemLogSystemLevel.setDescription('')
systemLogWirelessLevel = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemLogWirelessLevel.setStatus('mandatory')
if mibBuilder.loadTexts: systemLogWirelessLevel.setDescription('')
systemLogNoticeLevel = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemLogNoticeLevel.setStatus('mandatory')
if mibBuilder.loadTexts: systemLogNoticeLevel.setDescription('')
systemLogTFTPServerIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemLogTFTPServerIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: systemLogTFTPServerIPAddress.setDescription('')
systemLogFileName = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 5), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemLogFileName.setStatus('mandatory')
if mibBuilder.loadTexts: systemLogFileName.setDescription('')
systemLogGetLog = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("get", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemLogGetLog.setStatus('mandatory')
if mibBuilder.loadTexts: systemLogGetLog.setDescription('')
systemLogRemoteLogState = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemLogRemoteLogState.setStatus('mandatory')
if mibBuilder.loadTexts: systemLogRemoteLogState.setDescription('')
systemLogServerIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 9), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemLogServerIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: systemLogServerIPAddress.setDescription('')
systemLogClearLocalLog = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("nothing", 0), ("clear", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: systemLogClearLocalLog.setStatus('mandatory')
if mibBuilder.loadTexts: systemLogClearLocalLog.setDescription('')
emailNotification = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11))
emailNotificationTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1), )
if mibBuilder.loadTexts: emailNotificationTable.setStatus('mandatory')
if mibBuilder.loadTexts: emailNotificationTable.setDescription('')
emailNotificationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1), ).setIndexNames((0, "DAP-3520-v115", "emailNtfIndex"))
if mibBuilder.loadTexts: emailNotificationEntry.setStatus('mandatory')
if mibBuilder.loadTexts: emailNotificationEntry.setDescription('')
emailNtfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: emailNtfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfIndex.setDescription('')
emailNtfStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfStatus.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfStatus.setDescription('')
emailNtfFromIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 3), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfFromIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfFromIPAddress.setDescription('')
emailNtfToIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfToIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfToIPAddress.setDescription('')
emailNtfServerIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 5), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfServerIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfServerIPAddress.setDescription('')
emailNtfAuthentication = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfAuthentication.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfAuthentication.setDescription('')
emailNtfPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 7), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfPassword.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfPassword.setDescription('')
emailNtfOnSchedule = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 9), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfOnSchedule.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfOnSchedule.setDescription('0, 0.5, 1, 1.5 ~ 24')
emailNtfPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfPort.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfPort.setDescription('')
emailNtfSSL = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfSSL.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfSSL.setDescription('')
emailNtfMailServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("internal", 1), ("gmail", 2), ("hotmail", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfMailServerIndex.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfMailServerIndex.setDescription('')
emailNtfUsername = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 4, 11, 1, 1, 13), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailNtfUsername.setStatus('mandatory')
if mibBuilder.loadTexts: emailNtfUsername.setDescription('')
traps = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7))
trapSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 1))
trapStatus = MibScalar((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapStatus.setStatus('mandatory')
if mibBuilder.loadTexts: trapStatus.setDescription('')
trapHostTable = MibTable((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 1, 3), )
if mibBuilder.loadTexts: trapHostTable.setStatus('mandatory')
if mibBuilder.loadTexts: trapHostTable.setDescription('')
trapHostEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 1, 3, 1), ).setIndexNames((0, "DAP-3520-v115", "trapHostIndex"))
if mibBuilder.loadTexts: trapHostEntry.setStatus('mandatory')
if mibBuilder.loadTexts: trapHostEntry.setDescription('')
trapHostIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 1, 3, 1, 1), Integer32())
if mibBuilder.loadTexts: trapHostIndex.setStatus('mandatory')
if mibBuilder.loadTexts: trapHostIndex.setDescription('')
trapHostIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 1, 3, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapHostIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: trapHostIPAddress.setDescription('')
trapVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 1, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("v1", 1), ("v2c", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapVersion.setStatus('mandatory')
if mibBuilder.loadTexts: trapVersion.setDescription('')
trapSecurityName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 1, 3, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapSecurityName.setStatus('mandatory')
if mibBuilder.loadTexts: trapSecurityName.setDescription('v1/v2c: community name, v3: user name')
trapInstances = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2))
sshLoginFail = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 1)).setObjects(("DAP-3520-v115", "usersName"))
if mibBuilder.loadTexts: sshLoginFail.setStatus('current')
if mibBuilder.loadTexts: sshLoginFail.setDescription('')
webNotify = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 2)).setObjects(("DAP-3520-v115", "usersName"))
if mibBuilder.loadTexts: webNotify.setStatus('current')
if mibBuilder.loadTexts: webNotify.setDescription('')
telnetLoginFail = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 3)).setObjects(("DAP-3520-v115", "usersName"))
if mibBuilder.loadTexts: telnetLoginFail.setStatus('current')
if mibBuilder.loadTexts: telnetLoginFail.setDescription('')
cpuLoadingFull = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 4))
if mibBuilder.loadTexts: cpuLoadingFull.setStatus('current')
if mibBuilder.loadTexts: cpuLoadingFull.setDescription('')
memoryPoor = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 5))
if mibBuilder.loadTexts: memoryPoor.setStatus('current')
if mibBuilder.loadTexts: memoryPoor.setDescription('')
wlanIfLinkUp = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 7)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: wlanIfLinkUp.setStatus('current')
if mibBuilder.loadTexts: wlanIfLinkUp.setDescription('')
deauthenticateAttack = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 8)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: deauthenticateAttack.setStatus('current')
if mibBuilder.loadTexts: deauthenticateAttack.setDescription('')
disassociateAttack = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 9)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: disassociateAttack.setStatus('current')
if mibBuilder.loadTexts: disassociateAttack.setDescription('')
bcFlood = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 10)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: bcFlood.setStatus('current')
if mibBuilder.loadTexts: bcFlood.setDescription('')
webLogoutSuccessful = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 11)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: webLogoutSuccessful.setStatus('current')
if mibBuilder.loadTexts: webLogoutSuccessful.setDescription('')
wlanIfLinkDown = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 13)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: wlanIfLinkDown.setStatus('current')
if mibBuilder.loadTexts: wlanIfLinkDown.setDescription('')
stationAssocNotify = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 14))
if mibBuilder.loadTexts: stationAssocNotify.setStatus('current')
if mibBuilder.loadTexts: stationAssocNotify.setDescription('')
stationDisassocNotify = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 15))
if mibBuilder.loadTexts: stationDisassocNotify.setStatus('current')
if mibBuilder.loadTexts: stationDisassocNotify.setDescription('')
deAuthentication = NotificationType((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 5, 7, 2, 20))
if mibBuilder.loadTexts: deAuthentication.setStatus('current')
if mibBuilder.loadTexts: deAuthentication.setDescription('')
miscellaneous = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 10, 37, 37, 6))
mibBuilder.exportSymbols("DAP-3520-v115", dot11APArrayScanListName=dot11APArrayScanListName, lanIfMacAddress=lanIfMacAddress, interfaceInformationTable=interfaceInformationTable, dot11GroupVlanListMgmt=dot11GroupVlanListMgmt, dot11Band=dot11Band, webNotify=webNotify, ifMacAddress=ifMacAddress, dot11WdsSiteSurvey=dot11WdsSiteSurvey, dynamicDomainName=dynamicDomainName, currentStaticHostName=currentStaticHostName, wlanIfLinkDown=wlanIfLinkDown, dynamicDns=dynamicDns, timeout=timeout, lanIfSubnetMask=lanIfSubnetMask, dot11MacCloneAddressRefresh=dot11MacCloneAddressRefresh, telnet=telnet, dot11APArraySyncMultiSsidStatus=dot11APArraySyncMultiSsidStatus, wirelessLed5G=wirelessLed5G, dot11WdsTable=dot11WdsTable, ieee802dot11ScheduleRuleSetting=ieee802dot11ScheduleRuleSetting, dataBaseChannel=dataBaseChannel, ieee802dot11ARPSpoofingPrevention=ieee802dot11ARPSpoofingPrevention, dot11QosRulesEntry=dot11QosRulesEntry, wirelessLanIfNumber=wirelessLanIfNumber, dot11PvidSettingWds2=dot11PvidSettingWds2, smtpAccountingName=smtpAccountingName, pingControlStatus=pingControlStatus, dot11MacCloneSurveryMacAddress=dot11MacCloneSurveryMacAddress, usersPassword=usersPassword, ftpUpgradeSettingCommand=ftpUpgradeSettingCommand, dot11QosAutomatic=dot11QosAutomatic, dot11WepKeyTable=dot11WepKeyTable, staticIndex=staticIndex, dot11APArraySyncSsidHiddenStatus=dot11APArraySyncSsidHiddenStatus, dot11ScheduleAllDaySelect=dot11ScheduleAllDaySelect, tftpCommand=tftpCommand, mssid1MacAddress=mssid1MacAddress, dot11RogueApSurveyEntry=dot11RogueApSurveyEntry, dot11QosRulesProtocol=dot11QosRulesProtocol, managerAddress=managerAddress, dot11QosRulesState=dot11QosRulesState, dot11SiteSurveyRefreshTable=dot11SiteSurveyRefreshTable, ieee802dot11APArrayScanSetting=ieee802dot11APArrayScanSetting, usersName=usersName, dhcpServerStaticTable=dhcpServerStaticTable, dot11APArraySyncScheduleStatus=dot11APArraySyncScheduleStatus, updateFirmwareVersion=updateFirmwareVersion, dot11WebRedirectionAccountStatus=dot11WebRedirectionAccountStatus, dot11SiteSurveyRefreshEntry=dot11SiteSurveyRefreshEntry, dot11RogueApSurveyMode=dot11RogueApSurveyMode, dot11RemoteApMacAddressAdd=dot11RemoteApMacAddressAdd, dot11MssidWepKeyIndex=dot11MssidWepKeyIndex, dhcpServerCurrentList=dhcpServerCurrentList, dot11ScheduleStatus=dot11ScheduleStatus, dot11PvidSettingWds3=dot11PvidSettingWds3, dot11MacAccessControlListTable=dot11MacAccessControlListTable, dot11Clear=dot11Clear, dot11APArraySyncShortGIStatus=dot11APArraySyncShortGIStatus, dot11ARPSpoofingPreventionIndex=dot11ARPSpoofingPreventionIndex, dot11WdsAuthentication=dot11WdsAuthentication, dot3TrafficStatisticsEntry=dot3TrafficStatisticsEntry, dot11MacAccessControlMacAddressAdd=dot11MacAccessControlMacAddressAdd, bcFlood=bcFlood, dot11PortListRecord=dot11PortListRecord, dot11AutoChannelScan=dot11AutoChannelScan, dlink_dapfamily=dlink_dapfamily, dot3TrafficStatistics=dot3TrafficStatistics, dot11MacCloneEntry=dot11MacCloneEntry, dot11GroupingTable=dot11GroupingTable, dot11VLANParameter=dot11VLANParameter, dot11QosRulesHostOnePortEnd=dot11QosRulesHostOnePortEnd, dot11SecuritiesTable=dot11SecuritiesTable, dot11SiteSurveyTable=dot11SiteSurveyTable, ieee802dot11APArrayMeberList=ieee802dot11APArrayMeberList, dot11APArraySyncChannelWidthStatus=dot11APArraySyncChannelWidthStatus, stationDisassocNotify=stationDisassocNotify, dot11ClientIpAddress=dot11ClientIpAddress, dot11MssidWmm=dot11MssidWmm, dhcpServerDynamicTable=dhcpServerDynamicTable, staticGateway=staticGateway, dot11WpaEapPasswd=dot11WpaEapPasswd, emailNtfToIPAddress=emailNtfToIPAddress, dot11ClientTxBytesCount=dot11ClientTxBytesCount, dot11ScheduleListWirelessStatus=dot11ScheduleListWirelessStatus, dot11GroupKeyUpdateInterval=dot11GroupKeyUpdateInterval, dot11MacCloneSource=dot11MacCloneSource, dot11ZoneDefenceIpAddressListEntry=dot11ZoneDefenceIpAddressListEntry, dot11WifiMode=dot11WifiMode, currentStaticIndex=currentStaticIndex, dot11ScheduleNodeStatus=dot11ScheduleNodeStatus, dot11WebRedirectionAccountEntry=dot11WebRedirectionAccountEntry, dot11APArrayScanListTable=dot11APArrayScanListTable, capwapWlanSwitchSetting=capwapWlanSwitchSetting, staticDns=staticDns, dot11SsidBroadcast=dot11SsidBroadcast, ftpPassword=ftpPassword, dot11MssidAutoRekeyPassPhrase=dot11MssidAutoRekeyPassPhrase, dot11RogueApListRecordStatus=dot11RogueApListRecordStatus, dot11ScheduleListTable=dot11ScheduleListTable, lanLED=lanLED, emailNtfAuthentication=emailNtfAuthentication, deviceRestart=deviceRestart, refreshCurrentDynamicList=refreshCurrentDynamicList, dot11RogueApSurveyLastseen=dot11RogueApSurveyLastseen, dot11AutoRekeyStartTime=dot11AutoRekeyStartTime, tftp=tftp, dot11WdsSiteSurveyEncryption=dot11WdsSiteSurveyEncryption, lanIfGetIpAddressFrom=lanIfGetIpAddressFrom, ftpCommand=ftpCommand, ieee802dot11ScheduleList=ieee802dot11ScheduleList, dhcpServer=dhcpServer, dot11APArraySyncSnmpStatus=dot11APArraySyncSnmpStatus, dot11RemoteApMacAddressList=dot11RemoteApMacAddressList, dot11ScheduleListIndex=dot11ScheduleListIndex, dot11ScheduleRuleEndTime=dot11ScheduleRuleEndTime, dot11QosRulesHostOneIpStart=dot11QosRulesHostOneIpStart, dot11SiteSurveyRssi=dot11SiteSurveyRssi, dot3ReceivedBytesCount=dot3ReceivedBytesCount, dot11RogueApAllNewNodesAsValid=dot11RogueApAllNewNodesAsValid, dot11GetWdsEntry=dot11GetWdsEntry, dot11ARPSpoofingPreventionIpAddress=dot11ARPSpoofingPreventionIpAddress, capwapWtpSwitchIpAddressDelete=capwapWtpSwitchIpAddressDelete, dot11APArraySyncWMMStatus=dot11APArraySyncWMMStatus, ieee802dot11=ieee802dot11, dot11WpaEapUsername=dot11WpaEapUsername, dot11ClientMacAddress=dot11ClientMacAddress, smtpStatus=smtpStatus, wirelessLed2dot4G=wirelessLed2dot4G, dot11MssidAutoRekeyStartWeek=dot11MssidAutoRekeyStartWeek, dot11RogueApListRecordEntry=dot11RogueApListRecordEntry, currentDynamicEntry=currentDynamicEntry, dot11DataRateList=dot11DataRateList, dot11QosRulesName=dot11QosRulesName, staticEntryStatus=staticEntryStatus, dot11MacCloneSurveryIndex=dot11MacCloneSurveryIndex, dot11APArrayScanListMasterIP=dot11APArrayScanListMasterIP, dot11MacAccessControlTable=dot11MacAccessControlTable, dot11PortListEntry=dot11PortListEntry, dot11EthernetToWlanAccess=dot11EthernetToWlanAccess, dot11GroupVlanListLan=dot11GroupVlanListLan, dot11WdsSiteSurveyIndex=dot11WdsSiteSurveyIndex, dot11GroupVlanListPrimary=dot11GroupVlanListPrimary, dot11PrimaryRadiusPort=dot11PrimaryRadiusPort, dot11WpaEapType=dot11WpaEapType, dot11ClientInformationTable=dot11ClientInformationTable, capwap=capwap, report=report, managerIpAddressPoolEnd=managerIpAddressPoolEnd, dot11RogueApSurveyStatus=dot11RogueApSurveyStatus, dot11VlanMode=dot11VlanMode, cpuLoadingFull=cpuLoadingFull, ieee802dot11Qos=ieee802dot11Qos, trafficStatisticsWireless=trafficStatisticsWireless, tftpServerIPAddress=tftpServerIPAddress, dot11WdsSiteSurveyChannel=dot11WdsSiteSurveyChannel, dot11PrimaryRadiusSecret=dot11PrimaryRadiusSecret, enterprises=enterprises, dot11RogueApListRecordTable=dot11RogueApListRecordTable, limitedAdministrator=limitedAdministrator, dot11APArrayMeberListIndex=dot11APArrayMeberListIndex, lanIfSecondaryDNS=lanIfSecondaryDNS, managerIpAddressIndex=managerIpAddressIndex, dot11Encryption=dot11Encryption, dot11MssidEntry=dot11MssidEntry, dot11RemoteApMacAddressAccessTable=dot11RemoteApMacAddressAccessTable, dot11SiteSurvey=dot11SiteSurvey, dot11MacAccessControl=dot11MacAccessControl, dot11ARPSpoofingPreventionTable=dot11ARPSpoofingPreventionTable, dot11APArraySyncSSIDStatus=dot11APArraySyncSSIDStatus, dot11MssidWepKey=dot11MssidWepKey, ieee802dot11ARPSpoofingPreventionSetting=ieee802dot11ARPSpoofingPreventionSetting, dot11MacCloneMacAddress=dot11MacCloneMacAddress, dynamicMask=dynamicMask, trapInstances=trapInstances, deviceSettingDiscard=deviceSettingDiscard, dot11TrafficStatisticsEntry=dot11TrafficStatisticsEntry, dot11RogueApAddtoNew=dot11RogueApAddtoNew, trapHostEntry=trapHostEntry, dot11WebRedirectionIndex=dot11WebRedirectionIndex, dot11MssidEncryption=dot11MssidEncryption, dot11GroupVlanListMssid2=dot11GroupVlanListMssid2, currentDynamicTable=currentDynamicTable, dot11QosRuleStatus=dot11QosRuleStatus, lanIfPrimaryDNS=lanIfPrimaryDNS, trapHostIndex=trapHostIndex, dot11ApArrayScan=dot11ApArrayScan, snmp=snmp, sntpInterval=sntpInterval, dot11WepKey=dot11WepKey, dot11SiteSurveyEncryption=dot11SiteSurveyEncryption, wirelessLanIfTable=wirelessLanIfTable, wtpSwitchAddressListEntry=wtpSwitchAddressListEntry, dot11Application=dot11Application, dot11Band5GHzOutdoorChannelList=dot11Band5GHzOutdoorChannelList, ieee802dot11APArray=ieee802dot11APArray, tftpUpgradeSettingCommand=tftpUpgradeSettingCommand, updateStatus=updateStatus, dot11APArraySyncConnectionLimitStatus=dot11APArraySyncConnectionLimitStatus, usersIndex=usersIndex, ieee802dot11RogueApDetection=ieee802dot11RogueApDetection, dot11MssidRADIUSSecret=dot11MssidRADIUSSecret, dot11QosRulesHostOneIpRange=dot11QosRulesHostOneIpRange, dot11WebRedirectionListAccountName=dot11WebRedirectionListAccountName, dot11ApMode=dot11ApMode, dot3TransmittedBytesCount=dot3TransmittedBytesCount, dot11MssidStateTable=dot11MssidStateTable, dot11PrimaryRadiusServer=dot11PrimaryRadiusServer, dot11MulticastRateABandList=dot11MulticastRateABandList, dot11ARPSpoofingPreventionMacAddress=dot11ARPSpoofingPreventionMacAddress, dot11SiteSurveyEntry=dot11SiteSurveyEntry, deviceFactoryDefault=deviceFactoryDefault, deAuthentication=deAuthentication, dot11PvidSettingEntry=dot11PvidSettingEntry, dot11CipherType=dot11CipherType, dot11QosPriorityClassifiers=dot11QosPriorityClassifiers, dot11QosStatus=dot11QosStatus, dot11PvidSettingMssid2=dot11PvidSettingMssid2, dot11WdsSiteSurveySsid=dot11WdsSiteSurveySsid, administration=administration, systemLogFileName=systemLogFileName, dot11GroupVlanListTable=dot11GroupVlanListTable, currentDynamicLease=currentDynamicLease, dot11ZoneDefenceIpAddressAdd=dot11ZoneDefenceIpAddressAdd, dot11MacCloneTable=dot11MacCloneTable, dot11APArrayScanListIndex=dot11APArrayScanListIndex, dot11Igmpsnooping=dot11Igmpsnooping, dot11RogueApSurveyIndex=dot11RogueApSurveyIndex, capwapWtpStatus=capwapWtpStatus, dot11RemoteApMacAddressAccessEntry=dot11RemoteApMacAddressAccessEntry, refreshCurrentStaticList=refreshCurrentStaticList, dynamicIndex=dynamicIndex, dot11MssidGroupKeyUpdateInterval=dot11MssidGroupKeyUpdateInterval, dot11GroupVlanListMssid1=dot11GroupVlanListMssid1, sntpTimeZoneIndex=sntpTimeZoneIndex, dot11WebRedirectionStatus=dot11WebRedirectionStatus, wirelessLanIfEntry=wirelessLanIfEntry, systemLogSystemLevel=systemLogSystemLevel, wtpSwitchAddressIndex=wtpSwitchAddressIndex, dot11ReceivedCRCCount=dot11ReceivedCRCCount, dot11ScheduleListOverNight=dot11ScheduleListOverNight, systemLogTFTPServerIPAddress=systemLogTFTPServerIPAddress, dot11GroupVlanListIndex=dot11GroupVlanListIndex, dot11APArraySyncDtimStatus=dot11APArraySyncDtimStatus, dot11PvidSettingRecord=dot11PvidSettingRecord, dot11APArraySyncParametersStatusEntry=dot11APArraySyncParametersStatusEntry, dot11GroupVlanListWds4=dot11GroupVlanListWds4, ieee802dot11Grouping=ieee802dot11Grouping, deviceSettingApply=deviceSettingApply, webStatus=webStatus, ieee802dot11MultiSsid=ieee802dot11MultiSsid, dot11RogueApSurveyTable=dot11RogueApSurveyTable, manergeVLANTag=manergeVLANTag, smtpServerIpAddress=smtpServerIpAddress, interfaceInformationEntry=interfaceInformationEntry)
mibBuilder.exportSymbols("DAP-3520-v115", ifIpAddress=ifIpAddress, dot11ARPSpoofingPreventionAction=dot11ARPSpoofingPreventionAction, dot11ApArrayPassword=dot11ApArrayPassword, ieee802dot11APArraySetting=ieee802dot11APArraySetting, dot11SecuritiesEntry=dot11SecuritiesEntry, traps=traps, dot3TrafficStatisticsTable=dot3TrafficStatisticsTable, capwapWtpConnectingSwitchIP=capwapWtpConnectingSwitchIP, dap3520=dap3520, ieee802dot11ScheduleSetting=ieee802dot11ScheduleSetting, ftp=ftp, dot11WdsSiteSurveyMode=dot11WdsSiteSurveyMode, dot11WdsRefresh=dot11WdsRefresh, dot11WpaEapAuthenticationType=dot11WpaEapAuthenticationType, staticIP=staticIP, dot11RemoteApMacAddressDelete=dot11RemoteApMacAddressDelete, dot11RogueApListRecord=dot11RogueApListRecord, dot11QosRulesHostOnePortStart=dot11QosRulesHostOnePortStart, dot11MssidRADIUSServer=dot11MssidRADIUSServer, dot11PortListTable=dot11PortListTable, dot11RadioWave=dot11RadioWave, dot11APArraySyncQOSStatus=dot11APArraySyncQOSStatus, dot11Securities=dot11Securities, dot11MacAccessControlListMacAddress=dot11MacAccessControlListMacAddress, dlink_products=dlink_products, dot11InternalStationConnectionMultiSSID3=dot11InternalStationConnectionMultiSSID3, emailNtfStatus=emailNtfStatus, sntp=sntp, dot11MssidRADIUSPort=dot11MssidRADIUSPort, dot11ScheduleSSIDIndex=dot11ScheduleSSIDIndex, staticMac=staticMac, dot11MssidIndex=dot11MssidIndex, dot11MulticastRateGBand=dot11MulticastRateGBand, dot11WdsSiteSurveyEntry=dot11WdsSiteSurveyEntry, mssid3MacAddress=mssid3MacAddress, dot11PvidSettingWds4=dot11PvidSettingWds4, dot11ARPSpoofingPreventionStatus=dot11ARPSpoofingPreventionStatus, interfaceInformation=interfaceInformation, dot11HT2040Coexistence=dot11HT2040Coexistence, dot11KeyIndex=dot11KeyIndex, webLogoutSuccessful=webLogoutSuccessful, dot11MssidRADIUSEntry=dot11MssidRADIUSEntry, sntpStatus=sntpStatus, ftpRemoteFileName=ftpRemoteFileName, dot11RogueApAddtoRouge=dot11RogueApAddtoRouge, dot11LoadBalance=dot11LoadBalance, dot11APArrayScanListTotal=dot11APArrayScanListTotal, dot11DataRate=dot11DataRate, dot11ScheduleListDays=dot11ScheduleListDays, sntpDstEndWeek=sntpDstEndWeek, usersTable=usersTable, systemLogGetLog=systemLogGetLog, dot11MacClone=dot11MacClone, dot3ReceivedDroppedPacketCount=dot3ReceivedDroppedPacketCount, dot11ClientInformation=dot11ClientInformation, ieee802dot11ScheduleStatus=ieee802dot11ScheduleStatus, dot11RogueApListRecordChannel=dot11RogueApListRecordChannel, dot11SSIDIndex=dot11SSIDIndex, dot11GetClientInformationEntry=dot11GetClientInformationEntry, dot11APArraySyncParametersStatusIndex=dot11APArraySyncParametersStatusIndex, usersEntry=usersEntry, dot11Parameters=dot11Parameters, dot11ZoneDefenceIpAddressDelete=dot11ZoneDefenceIpAddressDelete, dot11UserLimit=dot11UserLimit, wtpSwitchIpAddress=wtpSwitchIpAddress, ieee802dot11APArrayScanList=ieee802dot11APArrayScanList, dot11PvidSettingTable=dot11PvidSettingTable, ieee802dot11APArrayScans=ieee802dot11APArrayScans, staticWins=staticWins, dot11RogueApSurvey=dot11RogueApSurvey, dot11PvidSettingMssid3=dot11PvidSettingMssid3, wirelessLanIfObjectID=wirelessLanIfObjectID, dot11InternalStationConnectionMultiSSID2=dot11InternalStationConnectionMultiSSID2, dot11ChannelList=dot11ChannelList, DisplayString=DisplayString, dot11MacCloneStatus=dot11MacCloneStatus, dhcpServerDynamicParameter=dhcpServerDynamicParameter, currentDynamicMacAddress=currentDynamicMacAddress, dot11ScheduleListEntry=dot11ScheduleListEntry, dot11ScheduleRuleName=dot11ScheduleRuleName, dot11ClientRssi=dot11ClientRssi, ieee802dot11APArraySyncParametersStatus=ieee802dot11APArraySyncParametersStatus, emailNtfSSL=emailNtfSSL, dot11MssidTable=dot11MssidTable, dynamicGateway=dynamicGateway, dot11SiteSurveyChannel=dot11SiteSurveyChannel, dot11APArraySyncTxPowerStatus=dot11APArraySyncTxPowerStatus, dot11ARPSpoofingPreventionListMacAddress=dot11ARPSpoofingPreventionListMacAddress, dhcpServerStaticControl=dhcpServerStaticControl, dhcpServerStaticDelete=dhcpServerStaticDelete, dot11ParametersTable=dot11ParametersTable, dot11MulticastRateGBandList=dot11MulticastRateGBandList, dot11MssidAutoRekeyTimeInterval=dot11MssidAutoRekeyTimeInterval, emailNotificationTable=emailNotificationTable, dot11ScheduleOverNight=dot11ScheduleOverNight, wlanIfLinkUp=wlanIfLinkUp, dot11WepKeyEntryMethod=dot11WepKeyEntryMethod, dot11APArrayScanListBackupNumber=dot11APArrayScanListBackupNumber, dot11Band5GHzDataRateList=dot11Band5GHzDataRateList, dot11ReceivedPacketCount=dot11ReceivedPacketCount, dot11RogueApListRecordIndex=dot11RogueApListRecordIndex, dot11RogueApListRecordType=dot11RogueApListRecordType, dot11RemoteApMacAddressDeleteAll=dot11RemoteApMacAddressDeleteAll, ftpServerIPAddress=ftpServerIPAddress, emailNotification=emailNotification, ifSubnetMask=ifSubnetMask, dot11MacCloneSurveryTable=dot11MacCloneSurveryTable, miscellaneous=miscellaneous, dot11wepKeyIndex=dot11wepKeyIndex, currentStaticAssignedIP=currentStaticAssignedIP, dot11MssidPriority=dot11MssidPriority, dot11QosRulesHostTwoIpStart=dot11QosRulesHostTwoIpStart, dot11PortListIndex=dot11PortListIndex, ieee802dot11APArrayStatus=ieee802dot11APArrayStatus, dot11ClientIndex=dot11ClientIndex, dot11APArrayMeberListMac=dot11APArrayMeberListMac, dot11WepKeyEntry=dot11WepKeyEntry, dot11MssidPriorityState=dot11MssidPriorityState, currentStaticEntry=currentStaticEntry, dot11ApArrayName=dot11ApArrayName, dot11MacCloneSurveryEntry=dot11MacCloneSurveryEntry, dot11APArraySyncDhcpStatus=dot11APArraySyncDhcpStatus, languagePackClear=languagePackClear, managerIpAddressTable=managerIpAddressTable, ieee802dot11Schedule=ieee802dot11Schedule, dot11MssidSuppress=dot11MssidSuppress, trapHostIPAddress=trapHostIPAddress, dot11WebRedirectionAccountName=dot11WebRedirectionAccountName, sntpDstStartMonth=sntpDstStartMonth, dot11InternalStationConnectionMultiSSID1=dot11InternalStationConnectionMultiSSID1, dot11RogueApListRecordMode=dot11RogueApListRecordMode, dhcpServerDynamicControl=dhcpServerDynamicControl, dot11WdsSiteSurveyRefresh=dot11WdsSiteSurveyRefresh, memoryPoor=memoryPoor, emailNotificationEntry=emailNotificationEntry, sntpTimeofDay=sntpTimeofDay, dynamicLeaseTime=dynamicLeaseTime, dot11TransmitRetryRate=dot11TransmitRetryRate, dot11GroupVlanListMssid3=dot11GroupVlanListMssid3, dot11WdsMacAddress=dot11WdsMacAddress, dot11WdsPsm=dot11WdsPsm, disassociateAttack=disassociateAttack, dot11APArraySyncVlanStatus=dot11APArraySyncVlanStatus, telnetLoginFail=telnetLoginFail, dot11QosRulesProtocolType=dot11QosRulesProtocolType, dot11ReceivedPHYErrorCount=dot11ReceivedPHYErrorCount, capwapWtpLocationData=capwapWtpLocationData, dot11APArrayModeSelect=dot11APArrayModeSelect, systemLogNoticeLevel=systemLogNoticeLevel, dot11Band5GHzChannelList=dot11Band5GHzChannelList, ieee802dot11ARPSpoofingPreventionAddressSetting=ieee802dot11ARPSpoofingPreventionAddressSetting, currentStaticTable=currentStaticTable, dot11APArrayMeberListTable=dot11APArrayMeberListTable, dot11APArraySyncParametersStatusTable=dot11APArraySyncParametersStatusTable, dot11AutoRekeyControl=dot11AutoRekeyControl, dot11ScheduleListName=dot11ScheduleListName, interface=interface, dot11RemoteApMacAddressIndex=dot11RemoteApMacAddressIndex, dot11ZoneDefenceControl=dot11ZoneDefenceControl, dot11InternalStationConnectionPrimarySSID=dot11InternalStationConnectionPrimarySSID, dot11GroupVlanListWds2=dot11GroupVlanListWds2, dot11APArrayScanListMasterNumber=dot11APArrayScanListMasterNumber, lanIfIpAddress=lanIfIpAddress, dot11RadiusKeyUpdateInterval=dot11RadiusKeyUpdateInterval, dot11AutoRekeyTimeInterval=dot11AutoRekeyTimeInterval, dot11PortListRefresh=dot11PortListRefresh, dot11WdsStatus=dot11WdsStatus, dot11PvidSettingMssid1=dot11PvidSettingMssid1, lanIfDefaultGateway=lanIfDefaultGateway, dot11SiteSurveyIndex=dot11SiteSurveyIndex, dot3TransmittedDroppedPacketCount=dot3TransmittedDroppedPacketCount, emailNtfFromIPAddress=emailNtfFromIPAddress, dot11PvidSettingPrimary=dot11PvidSettingPrimary, dot11APArrayStatus=dot11APArrayStatus, ftpUserName=ftpUserName, lanIfSetting=lanIfSetting, dot11ZoneDefence=dot11ZoneDefence, setTimeManually=setTimeManually, sntpDstEndMonth=sntpDstEndMonth, dot11GetClientInformationTable=dot11GetClientInformationTable, mssid2MacAddress=mssid2MacAddress, dot11GetWdsTable=dot11GetWdsTable, dot11PvidSettingWds1=dot11PvidSettingWds1, dot11MssidPassPhrase=dot11MssidPassPhrase, dot11QosRulesHostTwoIpRange=dot11QosRulesHostTwoIpRange, dot11MssidKeyType=dot11MssidKeyType, dhcpServerStaticParameter=dhcpServerStaticParameter, ieee802dot11WebRedirectionAccountSetting=ieee802dot11WebRedirectionAccountSetting, dot11MulticastRateABand=dot11MulticastRateABand, dot11PassPhrase=dot11PassPhrase, dot11RogueApAllNewNodesAsRogue=dot11RogueApAllNewNodesAsRogue, dot11Countrycode=dot11Countrycode, dot11ClientRxBytesCount=dot11ClientRxBytesCount, smtpPassword=smtpPassword, trapSecurityName=trapSecurityName, managerIpAddressDelete=managerIpAddressDelete, dot11WdsSiteSurveyRefreshEntry=dot11WdsSiteSurveyRefreshEntry, dot11ZoneDefenceTable=dot11ZoneDefenceTable, dot11RogueApSurveyType=dot11RogueApSurveyType, dot11APArraySyncAutoChannelStatus=dot11APArraySyncAutoChannelStatus, dot11GroupingEntry=dot11GroupingEntry, dot11PartionEntry=dot11PartionEntry, wtpSwitchAddressListTable=wtpSwitchAddressListTable, dot11ScheduleAction=dot11ScheduleAction, dot11Authentication=dot11Authentication, dot11NetworkAccessProtection=dot11NetworkAccessProtection, emailNtfPort=emailNtfPort, dot11PvidSettingIndex=dot11PvidSettingIndex, dot11APArraySyncAclStatus=dot11APArraySyncAclStatus, trapSetting=trapSetting, sntpDayLightSavingOffset=sntpDayLightSavingOffset, ieee802dot11WebRedirection=ieee802dot11WebRedirection, sntpDayLightSaving=sntpDayLightSaving, update=update, dynamicIpPoolEnd=dynamicIpPoolEnd, capwapWtpSwitchIpAddressAdd=capwapWtpSwitchIpAddressAdd, dot11QosRulesHostOneIpEnd=dot11QosRulesHostOneIpEnd, dynamicWins=dynamicWins, dot11GroupVlanListWds1=dot11GroupVlanListWds1, dot11MacAccessControlMacAddressDelete=dot11MacAccessControlMacAddressDelete, dot11APArraySyncLogStatus=dot11APArraySyncLogStatus, dot11APArraySyncMultiSsidSecurityStatus=dot11APArraySyncMultiSsidSecurityStatus, managerIpAddressStatus=managerIpAddressStatus, managerIpAddressPoolStart=managerIpAddressPoolStart, trafficStatisticsWired=trafficStatisticsWired, dot11ClientInformationEntry=dot11ClientInformationEntry, dot11APArrayMeberListIP=dot11APArrayMeberListIP, dot11RemoteApMacAddressTable=dot11RemoteApMacAddressTable, sntpServerIpAddress=sntpServerIpAddress, dot11MssidAutoRekeyStartTime=dot11MssidAutoRekeyStartTime, dot11WebRedirectionAccountTable=dot11WebRedirectionAccountTable, dot11MacAccessControlEntry=dot11MacAccessControlEntry, dot11RogueApSurveyChannel=dot11RogueApSurveyChannel, sntpDstStartDayOfWeek=sntpDstStartDayOfWeek, emailNtfMailServerIndex=emailNtfMailServerIndex, dot11APArrayMeberListLoacation=dot11APArrayMeberListLoacation, dot11Band5GHzWdsChannelList=dot11Band5GHzWdsChannelList, dot11ScheduleListTimeFrame=dot11ScheduleListTimeFrame, stationAssocNotify=stationAssocNotify, dot11RemoteApMacAddressEntry=dot11RemoteApMacAddressEntry, dot11APArraySyncMultiSsidWMMStatus=dot11APArraySyncMultiSsidWMMStatus, dot11RogueApListRecordBssid=dot11RogueApListRecordBssid, dot3Clear=dot3Clear, device=device, dot11SiteSurveySsid=dot11SiteSurveySsid, staticMask=staticMask, dot11ClientBand=dot11ClientBand, currentStaticMacAddress=currentStaticMacAddress, dot11WebRedirectionAccountAction=dot11WebRedirectionAccountAction, dot11RogueApAddtoNeighbor=dot11RogueApAddtoNeighbor, ifGetIpAddressFrom=ifGetIpAddressFrom, dot11TransmitSuccessRate=dot11TransmitSuccessRate, dot11QosRulesHostTwoIpEnd=dot11QosRulesHostTwoIpEnd, dot11WdsIndex=dot11WdsIndex)
mibBuilder.exportSymbols("DAP-3520-v115", dot11QosRulesPriority=dot11QosRulesPriority, emailNtfUsername=emailNtfUsername, dot11ScheduleListNodeStatus=dot11ScheduleListNodeStatus, emailNtfPassword=emailNtfPassword, dot3ReceivedPacketCount=dot3ReceivedPacketCount, dot11ClientInformationRefresh=dot11ClientInformationRefresh, smtp=smtp, sshLoginFail=sshLoginFail, dot11Frequency=dot11Frequency, dot11ClientAuthentication=dot11ClientAuthentication, dot11PortListTagVid=dot11PortListTagVid, systemLogServerIPAddress=systemLogServerIPAddress, sntpDstStartWeek=sntpDstStartWeek, dot11QosHttp=dot11QosHttp, dot11TransmittedRetryCount=dot11TransmittedRetryCount, ssh=ssh, dot11QosRulesHostOnePortRange=dot11QosRulesHostOnePortRange, dot11WdsSiteSurveyBssid=dot11WdsSiteSurveyBssid, dot11RemoteApMacAddress=dot11RemoteApMacAddress, dot11WdsSiteSurveyRefreshTable=dot11WdsSiteSurveyRefreshTable, dot11TrafficStatistics=dot11TrafficStatistics, wirelesslan=wirelesslan, emailNtfOnSchedule=emailNtfOnSchedule, ieee802dot11WebRedirectionSetting=ieee802dot11WebRedirectionSetting, dhcpServerDomainNameStatus=dhcpServerDomainNameStatus, dot11Filter=dot11Filter, dhcpServerControl=dhcpServerControl, dot11RogueApSurveyRefresh=dot11RogueApSurveyRefresh, dot11RogueApAddtoValid=dot11RogueApAddtoValid, dot11WdsBand=dot11WdsBand, dot3TransmittedPacketCount=dot3TransmittedPacketCount, dot11AutoRekeyStartWeek=dot11AutoRekeyStartWeek, dot11ZoneDefenceIpAddressListIndex=dot11ZoneDefenceIpAddressListIndex, staticDomainName=staticDomainName, dot11MssidStateEntry=dot11MssidStateEntry, dot11APArraySyncAdminLimitStatus=dot11APArraySyncAdminLimitStatus, dot11GroupVlanListVid=dot11GroupVlanListVid, dot11APArraySyncLoginStatus=dot11APArraySyncLoginStatus, dot11TransmittedPacketCount=dot11TransmittedPacketCount, dot11ReceivedBytesCount=dot11ReceivedBytesCount, dot11PartionTable=dot11PartionTable, dot11APArrayScanListMac=dot11APArrayScanListMac, dot11RogueApListRecordLastseen=dot11RogueApListRecordLastseen, dot11MssidAuthentication=dot11MssidAuthentication, dot11APArraySyncPingCtlStatus=dot11APArraySyncPingCtlStatus, dot11APArrayScanListSlaverNumber=dot11APArrayScanListSlaverNumber, dot11QosRulesHostTwoPortStart=dot11QosRulesHostTwoPortStart, dot11WdsMonitor=dot11WdsMonitor, trafficStatistics=trafficStatistics, dot11SiteSurveyBssid=dot11SiteSurveyBssid, systemLogWirelessLevel=systemLogWirelessLevel, dot11ARPSpoofingPreventionListIpAddress=dot11ARPSpoofingPreventionListIpAddress, dot11ZoneDefenceEntry=dot11ZoneDefenceEntry, dot11MssidRadiusKeyUpdateInterval=dot11MssidRadiusKeyUpdateInterval, dot11ZoneDefenceIpAddressList=dot11ZoneDefenceIpAddressList, trapVersion=trapVersion, currentDynamicAssignedIP=currentDynamicAssignedIP, dot11SiteSurveyRefresh=dot11SiteSurveyRefresh, dot11WebRedirectionListAccountPasswd=dot11WebRedirectionListAccountPasswd, dot11QosRulesIndex=dot11QosRulesIndex, dot11MssidCipherType=dot11MssidCipherType, systemLogRemoteLogState=systemLogRemoteLogState, dot11QosRulesTable=dot11QosRulesTable, dot11ScheduleDaysSelect=dot11ScheduleDaysSelect, dot11VlanListSurveydelete=dot11VlanListSurveydelete, dot11QosRulesHostTwoPortRange=dot11QosRulesHostTwoPortRange, dot11AutoRekeyPassPhrase=dot11AutoRekeyPassPhrase, dot11WdsConnected=dot11WdsConnected, lanIfSettingEntry=lanIfSettingEntry, dot11APArraySyncConsoleProtocolStatus=dot11APArraySyncConsoleProtocolStatus, pingControl=pingControl, dot11MacAccessControlListIndex=dot11MacAccessControlListIndex, dot11QosRulesDelete=dot11QosRulesDelete, dot11Dtim=dot11Dtim, dot11WebRedirectionListAccountStatus=dot11WebRedirectionListAccountStatus, tftpRemoteFileName=tftpRemoteFileName, dot11Wmm=dot11Wmm, sntpDstEndDayOfWeek=sntpDstEndDayOfWeek, emailNtfIndex=emailNtfIndex, dot11SiteSurveyBssType=dot11SiteSurveyBssType, dot11TransmitPower=dot11TransmitPower, deviceInformationFirmwareVersion=deviceInformationFirmwareVersion, dhcpServerStaticEntry=dhcpServerStaticEntry, emailNtfServerIPAddress=emailNtfServerIPAddress, capwapWtpConnectingSwitchName=capwapWtpConnectingSwitchName, dot11RogueApSurveyBssid=dot11RogueApSurveyBssid, managerIpAddressEntry=managerIpAddressEntry, deviceInformation=deviceInformation, dot11BeaconInterval=dot11BeaconInterval, dot11TrafficStatisticsTable=dot11TrafficStatisticsTable, dot11APArrayMeberListRole=dot11APArrayMeberListRole, dot11PortLisPortName=dot11PortLisPortName, console=console, dot11RogueApListRecordSsid=dot11RogueApListRecordSsid, lan=lan, dot11ARPSpoofingPreventionEntry=dot11ARPSpoofingPreventionEntry, dot11Channel=dot11Channel, dhcpServerDynamicEntry=dhcpServerDynamicEntry, dot11ScheduleListSSID=dot11ScheduleListSSID, currentDynamicIndex=currentDynamicIndex, dot11GroupVlanListWds3=dot11GroupVlanListWds3, deauthenticateAttack=deauthenticateAttack, dot11LinkIntegrate=dot11LinkIntegrate, lanIfSettingTable=lanIfSettingTable, ifDefaultGateway=ifDefaultGateway, dot11WdsRssi=dot11WdsRssi, advance=advance, dot11ScheduleListSSIDIndex=dot11ScheduleListSSIDIndex, systemLogClearLocalLog=systemLogClearLocalLog, dot11VlanStatus=dot11VlanStatus, dynamicIpPoolStart=dynamicIpPoolStart, systemLog=systemLog, dot11MssidAutoRekeyControl=dot11MssidAutoRekeyControl, dot11PvidSettingMgmt=dot11PvidSettingMgmt, dot11WdsSiteSurveyTable=dot11WdsSiteSurveyTable, trapHostTable=trapHostTable, dot11MssidRADIUSTable=dot11MssidRADIUSTable, dot11ScheduleRuleStartTime=dot11ScheduleRuleStartTime, dot11APArraySyncBeaconIntervalStatus=dot11APArraySyncBeaconIntervalStatus, dot11AckTimeout=dot11AckTimeout, dot11PvidSettingLan=dot11PvidSettingLan, dot11MssIndividualState=dot11MssIndividualState, dot11ChannelWidth=dot11ChannelWidth, dot11WdsEntry=dot11WdsEntry, dot11APArraySyncAckTimeoutStatus=dot11APArraySyncAckTimeoutStatus, dot11PvidAutoAssignStatus=dot11PvidAutoAssignStatus, sntpDstEndCurrentTime=sntpDstEndCurrentTime, trapStatus=trapStatus, sntpDstStartCurrentTime=sntpDstStartCurrentTime, dot11ShortGI=dot11ShortGI, dot11WdsSsidIndex=dot11WdsSsidIndex, dot11ApModeStatus=dot11ApModeStatus, dot11TransmittedDroppedPacketCount=dot11TransmittedDroppedPacketCount, dot11MssidState=dot11MssidState, dot11MssidRADIUSIndex=dot11MssidRADIUSIndex, staticHostName=staticHostName, dot11RogueApDeleteFromRecord=dot11RogueApDeleteFromRecord, dot11APArrayMeberListEntry=dot11APArrayMeberListEntry, dlink=dlink, dot11APArraySyncLinkIntegrityStatus=dot11APArraySyncLinkIntegrityStatus, dot11Ssid=dot11Ssid, dot11ReceivedDroppedPacketCount=dot11ReceivedDroppedPacketCount, dot11QosRulesHostTwoPortEnd=dot11QosRulesHostTwoPortEnd, capwapWtpName=capwapWtpName, dot11GroupVlanListEntry=dot11GroupVlanListEntry, dot11APArraySyncSystemStatus=dot11APArraySyncSystemStatus, dot11WebRedirectionAccountPasswd=dot11WebRedirectionAccountPasswd, currentDynamicHostName=currentDynamicHostName, wirelessLanIfDesc=wirelessLanIfDesc, ieee802dot11VLAN=ieee802dot11VLAN, dot11APArraySyncFixedRateStatus=dot11APArraySyncFixedRateStatus, dot11ZoneDefenceIpAddressListTable=dot11ZoneDefenceIpAddressListTable, dot11APArraySyncBandStatus=dot11APArraySyncBandStatus, users=users, ssl=ssl, dot11PortListUntagVid=dot11PortListUntagVid, dot11APArraySyncSecurityStatus=dot11APArraySyncSecurityStatus, dot11ReceivedMICErrorCount=dot11ReceivedMICErrorCount, dot11ParametersEntry=dot11ParametersEntry, dot11APArraySyncMultiSsidHiddenStatus=dot11APArraySyncMultiSsidHiddenStatus, dot11MacAccessControlListEntry=dot11MacAccessControlListEntry, dot11TransmittedBytesCount=dot11TransmittedBytesCount, dot11MssidSsid=dot11MssidSsid, web=web, dot11APArraySyncIgmpSnoopStatus=dot11APArraySyncIgmpSnoopStatus, dot11APArraySyncTimeStatus=dot11APArraySyncTimeStatus, dot11GroupVlanListName=dot11GroupVlanListName, dot11APArrayScanListEntry=dot11APArrayScanListEntry, dot11RogueApSurveySsid=dot11RogueApSurveySsid, dot11ReceivedDecryptionErrorCount=dot11ReceivedDecryptionErrorCount, dot11ClientPsm=dot11ClientPsm)
| 121.925119
| 11,707
| 0.769863
|
bb0a6e9aafa1f48e9e82ffefa02eb039a6327509
| 1,715
|
py
|
Python
|
project/db/models/product_model.py
|
sunday-ucheawaji/API-
|
07fb4b596cfe8e85b8575a8e70a8c886d3ab627a
|
[
"MIT"
] | null | null | null |
project/db/models/product_model.py
|
sunday-ucheawaji/API-
|
07fb4b596cfe8e85b8575a8e70a8c886d3ab627a
|
[
"MIT"
] | null | null | null |
project/db/models/product_model.py
|
sunday-ucheawaji/API-
|
07fb4b596cfe8e85b8575a8e70a8c886d3ab627a
|
[
"MIT"
] | null | null | null |
import uuid
from django.db import models
from .brands_model import Brand
from .category_model import Category
class Product(models.Model):
LARGE = 'L'
MEDIUM = 'M'
SMALL = 'S'
SIZE_CHOICES = [
(LARGE, 'Large'),
(MEDIUM, 'Medium'),
(SMALL, 'Small'),
]
WHITE = 'WH'
BLUE = 'BL'
RED = 'RE'
BLACK = 'BK'
BROWN = 'BR'
COLOUR_CHOICES = [
(WHITE, 'White'),
(BLUE, 'Blue'),
(RED, 'Red'),
(BLACK, 'BK'),
(BROWN, 'Brown'),
]
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=50, unique=True)
image = models.URLField()
price = models.DecimalField(max_digits=20, decimal_places=2)
category = models.ForeignKey(
Category, on_delete=models.CASCADE, related_name='category')
label = models.CharField(max_length=100)
brand = models.ForeignKey(
Brand, on_delete=models.CASCADE, related_name='brand')
quantity_in_stock = models.IntegerField()
discounted_price = models.DecimalField(max_digits=10, decimal_places=2)
shipping_fee = models.DecimalField(max_digits=10, decimal_places=2)
size = models.CharField(
max_length=50, choices=SIZE_CHOICES, default=MEDIUM)
description = models.TextField(max_length=50)
colour = models.CharField(
max_length=50, choices=COLOUR_CHOICES, default=BLACK)
created_at = models.DateTimeField(auto_now_add=True, null=True)
updated_at = models.DateTimeField(auto_now=True, null=True)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = 'Products'
| 29.568966
| 87
| 0.638484
|
0926623ab1836aeb5df55850fc83dba5f449b2ef
| 397
|
py
|
Python
|
equals/constraints/numbers/between.py
|
toddsifleet/equals
|
fb2b2a027e5389fdeb2f59e9acbdcacb8a8cdfb4
|
[
"MIT"
] | 38
|
2015-03-18T21:45:33.000Z
|
2020-12-22T11:13:05.000Z
|
equals/constraints/numbers/between.py
|
toddsifleet/equals
|
fb2b2a027e5389fdeb2f59e9acbdcacb8a8cdfb4
|
[
"MIT"
] | 8
|
2015-02-12T04:06:37.000Z
|
2022-02-10T08:30:08.000Z
|
equals/constraints/numbers/between.py
|
toddsifleet/equals
|
fb2b2a027e5389fdeb2f59e9acbdcacb8a8cdfb4
|
[
"MIT"
] | 4
|
2015-02-25T16:54:00.000Z
|
2016-09-07T20:10:09.000Z
|
from ..base import Base
class Between(Base):
@property
def description(self):
return 'between {min} and {max}'.format(
min=self.min_val,
max=self.max_val,
)
def _handle_args(self, min_val, max_val):
self.min_val = min_val
self.max_val = max_val
def _check(self, value):
return self.min_val < value < self.max_val
| 22.055556
| 50
| 0.594458
|
f3c6071c18ddad5d0b92dc4da9a9a2d01823c70d
| 216
|
py
|
Python
|
Standard Library/traceback/04_print_exception.py
|
shubhamnag14/Python-Documents
|
d3fee0ad90232b413f6ac1b562588fb255b79e42
|
[
"Apache-2.0"
] | 2
|
2020-11-27T13:21:05.000Z
|
2021-04-19T21:14:21.000Z
|
Standard Library/traceback/04_print_exception.py
|
shubhamnag14/Python-Documents
|
d3fee0ad90232b413f6ac1b562588fb255b79e42
|
[
"Apache-2.0"
] | null | null | null |
Standard Library/traceback/04_print_exception.py
|
shubhamnag14/Python-Documents
|
d3fee0ad90232b413f6ac1b562588fb255b79e42
|
[
"Apache-2.0"
] | 1
|
2021-06-27T20:31:42.000Z
|
2021-06-27T20:31:42.000Z
|
import traceback
import sys
try:
1/0
except ZeroDivisionError:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(
exc_type, exc_value, exc_traceback, file=sys.stdout)
| 21.6
| 60
| 0.740741
|
6067fe06215769c596bedb89fb21bbe9417db616
| 315
|
py
|
Python
|
portfolio/forms.py
|
AnirbanDatta87/my-website
|
755a99abd8c162f8882d574a6e385139aead883e
|
[
"MIT"
] | null | null | null |
portfolio/forms.py
|
AnirbanDatta87/my-website
|
755a99abd8c162f8882d574a6e385139aead883e
|
[
"MIT"
] | 12
|
2020-02-07T09:13:49.000Z
|
2022-01-13T01:51:04.000Z
|
portfolio/forms.py
|
AnirbanDatta87/my-website
|
755a99abd8c162f8882d574a6e385139aead883e
|
[
"MIT"
] | null | null | null |
from django import forms
class ContactForm(forms.Form):
name= forms.CharField(max_length=100, label="Name")
email= forms.EmailField(max_length=100, label="Email")
message= forms.CharField(label='',widget=forms.Textarea(
attrs={'placeholder': 'Enter your message here'}))
| 39.375
| 74
| 0.666667
|
e78d1c5a4b196e6c72a8caa029e3ad2fa17763b3
| 49
|
py
|
Python
|
abbreviations.py
|
deshmukhps95/text-analysis-and-classification-gui-tool
|
c24fcc2083b65c8ce433c16e539366def9c1d0fa
|
[
"MIT"
] | 10
|
2019-10-20T12:46:54.000Z
|
2022-03-16T07:16:54.000Z
|
abbreviations.py
|
deshmukhps95/text-analysis-and-classification-gui-tool
|
c24fcc2083b65c8ce433c16e539366def9c1d0fa
|
[
"MIT"
] | null | null | null |
abbreviations.py
|
deshmukhps95/text-analysis-and-classification-gui-tool
|
c24fcc2083b65c8ce433c16e539366def9c1d0fa
|
[
"MIT"
] | 3
|
2019-10-20T17:33:17.000Z
|
2021-01-15T02:38:22.000Z
|
synonyms = {"": ""}
abbreviations_map = {"": ""}
| 16.333333
| 28
| 0.510204
|
d38f1c7a45bd8215229408340d91d71b7bd10c11
| 1,117
|
py
|
Python
|
generator/native.py
|
simpleton/eclipse2buck
|
b726cfdcb1ef98967435058aecb04d42110fe8ed
|
[
"MIT"
] | 1
|
2015-09-08T11:21:13.000Z
|
2015-09-08T11:21:13.000Z
|
generator/native.py
|
simpleton/eclipse2buck
|
b726cfdcb1ef98967435058aecb04d42110fe8ed
|
[
"MIT"
] | null | null | null |
generator/native.py
|
simpleton/eclipse2buck
|
b726cfdcb1ef98967435058aecb04d42110fe8ed
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
from eclipse2buck.generator.base_target import BaseTarget
from eclipse2buck import decorator
from eclipse2buck.util import util
from eclipse2buck import config
import os
class NativeLib(BaseTarget):
"""
gen native lib target
"""
is_exist_native = False
def __init__(self, root, name):
BaseTarget.__init__(self, root, name, config.native_suffix)
self.is_exist_native = self._is_exist_native()
if self.is_exist_native:
name = self.target_name(self.proj_name)
self.deps.append(":%s" % name)
def dump(self):
if self.is_exist_native:
name = self.target_name(self.proj_name)
self.gen_native_lib(name)
@decorator.target("prebuilt_native_library")
def gen_native_lib(self, name):
print "name = '%s'," % name
print "native_libs = 'libs',"
def _is_exist_native(self):
native_lib_path = os.path.join(self.lib_path, "libs")
return os.path.isdir(native_lib_path) and len(util.find_all_files_with_suffix(native_lib_path, "*.so")) > 0
| 31.027778
| 115
| 0.661594
|
e9f6606c4164d8c6fc943f1c02d398641e3ff4b8
| 17,947
|
py
|
Python
|
Lazy_cleaner/Lazy_cleaner.py
|
ahmed13131/Lazy-cleaner
|
8316421d2cb77d9e861bf2d9f5c5e51748cada02
|
[
"MIT"
] | 1
|
2021-09-27T19:06:36.000Z
|
2021-09-27T19:06:36.000Z
|
Lazy_cleaner/Lazy_cleaner.py
|
ahmed13131/Lazy-cleaner
|
8316421d2cb77d9e861bf2d9f5c5e51748cada02
|
[
"MIT"
] | null | null | null |
Lazy_cleaner/Lazy_cleaner.py
|
ahmed13131/Lazy-cleaner
|
8316421d2cb77d9e861bf2d9f5c5e51748cada02
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.ensemble import RandomForestRegressor
import statsmodels.formula.api as sm
from scipy import stats
import chardet
import fuzzywuzzy
from fuzzywuzzy import process
import seaborn as sns
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings("ignore")
def fill_lin_rand(messy_df, metric, colnames):
"""
messy_df -> dataframe contain Nans
metric -> weather fill Nans by linear regression or random forest
colnames ->column names that needs to replace the Nans with numeric values
this function is to fill the Nan values of columns by making
a regression modle by taking features of Non-Nan values as a
training data and predicting the missing values and fill it
returns:
the clean dataframe and list of the missing values
"""
# Create X_df of predictor columns
X_df = messy_df.drop(colnames, axis = 1)
# Create Y_df of predicted columns
Y_df = messy_df[colnames]
# Create empty dataframes and list
Y_pred_df = pd.DataFrame(columns=colnames)
Y_missing_df = pd.DataFrame(columns=colnames)
missing_list = []
# Loop through all columns containing missing values
for col in messy_df[colnames]:
# Number of missing values in the column
missing_count = messy_df[col].isnull().sum()
# Separate train dataset which does not contain missing values
messy_df_train = messy_df[~messy_df[col].isnull()]
# Create X and Y within train dataset
msg_cols_train_df = messy_df_train[col]
messy_df_train = messy_df_train.drop(colnames, axis = 1)
# Create test dataset, containing missing values in Y
messy_df_test = messy_df[messy_df[col].isnull()]
# Separate X and Y in test dataset
msg_cols_test_df = messy_df_test[col]
messy_df_test = messy_df_test.drop(colnames,axis = 1)
# Copy X_train and Y_train
Y_train = msg_cols_train_df.copy()
X_train = messy_df_train.copy()
# Linear Regression model
if metric == "Linear Regression":
model = LinearRegression()
model.fit(X_train,Y_train)
print("R-squared value is: " + str(model.score(X_train, Y_train)))
# Random Forests regression model
elif metric == "Random Forests":
model = RandomForestRegressor(n_estimators = 10 , oob_score = True)
model.fit(X_train,Y_train)
# importances = model.feature_importances_
# indices = np.argsort(importances)
# features = X_train.columns
# print("Missing values in"+ col)
# #plt.title('Feature Importances')
# plt.barh(range(len(indices)), importances[indices], color='b', align='center')
# plt.yticks(range(len(indices)), features) ## removed [indices]
# plt.xlabel('Relative Importance')
# plt.show()
X_test = messy_df_test.copy()
# Predict Y_test values by passing X_test as input to the model
Y_test = model.predict(X_test)
Y_test_integer = pd.to_numeric(pd.Series(Y_test),downcast='integer')
# Append predicted Y values to known Y values
Y_complete = Y_train.append(Y_test_integer)
Y_complete = Y_complete.reset_index(drop = True)
# Update list of missing values
missing_list.append(Y_test.tolist())
Y_pred_df[col] = Y_complete
Y_pred_df = Y_pred_df.reset_index(drop = True)
# Create cleaned up dataframe
clean_df = X_df.join(Y_pred_df)
return clean_df,missing_list
class stat():
def __init__(self):
pass
def p_val_of_features(self,data,target_column):
"""
df -> the dataframe
label_column -> the column you want to predict
this function calculate the P value of the features to know how it affects the regression module
for a single label
returns:
summary report
"""
# stre is a string variable of independent and dependant columns
stre = '{} ~'.format(target_column)
for i in data.columns:
stre = stre + "{} +".format(i)
stre = stre[0:-1] #to remove the last + sign
reg_ols = sm.ols(formula=stre, data=data).fit()
return reg_ols.summary()
#------------------------------------------------------------------------------------------------------------------------------
def na_per_data(self,data):
"""
df -> dataframe
returns:
the percentage of Nan values in the whole dataset
"""
per = (((data.isnull().sum()).sum())/ np.product(data.shape))*100
return per
class fillnan():
def __init__(self):
pass
def simpleimputer(self,df):
"""
df -> dataframe
this function fill the nan values with simple techniques like (mean,mode)
depending on the data type of the column
*also consider filling by median manually before using this method*
returns:
the dataframe after editing
"""
for i in df.columns:
if df[i].isnull().any() == True :
if df[i].dtypes == "object":
if len(df[i].unique()) <= 10:
df[i].fillna(df[i].mode()[0],inplace=True)
if len(df[i].unique()) > 10 :
df[i].dropna(inplace=True)
if df[i].dtypes == "int64" or df[i].dtypes == "int32" or df[i].dtypes == "float64":
if len(df[i].unique()) <= 10 :
df[i].fillna(df[i].mode()[0],inplace=True)
if len(df[i].unique()) > 10 :
df[i].fillna(df[i].mean(),inplace=True)
else:
df[i].dropna(inplace=True)
return df
#------------------------------------------------------------------------------------------------------------------------------
def hyperimputer(self,df,metric = "Linear Regression"): # there is also "Random Forests"
"""
df->dataframe
metric ->"Linear Regression" or "Random Forests" models to fill you numeric nan values with
this function compines between the simple imputation and the linear regression imputation
as in object columns it will impute with the mode and for any numerical number it will impute
with linear regression or random forest
*also consider filling by median manually before using this method*
returns:
the dataframe after editing
"""
for i in df.columns:
if df[i].isnull().any() == True :
if df[i].dtypes == "object":
if len(df[i].unique()) <= 10:
df[i].fillna(df[i].mode()[0],inplace=True)
if len(df[i].unique()) > 10 :
df[i].dropna(inplace=True)
if df[i].dtypes == "int64" or df[i].dtypes == "int32" or df[i].dtypes == "float64":
if len(df[i].unique()) > 10:
df,_ = fill_lin_rand(df,metric,[i])
else:
df[i].dropna(inplace=True)
return df
#------------------------------------------------------------------------------------------------------------------------------
def fill_by_nex(self,df,columns=[]):
"""
df -> dataframe
columns -> a list of columns you want to fill
this one fill nan values by the next value
returns:
the dataframe after editing
"""
for i in columns:
df[i] = df[i].fillna(method='bfill', axis=0).fillna(0)
return df
#------------------------------------------------------------------------------------------------------------------------------
def fill_by_perv(self,df,columns=[]):
"""
df -> dataframe
columns -> a list of columns you want to fill
this one fill nan values by the previous value
returns:
the dataframe after editing
"""
for i in columns:
df[i] = df[i].fillna(method='ffill', axis=0).fillna(0)
return df
class label():
def __init__(self):
pass
def to_category(self,df):
"""
change from and object datatype column into categorie datatype column
df-> dataframe
returns:
the dataframe after editing
"""
cols = df.select_dtypes(include='object').columns
for col in cols:
ratio = len(df[col].value_counts()) / len(df)
if ratio < 0.05:
df[col] = df[col].astype('category')
return df
#------------------------------------------------------------------------------------------------------------------------------
def freq_labeling(self,df=None,column=None):
"""
replace objects by how frequent they are in a certain column
df -> dataframe
column -> column you want to apply this method on
returns:
the dataframe after editing
"""
df = df.copy()
freq = (df[column].value_counts() /len(df))
d={}
for i in freq.index:
d[i] = freq[i]
df[column] = df[column].map(d)
return df
class Clean_Data():
def __init__(self):
pass
def reduce_mem_usage(self,props):
"""
this funaction to reduce memory usage of dataset
props-> dataset you want to reduce
returns:
the dataframe after editing and Nan values list
"""
start_mem_usg = props.memory_usage().sum() / 1024**2
print("Memory usage of properties dataframe is :",start_mem_usg," MB")
NAlist = [] # Keeps track of columns that have missing values filled in.
for col in props.columns:
if props[col].dtype not in [object, bool]: # Exclude strings
# Print current column type
print("******************************")
print("Column: ",col)
print("dtype before: ",props[col].dtype)
# make variables for Int, max and min
IsInt = False
mx = props[col].max()
mn = props[col].min()
'''
# Integer does not support NA, therefore, NA needs to be filled
if not np.isfinite(props[col]).all():
NAlist.append(col)
props[col].fillna(mn-1,inplace=True)
'''
# test if column can be converted to an integer
asint = props[col].fillna(0).astype(np.int64)
result = (props[col] - asint)
result = result.sum()
if result > -0.01 and result < 0.01:
IsInt = True
# Make Integer/unsigned Integer datatypes
if IsInt:
if mn >= 0:
if mx < 255:
props[col] = props[col].astype(np.uint8)
elif mx < 65535:
props[col] = props[col].astype(np.uint16)
elif mx < 4294967295:
props[col] = props[col].astype(np.uint32)
else:
props[col] = props[col].astype(np.uint64)
else:
if mn > np.iinfo(np.int8).min and mx < np.iinfo(np.int8).max:
props[col] = props[col].astype(np.int8)
elif mn > np.iinfo(np.int16).min and mx < np.iinfo(np.int16).max:
props[col] = props[col].astype(np.int16)
elif mn > np.iinfo(np.int32).min and mx < np.iinfo(np.int32).max:
props[col] = props[col].astype(np.int32)
elif mn > np.iinfo(np.int64).min and mx < np.iinfo(np.int64).max:
props[col] = props[col].astype(np.int64)
# Make float datatypes 32 bit
else:
props[col] = props[col].astype(np.float32)
# Print new column type
print("dtype after: ",props[col].dtype)
print("******************************")
# Print final result
print("___MEMORY USAGE AFTER COMPLETION:___")
mem_usg = props.memory_usage().sum() / 1024**2
print("Memory usage is: ",mem_usg," MB")
print("This is ",100*mem_usg/start_mem_usg,"% of the initial size")
return props, NAlist
#-----------------------------------------------------------------------------------------------------------------------------
def replace_matches(self,df, column, string_to_match, min_ratio = 47):
"""
if there is simillar words but different format due to a data entry error
you can apply this function to calculate similarity percentage and then change them
df -> dataframe
column -> column to edit
string_to_match -> string you want to match and replace
min_ratio -> minimum probability to excange
returns:
the dataframe after editing
"""
# get a list of unique strings
strings = df[column].unique()
# get the top 10 closest matches to our input string
matches = fuzzywuzzy.process.extract(string_to_match, strings,
limit=10, scorer=fuzzywuzzy.fuzz.token_sort_ratio)
# only get matches with a ratio > 90
close_matches = [matches[0] for matches in matches if matches[1] >= min_ratio]
# get the rows of all the close matches in our dataframe
rows_with_matches = df[column].isin(close_matches)
# replace all rows with close matches with the input matches
df.loc[rows_with_matches, column] = string_to_match
# let us know the function's done
print("All done!")
return df
#-----------------------------------------------------------------------------------------------------------------------------
def drop_missing(self,df,thresh=55):
"""
drop the columns if the missing values exceed 60% of it
df-> dataframe
thresh->percentage of the missing threshold to Delete above
returns:
the dataframe after editing
"""
thresh = len(df) * (thresh/100)
df.dropna(axis=1, thresh=thresh, inplace=True)
return df
#------------------------------------------------------------------------------------------------------------------------------
def log_features(self,df=None):
"""
log the data to remove large gaps between the data
after or before removing outliers
df -> dataframe you want to apply log function to
returns:
dataset after applying log
"""
if 0 in df.values:
df= np.log1p(df)
if 0 not in df.values:
df= np.log(df)
df[df == -inf] = 0
return df
#------------------------------------------------------------------------------------------------------------------------------
def dealing_with_outliers(self,df , type_o = "z-score"):
"""
this function deals and removes outliers with z-score and Inter-Quartile Range
method
hint : XGboost deal with it very good (SOTA machine learning model)
df -> dataframe
type_o -> type of the method you want to choose
returns:
the dataframe after editing
"""
if type_o == "z-score":
# z-score range.
z = np.abs(stats.zscore(df))
df = df[(z < 3).all(axis=1)]
if type_o == "IQR" :
#Inter-Quartile Range
Q1 = df.quantile(0.25)
Q3 = df.quantile(0.75)
IQR = Q3 - Q1
df = df[~((df < (Q1 - 1.5 * IQR)) |(df > (Q3 + 1.5 * IQR))).any(axis=1)]
return df
#-----------------------------------------------------------------------------------------------------------------------------------
def normalize(self,df,columns=[]):
"""
normalize columns by your choice in a dataframe
df -> dataframe
columns -> list of columns to normalize its values
returns:
the dataframe after editing
"""
for i in columns:
df[i] = stats.boxcox(df[i])
return df
#----------------------------------------------------------------------------------------------------------------------------------
def en_de(self,decode=False,typeo="utf-8"):
"""
encode and decode any string format
decode -> if you want to encode assign as False if decode assign True
typeo -> the format you want to encode to or decode from
returns:
the result after encoding or decoding
"""
#or ascii
if decode == True :
return bite.decode(typeo)
else :
return string.encode(typeo, errors="replace")
| 33.483209
| 132
| 0.49518
|
f6866565bc88f9b17a173f3cbb541af55018de35
| 27,437
|
py
|
Python
|
azure-iot-device/azure/iot/device/iothub/pipeline/mqtt_pipeline.py
|
cartertinney/azure-iot-sdk-python
|
a5572b93047b4a54c5b990d9e25905398418c4fd
|
[
"MIT"
] | null | null | null |
azure-iot-device/azure/iot/device/iothub/pipeline/mqtt_pipeline.py
|
cartertinney/azure-iot-sdk-python
|
a5572b93047b4a54c5b990d9e25905398418c4fd
|
[
"MIT"
] | null | null | null |
azure-iot-device/azure/iot/device/iothub/pipeline/mqtt_pipeline.py
|
cartertinney/azure-iot-sdk-python
|
a5572b93047b4a54c5b990d9e25905398418c4fd
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import logging
import sys
from azure.iot.device.common.evented_callback import EventedCallback
from azure.iot.device.common.pipeline import (
pipeline_events_base,
pipeline_stages_base,
pipeline_ops_base,
pipeline_stages_mqtt,
pipeline_exceptions,
)
from . import (
constant,
pipeline_stages_iothub,
pipeline_events_iothub,
pipeline_ops_iothub,
pipeline_stages_iothub_mqtt,
)
logger = logging.getLogger(__name__)
class MQTTPipeline(object):
def __init__(self, pipeline_configuration):
"""
Constructor for instantiating a pipeline adapter object
:param auth_provider: The authentication provider
:param pipeline_configuration: The configuration generated based on user inputs
"""
self.feature_enabled = {
constant.C2D_MSG: False,
constant.INPUT_MSG: False,
constant.METHODS: False,
constant.TWIN: False,
constant.TWIN_PATCHES: False,
}
# Handlers - Will be set by Client after instantiation of this object
self.on_connected = None
self.on_disconnected = None
self.on_new_sastoken_required = None
self.on_background_exception = None
self.on_c2d_message_received = None
self.on_input_message_received = None
self.on_method_request_received = None
self.on_twin_patch_received = None
# Currently a single timeout stage and a single retry stage for MQTT retry only.
# Later, a higher level timeout and a higher level retry stage.
self._pipeline = (
#
# The root is always the root. By definition, it's the first stage in the pipeline.
#
pipeline_stages_base.PipelineRootStage(pipeline_configuration)
#
# SasTokenStage comes near the root by default because it should be as close
# to the top of the pipeline as possible, and does not need to be after anything.
#
.append_stage(pipeline_stages_base.SasTokenStage())
#
# EnsureDesiredPropertiesStage needs to be above TwinRequestResponseStage because it
# sends GetTwinOperation ops and that stage handles those ops.
#
.append_stage(pipeline_stages_iothub.EnsureDesiredPropertiesStage())
#
# TwinRequestResponseStage comes near the root by default because it doesn't need to be
# after anything
#
.append_stage(pipeline_stages_iothub.TwinRequestResponseStage())
#
# CoordinateRequestAndResponseStage needs to be after TwinRequestResponseStage because
# TwinRequestResponseStage creates the request ops that CoordinateRequestAndResponseStage
# is coordinating. It needs to be before IoTHubMQTTTranslationStage because that stage
# operates on ops that CoordinateRequestAndResponseStage produces
#
.append_stage(pipeline_stages_base.CoordinateRequestAndResponseStage())
#
# IoTHubMQTTTranslationStage comes here because this is the point where we can translate
# all operations directly into MQTT. After this stage, only pipeline_stages_base stages
# are allowed because IoTHubMQTTTranslationStage removes all the IoTHub-ness from the ops
#
.append_stage(pipeline_stages_iothub_mqtt.IoTHubMQTTTranslationStage())
#
# AutoConnectStage comes here because only MQTT ops have the need_connection flag set
# and this is the first place in the pipeline where we can guaranetee that all network
# ops are MQTT ops.
#
.append_stage(pipeline_stages_base.AutoConnectStage())
#
# ReconnectStage needs to be after AutoConnectStage because ReconnectStage sets/clears
# the virtually_conencted flag and we want an automatic connection op to set this flag so
# we can reconnect autoconnect operations. This is important, for example, if a
# send_message causes the transport to automatically connect, but that connection fails.
# When that happens, the ReconnectState will hold onto the ConnectOperation until it
# succeeds, and only then will return success to the AutoConnectStage which will
# allow the publish to continue.
#
.append_stage(pipeline_stages_base.ReconnectStage())
#
# ConnectionLockStage needs to be after ReconnectStage because we want any ops that
# ReconnectStage creates to go through the ConnectionLockStage gate
#
.append_stage(pipeline_stages_base.ConnectionLockStage())
#
# RetryStage needs to be near the end because it's retrying low-level MQTT operations.
#
.append_stage(pipeline_stages_base.RetryStage())
#
# OpTimeoutStage needs to be after RetryStage because OpTimeoutStage returns the timeout
# errors that RetryStage is watching for.
#
.append_stage(pipeline_stages_base.OpTimeoutStage())
#
# MQTTTransportStage needs to be at the very end of the pipeline because this is where
# operations turn into network traffic
#
.append_stage(pipeline_stages_mqtt.MQTTTransportStage())
)
# Define behavior for domain-specific events
def _on_pipeline_event(event):
if isinstance(event, pipeline_events_iothub.C2DMessageEvent):
if self.on_c2d_message_received:
self.on_c2d_message_received(event.message)
else:
logger.error("C2D message event received with no handler. dropping.")
elif isinstance(event, pipeline_events_iothub.InputMessageEvent):
if self.on_input_message_received:
self.on_input_message_received(event.message)
else:
logger.error("input message event received with no handler. dropping.")
elif isinstance(event, pipeline_events_iothub.MethodRequestEvent):
if self.on_method_request_received:
self.on_method_request_received(event.method_request)
else:
logger.error("Method request event received with no handler. Dropping.")
elif isinstance(event, pipeline_events_iothub.TwinDesiredPropertiesPatchEvent):
if self.on_twin_patch_received:
self.on_twin_patch_received(event.patch)
else:
logger.error("Twin patch event received with no handler. Dropping.")
else:
logger.error("Dropping unknown pipeline event {}".format(event.name))
def _on_connected():
if self.on_connected:
self.on_connected()
else:
logger.debug("IoTHub Pipeline was connected, but no handler was set")
def _on_disconnected():
if self.on_disconnected:
self.on_disconnected()
else:
logger.debug("IoTHub Pipeline was disconnected, but no handler was set")
def _on_new_sastoken_required():
if self.on_new_sastoken_required:
self.on_new_sastoken_required()
else:
logger.debug("IoTHub Pipeline requires new SASToken, but no handler was set")
def _on_background_exception(e):
if self.on_background_exception:
self.on_background_exception(e)
else:
logger.debug(
"IoTHub Pipeline experienced background exception, but no handler was set"
)
# Set internal event handlers
self._pipeline.on_pipeline_event_handler = _on_pipeline_event
self._pipeline.on_connected_handler = _on_connected
self._pipeline.on_disconnected_handler = _on_disconnected
self._pipeline.on_new_sastoken_required_handler = _on_new_sastoken_required
self._pipeline.on_background_exception_handler = _on_background_exception
# Initialize the pipeline
callback = EventedCallback()
op = pipeline_ops_base.InitializePipelineOperation(callback=callback)
self._pipeline.run_op(op)
callback.wait_for_completion()
# Set the running flag
self._running = True
def _verify_running(self):
if not self._running:
raise pipeline_exceptions.PipelineNotRunning(
"Cannot execute method - Pipeline is not running"
)
def shutdown(self, callback):
"""Shut down the pipeline and clean up any resources.
Once shut down, making any further calls on the pipeline will result in a
PipelineNotRunning exception being raised.
There is currently no way to resume pipeline functionality once shutdown has occurred.
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has already been shut down
The shutdown process itself is not expected to fail under any normal condition, but if it
does, exceptions are not "raised", but rather, returned via the "error" parameter when
invoking "callback".
"""
self._verify_running()
logger.debug("Commencing shutdown of pipeline")
def on_complete(op, error):
if not error:
# Only set the pipeline to not be running if the op was successful
self._running = False
callback(error=error)
# NOTE: While we do run this operation, its functionality is incomplete. Some stages still
# need a response to this operation implemented. Additionally, there are other pipeline
# constructs other than Stages (e.g. Operations) which may have timers attached. These are
# lesser issues, but should be addressed at some point.
# TODO: Truly complete the shutdown implementation
self._pipeline.run_op(pipeline_ops_base.ShutdownPipelineOperation(callback=on_complete))
def connect(self, callback):
"""
Connect to the service.
:param callback: callback which is called when the connection attempt is complete.
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.OperationTimeout`
"""
self._verify_running()
logger.debug("Starting ConnectOperation on the pipeline")
def on_complete(op, error):
callback(error=error)
self._pipeline.run_op(pipeline_ops_base.ConnectOperation(callback=on_complete))
def disconnect(self, callback):
"""
Disconnect from the service.
Note that even if this fails for some reason, the client will be in a disconnected state.
:param callback: callback which is called when the disconnection is complete.
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
"""
self._verify_running()
logger.debug("Starting DisconnectOperation on the pipeline")
def on_complete(op, error):
callback(error=error)
self._pipeline.run_op(pipeline_ops_base.DisconnectOperation(callback=on_complete))
def reauthorize_connection(self, callback):
"""
Reauthorize connection to the service by disconnecting and then reconnecting using
fresh credentials.
This can be called regardless of connection state. If successful, the client will be
connected. If unsuccessful, the client will be disconnected.
:param callback: callback which is called when the reauthorization attempt is complete.
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.OperationTimeout`
"""
self._verify_running()
logger.debug("Starting ReauthorizeConnectionOperation on the pipeline")
def on_complete(op, error):
callback(error=error)
self._pipeline.run_op(
pipeline_ops_base.ReauthorizeConnectionOperation(callback=on_complete)
)
def send_message(self, message, callback):
"""
Send a telemetry message to the service.
:param message: message to send.
:param callback: callback which is called when the message publish attempt is complete.
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.NoConnectionError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
The following exceptions can be returned via the "error" parameter only if auto-connect
is enabled in the pipeline configuration:
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.OperationTimeout`
"""
self._verify_running()
logger.debug("Starting SendD2CMessageOperation on the pipeline")
def on_complete(op, error):
callback(error=error)
self._pipeline.run_op(
pipeline_ops_iothub.SendD2CMessageOperation(message=message, callback=on_complete)
)
def send_output_message(self, message, callback):
"""
Send an output message to the service.
:param message: message to send.
:param callback: callback which is called when the message publish attempt is complete.
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.NoConnectionError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
The following exceptions can be returned via the "error" parameter only if auto-connect
is enabled in the pipeline configuration:
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.OperationTimeout`
"""
self._verify_running()
logger.debug("Starting SendOutputMessageOperation on the pipeline")
def on_complete(op, error):
callback(error=error)
self._pipeline.run_op(
pipeline_ops_iothub.SendOutputMessageOperation(message=message, callback=on_complete)
)
def send_method_response(self, method_response, callback):
"""
Send a method response to the service.
:param method_response: the method response to send
:param callback: callback which is called when response publish attempt is complete.
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.NoConnectionError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
The following exceptions can be returned via the "error" parameter only if auto-connect
is enabled in the pipeline configuration:
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.OperationTimeout`
"""
self._verify_running()
logger.debug("Starting SendMethodResponseOperation on the pipeline")
def on_complete(op, error):
callback(error=error)
self._pipeline.run_op(
pipeline_ops_iothub.SendMethodResponseOperation(
method_response=method_response, callback=on_complete
)
)
def get_twin(self, callback):
"""
Send a request for a full twin to the service.
:param callback: callback which is called when request attempt is complete.
This callback should have two parameters. On success, this callback is called with the
requested twin and error=None. On failure, this callback is called with None for the
requested win and error set to the cause of the failure.
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.NoConnectionError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
The following exceptions can be returned via the "error" parameter only if auto-connect
is enabled in the pipeline configuration:
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.OperationTimeout`
"""
self._verify_running()
logger.debug("Starting GetTwinOperation on the pipeline")
def on_complete(op, error):
if error:
callback(error=error, twin=None)
else:
callback(twin=op.twin)
self._pipeline.run_op(pipeline_ops_iothub.GetTwinOperation(callback=on_complete))
def patch_twin_reported_properties(self, patch, callback):
"""
Send a patch for a twin's reported properties to the service.
:param patch: the reported properties patch to send
:param callback: callback which is called when the request attempt is complete.
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.NoConnectionError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
The following exceptions can be returned via the "error" parameter only if auto-connect
is enabled in the pipeline configuration:
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.OperationTimeout`
"""
self._verify_running()
logger.debug("Starting PatchTwinReportedPropertiesOperation on the pipeline")
def on_complete(op, error):
callback(error=error)
self._pipeline.run_op(
pipeline_ops_iothub.PatchTwinReportedPropertiesOperation(
patch=patch, callback=on_complete
)
)
# NOTE: Currently, this operation will retry itself indefinitely in the case of timeout
def enable_feature(self, feature_name, callback):
"""
Enable the given feature by subscribing to the appropriate topics.
:param feature_name: one of the feature name constants from constant.py
:param callback: callback which is called when the feature is enabled
:raises: ValueError if feature_name is invalid
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.NoConnectionError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
The following exceptions can be returned via the "error" parameter only if auto-connect
is enabled in the pipeline configuration:
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError`
"""
self._verify_running()
logger.debug("enable_feature {} called".format(feature_name))
if feature_name not in self.feature_enabled:
raise ValueError("Invalid feature_name")
# TODO: What about if the feature is already enabled?
def on_complete(op, error):
if error:
logger.error("Subscribe for {} failed. Not enabling feature".format(feature_name))
else:
self.feature_enabled[feature_name] = True
callback(error=error)
self._pipeline.run_op(
pipeline_ops_base.EnableFeatureOperation(
feature_name=feature_name, callback=on_complete
)
)
# NOTE: Currently, this operation will retry itself indefinitely in the case of timeout
def disable_feature(self, feature_name, callback):
"""
Disable the given feature by subscribing to the appropriate topics.
:param callback: callback which is called when the feature is disabled
:param feature_name: one of the feature name constants from constant.py
:raises: ValueError if feature_name is invalid
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.PipelineNotRunning` if the
pipeline has previously been shut down
The following exceptions are not "raised", but rather returned via the "error" parameter
when invoking "callback":
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.NoConnectionError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError`
The following exceptions can be returned via the "error" parameter only if auto-connect
is enabled in the pipeline configuration:
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError`
:raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError`
"""
self._verify_running()
logger.debug("disable_feature {} called".format(feature_name))
if feature_name not in self.feature_enabled:
raise ValueError("Invalid feature_name")
# TODO: What about if the feature is already disabled?
def on_complete(op, error):
if error:
logger.warning(
"Error occurred while disabling feature. Unclear if subscription for {} is still alive or not".format(
feature_name
)
)
# No matter what, mark the feature as disabled, even if there was an error.
# This is safer than only marking it disabled upon operation success, because an op
# could fail after successfully doing the network operations to change the subscription
# state, and then we would be stuck in a bad state.
self.feature_enabled[feature_name] = False
callback(error=error)
self._pipeline.run_op(
pipeline_ops_base.DisableFeatureOperation(
feature_name=feature_name, callback=on_complete
)
)
@property
def pipeline_configuration(self):
"""
Pipeline Configuration for the pipeline. Note that while a new config object cannot be
provided (read-only), the values stored in the config object CAN be changed.
"""
return self._pipeline.pipeline_configuration
@property
def connected(self):
"""
Read-only property to indicate if the transport is connected or not.
"""
return self._pipeline.connected
| 45.425497
| 122
| 0.673652
|
09ab4fcb966cbbbab3bf2fdd796716e7f6de990c
| 14,995
|
py
|
Python
|
plataforma/views.py
|
bodedev/prospera
|
4ce39b0ee4ae32b3584157f23a5f94f340892980
|
[
"MIT"
] | 3
|
2017-05-11T17:48:41.000Z
|
2017-10-04T01:53:35.000Z
|
plataforma/views.py
|
bodedev/prospera
|
4ce39b0ee4ae32b3584157f23a5f94f340892980
|
[
"MIT"
] | null | null | null |
plataforma/views.py
|
bodedev/prospera
|
4ce39b0ee4ae32b3584157f23a5f94f340892980
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from braces.views import AjaxResponseMixin, JSONResponseMixin
from datetime import datetime
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout, update_session_auth_hash
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AdminPasswordChangeForm, PasswordChangeForm
from django.contrib.auth.views import LoginView
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponseRedirect, HttpResponseServerError
from django.utils.decorators import method_decorator
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import CreateView, DeleteView, DetailView, FormView, ListView, TemplateView, UpdateView
from plataforma.constants import ETHER_DIVISOR
from plataforma.forms import ProsperaLoginForm, NodoForm, NodosForm, ObjetoForm, SignUpForm
from plataforma.models import Nodo, Nodos, Objeto, Saldo
import requests
class LandingPageView(TemplateView):
template_name = "pages/landing_page.html"
class LandingLastOperationsByTokenView(TemplateView):
template_name = "pages/partial_landing_history.html"
def get_context_data(self, **kwargs):
context = super(LandingLastOperationsByTokenView, self).get_context_data(**kwargs)
context["transacoes"] = []
try:
r = requests.get("https://api.ethplorer.io/getTokenHistory/%s/?apiKey=freekey" % (settings.ETHERSCAN_CONTRACT_ADDRESS, ))
if r.status_code == 200:
data = r.json()
for operation in data["operations"]:
context["transacoes"].append({
"date": datetime.fromtimestamp(float(operation["timestamp"])),
"value": int(operation["value"]) / float(ETHER_DIVISOR),
"from": operation["from"],
"to": operation["to"],
})
except Exception, e:
pass
return context
class LandingBalanceByTokenView(ListView):
model = Saldo
ordering = ("-total", )
template_name = "pages/partial_landing_balance.html"
@method_decorator(csrf_exempt, name='dispatch')
class LoginWithAjaxView(AjaxResponseMixin, JSONResponseMixin, LoginView):
form_class = ProsperaLoginForm
def post_ajax(self, request, *args, **kwargs):
form = self.get_form()
if form.is_valid():
login(self.request, form.get_user())
return self.render_json_response({})
else:
return self.render_json_response(form.errors.as_json(), status=400)
@method_decorator(csrf_exempt, name='dispatch')
class NoCreateView(AjaxResponseMixin, JSONResponseMixin, FormView):
form_class = SignUpForm
template_name = "pages/no_create_form.html"
def create_the_user(self, form):
user = form.save()
user.refresh_from_db() # load the nodo instance created by the signal
user.nodo.quem_sou = form.cleaned_data.get('quem_sou')
user.save()
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=user.username, password=raw_password)
login(self.request, user)
messages.success(self.request, u'Bem-vindo a Prospera!')
return user
def form_valid(self, form):
user = self.create_the_user(form)
return HttpResponseRedirect(reverse_lazy("no_detail_public", args=[user.id]))
def post_ajax(self, request, *args, **kwargs):
form = self.get_form()
if form.is_valid():
user = self.create_the_user(form)
return self.render_json_response({"next": reverse_lazy("no_detail_public", args=[user.id])})
else:
return self.render_json_response(form.errors.as_json(), status=400)
class NoDetailView(DetailView):
fields = ["quem_sou"]
template_name = "pages/no_detail.html"
def get_object(self, queryset=None):
if "pk" in self.kwargs:
return get_object_or_404(Nodo, user__id=self.kwargs["pk"])
return self.request.user.nodo
class NoDetailTransactionView(TemplateView):
template_name = "pages/partial_no_transactions.html"
def get_context_data(self, **kwargs):
context = super(NoDetailTransactionView, self).get_context_data(**kwargs)
context["transactions"] = []
nodo = self.get_object()
try:
url = "https://api.ethplorer.io/getAddressHistory/%s?apiKey=freekey" % (nodo.carteira.lower())
r = requests.get(url)
data = r.json()
for t in data["operations"]:
context["transactions"].append(
{
"date": datetime.fromtimestamp(float(t["timestamp"])),
"value": int(t["value"]) / float(ETHER_DIVISOR),
"to": t["to"],
"from": t["from"],
"in_or_out": "in" if t["to"] == nodo.carteira.lower() else "out"
}
)
except:
pass
return context
def get_object(self, queryset=None):
try:
nodo = Nodo.objects.get(user__id=self.kwargs["pk"])
if nodo.carteira:
return nodo
except:
pass
return HttpResponseServerError
class NoDetailSummaryView(TemplateView):
template_name = "pages/partial_no_status.html"
def get_context_data(self, **kwargs):
context = super(NoDetailSummaryView, self).get_context_data(**kwargs)
nodo = self.get_object()
context["nodo"] = nodo
try:
r = requests.get("https://api.etherscan.io/api?module=account&action=tokenbalance&contractaddress=%s&address=%s&tag=latest&apikey=%s" % (settings.ETHERSCAN_CONTRACT_ADDRESS, nodo.carteira, settings.ETHERSCAN_APIKEY))
if r.status_code == 200:
data = r.json()
if data["status"] == "1":
context["quanto_ganhou_com_a_prospera"] = float(data["result"]) / float(1000000000)
except Exception, e:
pass
quanto_contribuiu_com_a_prospera = 0
quanto_recebeu_como_nos = 0
context["quanto_contribuiu_com_a_prospera"] = quanto_contribuiu_com_a_prospera
context["quanto_recebeu_como_nos"] = quanto_recebeu_como_nos
return context
def get_object(self, queryset=None):
try:
nodo = Nodo.objects.get(user__id=self.kwargs["pk"])
if nodo.carteira:
return nodo
except:
pass
return HttpResponseServerError
class TotalProsperEmitidosSummaryView(TemplateView):
template_name = "pages/partial_landing_total.html"
def get_context_data(self, **kwargs):
context = super(TotalProsperEmitidosSummaryView, self).get_context_data(**kwargs)
try:
r = requests.get("https://api.etherscan.io/api?module=stats&action=tokensupply&contractaddress=%s&apikey=%s" % (settings.ETHERSCAN_CONTRACT_ADDRESS, settings.ETHERSCAN_APIKEY))
if r.status_code == 200:
data = r.json()
if data["status"] == "1":
context["total_emitido"] = int(data["result"]) / float(ETHER_DIVISOR)
except:
# Condição inicial
context["total_emitido"] = 781.25
return context
@method_decorator(login_required, name='dispatch')
class NoDeleteView(DeleteView):
model = Nodo
slug_url_kwarg = "no"
template_name = "pages/no_delete_form.html"
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
self.object.user.is_active = False
self.object.user.save()
self.object.delete()
logout(request)
messages.success(request, u"Seu perfil foi excluído!")
return HttpResponseRedirect(reverse_lazy("home"))
def get_context_data(self, **kwargs):
context = super(NoDeleteView, self).get_context_data(**kwargs)
context["action"] = u'Excluir'
context["comunidades_list"] = Nodos.objects.filter(criado_por=self.request.user)
context["objetos_list"] = Objeto.objects.filter(criado_por=self.request.user)
return context
def get_object(self, queryset=None):
self.object = super(NoDeleteView, self).get_object(queryset)
if self.object.user != self.request.user:
raise PermissionDenied
return self.object
@method_decorator(login_required, name='dispatch')
class NoEditView(UpdateView):
form_class = NodoForm
model = Nodo
slug_url_kwarg = "no"
template_name = "pages/no_edit_form.html"
def get_context_data(self, **kwargs):
context = super(NoEditView, self).get_context_data(**kwargs)
context["action"] = u'Salvar'
return context
def get_object(self, queryset=None):
return self.request.user.nodo
def get_success_url(self):
return reverse_lazy("no_detail_public", args=[self.request.user.id])
class NoListView(ListView):
model = Nodo
template_name = "pages/no_list.html"
def get_queryset(self):
queryset = super(NoListView, self).get_queryset()
return queryset.exclude(user__id__in=[4])
@method_decorator(login_required, name='dispatch')
class UserChangePassword(FormView):
template_name = "pages/no_change_password.html"
def get_form_class(self):
if self.request.user.has_usable_password():
return PasswordChangeForm
return AdminPasswordChangeForm
def get_form_kwargs(self):
kwargs = super(UserChangePassword, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
form.save()
update_session_auth_hash(self.request, form.user)
messages.success(self.request, u'Sua senha foi alterada!')
return HttpResponseRedirect(reverse_lazy('no_detail_public', args=[self.request.user.id]))
@method_decorator(login_required, name='dispatch')
class NosCreateView(CreateView):
form_class = NodosForm
model = Nodos
template_name = "pages/nos_create_form.html"
def form_valid(self, form):
nos = form.save(commit=False)
nos.criado_por = self.request.user
nos.save()
messages.success(self.request, u'Comunidade criada com sucesso!')
return HttpResponseRedirect(reverse_lazy("nos_detail", args=[nos.slug]))
class NosListView(ListView):
model = Nodos
template_name = "pages/nos_list.html"
@method_decorator(login_required, name='dispatch')
class NosDeleteView(DeleteView):
model = Nodos
slug_url_kwarg = "nos"
template_name = "pages/nos_delete_form.html"
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
self.object.delete()
messages.success(request, u"A Comunidade foi excluída!")
return HttpResponseRedirect(reverse_lazy("nos_list"))
def get_context_data(self, **kwargs):
context = super(NosDeleteView, self).get_context_data(**kwargs)
context["action"] = u'Excluir'
context["objetos_list"] = Objeto.objects.filter(nodos=self.object)
return context
def get_object(self, queryset=None):
self.object = super(NosDeleteView, self).get_object(queryset)
if self.object.criado_por != self.request.user:
raise PermissionDenied
return self.object
class NosDetailView(DetailView):
context_object_name = "nos"
model = Nodos
slug_url_kwarg = "nos"
template_name = "pages/nos_detail.html"
@method_decorator(login_required, name='dispatch')
class NosEditView(UpdateView):
form_class = NodosForm
model = Nodos
slug_url_kwarg = "nos"
success_url = reverse_lazy("nos_list")
template_name = "pages/nos_create_form.html"
def get_context_data(self, **kwargs):
context = super(NosEditView, self).get_context_data(**kwargs)
context["action"] = u'Salvar'
return context
def get_object(self, queryset=None):
self.object = super(NosEditView, self).get_object(queryset)
if self.object.criado_por != self.request.user:
raise PermissionDenied
return self.object
@method_decorator(login_required, name='dispatch')
class ObjectCreateView(CreateView):
form_class = ObjetoForm
model = Objeto
template_name = "pages/object_create_form.html"
def form_valid(self, form):
objeto = form.save(commit=False)
objeto.nodos = Nodos.objects.get(slug=self.kwargs["nos"])
objeto.criado_por = self.request.user
objeto.save()
messages.success(self.request, u'Objeto criado com sucesso!')
return HttpResponseRedirect(reverse_lazy("object_detail", args=[objeto.nodos.slug, objeto.slug]))
@method_decorator(login_required, name='dispatch')
class ObjectDeleteView(DeleteView):
model = Objeto
slug_url_kwarg = "objeto"
template_name = "pages/object_delete_form.html"
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
comunidade_slug = self.object.nodos.slug
self.object.delete()
messages.success(request, u"O objeto foi excluído!")
return HttpResponseRedirect(reverse_lazy("nos_detail", args=[comunidade_slug]))
def get_context_data(self, **kwargs):
context = super(ObjectDeleteView, self).get_context_data(**kwargs)
context["action"] = u'Excluir'
return context
def get_object(self, queryset=None):
self.object = super(ObjectDeleteView, self).get_object(queryset)
if self.object.criado_por != self.request.user:
raise PermissionDenied
return self.object
class ObjectDetailView(DetailView):
context_object_name = "objeto"
model = Objeto
slug_url_kwarg = "objeto"
template_name = "pages/object_detail.html"
@method_decorator(login_required, name='dispatch')
class ObjectEditView(UpdateView):
form_class = ObjetoForm
model = Objeto
slug_url_kwarg = "objeto"
template_name = "pages/object_create_form.html"
def get_context_data(self, **kwargs):
context = super(ObjectEditView, self).get_context_data(**kwargs)
context["action"] = u'Salvar'
return context
def get_object(self, queryset=None):
self.object = super(ObjectEditView, self).get_object(queryset)
if self.object.criado_por != self.request.user:
raise PermissionDenied
return self.object
def get_success_url(self):
return reverse_lazy("object_detail", kwargs={"nos": self.object.nodos.slug, "objeto": self.object.slug})
class ObjectListView(ListView):
model = Objeto
template_name = "pages/object_list.html"
| 34.23516
| 228
| 0.667889
|
c518279217a707b7c4807f13603a058b97b582bc
| 12,375
|
py
|
Python
|
mdma/fit.py
|
dargilboa/MDMA
|
8ed4afc731936f695d6e320d804e5157e8eb8a71
|
[
"MIT"
] | 6
|
2021-06-10T22:31:28.000Z
|
2022-02-04T14:32:41.000Z
|
mdma/fit.py
|
dargilboa/MDMA
|
8ed4afc731936f695d6e320d804e5157e8eb8a71
|
[
"MIT"
] | 2
|
2021-06-30T22:08:11.000Z
|
2021-10-10T04:49:27.000Z
|
mdma/fit.py
|
dargilboa/MDMA
|
8ed4afc731936f695d6e320d804e5157e8eb8a71
|
[
"MIT"
] | 1
|
2021-07-29T03:14:36.000Z
|
2021-07-29T03:14:36.000Z
|
# Copyright © 2021 Dar Gilboa, Ari Pakman and Thibault Vatter
# This file is part of the mdma library and licensed under the terms of the MIT license.
# For a copy, see the LICENSE file in the root directory.
import torch as t
import torch.optim as optim
from mdma import models
from mdma import utils
import argparse
import datetime
import json
import os
import time
from typing import List
if t.cuda.is_available():
t.set_default_tensor_type('torch.cuda.FloatTensor')
device = "cuda"
else:
print('No GPU found')
t.set_default_tensor_type('torch.FloatTensor')
device = "cpu"
def get_default_h() -> argparse.Namespace:
""" Get default argument parser.
Returns:
A namespace of parsed arguments.
"""
h_parser = argparse.ArgumentParser()
# data
h_parser.add_argument('--d', type=int, default=2)
h_parser.add_argument('--M', type=int, default=None)
h_parser.add_argument('--dataset', type=str, default=None)
h_parser.add_argument('--missing_data_pct', type=float, default=0.0)
# for the causal discovery experiment
h_parser.add_argument('--causal_mechanism', type=str, default=None)
# architecture
h_parser.add_argument('--m', type=int, default=1000)
h_parser.add_argument('--r', type=int, default=3)
h_parser.add_argument('--l', type=int, default=2)
h_parser.add_argument('--use_HT', type=utils.str2bool, default=True)
h_parser.add_argument('--use_MERA', type=utils.str2bool, default=False)
h_parser.add_argument('--HT_poolsize', type=int, default=2)
h_parser.add_argument('--adaptive_coupling',
type=utils.str2bool,
default=False)
h_parser.add_argument('--mix_vars', type=utils.str2bool, default=False)
h_parser.add_argument('--n_mix_terms', type=int, default=1)
# initialization
h_parser.add_argument('--w_std', type=float, default=1.0)
h_parser.add_argument('--w_bias', type=float, default=1.0)
h_parser.add_argument('--b_std', type=float, default=0)
h_parser.add_argument('--b_bias', type=float, default=0)
h_parser.add_argument('--a_std', type=float, default=1)
# fitting
h_parser.add_argument('--n_epochs', '-ne', type=int, default=1000)
h_parser.add_argument('--batch_size', '-b', type=int, default=500)
h_parser.add_argument('--lambda_l2', type=float, default=0)
h_parser.add_argument('--opt',
type=str,
default='adam',
choices=['adam', 'sgd'])
h_parser.add_argument('--lr', type=float, default=.01)
h_parser.add_argument('--patience', '-p', type=int, default=30)
h_parser.add_argument('--es_patience', '-esp', type=int, default=50)
h_parser.add_argument('--stable_nll_iters', type=int, default=5)
h_parser.add_argument('--gaussian_noise', type=float, default=0)
h_parser.add_argument('--subsample_inds', type=utils.str2bool, default=False)
h_parser.add_argument('--n_inds_to_subsample', type=int, default=20)
# logging
h_parser.add_argument('--data_dir', type=str, default='data')
h_parser.add_argument('--use_tb', type=utils.str2bool, default=False)
h_parser.add_argument('--tb_dir', type=str, default='data/tb')
h_parser.add_argument('--exp_name', type=str, default='')
h_parser.add_argument('--add_dt_str', type=utils.str2bool, default=True)
h_parser.add_argument('--model_to_load', '-mtl', type=str, default='')
h_parser.add_argument('--set_detect_anomaly',
'-sde',
type=utils.str2bool,
default=False)
h_parser.add_argument('--save_checkpoints',
'-sc',
type=utils.str2bool,
default=False)
h_parser.add_argument('--save_path', type=str, default='data/checkpoint')
h_parser.add_argument('--eval_validation', type=utils.str2bool, default=True)
h_parser.add_argument('--eval_test',
'-et',
type=utils.str2bool,
default=True)
h_parser.add_argument('--verbose', '-v', type=utils.str2bool, default=True)
h_parser.add_argument('--print_every', '-pe', type=int, default=20)
h_parser.add_argument('--max_iters', type=int, default=None)
h = h_parser.parse_known_args()[0]
return h
def print_category(key):
categories = {
'd': 'Data',
'causal_mechanism': 'Causal discovery only',
'm': 'Architecture',
'w_std': 'Initialization',
'n_epochs': 'Fitting',
'data_dir': 'Logging'
}
category = categories.get(key, None)
if not category == None:
print(f" {category}:")
def print_arguments(h):
print('Arguments:')
for key, value in h.__dict__.items():
print_category(key)
print(f' {key}: {value}')
def fit_mdma(
h: argparse.Namespace,
data: List[t.utils.data.DataLoader],
) -> models.MDMA:
""" Fit MDMA model to data using stochastic gradient descent on the negative log likelihood.
Args:
h: Argument parser containing training and model hyperparameters.
data: List of training, validation and test dataloaders.
Returns:
Fitted MDMA model.
"""
n_iters = h.n_epochs * h.M // h.batch_size
save_path = h.save_path
if h.use_tb:
tb_path = get_tb_path(h)
if not os.path.isdir(tb_path):
os.mkdir(tb_path)
with open(tb_path + '/h.json', 'w') as f:
json.dump(h.__dict__, f, indent=4, sort_keys=True)
save_path = tb_path
from tensorboardX import SummaryWriter
writer = SummaryWriter(tb_path)
print('Saving tensorboard logs to ' + tb_path)
model, optimizer, scheduler, start_epoch, use_stable_nll = initialize(h)
model, optimizer, scheduler, start_epoch, use_stable_nll = load_checkpoint(
model, optimizer, scheduler, start_epoch, use_stable_nll, save_path)
total_params = sum(p.numel() for p in model.parameters())
print(
f"Running {n_iters} iterations. Model contains {total_params} parameters."
)
h.total_params = total_params
print_arguments(h)
# Set up data loaders
train_loader, val_loader, test_loader = data
t.autograd.set_detect_anomaly(h.set_detect_anomaly)
# Fit MDMA
if h.use_HT and h.adaptive_coupling:
set_adaptive_coupling(h, model, train_loader)
clip_max_norm = 0
step = start_epoch * len(train_loader)
inds = ...
missing_data_mask = None
use_stable_nll = True
tic = time.time()
es = utils.EarlyStopping(patience=h.es_patience)
for epoch in range(start_epoch, h.n_epochs):
for batch_idx, batch in enumerate(train_loader):
batch_data = batch[0][:, 0, :].to(device)
if h.missing_data_pct > 0:
missing_data_mask = batch[0][:, 1, :].to(device)
if h.subsample_inds:
inds = t.randperm(h.d)[:h.n_inds_to_subsample]
if step == h.stable_nll_iters:
use_stable_nll = False
for param in model.parameters():
param.grad = None
obj = model.nll(batch_data[:, inds],
inds=inds,
stabilize=use_stable_nll,
missing_data_mask=missing_data_mask)
nll_value = obj.item()
obj.backward()
if clip_max_norm > 0:
t.nn.utils.clip_grad_value_(model.parameters(), clip_max_norm)
optimizer.step()
if not h.eval_validation:
scheduler.step(obj)
if h.verbose and step % h.print_every == 0:
print_str = f'Iteration {step}, train nll: {nll_value:.4f}'
toc = time.time()
print_str += f', elapsed: {toc - tic:.4f}, {h.print_every / (toc - tic):.4f} iterations per sec.'
tic = time.time()
print(print_str)
if h.use_tb:
writer.add_scalar('lr', optimizer.param_groups[0]['lr'], step)
writer.add_scalar('loss/train', nll_value, step)
step += 1
if step == h.max_iters:
print(f'Terminating after {h.max_iters} iterations.')
return model
if h.save_checkpoints:
cp_file = save_path + '/checkpoint.pt'
print('Saving model to ' + cp_file)
t.save(
{
'model': model,
'optimizer': optimizer.state_dict(),
'scheduler': scheduler.state_dict(),
'epoch': epoch + 1,
'use_stable_nll': use_stable_nll
}, cp_file)
if h.eval_test:
test_nll = eval_test(model, test_loader)
print(f'Epoch {epoch}, test nll: {test_nll:.4f}')
if h.use_tb:
writer.add_scalar('loss/test', test_nll, epoch)
if h.eval_validation:
val_nll = eval_validation(model, val_loader, h)
scheduler.step(val_nll)
print(f'Epoch {epoch}, validation nll: {val_nll:.4f}')
if h.use_tb:
writer.add_scalar('loss/validation', val_nll, epoch)
if es.step(val_nll):
print('Early stopping criterion met, terminating.')
return model
return model
def eval_validation(model, loader, h):
with t.no_grad():
val_nll = 0
nll = model.nll
for batch_idx, batch in enumerate(loader):
batch_data = batch[0][:, 0, :].to(device)
if h.missing_data_pct > 0:
missing_data_mask = batch[0][:, 1, :].to(device)
val_nll += nll(batch_data, missing_data_mask=missing_data_mask).item()
else:
val_nll += nll(batch_data).item()
return val_nll / (batch_idx + 1)
def eval_test(model, loader):
with t.no_grad():
test_nll = 0
nll = model.nll
for batch_idx, batch in enumerate(loader):
batch_data = batch[0].to(device)
test_nll += nll(batch_data).item()
return test_nll / (batch_idx + 1)
def initialize(h):
model = models.MDMA(
h.d,
m=h.m,
l=h.l,
r=h.r,
w_std=h.w_std,
w_bias=h.w_bias,
b_std=h.b_std,
b_bias=h.b_bias,
a_std=h.a_std,
use_HT=h.use_HT,
use_MERA=h.use_MERA,
adaptive_coupling=h.adaptive_coupling,
HT_poolsize=h.HT_poolsize,
mix_vars=h.mix_vars,
n_mix_terms=h.n_mix_terms,
)
if h.opt == 'adam':
opt_type = optim.Adam
elif h.opt == 'sgd':
opt_type = optim.SGD
else:
raise NameError
optimizer = opt_type(model.parameters(),
lr=h.lr,
weight_decay=h.lambda_l2,
amsgrad=True)
scheduler = t.optim.lr_scheduler.ReduceLROnPlateau(optimizer,
verbose=True,
patience=h.patience,
min_lr=1e-4,
factor=0.5)
start_epoch = 0
use_stable_nll = True
return model, optimizer, scheduler, start_epoch, use_stable_nll
def load_checkpoint(model, optimizer, scheduler, epoch, use_stable_nll,
save_path):
cp_file = save_path + '/checkpoint.pt'
if os.path.isfile(cp_file):
print('Loading model from ' + cp_file)
checkpoint = t.load(cp_file)
model = checkpoint['model']
optimizer.load_state_dict(checkpoint['optimizer'])
scheduler.load_state_dict(checkpoint['scheduler'])
epoch = checkpoint['epoch']
use_stable_nll = checkpoint[
'use_stable_nll'] # assumes # stable iters < # iters in 1 epoch
else:
print('No model to load.')
return model, optimizer, scheduler, epoch, use_stable_nll
def get_tb_path(h):
fields = [
'dataset', 'm', 'r', 'l', 'mix_vars', 'batch_size', 'n_epochs', 'lr',
'patience', 'missing_data_pct'
]
folder_name = [f'{utils.shorten(f)}:{h.__dict__[f]}' for f in fields]
if h.add_dt_str:
dt_str = str(datetime.datetime.now())[:-7].replace(' ',
'-').replace(':', '-')
folder_name += [dt_str]
if h.exp_name != '':
folder_name = [h.exp_name] + folder_name
folder_name = '_'.join(folder_name)
folder_name.replace('.', 'p')
tb_path = h.tb_dir + '/' + folder_name
return tb_path
def set_adaptive_coupling(h, model, train_loader):
# Couple variables in HT decomposition based on correlations
n_batches = 10 * h.d**2 // h.batch_size + 1
train_iter = iter(train_loader)
if h.missing_data_pct > 0:
# Multiply by mask
batches = [t.prod(next(train_iter)[0], dim=1) for _ in range(n_batches)]
else:
batches = [next(train_iter)[0][:, 0, :] for _ in range(n_batches)]
model.create_adaptive_couplings(batches)
print('Using adaptive variable coupling')
| 33.355795
| 105
| 0.63604
|
58f5bea79a68ef74af717970850407c2c308e01f
| 1,919
|
py
|
Python
|
exercises/knowledge_databases.py
|
alice19-meet/y2s18-databases
|
6cf15a7a1a5e802686d5d351caebf9e92312c068
|
[
"MIT"
] | null | null | null |
exercises/knowledge_databases.py
|
alice19-meet/y2s18-databases
|
6cf15a7a1a5e802686d5d351caebf9e92312c068
|
[
"MIT"
] | null | null | null |
exercises/knowledge_databases.py
|
alice19-meet/y2s18-databases
|
6cf15a7a1a5e802686d5d351caebf9e92312c068
|
[
"MIT"
] | null | null | null |
from knowledge_model import Base, Knowledge
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
engine = create_engine('sqlite:///knowledge.db')
Base.metadata.create_all(engine)
DBSession = sessionmaker(bind=engine)
session = DBSession()
def add_article(name,title, rating):
article_object = Knowledge(
name=name,
title=title,
rating=rating)
session.add(article_object)
session.commit()
# add_article("Noam", "music", 9)
# add_article("Rakan", "chemistry", 8)
# add_article("Leen", "architecture", 7)
def query_all_articles():
article=session.query(
Knowledge).all()
return article
#print (query_all_articles())
def query_article_by_topic(their_name):
article=session.query(
Knowledge).filter_by(
name=their_name).first()
return article
#print(query_article_by_topic("Noam"))
def query_article_by_rating(threshold):
rating=threshold
knowledge= query_all_articles()
rated_list=[]
for article in knowledge:
if article.rating<threshold:
rated_list.append(article)
return rated_list
#print(query_article_by_rating(8))
def query_article_by_primary_key(primary_key):
article=session.query(
Knowledge).filter_by(
site_id=primary_key).first()
return article
#print(query_article_by_primary_key(3))
# def delete_article_by_topic(topic):
# session.query(Knowledge).filter_by(
# title=topic).delete()
# session.commit()
# def delete_all_articles():
# session.query(Knowledge).delete()
# session.commit()
def edit_article_rating(updated_rating, article_title):
article = session.query(
Knowledge).filter_by(
title=article_title).all()
for i in range(len(article)):
article[i].rating=updated_rating
session.commit()
edit_article_rating(10, "music")
print(query_all_articles())
| 23.9875
| 55
| 0.705055
|
94ae6f56c5525cf64b9850ee94d28f11fc38bce5
| 445
|
py
|
Python
|
rlutils/infra/runner/tf_runner.py
|
vermouth1992/rlutils
|
a326373b9e39dbf147c6c4261b82a688d4dc3e78
|
[
"Apache-2.0"
] | null | null | null |
rlutils/infra/runner/tf_runner.py
|
vermouth1992/rlutils
|
a326373b9e39dbf147c6c4261b82a688d4dc3e78
|
[
"Apache-2.0"
] | null | null | null |
rlutils/infra/runner/tf_runner.py
|
vermouth1992/rlutils
|
a326373b9e39dbf147c6c4261b82a688d4dc3e78
|
[
"Apache-2.0"
] | null | null | null |
import os
import tensorflow as tf
from .base import BaseRunner, OffPolicyRunner, OnPolicyRunner
class TFRunner(BaseRunner):
def setup_global_seed(self):
super(TFRunner, self).setup_global_seed()
tf.random.set_seed(seed=self.seeder.generate_seed())
os.environ['TF_DETERMINISTIC_OPS'] = '1'
class TFOffPolicyRunner(OffPolicyRunner, TFRunner):
pass
class TFOnPolicyRunner(OnPolicyRunner, TFRunner):
pass
| 21.190476
| 61
| 0.74382
|
2356515467d70b907be40cb821fd2e2baf799df6
| 22,583
|
py
|
Python
|
trestle/transforms/implementations/tanium.py
|
PritamDutt/compliance-trestle
|
7edadde2bd2949e73a085bd78ef57995250fc9cb
|
[
"Apache-2.0"
] | null | null | null |
trestle/transforms/implementations/tanium.py
|
PritamDutt/compliance-trestle
|
7edadde2bd2949e73a085bd78ef57995250fc9cb
|
[
"Apache-2.0"
] | null | null | null |
trestle/transforms/implementations/tanium.py
|
PritamDutt/compliance-trestle
|
7edadde2bd2949e73a085bd78ef57995250fc9cb
|
[
"Apache-2.0"
] | null | null | null |
# -*- mode:python; coding:utf-8 -*-
# Copyright (c) 2021 IBM Corp. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Facilitate Tanium report to NIST OSCAL transformation."""
import datetime
import json
import logging
import multiprocessing
import os
import traceback
import uuid
from typing import Any, Dict, List, Optional, ValuesView
from trestle.oscal.assessment_results import ControlSelection
from trestle.oscal.assessment_results import LocalDefinitions1
from trestle.oscal.assessment_results import Observation
from trestle.oscal.assessment_results import Result
from trestle.oscal.assessment_results import ReviewedControls
from trestle.oscal.assessment_results import Status1
from trestle.oscal.assessment_results import SystemComponent
from trestle.oscal.common import ImplementedComponent, InventoryItem, Property, SubjectReference
from trestle.transforms.results import Results
from trestle.transforms.transformer_factory import ResultsTransformer
from trestle.transforms.transformer_helper import TransformerHelper
logger = logging.getLogger(__name__)
class TaniumTransformer(ResultsTransformer):
"""Interface for Tanium transformer."""
def __init__(self) -> None:
"""Initialize."""
self._modes = {}
@property
def analysis(self) -> List[str]:
"""Return analysis info."""
return self._analysis
@property
def blocksize(self):
"""Return blocksize."""
return self._modes.get('blocksize', 10000)
@property
def cpus_max(self):
"""Return cpus_max."""
return self._modes.get('cpus_max', 1)
@property
def cpus_min(self):
"""Return cpus_min."""
return self._modes.get('cpus_min', 1)
@property
def checking(self):
"""Return checking."""
return self._modes.get('checking', False)
def set_modes(self, modes: Dict[str, Any]) -> None:
"""Keep modes info."""
if modes is not None:
self._modes = modes
def transform(self, blob: str) -> Results:
"""Transform the blob into a Results."""
ts0 = datetime.datetime.now()
results = Results()
ru_factory = RuleUseFactory(self.get_timestamp())
ru_list = ru_factory.make_list(blob)
tanium_oscal_factory = TaniumOscalFactory(
self.get_timestamp(), ru_list, self.blocksize, self.cpus_max, self.cpus_min, self.checking
)
results.__root__ = tanium_oscal_factory.results
ts1 = datetime.datetime.now()
self._analysis = tanium_oscal_factory.analysis
self._analysis.append(f'transform time: {ts1-ts0}')
return results
class RuleUse():
"""Represents one row of Tanium data."""
def __init__(self, tanium_row: Dict[str, Any], comply: Dict[str, str], default_timestamp: str) -> None:
"""Initialize given specified args."""
logger.debug(f'tanium-row: {tanium_row}')
try:
# level 1 keys
self.computer_name = tanium_row['Computer Name']
self.tanium_client_ip_address = tanium_row['Tanium Client IP Address']
self.ip_address = str(tanium_row['IP Address'])
self.count = str(tanium_row['Count'])
# comply keys
self.check_id = comply['Check ID']
self.rule_id = comply['Rule ID']
self.state = comply['State']
# defaults
no_results = '[no results]'
self.check_id_level = no_results
self.check_id_version = no_results
self.check_id_benchmark = no_results
self.component = no_results
self.component_type = no_results
# parse
if ';' in self.check_id:
items = self.check_id.split(';')
if len(items) > 2:
self.check_id_level = items[2]
if len(items) > 1:
self.check_id_version = items[1]
if len(items) > 0:
self.check_id_benchmark = items[0]
self.component = items[0]
if self.component.startswith('CIS '):
self.component = self.component[len('CIS '):]
if self.component.endswith(' Benchmark'):
self.component = self.component[:-len(' Benchmark')]
self.component_type = 'Operating System'
# timestamp
self.timestamp = comply.get('Timestamp', default_timestamp)
# collected
self.collected = default_timestamp
except Exception as e:
logger.debug(f'tanium-row: {tanium_row}')
logger.debug(e)
logger.debug(traceback.format_exc())
raise e
class RuleUseFactory():
"""Build RuleUse list."""
def __init__(self, timestamp: str) -> None:
"""Initialize given specified args."""
self._timestamp = timestamp
def _make_sublist(self, tanium_row: Dict[str, Any]) -> List[RuleUse]:
"""Build RuleUse sublist from input data item."""
retval = []
keys = tanium_row.keys()
for key in keys:
if key.startswith('Comply'):
break
comply_list = tanium_row[key]
for comply in comply_list:
rule_use = RuleUse(tanium_row, comply, self._timestamp)
retval.append(rule_use)
return retval
def make_list(self, blob: str) -> List[RuleUse]:
"""Build RuleUse list from input data."""
retval = []
lines = blob.splitlines()
for line in lines:
line = line.strip()
if line:
jdata = json.loads(line)
if type(jdata) is list:
for item in jdata:
logger.debug(f'item: {item}')
retval += self._make_sublist(item)
else:
logger.debug(f'jdata: {jdata}')
retval += self._make_sublist(jdata)
logger.debug(f'ru_list: {len(retval)}')
return retval
def _uuid() -> str:
"""Create uuid."""
return str(uuid.uuid4())
def _uuid_component() -> str:
"""Create uuid for component."""
return _uuid()
def _uuid_inventory() -> str:
"""Create uuid for inventory."""
return _uuid()
def _uuid_observation() -> str:
"""Create uuid for observation."""
return _uuid()
def _uuid_result() -> str:
"""Create uuid for result."""
return _uuid()
class TaniumOscalFactory():
"""Build Tanium OSCAL entities."""
def __init__(
self,
timestamp: str,
rule_use_list: List[RuleUse],
blocksize: int = 11000,
cpus_max: int = 1,
cpus_min: int = 1,
checking: bool = False
) -> None:
"""Initialize given specified args."""
self._rule_use_list = rule_use_list
self._timestamp = timestamp
self._component_map = {}
self._inventory_map = {}
self._observation_list = []
self._ns = 'https://ibm.github.io/compliance-trestle/schemas/oscal/ar/tanium'
self._cpus = None
self._checking = checking
self._result = None
# blocksize: default, min
self._blocksize = blocksize
if self._blocksize < 1:
self._blocksize = 1
# cpus max: default, max, min
self._cpus_max = cpus_max
if self._cpus_max > os.cpu_count():
self._cpus_max = os.cpu_count()
self._cpus_min = cpus_min
if self._cpus_min > self._cpus_max:
self._cpus_min = self._cpus_max
if self._cpus_min < 1:
self._cpus_min = 1
def _is_duplicate_component(self, rule_use: RuleUse) -> bool:
"""Check for duplicate component."""
retval = False
component_type = rule_use.component_type
component_title = rule_use.component
for component in self._component_map.values():
if component.type != component_type:
continue
if component.title != component_title:
continue
retval = True
break
return retval
def _derive_components(self) -> Dict[str, ValuesView[InventoryItem]]:
"""Derive components from RuleUse list."""
self._component_map = {}
for rule_use in self._rule_use_list:
if self._is_duplicate_component(rule_use):
continue
component_type = rule_use.component_type
component_title = rule_use.component
# See Note in _get_component_ref.
component_description = rule_use.component
component_ref = _uuid_component()
status = Status1(state='operational')
component = SystemComponent(
uuid=component_ref,
type=component_type,
title=component_title,
description=component_description,
status=status
)
self._component_map[component_ref] = component
def _get_component_ref(self, rule_use: RuleUse) -> Optional[str]:
"""Get component reference for specified rule use."""
uuid = None
for component_ref, component in self._component_map.items():
if component.type != rule_use.component_type:
continue
if component.title != rule_use.component:
continue
# Note: currently title and description are the same,
# therefore checking description is not necessary.
uuid = component_ref
break
return uuid
def _derive_inventory(self) -> Dict[str, InventoryItem]:
"""Derive inventory from RuleUse list."""
self._inventory_map = {}
if self._checking:
self._derive_inventory_checked()
else:
self._derive_inventory_unchecked()
def _derive_inventory_checked(self) -> Dict[str, InventoryItem]:
"""Derive inventory from RuleUse list, properties checked."""
self._inventory_map = {}
for rule_use in self._rule_use_list:
if rule_use.tanium_client_ip_address in self._inventory_map:
continue
inventory = InventoryItem(uuid=_uuid_inventory(), description='inventory')
inventory.props = [
Property.construct(name='Computer_Name', value=rule_use.computer_name, ns=self._ns),
Property.construct(
name='Tanium_Client_IP_Address',
value=rule_use.tanium_client_ip_address,
ns=self._ns,
class_='scc_inventory_item_id'
),
Property.construct(name='IP_Address', value=rule_use.ip_address, ns=self._ns),
Property.construct(name='Count', value=rule_use.count, ns=self._ns)
]
component_uuid = self._get_component_ref(rule_use)
if component_uuid is not None:
inventory.implemented_components = [ImplementedComponent(component_uuid=component_uuid)]
self._inventory_map[rule_use.tanium_client_ip_address] = inventory
def _derive_inventory_unchecked(self) -> Dict[str, InventoryItem]:
"""Derive inventory from RuleUse list, properties unchecked."""
self._inventory_map = {}
for rule_use in self._rule_use_list:
if rule_use.tanium_client_ip_address in self._inventory_map:
continue
inventory = InventoryItem(uuid=_uuid_inventory(), description='inventory')
inventory.props = [
Property.construct(name='Computer_Name', value=rule_use.computer_name, ns=self._ns),
Property.construct(
name='Tanium_Client_IP_Address',
value=rule_use.tanium_client_ip_address,
ns=self._ns,
class_='scc_inventory_item_id'
),
Property.construct(name='IP_Address', value=rule_use.ip_address, ns=self._ns),
Property.construct(name='Count', value=rule_use.count, ns=self._ns)
]
component_uuid = self._get_component_ref(rule_use)
if component_uuid is not None:
inventory.implemented_components = [ImplementedComponent(component_uuid=component_uuid)]
self._inventory_map[rule_use.tanium_client_ip_address] = inventory
def _get_inventory_ref(self, rule_use: RuleUse) -> str:
"""Get inventory reference for specified rule use."""
return self._inventory_map[rule_use.tanium_client_ip_address].uuid
def _get_observtion_properties(self, rule_use):
"""Get observation properties."""
if self._checking:
return self._get_observtion_properties_checked(rule_use)
else:
return self._get_observtion_properties_unchecked(rule_use)
def _get_observtion_properties_checked(self, rule_use):
"""Get observation properties, with checking."""
props = [
Property(name='Check_ID', value=rule_use.check_id, ns=self._ns),
Property(
name='Check_ID_Benchmark',
value=rule_use.check_id_benchmark,
ns=self._ns,
class_='scc_predefined_profile'
),
Property(
name='Check_ID_Version',
value=rule_use.check_id_version,
ns=self._ns,
class_='scc_predefined_profile_version'
),
Property(name='Check_ID_Level', value=rule_use.check_id_level, ns=self._ns),
Property(name='Rule_ID', value=rule_use.rule_id, ns=self._ns, class_='scc_goal_description'),
Property(name='Rule_ID', value=rule_use.rule_id, ns=self._ns, class_='scc_check_name_id'),
Property(name='State', value=rule_use.state, ns=self._ns, class_='scc_result'),
Property(name='Timestamp', value=rule_use.timestamp, ns=self._ns, class_='scc_timestamp'),
]
return props
def _get_observtion_properties_unchecked(self, rule_use):
"""Get observation properties, without checking."""
props = [
Property.construct(name='Check_ID', value=rule_use.check_id, ns=self._ns),
Property.construct(
name='Check_ID_Benchmark',
value=rule_use.check_id_benchmark,
ns=self._ns,
class_='scc_predefined_profile'
),
Property.construct(
name='Check_ID_Version',
value=rule_use.check_id_version,
ns=self._ns,
class_='scc_predefined_profile_version'
),
Property.construct(name='Check_ID_Level', value=rule_use.check_id_level, ns=self._ns),
Property.construct(name='Rule_ID', value=rule_use.rule_id, ns=self._ns, class_='scc_goal_description'),
Property.construct(name='Rule_ID', value=rule_use.rule_id, ns=self._ns, class_='scc_check_name_id'),
Property.construct(name='State', value=rule_use.state, ns=self._ns, class_='scc_result'),
Property.construct(name='Timestamp', value=rule_use.timestamp, ns=self._ns, class_='scc_timestamp'),
]
return props
# parallel process to process one chuck of entire data set
def _batch_observations(self, index: int):
"""Derive batch of observations from RuleUse list."""
observation_partial_list = []
# determine which chunk to process
batch_size = (len(self._rule_use_list) // self._batch_workers) + 1
start = index * batch_size
end = (index + 1) * batch_size
end = min(end, len(self._rule_use_list))
logger.debug(f'start: {start} end: {end-1}')
# process just the one chunk
for i in range(start, end):
rule_use = self._rule_use_list[i]
observation = Observation(
uuid=_uuid_observation(),
description=rule_use.rule_id,
methods=['TEST-AUTOMATED'],
collected=rule_use.collected
)
subject_reference = SubjectReference(subject_uuid=self._get_inventory_ref(rule_use), type='inventory-item')
observation.subjects = [subject_reference]
observation.props = self._get_observtion_properties(rule_use)
observation_partial_list.append(observation)
return observation_partial_list
@property
def _batch_workers(self) -> int:
"""Calculate number of parallel processes to employ."""
if self._cpus is None:
cpus_estimate = len(self._rule_use_list) // self._blocksize
self._cpus = max(min(cpus_estimate, self._cpus_max), self._cpus_min)
logger.debug(f'CPUs estimate: {cpus_estimate} available: {os.cpu_count()} selection: {self._cpus}')
return self._cpus
def _derive_observations(self) -> List[Observation]:
"""Derive observations from RuleUse list."""
self._observation_list = []
if self._batch_workers == 1:
# no need for multiprocessing
self._observation_list = self._batch_observations(0)
else:
# use multiprocessing to perform observations creation in parallel
pool = multiprocessing.Pool(processes=self._batch_workers)
rval_list = pool.map(self._batch_observations, range(self._batch_workers))
# gather observations from the sundry batch workers
for observations_partial_list in rval_list:
self._observation_list += observations_partial_list
@property
def components(self) -> List[SystemComponent]:
"""OSCAL components."""
return list(self._component_map.values())
@property
def inventory(self) -> ValuesView[InventoryItem]:
"""OSCAL inventory."""
return self._inventory_map.values()
@property
def observations(self) -> List[Observation]:
"""OSCAL observations."""
return self._observation_list
@property
def control_selections(self) -> List[ControlSelection]:
"""OSCAL control selections."""
rval = []
rval.append(ControlSelection())
return rval
@property
def reviewed_controls(self) -> ReviewedControls:
"""OSCAL reviewed controls."""
rval = ReviewedControls(control_selections=self.control_selections)
return rval
@property
def analysis(self) -> List[str]:
"""OSCAL statistics."""
analysis = []
analysis.append(f'components: {len(self.components)}')
analysis.append(f'inventory: {len(self.inventory)}')
analysis.append(f'observations: {len(self.observations)}')
return analysis
def _get_local_definitions(self, system_component: SystemComponent) -> LocalDefinitions1:
"""Get local definitions."""
rval = LocalDefinitions1()
for component in self.components:
if component.uuid == system_component.uuid:
rval.components = [component]
rval.inventory_items = []
for inventory_item in self.inventory:
for implemented_component in inventory_item.implemented_components:
if implemented_component.component_uuid == system_component.uuid:
rval.inventory_items.append(inventory_item)
break
return rval
def _get_observation_subject_uuids(self, observation: Observation) -> List[str]:
"""Get subject uuids for given observation."""
return [subject.subject_uuid for subject in observation.subjects]
def _get_local_definitions_uuids(self, local_definitions: LocalDefinitions1) -> List[str]:
"""Get inventory uuids for given local definitions."""
if local_definitions.inventory_items:
rval = [inventory_item.uuid for inventory_item in local_definitions.inventory_items]
else:
rval = []
return rval
def _is_matching_uuid(self, observation_subject_uuids: List[str], local_definitions_uuids: List[str]) -> bool:
"""Check if any given observation uuid is present in given local definition uuids."""
return len(list(set(observation_subject_uuids) & set(local_definitions_uuids))) > 0
def _get_observations(self, local_definitions: LocalDefinitions1) -> List[Observation]:
"""Get observations for given local definitions."""
rval = []
local_definitions_uuids = self._get_local_definitions_uuids(local_definitions)
for observation in self.observations:
observation_subject_uuids = self._get_observation_subject_uuids(observation)
if self._is_matching_uuid(observation_subject_uuids, local_definitions_uuids):
rval.append(observation)
return rval
@property
def results(self) -> List[Result]:
"""OSCAL result."""
if self._result is None:
self._derive_components()
self._derive_inventory()
self._derive_observations()
results = []
for component in self.components:
local_definitions = self._get_local_definitions(component)
observations = self._get_observations(local_definitions)
result = Result(
uuid=_uuid_result(),
title='Tanium',
description='Tanium',
start=self._timestamp,
end=self._timestamp,
reviewed_controls=self.reviewed_controls,
local_definitions=local_definitions,
observations=observations
)
result_properties_list = TransformerHelper().remove_common_observation_properties(observations)
if result_properties_list:
result.prop = result_properties_list
results.append(result)
return results
| 40.616906
| 119
| 0.622946
|
f3a81ca5ff6e2c53d5300b80b97d2adf2aedbe89
| 1,114
|
py
|
Python
|
remindvoca/urls.py
|
zinuzian-portfolio/RemindVoca
|
aaa7aa004b9f213af486a9d8cad941b49d698e7d
|
[
"MIT"
] | 4
|
2019-07-31T10:03:17.000Z
|
2019-10-06T06:03:33.000Z
|
remindvoca/urls.py
|
zinuzian-portfolio/RemindVoca
|
aaa7aa004b9f213af486a9d8cad941b49d698e7d
|
[
"MIT"
] | 3
|
2019-07-23T17:10:13.000Z
|
2019-07-30T19:27:40.000Z
|
remindvoca/urls.py
|
zinuzian-portfolio/RemindVoca
|
aaa7aa004b9f213af486a9d8cad941b49d698e7d
|
[
"MIT"
] | 11
|
2019-07-14T06:17:31.000Z
|
2019-10-06T07:24:41.000Z
|
"""remindvoca URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('accounts/', include('accounts.urls', namespace='account')),
path('accounts/', include('django.contrib.auth.urls')),
path('', include('accounts.urls')),
path('voca/', include('voca.urls')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| 39.785714
| 77
| 0.712747
|
41a738f0a21de3f1f0a9a723e34c660a316bae90
| 819
|
py
|
Python
|
var/spack/repos/builtin/packages/r-multcompview/package.py
|
jeanbez/spack
|
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/r-multcompview/package.py
|
jeanbez/spack
|
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 8
|
2021-11-09T20:28:40.000Z
|
2022-03-15T03:26:33.000Z
|
var/spack/repos/builtin/packages/r-multcompview/package.py
|
jeanbez/spack
|
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2
|
2019-02-08T20:37:20.000Z
|
2019-03-31T15:19:26.000Z
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RMultcompview(RPackage):
"""Visualizations of Paired Comparisons.
Convert a logical vector or a vector of p-values or a correlation,
difference, or distance matrix into a display identifying the pairs for
which the differences were not significantly different. Designed for use in
conjunction with the output of functions like TukeyHSD, dist{stats},
simint, simtest, csimint, csimtest{multcomp}, friedmanmc,
kruskalmc{pgirmess}."""
cran = "multcompView"
version('0.1-8', sha256='123d539172ad6fc63d83d1fc7f356a5ed7b691e7803827480118bebc374fd8e5')
| 37.227273
| 95
| 0.765568
|
41f6bec459542c6a182247318714bd15b324089f
| 408
|
py
|
Python
|
setup.py
|
saurabh6790/shopify_broker
|
62969b70a56be0f74aa20ef0d31fbea5fa932bc5
|
[
"MIT"
] | null | null | null |
setup.py
|
saurabh6790/shopify_broker
|
62969b70a56be0f74aa20ef0d31fbea5fa932bc5
|
[
"MIT"
] | null | null | null |
setup.py
|
saurabh6790/shopify_broker
|
62969b70a56be0f74aa20ef0d31fbea5fa932bc5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
version = '1.0.0'
setup(
name='shopify_broker',
version=version,
description='A broker to install Shopify public app.',
author='Frappe Technologies Pvt. Ltd.',
author_email='info@frappe.io',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=("frappe",),
)
| 22.666667
| 58
| 0.686275
|
24e37ef9cb29b4bd7104f4feca2f8f7740a43908
| 8,166
|
py
|
Python
|
nearpy/tests/hash_storage_tests.py
|
jbalint/aurum-datadiscovery
|
443b777c13be47e6da60b4af528e19a02608e158
|
[
"MIT"
] | 60
|
2017-01-13T19:53:34.000Z
|
2022-01-06T07:00:43.000Z
|
nearpy/tests/hash_storage_tests.py
|
jbalint/aurum-datadiscovery
|
443b777c13be47e6da60b4af528e19a02608e158
|
[
"MIT"
] | 83
|
2016-10-08T14:07:55.000Z
|
2022-03-08T22:03:39.000Z
|
nearpy/tests/hash_storage_tests.py
|
jbalint/aurum-datadiscovery
|
443b777c13be47e6da60b4af528e19a02608e158
|
[
"MIT"
] | 44
|
2016-10-08T00:15:46.000Z
|
2021-12-31T02:42:45.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Ole Krause-Sparmann
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import numpy
import unittest
from mockredis import MockRedis as Redis
from nearpy.storage import MemoryStorage, RedisStorage
from nearpy.hashes import RandomBinaryProjections, \
RandomDiscretizedProjections, \
PCABinaryProjections, PCADiscretizedProjections
class TestHashStorage(unittest.TestCase):
def setUp(self):
self.memory = MemoryStorage()
self.redis_object = Redis()
self.redis_storage = RedisStorage(self.redis_object)
def test_hash_memory_storage_none_config(self):
conf = self.memory.load_hash_configuration('nonexistentHash')
self.assertIsNone(conf)
def test_hash_memory_storage_rbp(self):
hash1 = RandomBinaryProjections('testRBPHash', 10)
hash1.reset(100)
self.memory.store_hash_configuration(hash1)
hash2 = RandomBinaryProjections(None, None)
hash2.apply_config(self.memory.load_hash_configuration('testRBPHash'))
self.assertEqual(hash1.dim, hash2.dim)
self.assertEqual(hash1.hash_name, hash2.hash_name)
self.assertEqual(hash1.projection_count, hash2.projection_count)
for i in range(hash1.normals.shape[0]):
for j in range(hash1.normals.shape[1]):
self.assertEqual(hash1.normals[i, j], hash2.normals[i, j])
def test_hash_memory_storage_rdp(self):
hash1 = RandomDiscretizedProjections('testRDPHash', 10, 0.1)
hash1.reset(100)
self.memory.store_hash_configuration(hash1)
hash2 = RandomDiscretizedProjections(None, None, None)
hash2.apply_config(self.memory.load_hash_configuration('testRDPHash'))
self.assertEqual(hash1.dim, hash2.dim)
self.assertEqual(hash1.hash_name, hash2.hash_name)
self.assertEqual(hash1.bin_width, hash2.bin_width)
self.assertEqual(hash1.projection_count, hash2.projection_count)
for i in range(hash1.normals.shape[0]):
for j in range(hash1.normals.shape[1]):
self.assertEqual(hash1.normals[i, j], hash2.normals[i, j])
def test_hash_memory_storage_pcabp(self):
train_vectors = numpy.random.randn(10, 100)
hash1 = PCABinaryProjections('testPCABPHash', 4, train_vectors)
self.memory.store_hash_configuration(hash1)
hash2 = PCABinaryProjections(None, None, None)
hash2.apply_config(
self.memory.load_hash_configuration('testPCABPHash'))
self.assertEqual(hash1.dim, hash2.dim)
self.assertEqual(hash1.hash_name, hash2.hash_name)
self.assertEqual(hash1.projection_count, hash2.projection_count)
for i in range(hash1.components.shape[0]):
for j in range(hash1.components.shape[1]):
self.assertEqual(hash1.components[
i, j], hash2.components[i, j])
def test_hash_memory_storage_pcadp(self):
train_vectors = numpy.random.randn(10, 100)
hash1 = PCADiscretizedProjections(
'testPCADPHash', 4, train_vectors, 0.1)
self.memory.store_hash_configuration(hash1)
hash2 = PCADiscretizedProjections(None, None, None, None)
hash2.apply_config(
self.memory.load_hash_configuration('testPCADPHash'))
self.assertEqual(hash1.dim, hash2.dim)
self.assertEqual(hash1.hash_name, hash2.hash_name)
self.assertEqual(hash1.bin_width, hash2.bin_width)
self.assertEqual(hash1.projection_count, hash2.projection_count)
for i in range(hash1.components.shape[0]):
for j in range(hash1.components.shape[1]):
self.assertEqual(hash1.components[
i, j], hash2.components[i, j])
def test_hash_redis_storage_none_config(self):
conf = self.redis_storage.load_hash_configuration('nonexistentHash')
self.assertIsNone(conf)
def test_hash_redis_storage_rbp(self):
hash1 = RandomBinaryProjections('testRBPHash', 10)
hash1.reset(100)
self.redis_storage.store_hash_configuration(hash1)
hash2 = RandomBinaryProjections(None, None)
hash2.apply_config(
self.redis_storage.load_hash_configuration('testRBPHash'))
self.assertEqual(hash1.dim, hash2.dim)
self.assertEqual(hash1.hash_name, hash2.hash_name)
self.assertEqual(hash1.projection_count, hash2.projection_count)
for i in range(hash1.normals.shape[0]):
for j in range(hash1.normals.shape[1]):
self.assertEqual(hash1.normals[i, j], hash2.normals[i, j])
def test_hash_redis_storage_rdp(self):
hash1 = RandomDiscretizedProjections('testRDPHash', 10, 0.1)
hash1.reset(100)
self.redis_storage.store_hash_configuration(hash1)
hash2 = RandomDiscretizedProjections(None, None, None)
hash2.apply_config(
self.redis_storage.load_hash_configuration('testRDPHash'))
self.assertEqual(hash1.dim, hash2.dim)
self.assertEqual(hash1.hash_name, hash2.hash_name)
self.assertEqual(hash1.bin_width, hash2.bin_width)
self.assertEqual(hash1.projection_count, hash2.projection_count)
for i in range(hash1.normals.shape[0]):
for j in range(hash1.normals.shape[1]):
self.assertEqual(hash1.normals[i, j], hash2.normals[i, j])
def test_hash_redis_storage_pcabp(self):
train_vectors = numpy.random.randn(10, 100)
hash1 = PCABinaryProjections('testPCABPHash', 4, train_vectors)
self.redis_storage.store_hash_configuration(hash1)
hash2 = PCABinaryProjections(None, None, None)
hash2.apply_config(
self.redis_storage.load_hash_configuration('testPCABPHash'))
self.assertEqual(hash1.dim, hash2.dim)
self.assertEqual(hash1.hash_name, hash2.hash_name)
self.assertEqual(hash1.projection_count, hash2.projection_count)
for i in range(hash1.components.shape[0]):
for j in range(hash1.components.shape[1]):
self.assertEqual(hash1.components[
i, j], hash2.components[i, j])
def test_hash_redis_storage_pcadp(self):
train_vectors = numpy.random.randn(10, 100)
hash1 = PCADiscretizedProjections(
'testPCADPHash', 4, train_vectors, 0.1)
self.redis_storage.store_hash_configuration(hash1)
hash2 = PCADiscretizedProjections(None, None, None, None)
hash2.apply_config(
self.redis_storage.load_hash_configuration('testPCADPHash'))
self.assertEqual(hash1.dim, hash2.dim)
self.assertEqual(hash1.hash_name, hash2.hash_name)
self.assertEqual(hash1.bin_width, hash2.bin_width)
self.assertEqual(hash1.projection_count, hash2.projection_count)
for i in range(hash1.components.shape[0]):
for j in range(hash1.components.shape[1]):
self.assertEqual(hash1.components[
i, j], hash2.components[i, j])
if __name__ == '__main__':
unittest.main()
| 39.449275
| 79
| 0.690179
|
c5987aac5e581f929c2c33fdd6d570b11de4551d
| 7,569
|
py
|
Python
|
esim/utils.py
|
TripuraPriyanka/ekmimn
|
389c2249369e5b0f09498d79034634ac1db4ff68
|
[
"Apache-2.0"
] | null | null | null |
esim/utils.py
|
TripuraPriyanka/ekmimn
|
389c2249369e5b0f09498d79034634ac1db4ff68
|
[
"Apache-2.0"
] | null | null | null |
esim/utils.py
|
TripuraPriyanka/ekmimn
|
389c2249369e5b0f09498d79034634ac1db4ff68
|
[
"Apache-2.0"
] | null | null | null |
"""
Utility functions for the ESIM model.
"""
# Aurelien Coet, 2018.
import torch
import torch.nn as nn
# Code widely inspired from:
# https://github.com/allenai/allennlp/blob/master/allennlp/nn/util.py.
def sort_by_seq_lens(batch, sequences_lengths, descending=True):
"""
Sort a batch of padded variable length sequences by their length.
Args:
batch: A batch of padded variable length sequences. The batch should
have the dimensions (batch_size x max_sequence_length x *).
sequences_lengths: A tensor containing the lengths of the sequences in the
input batch. The tensor should be of size (batch_size).
descending: A boolean value indicating whether to sort the sequences
by their lengths in descending order. Defaults to True.
Returns:
sorted_batch: A tensor containing the input batch reordered by
sequences lengths.
sorted_seq_lens: A tensor containing the sorted lengths of the
sequences in the input batch.
sorting_idx: A tensor containing the indices used to permute the input
batch in order to get 'sorted_batch'.
restoration_idx: A tensor containing the indices that can be used to
restore the order of the sequences in 'sorted_batch' so that it
matches the input batch.
"""
sorted_seq_lens, sorting_index =\
sequences_lengths.sort(0, descending=descending)
sorted_batch = batch.index_select(0, sorting_index)
idx_range =\
sequences_lengths.new_tensor(torch.arange(0, len(sequences_lengths)))
_, reverse_mapping = sorting_index.sort(0, descending=False)
restoration_index = idx_range.index_select(0, reverse_mapping)
return sorted_batch, sorted_seq_lens, sorting_index, restoration_index
def get_mask(sequences_batch, sequences_lengths):
"""
Get the mask for a batch of padded variable length sequences.
Args:
sequences_batch: A batch of padded variable length sequences
containing word indices. Must be a 2-dimensional tensor of size
(batch, sequence).
sequences_lengths: A tensor containing the lengths of the sequences in
'sequences_batch'. Must be of size (batch).
Returns:
A mask of size (batch, max_sequence_length), where max_sequence_length
is the length of the longest sequence in the batch.
"""
batch_size = sequences_batch.size()[0]
max_length = torch.max(sequences_lengths)
mask = torch.ones(batch_size, max_length, dtype=torch.float)
mask[sequences_batch[:, :max_length] == 0] = 0.0
return mask
import numpy
def prepare_keypair_batch(premises, premises_lens, hypothesis, hyposesis_lens, kb_dict):
maxlen_x = torch.max(premises_lens)
maxlen_y = torch.max(hyposesis_lens)
batch_size = premises.size()[0]
pair_dim = 5
#kb_x = torch.zeros(maxlen_x, batch_size, maxlen_y, pair_dim, dtype=torch.float)
#kb_y = torch.zeros(maxlen_y, batch_size, maxlen_x, pair_dim, dtype=torch.float)
kb_att = torch.zeros(maxlen_x, batch_size, maxlen_y, dtype=torch.float)
#kb_x = numpy.zeros((maxlen_x, batch_size, maxlen_y, pair_dim)).astype('float32')
#kb_y = numpy.zeros((maxlen_y, batch_size, maxlen_x, pair_dim)).astype('float32')
#kb_att = numpy.zeros((maxlen_x, batch_size, maxlen_y)).astype('float32')
for idx, [s_x, s_y] in enumerate(zip(premises, hypothesis)):
for sid, s in enumerate(s_x):
for tid, t in enumerate(s_y):
if s in kb_dict:
if t in kb_dict[s]:
#kb_x[sid, idx, tid, :] = numpy.array(kb_dict[s][t]).astype('float32')
kb_att[sid, idx, tid] = 1.
"""
for sid, s in enumerate(s_y):
for tid, t in enumerate(s_x):
if s in kb_dict:
if t in kb_dict[s]:
kb_y[sid, idx, tid, :] = numpy.array(kb_dict[s][t]).astype('float32')
"""
return kb_att
# Code widely inspired from:
# https://github.com/allenai/allennlp/blob/master/allennlp/nn/util.py.
def masked_softmax(tensor, mask, kb_mask):
"""
Apply a masked softmax on the last dimension of a tensor.
The input tensor and mask should be of size (batch, *, sequence_length).
Args:
tensor: The tensor on which the softmax function must be applied along
the last dimension.
mask: A mask of the same size as the tensor with 0s in the positions of
the values that must be masked and 1s everywhere else.
Returns:
A tensor of the same size as the inputs containing the result of the
softmax.
"""
tensor_shape = tensor.size()
reshaped_tensor = tensor.view(-1, tensor_shape[-1])
# Reshape the mask so it matches the size of the input tensor.
while mask.dim() < tensor.dim():
mask = mask.unsqueeze(1)
mask = mask.expand_as(tensor).contiguous().float()
reshaped_mask = mask.view(-1, mask.size()[-1])
result = nn.functional.softmax(reshaped_tensor * reshaped_mask, dim=-1)
result = result * reshaped_mask
# 1e-13 is added to avoid divisions by zero.
result = result / (result.sum(dim=-1, keepdim=True) + 1e-13)
return torch.add(result.view(*tensor_shape), kb_mask)
# Code widely inspired from:
# https://github.com/allenai/allennlp/blob/master/allennlp/nn/util.py.
def weighted_sum(tensor, weights, mask):
"""
Apply a weighted sum on the vectors along the last dimension of 'tensor',
and mask the vectors in the result with 'mask'.
Args:
tensor: A tensor of vectors on which a weighted sum must be applied.
weights: The weights to use in the weighted sum.
mask: A mask to apply on the result of the weighted sum.
Returns:
A new tensor containing the result of the weighted sum after the mask
has been applied on it.
"""
weighted_sum = weights.bmm(tensor)
while mask.dim() < weighted_sum.dim():
mask = mask.unsqueeze(1)
mask = mask.transpose(-1, -2)
mask = mask.expand_as(weighted_sum).contiguous().float()
return weighted_sum * mask
# Code inspired from:
# https://github.com/allenai/allennlp/blob/master/allennlp/nn/util.py.
def replace_masked(tensor, mask, value):
"""
Replace the all the values of vectors in 'tensor' that are masked in
'masked' by 'value'.
Args:
tensor: The tensor in which the masked vectors must have their values
replaced.
mask: A mask indicating the vectors which must have their values
replaced.
value: The value to place in the masked vectors of 'tensor'.
Returns:
A new tensor of the same size as 'tensor' where the values of the
vectors masked in 'mask' were replaced by 'value'.
"""
mask = mask.unsqueeze(1).transpose(2, 1)
reverse_mask = 1.0 - mask
values_to_add = value * reverse_mask
return tensor * mask + values_to_add
def correct_predictions(output_probabilities, targets):
"""
Compute the number of predictions that match some target classes in the
output of a model.
Args:
output_probabilities: A tensor of probabilities for different output
classes.
targets: The indices of the actual target classes.
Returns:
The number of correct predictions in 'output_probabilities'.
"""
_, out_classes = output_probabilities.max(dim=1)
correct = (out_classes == targets).sum()
return correct.item()
| 37.845
| 94
| 0.670498
|
03d58752b93d5ca1e39344df138ef2afa9941f46
| 2,794
|
py
|
Python
|
accelerate_asr/optim/lr_scheduler/transformer_lr_scheduler.py
|
sooftware/accelerate-asr
|
9ed557186a3534fa66327eeb4b6e927882fd9e60
|
[
"MIT"
] | 8
|
2021-05-10T11:46:00.000Z
|
2021-05-21T16:27:28.000Z
|
accelerate_asr/optim/lr_scheduler/transformer_lr_scheduler.py
|
sooftware/accelerate-asr
|
9ed557186a3534fa66327eeb4b6e927882fd9e60
|
[
"MIT"
] | null | null | null |
accelerate_asr/optim/lr_scheduler/transformer_lr_scheduler.py
|
sooftware/accelerate-asr
|
9ed557186a3534fa66327eeb4b6e927882fd9e60
|
[
"MIT"
] | null | null | null |
# MIT License
#
# Copyright (c) 2021 Soohwan Kim
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import math
from accelerate_asr.optim.lr_scheduler.lr_scheduler import LearningRateScheduler
class TransformerLRScheduler(LearningRateScheduler):
""" Implement the learning rate scheduler in https://arxiv.org/abs/1706.03762 """
def __init__(self, optimizer, peak_lr, final_lr, final_lr_scale, warmup_steps, decay_steps):
assert isinstance(warmup_steps, int), "warmup_steps should be inteager type"
assert isinstance(decay_steps, int), "total_steps should be inteager type"
super(TransformerLRScheduler, self).__init__(optimizer, 0.0)
self.final_lr = final_lr
self.peak_lr = peak_lr
self.warmup_steps = warmup_steps
self.decay_steps = decay_steps
self.warmup_rate = self.peak_lr / self.warmup_steps
self.decay_factor = -math.log(final_lr_scale) / self.decay_steps
self.lr = self.init_lr
self.update_step = 0
def _decide_stage(self):
if self.update_step < self.warmup_steps:
return 0, self.update_step
if self.warmup_steps <= self.update_step < self.warmup_steps + self.decay_steps:
return 1, self.update_step - self.warmup_steps
return 2, None
def step(self):
self.update_step += 1
stage, steps_in_stage = self._decide_stage()
if stage == 0:
self.lr = self.update_step * self.warmup_rate
elif stage == 1:
self.lr = self.peak_lr * math.exp(-self.decay_factor * steps_in_stage)
elif stage == 2:
self.lr = self.final_lr
else:
raise ValueError("Undefined stage")
self.set_lr(self.optimizer, self.lr)
return self.lr
| 39.352113
| 96
| 0.709377
|
6281fc803e09f0fe7f47cc0500b93d8c211620b6
| 805
|
py
|
Python
|
metaci/cumulusci/admin.py
|
abhishekalgo/metaci
|
cd62473b3fb85fb0f39623f9fb2850993ff708a5
|
[
"BSD-3-Clause"
] | null | null | null |
metaci/cumulusci/admin.py
|
abhishekalgo/metaci
|
cd62473b3fb85fb0f39623f9fb2850993ff708a5
|
[
"BSD-3-Clause"
] | null | null | null |
metaci/cumulusci/admin.py
|
abhishekalgo/metaci
|
cd62473b3fb85fb0f39623f9fb2850993ff708a5
|
[
"BSD-3-Clause"
] | 1
|
2018-12-07T09:51:07.000Z
|
2018-12-07T09:51:07.000Z
|
from django.contrib import admin
from metaci.cumulusci.models import Org
from metaci.cumulusci.models import ScratchOrgInstance
from metaci.cumulusci.models import Service
class OrgAdmin(admin.ModelAdmin):
list_display = ("name", "repo", "scratch")
list_filter = ("name", "scratch", "repo")
admin.site.register(Org, OrgAdmin)
class ServiceAdmin(admin.ModelAdmin):
list_display = ("name",)
admin.site.register(Service, ServiceAdmin)
class ScratchOrgInstanceAdmin(admin.ModelAdmin):
list_display = (
"org",
"build",
"sf_org_id",
"username",
"deleted",
"time_created",
"time_deleted",
)
list_filter = ("deleted", "org")
raw_id_fields = ("build",)
admin.site.register(ScratchOrgInstance, ScratchOrgInstanceAdmin)
| 21.756757
| 64
| 0.68323
|
0e0b487ed97e03fc7a113ef6f0e9cd62715f5961
| 17,987
|
py
|
Python
|
src/psd_tools/composite/__init__.py
|
cronic208/syke412
|
589a03a0d102dbf02ff2624cf1177d2bf19681c6
|
[
"MIT"
] | 1
|
2020-08-19T07:30:59.000Z
|
2020-08-19T07:30:59.000Z
|
src/psd_tools/composite/__init__.py
|
cronic208/psd-tools
|
b815d7f2af49963392d3fd5be545253167e0dd01
|
[
"MIT"
] | null | null | null |
src/psd_tools/composite/__init__.py
|
cronic208/psd-tools
|
b815d7f2af49963392d3fd5be545253167e0dd01
|
[
"MIT"
] | 1
|
2022-03-21T21:05:44.000Z
|
2022-03-21T21:05:44.000Z
|
import numpy as np
from psd_tools.constants import Tag, BlendMode, ColorMode
from psd_tools.api.layers import AdjustmentLayer, Layer
from psd_tools.api.numpy_io import EXPECTED_CHANNELS
import logging
from .blend import BLEND_FUNC, normal
from .vector import (
create_fill, create_fill_desc, draw_vector_mask, draw_stroke,
draw_solid_color_fill, draw_pattern_fill, draw_gradient_fill
)
from .effects import draw_stroke_effect
logger = logging.getLogger(__name__)
def composite_pil(
layer, color, alpha, viewport, layer_filter, force, as_layer=False
):
from PIL import Image
from psd_tools.api.pil_io import get_pil_mode
from psd_tools.api.numpy_io import has_transparency
UNSUPPORTED_MODES = {
ColorMode.DUOTONE,
ColorMode.LAB,
}
color_mode = getattr(layer, '_psd', layer).color_mode
if color_mode in UNSUPPORTED_MODES:
logger.warning('Unsupported blending color space: %s' % (color_mode))
color, _, alpha = composite(
layer,
color=color,
alpha=alpha,
viewport=viewport,
layer_filter=layer_filter,
force=force,
as_layer=as_layer
)
mode = get_pil_mode(color_mode)
if mode == 'P':
mode = 'RGB'
# Skip only when there is a preview image and it has no alpha.
skip_alpha = (
color_mode not in (ColorMode.GRAYSCALE, ColorMode.RGB) or (
layer.kind == 'psdimage' and layer.has_preview() and
not has_transparency(layer)
)
)
if not skip_alpha:
color = np.concatenate((color, alpha), 2)
mode += 'A'
if mode in ('1', 'L'):
color = color[:, :, 0]
if color.shape[0] == 0 or color.shape[1] == 0:
return None
return Image.fromarray((255 * color).astype(np.uint8), mode)
def composite(
group,
color=1.0,
alpha=0.0,
viewport=None,
layer_filter=None,
force=False,
as_layer=False,
):
"""
Composite the given group of layers.
"""
viewport = viewport or getattr(group, 'viewbox', None) or group.bbox
if viewport == (0, 0, 0, 0):
viewport = getattr(group, '_psd').viewbox
if getattr(group, 'kind', None) == 'psdimage' and len(group) == 0:
color, shape = group.numpy('color'), group.numpy('shape')
if viewport != group.viewbox:
color = paste(viewport, group.bbox, color, 1.)
shape = paste(viewport, group.bbox, shape)
return color, shape, shape
if not isinstance(color, np.ndarray) and not hasattr(color, '__iter__'):
color_mode = getattr(group, '_psd', group).color_mode
color = (color, ) * EXPECTED_CHANNELS.get(color_mode)
isolated = False
if hasattr(group, 'blend_mode'):
isolated = group.blend_mode != BlendMode.PASS_THROUGH
layer_filter = layer_filter or Layer.is_visible
compositor = Compositor(
viewport, color, alpha, isolated, layer_filter, force
)
for layer in (
group if hasattr(group, '__iter__') and not as_layer else [group]
):
compositor.apply(layer)
return compositor.finish()
def paste(viewport, bbox, values, background=None):
"""Change to the specified viewport."""
shape = (
viewport[3] - viewport[1], viewport[2] - viewport[0], values.shape[2]
)
view = np.full(shape, background, dtype=np.float32
) if background else np.zeros(shape, dtype=np.float32)
inter = _intersect(viewport, bbox)
if inter == (0, 0, 0, 0):
return view
v = (
inter[0] - viewport[0], inter[1] - viewport[1], inter[2] - viewport[0],
inter[3] - viewport[1]
)
b = (
inter[0] - bbox[0], inter[1] - bbox[1], inter[2] - bbox[0],
inter[3] - bbox[1]
)
view[v[1]:v[3], v[0]:v[2], :] = values[b[1]:b[3], b[0]:b[2], :]
return view
class Compositor(object):
"""Composite context.
Example::
compositor = Compositor(group.bbox)
for layer in group:
compositor.apply(layer)
color, shape, alpha = compositor.finish()
"""
def __init__(
self,
viewport,
color=1.0,
alpha=0.0,
isolated=False,
layer_filter=None,
force=False,
):
self._viewport = viewport
self._layer_filter = layer_filter
self._force = force
self._clip_mask = 1.
if isolated:
self._alpha_0 = np.zeros((self.height, self.width, 1),
dtype=np.float32)
elif isinstance(alpha, np.ndarray):
self._alpha_0 = alpha
else:
self._alpha_0 = np.full((self.height, self.width, 1),
alpha,
dtype=np.float32)
if isinstance(color, np.ndarray):
self._color_0 = color
else:
channels = len(color) if hasattr(color, '__iter__') else 1
self._color_0 = np.full((self.height, self.width, channels),
color,
dtype=np.float32)
self._shape_g = np.zeros((self.height, self.width, 1),
dtype=np.float32)
self._alpha_g = np.zeros((self.height, self.width, 1),
dtype=np.float32)
self._color = self._color_0
self._alpha = self._alpha_0
def apply(self, layer):
logger.debug('Compositing %s' % layer)
if not self._layer_filter(layer):
logger.debug('Ignore %s' % layer)
return
if isinstance(layer, AdjustmentLayer):
logger.debug('Ignore adjustment %s' % layer)
return
if _intersect(self._viewport, layer.bbox) == (0, 0, 0, 0):
logger.debug('Out of viewport %s' % (layer))
return
knockout = bool(layer.tagged_blocks.get_data(Tag.KNOCKOUT_SETTING, 0))
if layer.is_group():
color, shape, alpha = self._get_group(layer, knockout)
else:
color, shape, alpha = self._get_object(layer)
shape_mask, opacity_mask = self._get_mask(layer)
shape_const, opacity_const = self._get_const(layer)
shape *= shape_mask
alpha *= shape_mask * opacity_mask * opacity_const
# TODO: Tag.BLEND_INTERIOR_ELEMENTS controls how inner effects apply.
# TODO: Apply before effects
self._apply_source(
color, shape * shape_const, alpha * shape_const, layer.blend_mode,
knockout
)
# TODO: Apply after effects
self._apply_color_overlay(layer, color, shape, alpha)
self._apply_pattern_overlay(layer, color, shape, alpha)
self._apply_gradient_overlay(layer, color, shape, alpha)
if ((self._force and layer.has_vector_mask()) or (
not layer.has_pixels()) and has_fill(layer)):
self._apply_stroke_effect(layer, color, shape_mask, alpha)
else:
self._apply_stroke_effect(layer, color, shape, alpha)
def _apply_source(self, color, shape, alpha, blend_mode, knockout=False):
if self._color_0.shape[2] == 1 and 1 < color.shape[2]:
self._color_0 = np.repeat(self._color_0, color.shape[2], axis=2)
if self._color.shape[2] == 1 and 1 < color.shape[2]:
self._color = np.repeat(self._color, color.shape[2], axis=2)
self._shape_g = _union(self._shape_g, shape)
if knockout:
self._alpha_g = (1. - shape) * self._alpha_g + \
(shape - alpha) * self._alpha_0 + alpha
else:
self._alpha_g = _union(self._alpha_g, alpha)
alpha_previous = self._alpha
self._alpha = _union(self._alpha_0, self._alpha_g)
alpha_b = self._alpha_0 if knockout else alpha_previous
color_b = self._color_0 if knockout else self._color
blend_fn = BLEND_FUNC.get(blend_mode, normal)
color_t = (shape - alpha) * alpha_b * color_b + alpha * \
((1. - alpha_b) * color + alpha_b * blend_fn(color_b, color))
self._color = _clip(
_divide((1. - shape) * alpha_previous * self._color + color_t,
self._alpha)
)
def finish(self):
return self.color, self.shape, self.alpha
@property
def viewport(self):
return self._viewport
@property
def width(self):
return self._viewport[2] - self._viewport[0]
@property
def height(self):
return self._viewport[3] - self._viewport[1]
@property
def color(self):
return _clip(
self._color + (self._color - self._color_0) *
(_divide(self._alpha_0, self._alpha_g) - self._alpha_0)
)
@property
def shape(self):
return self._shape_g
@property
def alpha(self):
return self._alpha_g
def _get_group(self, layer, knockout):
viewport = _intersect(self._viewport, layer.bbox)
if knockout:
color_b = self._color_0
alpha_b = self._alpha_0
else:
color_b = self._color
alpha_b = self._alpha
color, shape, alpha = composite(
layer,
paste(viewport, self._viewport, color_b, 1.),
paste(viewport, self._viewport, alpha_b),
viewport,
layer_filter=self._layer_filter,
force=self._force
)
color = paste(self._viewport, viewport, color, 1.)
shape = paste(self._viewport, viewport, shape)
alpha = paste(self._viewport, viewport, alpha)
# Composite clip layers.
if layer.has_clip_layers():
color = self._apply_clip_layers(layer, color, alpha)
assert color is not None
assert shape is not None
assert alpha is not None
return color, shape, alpha
def _get_object(self, layer):
"""Get object attributes."""
color, shape = layer.numpy('color'), layer.numpy('shape')
if (self._force or not layer.has_pixels()) and has_fill(layer):
color, shape = create_fill(layer, layer.bbox)
if shape is None:
shape = np.ones((layer.height, layer.width, 1),
dtype=np.float32)
if color is None and shape is None:
# Empty pixel layer.
color = np.ones((self.height, self.width, 1), dtype=np.float32)
shape = np.zeros((self.height, self.width, 1), dtype=np.float32)
if color is None:
color = np.ones((self.height, self.width, 1), dtype=np.float32)
else:
color = paste(self._viewport, layer.bbox, color, 1.)
if shape is None:
shape = np.ones((self.height, self.width, 1), dtype=np.float32)
else:
shape = paste(self._viewport, layer.bbox, shape)
alpha = shape * 1. # Constant factor is always 1.
# Composite clip layers.
if layer.has_clip_layers():
color = self._apply_clip_layers(layer, color, alpha)
# Apply stroke if any.
if layer.has_stroke() and layer.stroke.enabled:
color_s, shape_s, alpha_s = self._get_stroke(layer)
compositor = Compositor(self._viewport, color, alpha)
compositor._apply_source(
color_s, shape_s, alpha_s, layer.stroke.blend_mode
)
color, _, _ = compositor.finish()
assert color is not None
assert shape is not None
assert alpha is not None
return color, shape, alpha
def _apply_clip_layers(self, layer, color, alpha):
# TODO: Consider Tag.BLEND_CLIPPING_ELEMENTS.
compositor = Compositor(
self._viewport,
color,
alpha,
layer_filter=self._layer_filter,
force=self._force
)
for clip_layer in layer.clip_layers:
compositor.apply(clip_layer)
return compositor._color
def _get_mask(self, layer):
"""Get mask attributes."""
shape = 1.
opacity = 1.
if layer.has_mask():
# TODO: When force, ignore real mask.
mask = layer.numpy('mask')
if mask is not None:
shape = paste(
self._viewport, layer.mask.bbox, mask,
layer.mask.background_color / 255.
)
if layer.mask.parameters:
density = layer.mask.parameters.user_mask_density
if density is None:
density = layer.mask.parameters.vector_mask_density
if density is None:
density = 255
opacity = float(density) / 255.
if layer.has_vector_mask() and (
self._force or not layer.has_pixels() or (
not has_fill(layer) and layer.has_mask() and
not layer.mask._has_real()
)
):
shape_v = draw_vector_mask(layer)
shape_v = paste(self._viewport, layer._psd.viewbox, shape_v)
shape *= shape_v
assert shape is not None
assert opacity is not None
return shape, opacity
def _get_const(self, layer):
"""Get constant attributes."""
shape = layer.tagged_blocks.get_data(
Tag.BLEND_FILL_OPACITY, 255
) / 255.
opacity = layer.opacity / 255.
assert shape is not None
assert opacity is not None
return shape, opacity
def _get_stroke(self, layer):
"""Get stroke source."""
desc = layer.stroke._data
width = int(desc.get('strokeStyleLineWidth', 1.))
viewport = tuple(
x + d for x, d in zip(layer.bbox, (-width, -width, width, width))
)
color, _ = create_fill_desc(
layer, desc.get('strokeStyleContent'), viewport
)
color = paste(self._viewport, viewport, color, 1.)
shape = draw_stroke(layer)
if shape.shape[0] != self.height or shape.shape[1] != self.width:
bbox = (0, 0, shape.shape[1], shape.shape[0])
shape = paste(self._viewport, bbox, shape)
opacity = desc.get('strokeStyleOpacity', 100.) / 100.
alpha = shape * opacity
return color, shape, alpha
def _apply_color_overlay(self, layer, color, shape, alpha):
for effect in layer.effects.find('coloroverlay'):
color, shape_e = draw_solid_color_fill(layer.bbox, effect.value)
color = paste(self._viewport, layer.bbox, color, 1.)
if shape_e is None:
shape_e = np.ones((self.height, self.width, 1),
dtype=np.float32)
else:
shape_e = paste(self._viewport, layer.bbox, shape_e)
opacity = effect.opacity / 100.
self._apply_source(
color, shape * shape_e, alpha * shape_e * opacity,
effect.blend_mode
)
def _apply_pattern_overlay(self, layer, color, shape, alpha):
for effect in layer.effects.find('patternoverlay'):
color, shape_e = draw_pattern_fill(
layer.bbox, layer._psd, effect.value
)
color = paste(self._viewport, layer.bbox, color, 1.)
if shape_e is None:
shape_e = np.ones((self.height, self.width, 1),
dtype=np.float32)
else:
shape_e = paste(self._viewport, layer.bbox, shape_e)
opacity = effect.opacity / 100.
self._apply_source(
color, shape * shape_e, alpha * shape_e * opacity,
effect.blend_mode
)
def _apply_gradient_overlay(self, layer, color, shape, alpha):
for effect in layer.effects.find('gradientoverlay'):
color, shape_e = draw_gradient_fill(layer.bbox, effect.value)
color = paste(self._viewport, layer.bbox, color, 1.)
if shape_e is None:
shape_e = np.ones((self.height, self.width, 1),
dtype=np.float32)
else:
shape_e = paste(self._viewport, layer.bbox, shape_e)
opacity = effect.opacity / 100.
self._apply_source(
color, shape * shape_e, alpha * shape_e * opacity,
effect.blend_mode
)
def _apply_stroke_effect(self, layer, color, shape, alpha):
for effect in layer.effects.find('stroke'):
# Effect must happen at the layer viewport.
shape = paste(layer.bbox, self._viewport, shape)
color, shape_e = draw_stroke_effect(
layer.bbox, shape, effect.value, layer._psd
)
color = paste(self._viewport, layer.bbox, color)
shape_e = paste(self._viewport, layer.bbox, shape_e)
opacity = effect.opacity / 100.
self._apply_source(
color, shape_e, shape_e * opacity, effect.blend_mode
)
def _intersect(a, b):
inter = (
max(a[0], b[0]), max(a[1], b[1]), min(a[2], b[2]), min(a[3], b[3])
)
if inter[0] >= inter[2] or inter[1] >= inter[3]:
return (0, 0, 0, 0)
return inter
def has_fill(layer):
FILL_TAGS = (
Tag.SOLID_COLOR_SHEET_SETTING,
Tag.PATTERN_FILL_SETTING,
Tag.GRADIENT_FILL_SETTING,
Tag.VECTOR_STROKE_CONTENT_DATA,
)
return any(tag in layer.tagged_blocks for tag in FILL_TAGS)
def _union(backdrop, source):
"""Generalized union of shape."""
return backdrop + source - (backdrop * source)
def _clip(x):
"""Clip between [0, 1]."""
return np.clip(x, 0., 1.)
def _divide(a, b):
"""Safe division for color ops."""
with np.errstate(divide='ignore', invalid='ignore'):
c = np.true_divide(a, b)
c[~np.isfinite(c)] = 1.
return c
| 34.457854
| 79
| 0.576083
|
92f3772b3897ce7b484fb8b580dc91738367af33
| 3,155
|
py
|
Python
|
pp/components/pcm/test_resistance.py
|
flaport/gdsfactory
|
1f2e844c1fe27b9c6340e2d51500fd3358fa16e5
|
[
"MIT"
] | 8
|
2020-08-25T11:25:18.000Z
|
2022-03-27T11:32:11.000Z
|
pp/components/pcm/test_resistance.py
|
flaport/gdsfactory
|
1f2e844c1fe27b9c6340e2d51500fd3358fa16e5
|
[
"MIT"
] | null | null | null |
pp/components/pcm/test_resistance.py
|
flaport/gdsfactory
|
1f2e844c1fe27b9c6340e2d51500fd3358fa16e5
|
[
"MIT"
] | 1
|
2022-03-04T07:03:29.000Z
|
2022-03-04T07:03:29.000Z
|
from typing import Tuple
import numpy as np
from phidl.geometry import offset
import pp
from pp import components as pc
from pp.component import Component
@pp.cell
def test_resistance(
pad_size: Tuple[float] = (50.0, 50.0),
num_squares: int = 1000,
width: float = 1.0,
res_layer: Tuple[int, int] = pp.LAYER.M3,
pad_layer: Tuple[int, int] = pp.LAYER.M3,
gnd_layer: Tuple[int, int] = pp.LAYER.M3,
) -> Component:
""" meander to test resistance
from phidl.geometry
Args:
pad_size: Size of the two matched impedance pads (microns)
num_squares: Number of squares comprising the resonator wire
width: The width of the squares (microns)
res_layer:
pad_layer:
gnd_layer:
.. plot::
:include-source:
import pp
c = pp.c.test_resistance()
pp.plotgds(c)
"""
x = pad_size[0]
z = pad_size[1]
# Checking validity of input
if x <= 0 or z <= 0:
raise ValueError("Pad must have positive, real dimensions")
elif width > z:
raise ValueError("Width of cell cannot be greater than height of pad")
elif num_squares <= 0:
raise ValueError("Number of squares must be a positive real number")
elif width <= 0:
raise ValueError("Width of cell must be a positive real number")
# Performing preliminary calculations
num_rows = int(np.floor(z / (2 * width)))
if num_rows % 2 == 0:
num_rows -= 1
num_columns = num_rows - 1
squares_in_row = (num_squares - num_columns - 2) / num_rows
# Compensating for weird edge cases
if squares_in_row < 1:
num_rows = round(num_rows / 2) - 2
squares_in_row = 1
if width * 2 > z:
num_rows = 1
squares_in_row = num_squares - 2
length_row = squares_in_row * width
# Creating row/column corner combination structure
T = pp.Component()
Row = pc.rectangle(size=(length_row, width), layer=res_layer)
Col = pc.rectangle(size=(width, width), layer=res_layer)
T.add_ref(Row)
col = T.add_ref(Col)
col.move([length_row - width, -width])
# Creating entire waveguide net
N = pp.Component("net")
n = 1
for i in range(num_rows):
if i != num_rows - 1:
d = N.add_ref(T)
else:
d = N.add_ref(Row)
if n % 2 == 0:
d.reflect(p1=(d.x, d.ymax), p2=(d.x, d.ymin))
d.movey(-(n - 1) * T.ysize)
n += 1
d = N.add_ref(Col).movex(-width)
d = N.add_ref(Col).move([length_row, -(n - 2) * T.ysize])
# Creating pads
P = pp.Component()
Pad1 = pc.rectangle(size=(x, z), layer=pad_layer)
Pad2 = pc.rectangle(size=(x + 5, z), layer=pad_layer)
Gnd1 = offset(Pad1, distance=-5, layer=gnd_layer)
Gnd2 = offset(Pad2, distance=-5, layer=gnd_layer)
pad1 = P.add_ref(Pad1).movex(-x - width)
pad2 = P.add_ref(Pad1).movex(length_row + width)
P.add_ref(Gnd1).center = pad1.center
gnd2 = P.add_ref(Gnd2)
P.add_ref(N).y = pad1.y
gnd2.center = pad2.center
gnd2.movex(2.5)
return P
if __name__ == "__main__":
c = test_resistance()
pp.show(c)
| 27.675439
| 78
| 0.61046
|
a2c73fddb99384e3b9fea3da329333ec854594e6
| 172
|
py
|
Python
|
angular_flask/settings.py
|
mding5692/cs3319Asst3
|
897bd0ae601073ca822babc5ee0da4e82ddc744d
|
[
"MIT"
] | null | null | null |
angular_flask/settings.py
|
mding5692/cs3319Asst3
|
897bd0ae601073ca822babc5ee0da4e82ddc744d
|
[
"MIT"
] | null | null | null |
angular_flask/settings.py
|
mding5692/cs3319Asst3
|
897bd0ae601073ca822babc5ee0da4e82ddc744d
|
[
"MIT"
] | null | null | null |
# Not sure why but this is needed
DEBUG = True
SECRET_KEY = 'temporary_secret_key' # make sure to change this
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/angular_flask.db'
| 28.666667
| 63
| 0.761628
|
088f51c7984d3d032811d0f7f6a8b04ab801b26a
| 1,560
|
py
|
Python
|
src/sentry/api/endpoints/organization_integration_details.py
|
apragacz/sf-sentry
|
2fdd6c1195c29a1d401d1cd538c22ea68556699a
|
[
"BSD-3-Clause"
] | 1
|
2018-03-05T15:40:12.000Z
|
2018-03-05T15:40:12.000Z
|
src/sentry/api/endpoints/organization_integration_details.py
|
pkaminski/sentry
|
27e948283e27d93ca5192ca7b580830e092c25c7
|
[
"BSD-3-Clause"
] | 1
|
2018-08-22T16:49:48.000Z
|
2018-08-22T16:49:48.000Z
|
src/sentry/api/endpoints/organization_integration_details.py
|
pkaminski/sentry
|
27e948283e27d93ca5192ca7b580830e092c25c7
|
[
"BSD-3-Clause"
] | 1
|
2018-07-02T09:46:44.000Z
|
2018-07-02T09:46:44.000Z
|
from __future__ import absolute_import
from sentry import features
from sentry.api.bases.organization import (
OrganizationEndpoint, OrganizationIntegrationsPermission
)
from sentry.api.serializers import serialize
from sentry.models import Integration, OrganizationIntegration
class OrganizationIntegrationDetailsEndpoint(OrganizationEndpoint):
permission_classes = (OrganizationIntegrationsPermission, )
def has_feature(self, request, organization):
return features.has(
'organizations:integrations-v3',
organization=organization,
actor=request.user,
)
def get(self, request, organization, integration_id):
if not self.has_feature(request, organization):
return self.respond({'detail': ['You do not have that feature enabled']}, status=400)
integration = Integration.objects.get(
organizations=organization,
id=integration_id,
)
return self.respond(serialize(integration, request.user))
def delete(self, request, organization, integration_id):
if not self.has_feature(request, organization):
return self.respond({'detail': ['You do not have that feature enabled']}, status=400)
integration = Integration.objects.get(
organizations=organization,
id=integration_id,
)
OrganizationIntegration.objects.filter(
integration=integration,
organization=organization,
).delete()
return self.respond(status=204)
| 34.666667
| 97
| 0.691667
|
371250d667c0851a03cd336de2068b11bf8f3ad8
| 47,452
|
py
|
Python
|
src/energenie/Devices.py
|
tonbut/pyenergenie
|
f4e4554c963457341d64a3eeef973636c6f5ed3c
|
[
"MIT"
] | 2
|
2017-02-05T23:30:50.000Z
|
2020-01-28T11:56:31.000Z
|
src/energenie/Devices.py
|
tonbut/pyenergenie
|
f4e4554c963457341d64a3eeef973636c6f5ed3c
|
[
"MIT"
] | null | null | null |
src/energenie/Devices.py
|
tonbut/pyenergenie
|
f4e4554c963457341d64a3eeef973636c6f5ed3c
|
[
"MIT"
] | 3
|
2017-02-18T17:48:15.000Z
|
2020-03-09T19:33:13.000Z
|
# Devices.py 30/09/2015 D.J.Whale
#
# Information about specific Energenie devices
# This table is mostly reverse-engineered from various websites and web catalogues.
##from lifecycle import *
import time
import copy
import random
import sys
import traceback
try:
# Python 2
import OnAir
import OpenThings
except ImportError:
# Python 3
from . import OnAir
from . import OpenThings
# This level of indirection allows easy mocking for testing
ook_interface = OnAir.TwoBitAirInterface()
fsk_interface = OnAir.OpenThingsAirInterface()
MFRID_ENERGENIE = 0x04
MFRID = MFRID_ENERGENIE
##PRODUCTID_MIHO001 = # Home Hub
##PRODUCTID_MIHO002 = # OOK Control only
##PRODUCTID_MIHO003 = 0x0? # Hand Controller
PRODUCTID_MIHO004 = 0x01 # Monitor only
PRODUCTID_MIHO005 = 0x02 # Adaptor Plus
PRODUCTID_MIHO006 = 0x05 # House Monitor
##PRODUCTID_MIHO007 = 0x0? # Double Wall Socket White
##PRODUCTID_MIHO008 = 0x0? # OOK: Single light switch white
##PRODUCTID_MIHO009 not used
##PRODUCTID_MIHO010 not used
##PRODUCTID_MIHO011 not used
##PRODUCTID_MIHO012 not used
PRODUCTID_MIHO013 = 0x03 # eTRV
##PRODUCTID_MIHO014 # OOK In-line Relay
##PRODUCTID_MIHO015 not used
##PRODUCTID_MIHO016 not used
##PRODUCTID_MIHO017
##PRODUCTID_MIHO018
##PRODUCTID_MIHO019
##PRODUCTID_MIHO020
##PRODUCTID_MIHO021 = 0x0? # Double Wall Socket Nickel
##PRODUCTID_MIHO022 = 0x0? # Double Wall Socket Chrome
##PRODUCTID_MIHO023 = 0x0? # Double Wall Socket Brushed Steel
##PRODUCTID_MIHO024 = 0x0? # OOK:Style Light Nickel
##PRODUCTID_MIHO025 = 0x0? # OOK:Style Light Chrome
##PRODUCTID_MIHO026 = 0x0? # OOK:Style Light Steel
##PRODUCTID_MIHO027 starter pack bundle
##PRODUCTID_MIHO028 eco starter pack
##PRODUCTID_MIHO029 heating bundle
##PRODUCTID_MIHO030 not used
##PRODUCTID_MIHO031 not used
PRODUCTID_MIHO032 = 0x0C # FSK motion sensor
PRODUCTID_MIHO033 = 0x0D # FSK open sensor
##PRODUCTID_MIHO034 not used
##PRODUCTID_MIHO035 not used
##PRODUCTID_MIHO036 not used
##PRODUCTID_MIHO037 Adaptor Plus Bundle
##PRODUCTID_MIHO038 2-gang socket Bundle
##PRODUCTID_MIHO039 2-gang socket Bundle black nickel
##PRODUCTID_MIHO040 2-gang socket Bundle chrome
##PRODUCTID_MIHO041 2-gang socket Bundle stainless steel
# Default keys for OpenThings encryption and decryption
CRYPT_PID = 242
CRYPT_PIP = 0x0100
# OpenThings does not support a broadcast id,
# but Energenie added one for their MiHome Adaptors.
# This makes simple discovery possible.
BROADCAST_ID = 0xFFFFFF # Energenie broadcast
#----- DEFINED MESSAGE TEMPLATES ----------------------------------------------
SWITCH = {
"header": {
"mfrid": MFRID_ENERGENIE,
"productid": PRODUCTID_MIHO005,
"encryptPIP": CRYPT_PIP,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": True,
"paramid": OpenThings.PARAM_SWITCH_STATE,
"typeid": OpenThings.Value.UINT,
"length": 1,
"value": 0 # FILL IN
}
]
}
MIHO013_IDENTIFY = {
"header": {
"mfrid": MFRID_ENERGENIE,
"productid": PRODUCTID_MIHO013,
"encryptPIP": CRYPT_PID,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": True,
"paramid": OpenThings.PARAM_IDENTIFY,
"typeid": OpenThings.Value.UINT,
"length": 0,
}
]
}
MIHO013_EXERCISE = {
"header": {
"mfrid": MFRID_ENERGENIE,
"productid": PRODUCTID_MIHO013,
"encryptPIP": CRYPT_PIP,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": True,
"paramid": OpenThings.PARAM_EXERCISE,
"typeid": OpenThings.Value.UINT,
"length": 0
}
]
}
MIHO013_BATTERY_LEVEL = {
"header": {
"mfrid": MFRID_ENERGENIE,
"productid": PRODUCTID_MIHO013,
"encryptPIP": CRYPT_PID,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": True,
"paramid": OpenThings.PARAM_BATTERY_LEVEL, #OpenThings.PARAM_IDENTIFY,
"typeid": OpenThings.Value.UINT,
"length": 0,
}
]
}
MIHO013_DIAGNOSTICS = {
"header": {
"mfrid": MFRID_ENERGENIE,
"productid": PRODUCTID_MIHO013,
"encryptPIP": CRYPT_PID,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": True,
"paramid": OpenThings.PARAM_DIAGNOSTICS,
"typeid": OpenThings.Value.UINT,
"length": 0,
}
]
}
MIHO013_SET_TEMPERATURE = {
"header": {
"mfrid": MFRID_ENERGENIE,
"productid": PRODUCTID_MIHO013,
"encryptPIP": CRYPT_PID,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": True,
"paramid": OpenThings.PARAM_TEMPERATURE,
"typeid": OpenThings.Value.SINT_BP8,
"length": 2,
"value": 0 # FILL IN
}
]
}
MIHO013_SET_VALVE_POSITION = {
"header": {
"mfrid": MFRID_ENERGENIE,
"productid": PRODUCTID_MIHO013,
"encryptPIP": CRYPT_PID,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": True,
"paramid": OpenThings.PARAM_VALVE_POSITION,
"typeid": 0x01,
"length": 1,
"value": 0 # FILL IN
}
]
}
JOIN_REQ = {
"header": {
"mfrid": 0, # FILL IN
"productid": 0, # FILL IN
"encryptPIP": CRYPT_PIP,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": False,
"paramid": OpenThings.PARAM_JOIN,
"typeid": OpenThings.Value.UINT,
"length": 0
}
]
}
JOIN_ACK = {
"header": {
"mfrid": MFRID_ENERGENIE, # FILL IN
"productid": 0, # FILL IN
"encryptPIP": CRYPT_PIP,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": False,
"paramid": OpenThings.PARAM_JOIN,
"typeid": OpenThings.Value.UINT,
"length": 0
}
]
}
REGISTERED_SENSOR = {
"header": {
"mfrid": MFRID_ENERGENIE,
"productid": 0, # FILL IN
"encryptPIP": CRYPT_PIP,
"sensorid": 0 # FILL IN
}
}
MIHO005_REPORT = {
"header": {
"mfrid": MFRID_ENERGENIE,
"productid": PRODUCTID_MIHO005,
"encryptPIP": CRYPT_PIP,
"sensorid": 0 # FILL IN
},
"recs": [
{
"wr": False,
"paramid": OpenThings.PARAM_SWITCH_STATE,
"typeid": OpenThings.Value.UINT,
"length": 1,
"value": 0 # FILL IN
},
{
"wr": False,
"paramid": OpenThings.PARAM_VOLTAGE,
"typeid": OpenThings.Value.UINT,
"length": 1,
"value": 0 # FILL IN
},
{
"wr": False,
"paramid": OpenThings.PARAM_CURRENT,
"typeid": OpenThings.Value.UINT,
"length": 1,
"value": 0 # FILL IN
},
{
"wr": False,
"paramid": OpenThings.PARAM_FREQUENCY,
"typeid": OpenThings.Value.UINT,
"length": 1,
"value": 0 # FILL IN
},
{
"wr": False,
"paramid": OpenThings.PARAM_REAL_POWER,
"typeid": OpenThings.Value.UINT,
"length": 1,
"value": 0 # FILL IN
},
{
"wr": False,
"paramid": OpenThings.PARAM_REACTIVE_POWER,
"typeid": OpenThings.Value.UINT,
"length": 1,
"value": 0 # FILL IN
},
{
"wr": False,
"paramid": OpenThings.PARAM_APPARENT_POWER,
"typeid": OpenThings.Value.UINT,
"length": 1,
"value": 0 # FILL IN
},
]
}
#----- CONTRACT WITH AIR-INTERFACE --------------------------------------------
# this might be a real air_interface (a radio), or an adaptor interface
# (a message scheduler with a queue).
#
# synchronous send
# synchronous receive
#TODO: asynchronous send (deferred) - implies a callback on 'done, fail, timeout'
#TODO: asynchronous receive (deferred) - implies a callback on 'done, fail, timeout'
# air_interface has:
# configure(parameters)
# send(payload)
# send(payload, parameters)
# receive() -> (radio_measurements, address, payload)
#----- NEW DEVICE CLASSES -----------------------------------------------------
class Device():
"""A generic connected device abstraction"""
def __init__(self, device_id=None, air_interface=None):
self.air_interface = air_interface
self.device_id = self.parse_device_id(device_id)
class RadioConfig(): pass
self.radio_config = RadioConfig()
class Capabilities(): pass
self.capabilities = Capabilities()
self.updated_cb = None
self.rxseq = 0
self.lastHandledMessage = 0
def get_config(self):
raise RuntimeError("There is no configuration for a base Device")
@staticmethod
def parse_device_id(device_id):
"""device_id could be a number, a hex string or a decimal string"""
##print("**** parsing: %s" % str(device_id))
if device_id == None:
raise ValueError("device_id is None, not allowed")
if type(device_id) == int:
return device_id # does not need to be parsed
if type(device_id) == tuple or type(device_id) == list:
# each part of the tuple could be encoded
res = []
for p in device_id:
res.append(Device.parse_device_id(p))
#TODO: could usefully convert to tuple here to be helpful
return res
if type(device_id) == str:
# could be hex or decimal or strtuple or strlist
if device_id == "":
raise ValueError("device_id is blank, not allowed")
elif device_id.startswith("0x"):
return int(device_id, 16)
elif device_id[0] == '(' and device_id[-1] == ')':
##print("**** parse tuple")
inner = device_id[1:-1]
parts = inner.split(',')
##print(parts)
res = []
for p in parts:
res.append(Device.parse_device_id(p))
##print(res)
return res
elif device_id[0] == '[' and device_id[-1] == ']':
##print("**** parse list")
inner = device_id[1:-1]
parts = inner.split(',')
##print(parts)
res = []
for p in parts:
res.append(Device.parse_device_id(p))
#TODO: could usefully change to tuple here
##print(res)
return res
else:
return int(device_id, 10)
else:
raise ValueError("device_id unsupported type or format, got: %s %s" % (type(device_id), str(device_id)))
def has_switch(self):
return hasattr(self.capabilities, "switch")
def can_send(self):
return hasattr(self.capabilities, "send")
def can_receive(self):
return hasattr(self.capabilities, "receive")
def get_radio_config(self):
return self.radio_config
def get_last_receive_time(self): # ->timestamp
"""The timestamp of the last time any message was received by this device"""
return self.last_receive_time
def get_next_receive_time(self): # -> timestamp
"""An estimate of the next time we expect a message from this device"""
pass
def get_readings_summary(self):
"""Try to get a terse summary of all present readings"""
try:
r = self.readings
except AttributeError:
return "(no readings)"
def shortname(name):
parts = name.split('_')
sn = ""
for p in parts:
sn += p[0].upper()
return sn
line = ""
for rname in dir(self.readings):
if not rname.startswith("__"):
value = getattr(self.readings, rname)
line += "%s:%s " % (shortname(rname), str(value))
return line
# for each reading
# call get_x to get the reading
# think of a very short name, perhaps first letter of reading name?
# add it to a terse string
# return the string
def get_receive_count(self):
return self.rxseq
def incoming_message(self, payload):
"""Entry point for a message to be processed"""
#This is the base-class entry point, don't override this, but override handle_message
self.rxseq += 1
self.handle_message(payload)
if self.updated_cb != None:
self.updated_cb(self, payload)
self.lastHandledMessage=time.time()
def handle_message(self, payload):
"""Default handling for a new message"""
print("incoming(unhandled): %s" % payload)
def send_message(self, payload):
print("send_message %s" % payload)
# A raw device has no knowledge of how to send, the sub class provides that.
def when_updated(self, callback):
"""Provide a callback handler to be called when a new message arrives"""
self.updated_cb = callback
# signature: update(self, message)
def __repr__(self):
return "Device()"
class EnergenieDevice(Device):
"""An abstraction for any kind of Energenie connected device"""
def __init__(self, device_id=None, air_interface=None):
Device.__init__(self, device_id, air_interface)
def get_device_id(self): # -> id:int
return self.device_id
def __repr__(self):
return "EnergenieDevice(%s)" % str(self.device_id)
class LegacyDevice(EnergenieDevice):
DEFAULT_HOUSE_ADDRESS = 0x6C6C6
"""An abstraction for Energenie green button legacy OOK devices"""
def __init__(self, device_id=None, air_interface=None):
if air_interface == None:
air_interface == ook_interface
if device_id == None:
device_id = (LegacyDevice.DEFAULT_HOUSE_ADDRESS, 1)
elif type(device_id) == int:
device_id = (LegacyDevice.DEFAULT_HOUSE_ADDRESS, device_id)
elif type(device_id) == tuple and device_id[0] == None:
device_id = (LegacyDevice.DEFAULT_HOUSE_ADDRESS, device_id[1])
EnergenieDevice.__init__(self, device_id, ook_interface)
#TODO: These are now just be implied by the ook_interface adaptor
##self.radio_config.frequency = 433.92
##self.radio_config.modulation = "OOK"
##self.radio_config.codec = "4bit"
def __repr__(self):
return "LegacyDevice(%s)" % str(self.device_id)
def get_config(self):
"""Get the persistable config, enough to reconstruct this class from a factory"""
return {
"type": self.__class__.__name__,
"device_id": self.device_id
}
def send_message(self, payload):
if self.air_interface != None:
self.air_interface.send(payload, radio_config=self.radio_config)
else:
d = self.device_id
print("send_message(mock[%s]):%s" % (str(d), payload))
class MiHomeDevice(EnergenieDevice):
"""An abstraction for Energenie new style MiHome FSK devices"""
def __init__(self, device_id=None, air_interface=None):
if air_interface == None:
air_interface = fsk_interface
EnergenieDevice.__init__(self, device_id, air_interface)
#TODO: These are now implied by the air_interface adaptor
##self.radio_config.frequency = 433.92
##self.radio_config.modulation = "FSK"
##self.radio_config.codec = "OpenThings"
self.manufacturer_id = MFRID_ENERGENIE
self.product_id = None
#Different devices might have different PIP's
#if we are cycling codes on each message?
##self.config.encryptPID = CRYPT_PID
##self.config.encryptPIP = CRYPT_PIP
def get_config(self):
"""Get the persistable config, enough to reconstruct this class from a factory"""
return {
"type": self.__class__.__name__,
##"manufacturer_id": self.manufacturer_id, # not needed, known by class
##"product_id": self.product_id, # not needed, known by class
"device_id": self.device_id
}
def __repr__(self):
return "MiHomeDevice(%s,%s,%s)" % (str(self.manufacturer_id), str(self.product_id), str(self.device_id))
def get_manufacturer_id(self): # -> id:int
return self.manufacturer_id
def get_product_id(self): # -> id:int
return self.product_id
@staticmethod
def get_join_req(mfrid, productid, deviceid):
"""Used for testing, synthesises a JOIN_REQ message from this device"""
msg = OpenThings.Message(JOIN_REQ)
msg["header_mfrid"] = mfrid
msg["header_productid"] = productid
msg["header_sensorid"] = deviceid
return msg
def join_ack(self):
"""Send a join-ack to the real device"""
print "send join ack"
#msg = OpenThings.Message(header_mfrid=MFRID_ENERGENIE, header_productid=self.product_id, header_sensorid=self.device_id)
#msg[OpenThings.PARAM_JOIN] = {"wr":False, "typeid":OpenThings.Value.UINT, "length":0}
#self.send_message(msg)
payload = OpenThings.Message(JOIN_ACK)
payload.set(header_productid=self.product_id,
header_sensorid=self.device_id)
self.send_message(payload)
##def handle_message(self, payload):
#override for any specific handling
def send_message(self, payload, encoded=False):
#TODO: interface with air_interface
#is payload a pydict with header at this point, and we have to call OpenThings.encode?
#should the encode be done here, or in the air_interface adaptor?
#TODO: at what point is the payload turned into a pydict?
#TODO: We know it's going over OpenThings,
#do we call OpenThings.encode(payload) here?
#also OpenThings.encrypt() - done by encode() as default
if self.air_interface != None:
#TODO: might want to send the config, either as a send parameter,
#or by calling air_interface.configure() first?
self.air_interface.send(payload, encoded=encoded, radio_config=self.radio_config)
else:
m = self.manufacturer_id
p = self.product_id
d = self.device_id
print("send_message(mock[%s %s %s]):%s" % (str(m), str(p), str(d), payload))
#------------------------------------------------------------------------------
class OOKSwitch(LegacyDevice):
"""Any OOK controlled switch"""
def __init__(self, device_id, air_interface=None):
LegacyDevice.__init__(self, device_id, air_interface)
self.radio_config.inner_times = 8
self.capabilities.switch = True
self.capabilities.receive = True
def __repr__(self):
return "OOKSwitch(%s,%s)" % (str(hex(self.device_id[0])), str(hex(self.device_id[1])))
def turn_on(self):
#TODO: should this be here, or in LegacyDevice??
#addressing should probably be in LegacyDevice
#child devices might interpret the command differently
payload = {
"house_address": self.device_id[0],
"device_index": self.device_id[1],
"on": True
}
self.send_message(payload)
def turn_off(self):
#TODO: should this be here, or in LegacyDevice???
#addressing should probably be in LegacyDevice
#child devices might interpret the command differently
payload = {
"house_address": self.device_id[0],
"device_index": self.device_id[1],
"on": False
}
self.send_message(payload)
def set_switch(self, state):
if state:
self.turn_on()
else:
self.turn_off()
class ENER002(OOKSwitch):
"""A green button switch"""
def __repr__(self):
return "ENER002(%s,%s)" % (str(hex(self.device_id[0])), str(hex(self.device_id[1])))
class MIHO002(OOKSwitch):
"""A purple button MiHome switch"""
def __repr__(self):
return "MIHO002(%s,%s)" % (str(hex(self.device_id[0])), str(hex(self.device_id[1])))
class MIHO014(OOKSwitch):
"""Energenie 3kW switchable relay"""
def __repr__(self):
return "MIHO014(%s,%s)" % (str(hex(self.device_id[0])), str(hex(self.device_id[1])))
#------------------------------------------------------------------------------
class MiHomeLight(LegacyDevice):
"""Base for all MiHomeLight variants. Receive only OOK device"""
def __init__(self, device_id, air_interface=None):
LegacyDevice.__init__(self, device_id, air_interface)
self.radio_config.inner_times = 75
self.capabilities.switch = True
self.capabilities.receive = True
def __repr__(self):
return "MiHomeLight(%s,%s)" % (str(hex(self.device_id[0])), str(hex(self.device_id[1])))
def turn_on(self):
#TODO: should this be here, or in LegacyDevice??
#addressing should probably be in LegacyDevice
#child devices might interpret the command differently
payload = {
"house_address": self.device_id[0],
"device_index": self.device_id[1],
"on": True
}
self.send_message(payload)
def turn_off(self):
#TODO: should this be here, or in LegacyDevice???
#addressing should probably be in LegacyDevice
#child devices might interpret the command differently
payload = {
"house_address": self.device_id[0],
"device_index": self.device_id[1],
"on": False
}
self.send_message(payload)
def set_switch(self, state):
if state:
self.turn_on()
else:
self.turn_off()
class MIHO008(MiHomeLight):
"""White finish"""
def __repr__(self):
return "MIHO008(%s,%s)" % (str(hex(self.device_id[0])), str(hex(self.device_id[1])))
class MIHO024(MiHomeLight):
"""Black Nickel Finish"""
def __repr__(self):
return "MIHO024(%s,%s)" % (str(hex(self.device_id[0])), str(hex(self.device_id[1])))
class MIHO025(MiHomeLight):
"""Chrome Finish"""
def __repr__(self):
return "MIHO025(%s,%s)" % (str(hex(self.device_id[0])), str(hex(self.device_id[1])))
class MIHO026(MiHomeLight):
"""Brushed Steel Finish"""
def __repr__(self):
return "MIHO026(%s,%s)" % (str(hex(self.device_id[0])), str(hex(self.device_id[1])))
#------------------------------------------------------------------------------
class MIHO004(MiHomeDevice):
"""Energenie Monitor-only Adaptor"""
def __init__(self, device_id, air_interface=None):
MiHomeDevice.__init__(self, device_id, air_interface)
self.product_id = PRODUCTID_MIHO004
class Readings():
voltage = None
frequency = None
current = None
apparent_power = None
reactive_power = None
real_power = None
self.readings = Readings()
self.radio_config.inner_times = 4
self.capabilities.send = True
self.capabilities.switch = True
def __repr__(self):
return "MIHO004(%s)" % str(hex(self.device_id))
@staticmethod
def get_join_req(deviceid):
"""Get a synthetic join request from this device, for testing"""
return MiHomeDevice.get_join_req(MFRID_ENERGENIE, PRODUCTID_MIHO004, deviceid)
def handle_message(self, payload):
##print("MIHO005 new data %s %s" % (self.device_id, payload))
for rec in payload["recs"]:
paramid = rec["paramid"]
#TODO: consider making this table driven and allowing our base class to fill our readings in for us
# then just define the mapping table in __init__ (i.e. paramid->Readings field name)
value = rec["value"]
if paramid == OpenThings.PARAM_VOLTAGE:
self.readings.voltage = value
elif paramid == OpenThings.PARAM_CURRENT:
self.readings.current = value
elif paramid == OpenThings.PARAM_REAL_POWER:
self.readings.real_power = value
elif paramid == OpenThings.PARAM_APPARENT_POWER:
self.readings.apparent_power = value
elif paramid == OpenThings.PARAM_REACTIVE_POWER:
self.readings.reactive_power = value
elif paramid == OpenThings.PARAM_FREQUENCY:
self.readings.frequency = value
else:
try:
param_name = OpenThings.param_info[paramid]['n'] # name
except:
param_name = "UNKNOWN_%s" % str(hex(paramid))
print("unwanted paramid: %s" % param_name)
def get_readings(self): # -> readings:pydict
"""A way to get all readings as a single consistent set"""
return self.readings
def get_voltage(self): # -> voltage:float
"""Last stored state of voltage reading, None if unknown"""
if self.readings.voltage == None:
raise RuntimeError("No voltage reading received yet")
return self.readings.voltage
def get_frequency(self): # -> frequency:float
"""Last stored state of frequency reading, None if unknown"""
if self.readings.frequency == None:
raise RuntimeError("No frequency reading received yet")
return self.readings.frequency
def get_apparent_power(self): # ->power:float
"""Last stored state of apparent power reading, None if unknown"""
if self.readings.apparent_power == None:
raise RuntimeError("No apparent power reading received yet")
return self.readings.apparent_power
def get_reactive_power(self): # -> power:float
"""Last stored state of reactive power reading, None if unknown"""
if self.readings.reactive_power == None:
raise RuntimeError("No reactive power reading received yet")
return self.readings.reactive_power
def get_real_power(self): #-> power:float
"""Last stored state of real power reading, None if unknown"""
if self.readings.real_power == None:
raise RuntimeError("No real power reading received yet")
return self.readings.real_power
#------------------------------------------------------------------------------
class MIHO005(MiHomeDevice):
"""An Energenie MiHome Adaptor Plus"""
def __init__(self, device_id, air_interface=None):
MiHomeDevice.__init__(self, device_id, air_interface)
self.product_id = PRODUCTID_MIHO005
class Readings():
switch = None
voltage = None
frequency = None
current = None
apparent_power = None
reactive_power = None
real_power = None
self.readings = Readings()
self.radio_config.inner_times = 4
self.capabilities.send = True
self.capabilities.receive = True
self.capabilities.switch = True
def __repr__(self):
return "MIHO005(%s)" % str(hex(self.device_id))
@staticmethod
def get_join_req(deviceid):
"""Get a synthetic join request from this device, for testing"""
return MiHomeDevice.get_join_req(MFRID_ENERGENIE, PRODUCTID_MIHO004, deviceid)
def handle_message(self, payload):
#print("MIHO005 new data %s %s" % (self.device_id, payload))
for rec in payload["recs"]:
paramid = rec["paramid"]
#TODO: consider making this table driven and allowing our base class to fill our readings in for us
# then just define the mapping table in __init__ (i.e. paramid->Readings field name)
value = rec["value"]
if paramid == OpenThings.PARAM_SWITCH_STATE:
self.readings.switch = ((value == True) or (value != 0))
elif paramid == OpenThings.PARAM_VOLTAGE:
self.readings.voltage = value
elif paramid == OpenThings.PARAM_CURRENT:
self.readings.current = value
elif paramid == OpenThings.PARAM_REAL_POWER:
self.readings.real_power = value
elif paramid == OpenThings.PARAM_APPARENT_POWER:
self.readings.apparent_power = value
elif paramid == OpenThings.PARAM_REACTIVE_POWER:
self.readings.reactive_power = value
elif paramid == OpenThings.PARAM_FREQUENCY:
self.readings.frequency = value
else:
try:
param_name = OpenThings.param_info[paramid]['n'] # name
except:
param_name = "UNKNOWN_%s" % str(hex(paramid))
print("unwanted paramid: %s" % param_name)
def get_readings(self): # -> readings:pydict
"""A way to get all readings as a single consistent set"""
return self.readings
def turn_on(self):
#TODO: header construction should be in MiHomeDevice as it is shared?
payload = OpenThings.Message(SWITCH)
payload.set(header_productid=self.product_id,
header_sensorid=self.device_id,
recs_SWITCH_STATE_value=True)
self.send_message(payload)
def turn_off(self):
#TODO: header construction should be in MiHomeDevice as it is shared?
payload = OpenThings.Message(SWITCH)
payload.set(header_productid=self.product_id,
header_sensorid=self.device_id,
recs_SWITCH_STATE_value=False)
self.send_message(payload)
def set_switch(self, state):
if state:
self.turn_on()
else:
self.turn_off()
#TODO: difference between 'is on and 'is requested on'
#TODO: difference between 'is off' and 'is requested off'
#TODO: switch state might be 'unknown' if not heard.
#TODO: switch state might be 'turning_on' or 'turning_off' if send request and not heard response yet
def is_on(self): # -> boolean
"""True, False, or None if unknown"""
s = self.get_switch()
if s == None: return None
return s
def is_off(self): # -> boolean
"""True, False, or None if unknown"""
s = self.get_switch()
if s == None: return None
return not s
def get_switch(self): # -> boolean
"""Last stored state of the switch, might be None if unknown"""
return self.readings.switch
def get_voltage(self): # -> voltage:float
"""Last stored state of voltage reading, None if unknown"""
if self.readings.voltage == None:
raise RuntimeError("No voltage reading received yet")
return self.readings.voltage
def get_frequency(self): # -> frequency:float
"""Last stored state of frequency reading, None if unknown"""
if self.readings.frequency == None:
raise RuntimeError("No frequency reading received yet")
return self.readings.frequency
def get_apparent_power(self): # ->power:float
"""Last stored state of apparent power reading, None if unknown"""
if self.readings.apparent_power == None:
raise RuntimeError("No apparent power reading received yet")
return self.readings.apparent_power
def get_reactive_power(self): # -> power:float
"""Last stored state of reactive power reading, None if unknown"""
if self.readings.reactive_power == None:
raise RuntimeError("No reactive power reading received yet")
return self.readings.reactive_power
def get_real_power(self): #-> power:float
"""Last stored state of real power reading, None if unknown"""
if self.readings.real_power == None:
raise RuntimeError("No real power reading received yet")
return self.readings.real_power
#------------------------------------------------------------------------------
class MIHO006(MiHomeDevice):
"""An Energenie MiHome Home Monitor"""
def __init__(self, device_id, air_interface=None):
MiHomeDevice.__init__(self, device_id, air_interface)
self.product_id = PRODUCTID_MIHO006
class Readings():
battery_voltage = None
current = None
apparent_power = None
self.readings = Readings()
self.capabilities.send = True
def __repr__(self):
return "MIHO006(%s)" % str(hex(self.device_id))
def handle_message(self, payload):
for rec in payload["recs"]:
paramid = rec["paramid"]
#TODO: consider making this table driven and allowing our base class to fill our readings in for us
#TODO: consider using @OpenThings.parameter as a decorator to the receive function
#it will then register a handler for that message for itself as a handler
#we still need Readings() defined too as a cache. The decorator could add
#an entry into the cache too for us perhaps?
if "value" in rec:
value = rec["value"]
if paramid == OpenThings.PARAM_VOLTAGE:
self.readings.battery_voltage = value
elif paramid == OpenThings.PARAM_CURRENT:
self.readings.current = value
elif paramid == OpenThings.PARAM_APPARENT_POWER:
self.readings.apparent_power = value
else:
try:
param_name = OpenThings.param_info[paramid]['n'] # name
except:
param_name = "UNKNOWN_%s" % str(hex(paramid))
print("unwanted paramid: %s" % param_name)
pass
def get_battery_voltage(self): # -> voltage:float
return self.readings.battery_voltage
def get_current(self): # -> current:float
return self.readings.current
def get_apparent_power(self): # -> power:float
return self.readings.apparent_power
#------------------------------------------------------------------------------
class MIHO013(MiHomeDevice):
"""An Energenie MiHome eTRV Radiator Valve"""
def __init__(self, device_id, air_interface=None):
MiHomeDevice.__init__(self, device_id, air_interface)
self.product_id = PRODUCTID_MIHO013
class Readings():
battery_voltage = None
ambient_temperature = None
pipe_temperature = None
setpoint_temperature = None
valve_position = None
diagnostic_flags = None
self.readings = Readings()
self.radio_config.inner_times = 4
self.capabilities.send = True
self.capabilities.receive = True
self.send_queue = []
self.lastVoltageReading = None
self.lastDiagnosticsReading = None
self.voltageReadingPeriod = 3600
self.diagnosticsReadingPeriod = 3600
def handle_message(self, payload):
# check if it's time to refresh readings
now=time.time()
if self.voltageReadingPeriod != None and ( self.lastVoltageReading == None or now-self.lastVoltageReading>self.voltageReadingPeriod):
self.queue_message(OpenThings.Message(MIHO013_BATTERY_LEVEL))
self.lastVoltageReading = now
if self.diagnosticsReadingPeriod != None and ( self.lastDiagnosticsReading == None or now-self.lastDiagnosticsReading>self.diagnosticsReadingPeriod):
self.queue_message(OpenThings.Message(MIHO013_DIAGNOSTICS))
self.lastDiagnosticsReading = now
# send a message whilst receive window is open
if len(self.send_queue)>0:
message=self.send_queue.pop(0)
self.send_message(message);
#print ("MIHO013 send %s (%s)" % (self.device_id, len(self.send_queue)))
#extract data from message
for rec in payload["recs"]:
paramid = rec["paramid"]
if "value" in rec:
value = rec["value"]
#print("MIHO013 new data %s %s %s" % (self.device_id, OpenThings.paramid_to_paramname(paramid), value))
if paramid == OpenThings.PARAM_TEMPERATURE:
self.readings.ambient_temperature = value
if paramid == OpenThings.PARAM_VOLTAGE:
self.readings.battery_voltage = value
if paramid == OpenThings.PARAM_DIAGNOSTICS:
self.readings.diagnostic_flags = value
def queue_message(self, message):
message.set(
header_productid=self.product_id,
header_sensorid=self.device_id,
header_encryptPIP=int(random.randrange(0xFFFF))
)
self.send_queue.append(copy.copy(message))
def get_battery_voltage(self): # ->voltage:float
return self.readings.battery_voltage
def get_ambient_temperature(self): # -> temperature:float
return self.readings.ambient_temperature
def get_diagnostics(self):
return self.readings.diagnostic_flags
def get_setpoint_temperature(self): #-> temperature:float
return self.readings.setpoint_temperature
def set_setpoint_temperature(self, temperature):
self.readings.setpoint_temperature = temperature;
payload = OpenThings.Message(MIHO013_SET_TEMPERATURE).copyof()
if temperature<0:
temperature=0
if temperature>30:
temperature=30
payload.set(recs_TEMPERATURE_value=int(temperature*256))
self.queue_message(payload)
def set_valve_position(self, position):
payload = OpenThings.Message(MIHO013_SET_VALVE_POSITION).copyof()
payload.set(recs_VALVE_POSITION_value=position)
self.queue_message(payload)
def set_identify(self):
self.queue_message(OpenThings.Message(MIHO013_IDENTIFY).copyof())
def turn_on(self):
self.set_valve_position(0)
def turn_off(self):
self.set_valve_position(1)
def enable_thermostat(self):
self.set_valve_position(2)
#------------------------------------------------------------------------------
class MIHO032(MiHomeDevice):
"""An Energenie Motion Sensor"""
def __init__(self, device_id, air_interface=None):
MiHomeDevice.__init__(self, device_id, air_interface)
self.product_id = PRODUCTID_MIHO032
class Readings():
switch_state = None
battery_alarm = None
self.readings = Readings()
self.capabilities.send = True
self.callback=None
def __repr__(self):
return "MIHO032(%s)" % str(hex(self.device_id))
def setCallback(self,callback):
self.callback=callback
def handle_message(self, payload):
##print("MIHO032 new data %s %s" % (self.device_id, payload))
##sys.stdout.flush()
for rec in payload["recs"]:
paramid = rec["paramid"]
#TODO: consider making this table driven and allowing our base class to fill our readings in for us
#TODO: consider using @OpenThings.parameter as a decorator to the receive function
#it will then register a handler for that message for itself as a handler
#we still need Readings() defined too as a cache. The decorator could add
#an entry into the cache too for us perhaps?
if "value" in rec:
value = rec["value"]
if paramid == OpenThings.PARAM_MOTION_DETECTOR:
state = ((value == True) or (value != 0));
if self.readings.switch_state!= state:
self.readings.switch_state = state
#print("MIHO032 new data %s %s" % (self.device_id, payload))
if self.callback!=None:
self.callback(self,state)
elif paramid == OpenThings.PARAM_ALARM:
if value == 0x42: # battery alarming
self.readings.battery_alarm = True
elif value == 0x62: # battery not alarming
self.readings.battery_alarm = False
else:
try:
param_name = OpenThings.param_info[paramid]['n'] # name
except:
param_name = "UNKNOWN_%s" % str(hex(paramid))
print("unwanted paramid: %s" % param_name)
def get_switch_state(self): # -> switch:bool
return self.readings.switch_state
def get_battery_alarm(self): # -> alarm:bool
return self.readings.battery_alarm
#------------------------------------------------------------------------------
class MIHO033(MiHomeDevice):
"""An Energenie Open Sensor"""
def __init__(self, device_id, air_interface=None):
MiHomeDevice.__init__(self, device_id, air_interface)
self.product_id = PRODUCTID_MIHO033
class Readings():
switch_state = None
self.readings = Readings()
self.capabilities.send = True
def __repr__(self):
return "MIHO033(%s)" % str(hex(self.device_id))
def handle_message(self, payload):
##print("MIHO033 new data %s %s" % (self.device_id, payload))
for rec in payload["recs"]:
paramid = rec["paramid"]
#TODO: consider making this table driven and allowing our base class to fill our readings in for us
#TODO: consider using @OpenThings.parameter as a decorator to the receive function
#it will then register a handler for that message for itself as a handler
#we still need Readings() defined too as a cache. The decorator could add
#an entry into the cache too for us perhaps?
if "value" in rec:
value = rec["value"]
if paramid == OpenThings.PARAM_DOOR_SENSOR:
self.readings.switch_state = ((value == True) or (value != 0))
else:
try:
param_name = OpenThings.param_info[paramid]['n'] # name
except:
param_name = "UNKNOWN_%s" % str(hex(paramid))
print("unwanted paramid: %s" % param_name)
def get_switch_state(self): # -> switch:bool
return self.readings.switch_state
#----- DEVICE FACTORY ---------------------------------------------------------
# This is a singleton, but might not be in the future.
# i.e. we might have device factories for lots of different devices.
# and a DeviceFactory could auto configure it's set of devices
# with a specific air_interface for us.
# i.e. this might be the EnergenieDeviceFactory, there might be others
# for other product ranges like wirefree doorbells
class DeviceFactory():
"""A place to come to, to get instances of device classes"""
# If you know the name of the device, use this table
device_from_name = {
# official name friendly name
"ENER002": ENER002, "GreenButton": ENER002, # OOK(rx)
"MIHO002": MIHO002, "Controller": MIHO002, # OOK(rx)
"MIHO004": MIHO004, "Monitor": MIHO004, # FSK(rx)
"MIHO005": MIHO005, "AdaptorPlus": MIHO005, # FSK(tx,rx)
"MIHO006": MIHO006, "HomeMonitor": MIHO006, # FSK(tx)
"MIHO008": MIHO008, "MiHomeLightWhite": MIHO008, # OOK(rx)
"MIHO013": MIHO013, "eTRV": MIHO013, # FSK(tx,rx)
"MIHO014": MIHO014, "3kWRelay": MIHO014, # OOK(rx)
"MIHO024": MIHO024, "MiHomeLightBlack": MIHO024, # OOK(rx)
"MIHO025": MIHO025, "MiHomeLightChrome": MIHO025, # OOK(rx)
"MIHO026": MIHO026, "MiHomeLightSteel": MIHO026, # OOK(rx)
"MIHO032": MIHO032, "MotionSensor": MIHO032, # FSK(tx)
"MIHO033": MIHO033, "OpenSensor": MIHO033, # FSK(tx)
}
#TODO: These are MiHome devices only, but might add in mfrid prefix too
# If you know the mfrid, productid of the device, use this table
device_from_id = {
#ENER002 is anOOK
#MIHO002 control only switch is an OOK
PRODUCTID_MIHO004: MIHO004,
PRODUCTID_MIHO005: MIHO005,
PRODUCTID_MIHO006: MIHO006,
#MIHO008 is an OOK
PRODUCTID_MIHO013: MIHO013,
#MIHO014 is an OOK
#MIHO024 is an OOK
#MIHO025 is an OOK
#MIHO026 is an OOK
PRODUCTID_MIHO032: MIHO032,
PRODUCTID_MIHO033: MIHO033
}
default_air_interface = None
@staticmethod
def set_default_air_interface(air_interface):
DeviceFactory.default_air_interface = air_interface
@staticmethod
def keys():
return DeviceFactory.device_from_name.keys()
@staticmethod
def get_device_from_name(name, device_id=None, air_interface=None, **kwargs):
"""Get a device by name, construct a new instance"""
# e.g. This is useful when creating device class instances from a human readable config
if not name in DeviceFactory.device_from_name:
raise ValueError("Unsupported device:%s" % name)
c = DeviceFactory.device_from_name[name]
if air_interface == None:
air_interface = DeviceFactory.default_air_interface
return c(device_id, air_interface, **kwargs)
@staticmethod
def get_device_from_id(id, device_id=None, air_interface=None):
"""Get a device by it's id, construct a new instance"""
# e.g. This is useful when recreating device class instances from a persisted registry
if not id in DeviceFactory.device_from_id:
raise ValueError("Unsupported device id:%s" % id)
c = DeviceFactory.device_from_id[id]
if air_interface == None:
air_interface = DeviceFactory.default_air_interface
i = c(device_id, air_interface)
print(i)
return i
# END
| 36.614198
| 157
| 0.582884
|
9dd6df58bbd51c9ae92663a3b2d4cd4b456e3f7e
| 1,620
|
py
|
Python
|
problems/9/DecompressorTest.py
|
tmct/adventOfCode2016
|
bd5699ca179b873f9da01514903b1dd493a46b7b
|
[
"MIT"
] | null | null | null |
problems/9/DecompressorTest.py
|
tmct/adventOfCode2016
|
bd5699ca179b873f9da01514903b1dd493a46b7b
|
[
"MIT"
] | null | null | null |
problems/9/DecompressorTest.py
|
tmct/adventOfCode2016
|
bd5699ca179b873f9da01514903b1dd493a46b7b
|
[
"MIT"
] | null | null | null |
import unittest
from Decompressor import Decompressor
class DecompressorTest(unittest.TestCase):
def test_example_1(self):
output = Decompressor().get_decompressed_length('ADVENT')
self.assertEqual(6, output)
def test_example_2(self):
output = Decompressor().get_decompressed_length('A(1x5)BC')
self.assertEqual(7, output)
def test_example_3(self):
output = Decompressor().get_decompressed_length('(3x3)XYZ')
self.assertEqual(9, output)
def test_example_4(self):
output = Decompressor().get_decompressed_length('A(2x2)BCD(2x2)EFG')
self.assertEqual(11, output)
def test_example_5(self):
output = Decompressor().get_decompressed_length('(6x1)(1x3)A')
self.assertEqual(6, output)
def test_example_6(self):
output = Decompressor().get_decompressed_length('X(8x2)(3x3)ABCY')
self.assertEqual(18, output)
def test_example_1b(self):
output = Decompressor().get_decompressed_length_v2('(3x3)XYZ')
self.assertEqual(9, output)
def test_example_2b(self):
output = Decompressor().get_decompressed_length_v2('X(8x2)(3x3)ABCY')
self.assertEqual(20, output)
def test_example_2c(self):
output = Decompressor().get_decompressed_length_v2('(27x12)(20x12)(13x14)(7x10)(1x12)A')
self.assertEqual(241920, output)
def test_example_2d(self):
output = Decompressor().get_decompressed_length_v2('(25x3)(3x3)ABC(2x3)XY(5x2)PQRSTX(18x9)(3x2)TWO(5x7)SEVEN')
self.assertEqual(445, output)
if __name__ == '__main__':
unittest.main()
| 33.75
| 118
| 0.687037
|
77532ee60b41866938c402c21cc24fbc77065d5d
| 2,357
|
py
|
Python
|
Django/django_project_lsc_05/book_view/views_genericapiview.py
|
MarioKarting/Django_basic
|
437556628f72ccf8fdfd63c46191dcee39602b00
|
[
"MIT"
] | null | null | null |
Django/django_project_lsc_05/book_view/views_genericapiview.py
|
MarioKarting/Django_basic
|
437556628f72ccf8fdfd63c46191dcee39602b00
|
[
"MIT"
] | null | null | null |
Django/django_project_lsc_05/book_view/views_genericapiview.py
|
MarioKarting/Django_basic
|
437556628f72ccf8fdfd63c46191dcee39602b00
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from rest_framework.views import APIView
from rest_framework.generics import GenericAPIView
# Create your views here.
from C_databases.models import BookInfo
from book_serializer.serializers import BookInfoSerializer
from rest_framework.response import Response
class BooksView(GenericAPIView):
"""
获取所有图书
保存图书
"""
# 指定视图使用的查询集
queryset = BookInfo.objects.all()
# 指定视图使用的序列化器
serializer_class = BookInfoSerializer
def get(self, request):
# 1、查询所有图书
# self.get_queryset() 获取查询集的所有数据
books = self.get_queryset()
# 构建json数据 # 在类视图中调用序列化器类,初始化是传入需要序列化返回的数据对象
# ser = BookSerializer(books, many=True)
# self.get_serializer 获取指定的序列化器进行初始化操作
ser = self.get_serializer(books, many=True)
# 2、返回查询集数据
return Response(ser.data)
def post(self, request):
# 1、获取前端数据
# data = request.body.decode()
# data_dict = json.loads(data)
data = request.data
# 2、验证数据
# 序列化器初始化传入验证数据
# ser = BookSerializer(data=data)
ser = self.get_serializer(data=data)
# 序列化器提供的验证方法is_valid
ser.is_valid(raise_exception=True) # raise_exception参数作用:验证失败抛出异常
# 3、保存
ser.save()
# 4、返回
# 在类视图中调用序列化器类,初始化是传入需要序列化返回的数据对象
# 构建序列化的json数据
return Response(ser.data)
class BookView(GenericAPIView):
"""
获取单一图书
更新
删除
"""
#获取单一图书 路由GET /books/<pk>/
# 指定视图使用的查询集
queryset = BookInfo.objects.all()
# 指定视图使用的序列化器
serializer_class = BookInfoSerializer
def get(self,request,pk):
#从查询集中查询获取PK所对应的ID数据的对象
book = self.get_object()
ser = self.get_serializer(book)
return Response(ser.data)
# 修改图书信息 路由: PUT /books/<pk>
def put(self, request, pk):
data=request.data
book = self.get_object()
ser = self.get_serializer(book,data=data)
ser.is_valid(raise_exception=True)
ser.save()
return Response(ser.data)
def delete(self, request, pk):
# 1、根据图书id查询当前图书对象
book = self.get_object()
# 2、逻辑删除
book.is_delete = True
book.save()
# 物理删除
# book.delete()
# 3、返回结果
return Response({})
| 22.028037
| 74
| 0.612219
|
5d75c49603352fd0ead8523864f97795ced97fbd
| 647
|
py
|
Python
|
src/simmate/calculators/vasp/workflows/static_energy/quality_04.py
|
jacksund/simmate
|
0b29704540574e11b711f7b44e2cb7740141ebb4
|
[
"BSD-3-Clause"
] | 9
|
2021-12-21T02:58:21.000Z
|
2022-01-25T14:00:06.000Z
|
src/simmate/calculators/vasp/workflows/static_energy/quality_04.py
|
jacksund/simmate
|
0b29704540574e11b711f7b44e2cb7740141ebb4
|
[
"BSD-3-Clause"
] | 51
|
2022-01-01T15:59:58.000Z
|
2022-03-26T21:25:42.000Z
|
src/simmate/calculators/vasp/workflows/static_energy/quality_04.py
|
jacksund/simmate
|
0b29704540574e11b711f7b44e2cb7740141ebb4
|
[
"BSD-3-Clause"
] | 7
|
2022-01-01T03:44:32.000Z
|
2022-03-29T19:59:27.000Z
|
# -*- coding: utf-8 -*-
from simmate.workflow_engine import s3task_to_workflow
from simmate.calculators.vasp.tasks.static_energy import (
Quality04Energy as Quality04EnergyTask,
)
from simmate.calculators.vasp.database.energy import (
Quality04StaticEnergy as Quality04StaticEnergyResults,
)
workflow = s3task_to_workflow(
name="static-energy/quality04",
module=__name__,
project_name="Simmate-Energy",
s3task=Quality04EnergyTask,
calculation_table=Quality04StaticEnergyResults,
register_kwargs=["structure", "source"],
description_doc_short="low-quality settings meant for highly unreasonable structures",
)
| 32.35
| 90
| 0.785162
|
7f6375884eecc9f6f37f912cca40e9d57c0412d8
| 7,861
|
py
|
Python
|
aliyun/log/shipper_response.py
|
saorisakura/aliyun-log-python-sdk
|
584bf6321e8274fdf9604433ff583c22c1c4fd7a
|
[
"MIT"
] | 1
|
2020-08-31T14:42:58.000Z
|
2020-08-31T14:42:58.000Z
|
aliyun/log/shipper_response.py
|
saorisakura/aliyun-log-python-sdk
|
584bf6321e8274fdf9604433ff583c22c1c4fd7a
|
[
"MIT"
] | null | null | null |
aliyun/log/shipper_response.py
|
saorisakura/aliyun-log-python-sdk
|
584bf6321e8274fdf9604433ff583c22c1c4fd7a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
# Copyright (C) Alibaba Cloud Computing
# All rights reserved.
__all__ = ['CreateShipperResponse', 'UpdateShipperResponse', 'DeleteShipperResponse',
'GetShipperConfigResponse', 'ListShipperResponse', 'GetShipperTasksResponse',
'RetryShipperTasksResponse']
from .logresponse import LogResponse
from .shipper_config import OdpsShipperConfig
from .shipper_config import OssShipperConfig
from .shipper_config import ShipperTask
class CreateShipperResponse(LogResponse):
def __init__(self, header, resp=''):
LogResponse.__init__(self, header, resp)
def log_print(self):
print('CreateShipperResponse:')
print('headers:', self.get_all_headers())
class UpdateShipperResponse(LogResponse):
def __init__(self, header, resp=''):
LogResponse.__init__(self, header, resp)
def log_print(self):
print('UpdateShipperResponse:')
print('headers:', self.get_all_headers())
class DeleteShipperResponse(LogResponse):
def __init__(self, header, resp=''):
LogResponse.__init__(self, header, resp)
def log_print(self):
print('DeleteShipperResponse:')
print('headers:', self.get_all_headers())
class GetShipperConfigResponse(LogResponse):
def __init__(self, resp, header):
LogResponse.__init__(self, header, resp)
self.create_time = resp['createTime']
self.last_modify_time = resp['lastModifyTime']
self.type = resp['targetType']
target_config = resp['targetConfiguration']
if self.type == 'odps':
self.config = OdpsShipperConfig(target_config["odpsEndpoint"],
target_config["odpsProject"],
target_config["odpsTable"],
target_config["fields"],
target_config["partitionColumn"],
target_config["partitionTimeFormat"],
target_config["bufferInterval"])
elif self.type == 'oss':
self.config = OssShipperConfig(target_config['ossBucket'],
target_config['ossPrefix'],
target_config['roleArn'],
target_config['bufferInterval'],
target_config['bufferSize'],
target_config['compressType'])
def get_config(self):
"""
:return:
"""
return self.config
def get_create_time(self):
"""
:return:
"""
return self.create_time
def get_last_modify_time(self):
"""
:return:
"""
return self.last_modify_time
def log_print(self):
"""
:return:
"""
print('GetShipperConfigResponse:')
print('type:' + self.type)
print('config:' + str(self.config.to_json()))
class ListShipperResponse(LogResponse):
def __init__(self, resp, header):
LogResponse.__init__(self, header, resp)
self.count = resp['count']
self.total = resp['total']
self.shipper_names = resp['shipper']
def get_shipper_count(self):
"""
:return:
"""
return self.count
def get_shipper_total(self):
"""
:return:
"""
return self.total
def get_shipper_names(self):
"""
:return:
"""
return self.shipper_names
def log_print(self):
"""
:return:
"""
print('ListShipperResponse:')
print('shipper count : ' + str(self.count))
print('shipper total : ' + str(self.total))
print('shipper names : ' + str(self.shipper_names))
class GetShipperTasksResponse(LogResponse):
def __init__(self, resp, header):
LogResponse.__init__(self, header, resp)
self.count = resp['count']
self.total = resp['total']
self.running_count = resp['statistics']['running']
self.success_count = resp['statistics']['success']
self.fail_count = resp['statistics']['fail']
self.tasks = []
for task_res in resp['tasks']:
task = ShipperTask(task_res['id'], task_res['taskStatus'], task_res['taskMessage'],
task_res['taskCreateTime'],
task_res['taskLastDataReceiveTime'], task_res['taskFinishTime'])
self.tasks.append(task)
def get_task_count(self):
"""
:return:
"""
return self.count
def get_count(self):
return self.count
def get_task_total(self):
"""
:return:
"""
return self.total
def get_total(self):
return self.total
def get_running_task_count(self):
"""
:return:
"""
return self.running_count
def get_success_task_count(self):
"""
:return:
"""
return self.success_count
def get_fail_task_count(self):
"""
:return:
"""
return self.fail_count
def _get_task_ids(self, status):
task_ids = []
for task in self.tasks:
if task.task_status == status:
task_ids.append(task.task_id)
return task_ids
def get_fail_task_ids(self):
"""
:return:
"""
return self._get_task_ids("fail")
def get_running_task_ids(self):
"""
:return:
"""
return self._get_task_ids("running")
def get_success_task_ids(self):
"""
:return:
"""
return self._get_task_ids("success")
def get_tasks(self):
"""
:return:
"""
return self.tasks
def log_print(self):
"""
:return:
"""
print('GetShipperTasksResponse:')
print('ship count : ' + str(self.count))
print('ship total : ' + str(self.total))
print('ship running_count : ' + str(self.running_count))
print('ship success_count : ' + str(self.success_count))
print('ship fail_count : ' + str(self.fail_count))
print('ship taks : ')
for task in self.tasks:
print(str(task.to_json()))
def merge(self, response):
if not isinstance(response, GetShipperTasksResponse):
raise ValueError("passed response is not a GetShipperTasksResponse: " + str(type(response)))
self.count += response.get_count()
self.total = response.get_total() # use the latest total count
self.running_count += response.get_running_task_ids()
self.success_count += response.get_success_task_count()
self.fail_count += response.get_fail_task_count()
self.tasks.extend(response.get_tasks())
for task_res in response['tasks']:
task = ShipperTask(task_res['id'], task_res['taskStatus'], task_res['taskMessage'],
task_res['taskCreateTime'],
task_res['taskLastDataReceiveTime'], task_res['taskFinishTime'])
self.tasks.append(task)
# update body
self.body['count'] = self.count
self.body['total'] = self.total
self.body['statistics']['running'] = self.running_count
self.body['statistics']['success'] = self.success_count
self.body['statistics']['fail'] = self.fail_count
return self
class RetryShipperTasksResponse(LogResponse):
def __init__(self, header, resp=''):
LogResponse.__init__(self, header, resp)
def log_print(self,):
print('RetryShipperTasksResponse')
| 28.379061
| 104
| 0.563923
|
f1730426f31a1bf09b4188c1f88321da74065385
| 19,670
|
py
|
Python
|
car_tracking/doRPC.py
|
sameeptandon/sail-car-log
|
0ee3d598bb09d389bcbd2ebf73cd4b2411e796be
|
[
"BSD-2-Clause"
] | 1
|
2021-02-24T03:11:13.000Z
|
2021-02-24T03:11:13.000Z
|
car_tracking/doRPC.py
|
sameeptandon/sail-car-log
|
0ee3d598bb09d389bcbd2ebf73cd4b2411e796be
|
[
"BSD-2-Clause"
] | null | null | null |
car_tracking/doRPC.py
|
sameeptandon/sail-car-log
|
0ee3d598bb09d389bcbd2ebf73cd4b2411e796be
|
[
"BSD-2-Clause"
] | 3
|
2015-03-18T14:36:04.000Z
|
2018-07-04T02:57:24.000Z
|
#!/usr/bin/python
import os, sys
from AnnotationLib import *
from optparse import OptionParser
import copy
import math
# BASED ON WIKIPEDIA VERSION
# n - number of nodes
# C - capacity matrix
# F - flow matrix
# s - source
# t - sink
# sumC - sum over rows of C (too speed up computation)
def edmonds_karp(n, C, s, t, sumC):
# Residual capacity from u to v is C[u][v] - F[u][v]
F = [[0] * n for i in xrange(n)]
while True:
P = [-1] * n # Parent table
P[s] = s
M = [0] * n # Capacity of path to node
M[s] = float('infinity')
Q = [s] # BFS queue
while Q:
u = Q.pop(0)
for v in xrange(n):
# There is available capacity,
# and v is not seen before in search
if C[u][v] - F[u][v] > 0 and P[v] == -1:
P[v] = u
M[v] = min(M[u], C[u][v] - F[u][v])
if v != t:
if(sumC[u] > 0):
Q.append(v)
else:
# Backtrack search, and write flow
while P[v] != v:
u = P[v]
F[u][v] += M[t]
F[v][u] -= M[t]
v = u
Q = None
break
if P[t] == -1: # We did not find a path to t
return (F)
class AnnoGraph:
def __init__(self, anno, det, ignore, style, minCover, minOverlap, maxDistance, ignoreOverlap):
# setting rects
#print anno.imageName
self.anno = anno
self.det = det
self.det.sortByScore("descending")
# generate initial graph
self.n = len(det.rects)
self.m = len(anno.rects)
# Number of nodes = number of detections + number of GT + source + sink
self.a = self.n + self.m + 2
# Flow matrix
self.F = [[0] * self.a for i in xrange(self.a)]
# Capacity matrix
self.C = [[0] * self.a for i in xrange(self.a)]
# Connect source to all detections
for i in range(1, self.n + 1):
self.C[0][i] = 1
self.C[i][0] = 1
# Connect sink to all GT
for i in range(self.n + 1, self.a - 1):
self.C[i][self.a - 1] = 1
self.C[self.a - 1][i] = 1
# Overall flow
self.full_flow = 0
self.ignore_flow = 0
# match rects / Adjacency matrix
self.M = [[] for i in xrange(self.n)]
self.match(style, minCover, minOverlap, maxDistance)
self.nextN = 0
# Deactivate All Non Matching detections
# Save row sums for capacity matrix
self.sumC = []
self.sumC.append(self.n)
for q in [len(self.M[j]) for j in xrange(len(self.M))]:
self.sumC.append(q)
for q in [1] * self.m:
self.sumC.append(q)
# Initially no links are active
self.sumC_active = []
self.sumC_active.append(self.n)
for q in [len(self.M[j]) for j in xrange(len(self.M))]:
self.sumC_active.append(0)
for q in [1] * self.m:
self.sumC_active.append(q)
#
self.ignore = [ 0 ] * self.m
for ig in ignore.rects:
for i, r in enumerate(anno.rects):
if(ig.overlap_pascal(r) > ignoreOverlap):
self.ignore[i] = 1
def match(self, style, minCover, minOverlap, maxDistance):
for i in xrange(self.n):
detRect = self.det.rects[i]
for j in xrange(self.m):
annoRect = self.anno.rects[j]
# Bastian Leibe's matching style
if(style == 0):
if detRect.isMatchingStd(annoRect, minCover, minOverlap, maxDistance):
self.M[i].append(self.n + 1 + j)
# Pascal Matching style
if(style == 1):
if (detRect.isMatchingPascal(annoRect, minOverlap)):
self.M[i].append(self.n + 1 + j)
def decreaseScore(self, score):
capacity_change = False
for i in xrange(self.nextN, self.n):
if (self.det.rects[i].score >= score):
capacity_change = self.insertIntoC(i + 1) or capacity_change
self.nextN += 1
else:
break
if capacity_change:
self.F = edmonds_karp(self.a, self.C, 0, self.a - 1, self.sumC_active)
self.full_flow = sum([self.F[0][i] for i in xrange(self.a)])
self.ignore_flow = sum([self.F[i][self.a - 1] * self.ignore[i - 1 - self.n] for i in range(1 + self.n, 1 + self.n + self.m )])
return capacity_change
def addBB(self, rect):
self.nextN += 1
capacity_change = self.insertIntoC(rect.boxIndex + 1)
if capacity_change:
self.F = edmonds_karp(self.a, self.C, 0, self.a - 1, self.sumC_active)
self.full_flow = sum([self.F[0][i] for i in xrange(self.a)])
self.ignore_flow = sum([self.F[i][self.a - 1] * self.ignore[i - 1 - self.n] for i in range(1 + self.n, 1 + self.n + self.m )])
return capacity_change
def insertIntoC(self, i):
#print "Inserting node", i, self.det.rects[i-1].score, "of image", self.anno.imageName
for match in self.M[i - 1]:
#print " match: ", match
self.C[i][match] = 1
self.C[match][i] = 1
self.sumC_active[i] = self.sumC[i]
return self.sumC[i] > 0
def maxflow(self):
return self.full_flow - self.ignore_flow
def consideredDets(self):
return self.nextN - self.ignore_flow
def ignoredFlow(self):
return self.ignore_flow
def getTruePositives(self):
ret = copy.copy(self.anno)
ret.rects = []
#iterate over GT
for i in xrange(self.n + 1, self.a - 1):
#Flow to sink > 0
if(self.F[i][self.a - 1] > 0 and self.ignore[i - self.n - 1] == 0):
#Find associated det
for j in xrange(1, self.n + 1):
if(self.F[j][i] > 0):
ret.rects.append(self.det[j - 1])
break
return ret
def getIgnoredTruePositives(self):
ret = copy.copy(self.anno)
ret.rects = []
#iterate over GT
for i in xrange(self.n + 1, self.a - 1):
#Flow to sink > 0
if(self.F[i][self.a - 1] > 0 and self.ignore[i - self.n - 1] == 1):
#Find associated det
for j in xrange(1, self.n + 1):
if(self.F[j][i] > 0):
ret.rects.append(self.det[j - 1])
break
return ret
def getMissingRecall(self):
ret = copy.copy(self.anno)
ret.rects = []
for i in xrange(self.n + 1, self.a - 1):
if(self.F[i][self.a - 1] == 0 and self.ignore[i - self.n - 1] == 0):
ret.rects.append(self.anno.rects[i - self.n - 1])
return ret
def getFalsePositives(self):
ret = copy.copy(self.det)
ret.rects = []
for i in xrange(1, self.n + 1):
if(self.F[0][i] == 0):
ret.rects.append(self.det[i - 1])
return ret
def asort(idlGT, idlDet, minWidth, minHeight, style, minCover, minOverlap, maxDistance, maxWidth=float('inf'), maxHeight=float('inf')):
#Asort too small object in ground truth
for x,anno in enumerate(idlGT):
imageFound = False
filterIndex = -1
for i,filterAnno in enumerate(idlDet):
if (suffixMatch(anno.imageName, filterAnno.imageName) and anno.frameNr == filterAnno.frameNr):
filterIndex = i
imageFound = True
break
if(not imageFound):
continue
validGTRects = []
for j in anno.rects:
if (j.width() >= minWidth) and (j.height() >= minHeight) and (j.width() <= maxWidth) and (j.height() <= maxHeight):
validGTRects.append(j)
else:
# Sort out detections that would have matched
matchingIndexes = []
for m,frect in enumerate(idlDet[filterIndex].rects):
if(style == 0):
if (j.isMatchingStd(frect, minCover,minOverlap, maxDistance)):
overlap = j.overlap_pascal(frect)
matchingIndexes.append((m,overlap))
if(style == 1):
if(j.isMatchingPascal(frect, minOverlap)):
overlap = j.overlap_pascal(frect)
matchingIndexes.append((m, overlap))
for m in xrange(len(matchingIndexes) - 1, -1, -1):
matching_rect = idlDet[filterIndex].rects[matchingIndexes[m][0]]
matching_overlap = matchingIndexes[m][1]
better_overlap_found = False
for l in anno.rects:
if l.overlap_pascal(matching_rect) > matching_overlap:
better_overlap_found = True
if better_overlap_found:
continue
del idlDet[filterIndex].rects[matchingIndexes[m][0]]
idlGT[x].rects = validGTRects
#Sort out too small false positives
for x,anno in enumerate(idlDet):
imageFound = False
filterIndex = -1
for i,filterAnno in enumerate(idlGT):
if (suffixMatch(anno.imageName, filterAnno.imageName) and anno.frameNr == filterAnno.frameNr):
filterIndex = i
imageFound = True
break
if(not imageFound):
continue
validDetRects = []
for j in anno.rects:
if (j.width() >= minWidth) and (j.height() >= minHeight) and (j.width() <= maxWidth) and (j.height() <= maxHeight):
validDetRects.append(j)
else:
for frect in idlGT[filterIndex].rects:
if(style == 0):
if j.isMatchingStd(frect, minCover,minOverlap, maxDistance):
validDetRects.append(j)
if(style == 1):
if(j.isMatchingPascal(frect, minOverlap)):
validDetRects.append(j)
idlDet[x].rects = validDetRects
def main():
parser = OptionParser(usage="usage: %prog [options] <groundTruthIdl> <detectionIdl>")
parser.add_option("-o", "--outFile",
action="store", type="string", dest="outFile")
parser.add_option("-a", "--analysisFiles",
action="store", type="string", dest="analysisFile")
parser.add_option("-s", "--minScore",
action="store", type="float", dest="minScore")
parser.add_option("-w", "--minWidth",
action="store", type="int", dest="minWidth", default=0)
parser.add_option("-u", "--minHeight",
action="store", type="int", dest="minHeight",default=0)
parser.add_option("--maxWidth", action="store", type="float", dest="maxWidth", default=float('inf'))
parser.add_option("--maxHeight", action="store", type="float", dest="maxHeight", default=float('inf'))
parser.add_option("-r", "--fixAspectRatio",
action="store", type="float", dest="aspectRatio")
parser.add_option("-p", "--Pascal-Style", action="store_true", dest="pascalStyle")
parser.add_option("-l", "--Leibe-Seemann-Matching-Style", action="store_true", dest="leibeStyle")
parser.add_option("--minCover", action="store", type="float", dest="minCover", default=0.5)
parser.add_option("--maxDistance", action="store", type="float", dest="maxDistance", default=0.5)
parser.add_option("--minOverlap", action="store", type="float", dest="minOverlap", default=0.5)
parser.add_option("--clipToImageWidth", action="store", type="float", dest="clipWidth", default= None)
parser.add_option("--clipToImageHeight", action="store", type="float", dest="clipHeight", default= None)
parser.add_option("-d", "--dropFirst", action="store_true", dest="dropFirst")
#parser.add_option("-c", "--class", action="store", type="int", dest="classID", default=-1)
parser.add_option("-c", "--class", action="store", type="int", dest="classID", default = None)
parser.add_option("-i", "--ignore", action="store", type="string", dest="ignoreFile")
parser.add_option("--ignoreOverlap", action="store", type="float", dest="ignoreOverlap", default = 0.9)
(options, args) = parser.parse_args()
if (len(args) < 2):
print "Please specify annotation and detection as arguments!"
parser.print_help()
sys.exit(1)
annoFile = args[0]
# First figure out the minimum height and width we are dealing with
minWidth = options.minWidth
minHeight = options.minHeight
maxWidth = options.maxWidth
maxHeight = options.maxHeight
print "Minimum width: %d height: %d" % (minWidth, minHeight)
# Load files
annoIDL = parse(annoFile)
detIDL = []
for dets in args[1:]:
detIDL += parse(dets)
if options.ignoreFile != None:
ignoreIDL = parse(options.ignoreFile)
else:
ignoreIDL = copy.deepcopy(annoIDL)
for anno in ignoreIDL:
anno.rects = []
if(options.classID is not None):
for anno in annoIDL:
anno.rects = [rect for rect in anno.rects if (rect.classID == options.classID or rect.classID == -1)]
for anno in detIDL:
anno.rects = [rect for rect in anno.rects if (rect.classID == options.classID or rect.classID == -1)]
for anno in ignoreIDL:
anno.rects = [rect for rect in anno.rects if (rect.classID == options.classID or rect.classID == -1)]
# prevent division by zero when fixing aspect ratio
for anno in annoIDL:
anno.rects = [rect for rect in anno.rects if rect.width() > 0 and rect.height() > 0]
for anno in detIDL:
anno.rects = [rect for rect in anno.rects if rect.width() > 0 and rect.height() > 0]
for anno in ignoreIDL:
anno.rects = [rect for rect in anno.rects if rect.width() > 0 and rect.height() > 0]
# Fix aspect ratio
if (not options.aspectRatio == None):
forceAspectRatio(annoIDL, options.aspectRatio)
forceAspectRatio(detIDL, options.aspectRatio)
forceAspectRatio(ignoreIDL, options.aspectRatio)
# Deselect detections with too low score
if (not options.minScore == None):
for i,anno in enumerate(detIDL):
validRects = []
for rect in anno.rects:
if (rect.score >= options.minScore):
validRects.append(rect)
anno.rects = validRects
# Clip detections to the image dimensions
if(options.clipWidth != None or options.clipHeight != None):
min_x = -float('inf')
min_y = -float('inf')
max_x = float('inf')
max_y = float('inf')
if(options.clipWidth != None):
min_x = 0
max_x = options.clipWidth
if(options.clipHeight != None):
min_y = 0
max_y = options.clipHeight
print "Clipping width: (%.02f-%.02f); clipping height: (%.02f-%.02f)" % (min_x, max_x, min_y, max_y)
for anno in annoIDL:
for rect in anno:
rect.clipToImage(min_x, max_x, min_y, max_y)
for anno in detIDL:
for rect in anno:
rect.clipToImage(min_x, max_x, min_y, max_y)
# Setup matching style; standard is Pascal
# style
matchingStyle = 1
# Pascal style
if (options.pascalStyle == True):
matchingStyle = 1
if (options.leibeStyle == True):
matchingStyle = 0
if (options.pascalStyle and options.leibeStyle):
print "Conflicting matching styles!"
sys.exit(1)
if (options.dropFirst == True):
print "Drop first frame of each sequence..."
newIDL = []
for i, anno in enumerate(detIDL):
if (i > 1 and detIDL[i].frameNr == detIDL[i-1].frameNr + 1 and detIDL[i].frameNr == detIDL[i-2].frameNr + 2 and detIDL[i].frameNr == detIDL[i-3].frameNr + 3 and detIDL[i].frameNr == detIDL[i-4].frameNr + 4):
newIDL.append(anno)
detIDL = newIDL
# Asort detections which are too small/too big
print "Asorting too large/ too small detections"
asort(annoIDL, detIDL, minWidth, minHeight, matchingStyle, options.minCover, options.minOverlap, options.maxDistance, maxWidth, maxHeight)
#Debugging asort
#saveIDL("testGT.idl", annoIDL)
#saveIDL("testDET.idl", detIDL)
noAnnotations = 0
for anno in annoIDL:
for j,detAnno in enumerate(detIDL):
if (suffixMatch(anno.imageName, detIDL[j].imageName) and anno.frameNr == detIDL[j].frameNr):
noAnnotations = noAnnotations + len(anno.rects)
break
print "#Annotations:", noAnnotations
###--- set up graphs ---###
print "Setting up graphs ..."
graphs = []
allRects = []
missingFrames = 0
for i in xrange(len(annoIDL)):
imageFound = False
filterIndex = -1
for j, detAnno in enumerate(detIDL):
if (suffixMatch(annoIDL[i].imageName, detIDL[j].imageName) and annoIDL[i].frameNr == detIDL[j].frameNr):
filterIndex = j
imageFound = True
break
if(not imageFound):
print "No annotation/detection pair found for: " + annoIDL[i].imageName + " frame: " + str(annoIDL[i].frameNr)
missingFrames += 1
continue;
graphs.append(AnnoGraph(annoIDL[i], detIDL[filterIndex], ignoreIDL[i], matchingStyle, options.minCover, options.minOverlap, options.maxDistance, options.ignoreOverlap))
for j,rect in enumerate(detIDL[filterIndex]):
newRect = detAnnoRect()
newRect.imageName = anno.imageName
newRect.frameNr = anno.frameNr
newRect.rect = rect
newRect.imageIndex = i - missingFrames
newRect.boxIndex = j
allRects.append(newRect)
print "missingFrames: ", missingFrames
print "Number of detections on annotated frames: " , len(allRects)
###--- get scores from all rects ---###
print "Sorting scores ..."
allRects.sort(cmpDetAnnoRectsByScore)
allRects.reverse()
###--- gradually decrease score ---###
print "Gradually decrease score ..."
lastScore = float('infinity')
precs = [1.0]
recalls = [0.0]
#fppi = [ 10**(math.floor(math.log(1.0 / float(len(annoIDL)))/math.log(10) * 10.0) / 10.0) ]
fppi = [ 1.0 / float(len(annoIDL)) ]
scores = [lastScore]
numDet = len(allRects)
sf = lastsf = 0
cd = lastcd = 0
iflow = lastiflow = 0
changed = False
firstFP = True
for i,nextrect in enumerate(allRects):
score = nextrect.rect.score;
# updating true and false positive counts
sf = sf - graphs[nextrect.imageIndex].maxflow()
cd = cd - graphs[nextrect.imageIndex].consideredDets()
iflow = iflow - graphs[nextrect.imageIndex].ignoredFlow()
#changed = changed or graphs[nextrect.imageIndex].decreaseScore(score)
changed = graphs[nextrect.imageIndex].addBB(nextrect) or changed
sf = sf + graphs[nextrect.imageIndex].maxflow()
cd = cd + graphs[nextrect.imageIndex].consideredDets()
iflow = iflow + graphs[nextrect.imageIndex].ignoredFlow()
if(firstFP and cd - sf != 0):
firstFP = False
changed = True
if (i == numDet - 1 or score != allRects[i + 1].rect.score or firstFP or i == len(allRects)):
if(changed or i == numDet - 1 or i == len(allRects)):
if(lastcd > 0):
scores.append(lastScore)
recalls.append(float(lastsf) / float(noAnnotations - lastiflow))
precs.append(float(lastsf) / float(lastcd))
fppi.append(float(lastcd - lastsf) / float(len(annoIDL)))
if (cd > 0):
scores.append(score)
recalls.append(float(sf) / float(noAnnotations - iflow))
precs.append(float(sf) / float(cd))
fppi.append(float(cd - sf) / float(len(annoIDL)))
changed = False
lastScore = score
lastsf = sf
lastcd = cd
lastiflow = iflow
###--- output to file ---###
outfilename = options.outFile
if outfilename is None:
outputDir = os.path.dirname(os.path.abspath(args[1]))
outputFile = os.path.basename(os.path.abspath(args[1]))
[base, ext] = idlBase(outputFile)
outfilename = outputDir + "/rpc-" + base +".txt"
print "saving " + outfilename;
file = open(outfilename, 'w')
for i in xrange(len(precs)):
file.write(str(precs[i])+" "+str(recalls[i])+" "+str(scores[i])+ " " + str(fppi[i])+ "\n")
file.close()
# Extracting failure cases
if(options.analysisFile != None):
anaPrefix = options.analysisFile
falsePositives = []
truePositives = []
missingRecall = []
ignoredTruePositives = []
for i in xrange(len(graphs)):
falsePositives.append(graphs[i].getFalsePositives())
truePositives.append(graphs[i].getTruePositives())
truePositives[-1].imageName = falsePositives[-1].imageName
truePositives[-1].imagePath = falsePositives[-1].imagePath
missingRecall.append(graphs[i].getMissingRecall())
missingRecall[-1].imageName = falsePositives[-1].imageName
missingRecall[-1].imagePath = falsePositives[-1].imagePath
if options.ignoreFile != None:
ignoredTruePositives.append(graphs[i].getIgnoredTruePositives())
saveIDL(anaPrefix + "-falsePositives.idl.gz", falsePositives);
sortedFP = annoAnalyze(falsePositives);
saveIDL(anaPrefix + "-falsePositives-sortedByScore.idl.gz", sortedFP);
saveIDL(anaPrefix + "-truePositives.idl.gz", truePositives);
sortedFP = annoAnalyze(truePositives);
saveIDL(anaPrefix + "-truePositives-sortedByScore.idl.gz", sortedFP);
if options.ignoreFile != None:
saveIDL(anaPrefix + "-ignoredTruePositives.idl.gz", ignoredTruePositives)
saveIDL(anaPrefix + "-missingRecall.idl.gz", missingRecall);
if __name__ == "__main__":
main()
| 31.472
| 212
| 0.645806
|
fd6e724d3801b5e0131625448e3fa33969cff6eb
| 281
|
py
|
Python
|
v1/accounts/tests/conftest.py
|
DucPhamTV/Bank
|
4905ec7d63ef4daafe2119bf6b32928d4db2d4f2
|
[
"MIT"
] | 94
|
2020-07-12T23:08:47.000Z
|
2022-03-05T14:00:01.000Z
|
v1/accounts/tests/conftest.py
|
DucPhamTV/Bank
|
4905ec7d63ef4daafe2119bf6b32928d4db2d4f2
|
[
"MIT"
] | 84
|
2020-07-13T23:30:50.000Z
|
2022-03-15T15:47:46.000Z
|
v1/accounts/tests/conftest.py
|
DucPhamTV/Bank
|
4905ec7d63ef4daafe2119bf6b32928d4db2d4f2
|
[
"MIT"
] | 63
|
2020-07-13T02:46:51.000Z
|
2021-11-26T09:29:29.000Z
|
import pytest
from thenewboston.third_party.factory.utils import build_json
from ..factories.account import AccountFactory
@pytest.fixture
def account_fake_data():
yield build_json(AccountFactory)
@pytest.fixture
def accounts():
yield AccountFactory.create_batch(100)
| 18.733333
| 61
| 0.807829
|
53ec2972f5365fea04a225b0ab6a898d4dc79b34
| 1,465
|
py
|
Python
|
web/webViews/beansapplication.py
|
primroses/docklet
|
6c42a472a8b427496c03fad18b873cb4be219db3
|
[
"BSD-3-Clause"
] | null | null | null |
web/webViews/beansapplication.py
|
primroses/docklet
|
6c42a472a8b427496c03fad18b873cb4be219db3
|
[
"BSD-3-Clause"
] | null | null | null |
web/webViews/beansapplication.py
|
primroses/docklet
|
6c42a472a8b427496c03fad18b873cb4be219db3
|
[
"BSD-3-Clause"
] | null | null | null |
from flask import session,render_template,request,redirect
from webViews.view import normalView
from webViews.dockletrequest import dockletRequest
class beansapplicationView(normalView):
template_path = "beansapplication.html"
@classmethod
def get(self):
result = dockletRequest.post('/beans/applymsgs/').get('applymsgs')
return self.render(self.template_path, applications = result)
@classmethod
def post(self):
return self.get()
class beansapplyView(normalView):
template_path = "error.html"
@classmethod
def post(self):
data = {"number":request.form["number"],"reason":request.form["reason"]}
result = dockletRequest.post('/beans/apply/',data)
success = result.get("success")
if success == "true":
return redirect("/beans/application/")
else:
return self.render(self.template_path, message = result.get("message"))
@classmethod
def get(self):
return self.post()
class beansadminView(normalView):
msgid = ""
cmd = ""
template_path = "error.html"
@classmethod
def get(self):
data = {"msgid":self.msgid}
result = dockletRequest.post('/beans/admin/'+self.cmd+"/",data)
success = result.get("success")
if success == "true":
return redirect("/user/list/")
else:
return self.render(self.template_path, message = result.get("message"))
| 29.3
| 83
| 0.640273
|
cb1608f721a6f88413ba90f4beaf5d3a078ba09d
| 6,177
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/web/latest/web_app_public_certificate.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/web/latest/web_app_public_certificate.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/web/latest/web_app_public_certificate.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['WebAppPublicCertificate']
class WebAppPublicCertificate(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
blob: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
public_certificate_location: Optional[pulumi.Input[str]] = None,
public_certificate_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Public certificate object
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] blob: Public Certificate byte array
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] name: Name of the app.
:param pulumi.Input[str] public_certificate_location: Public Certificate Location
:param pulumi.Input[str] public_certificate_name: Public certificate name.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['blob'] = blob
__props__['kind'] = kind
if name is None:
raise TypeError("Missing required property 'name'")
__props__['name'] = name
__props__['public_certificate_location'] = public_certificate_location
if public_certificate_name is None:
raise TypeError("Missing required property 'public_certificate_name'")
__props__['public_certificate_name'] = public_certificate_name
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['thumbprint'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:web/v20160801:WebAppPublicCertificate"), pulumi.Alias(type_="azure-nextgen:web/v20180201:WebAppPublicCertificate"), pulumi.Alias(type_="azure-nextgen:web/v20181101:WebAppPublicCertificate"), pulumi.Alias(type_="azure-nextgen:web/v20190801:WebAppPublicCertificate"), pulumi.Alias(type_="azure-nextgen:web/v20200601:WebAppPublicCertificate")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(WebAppPublicCertificate, __self__).__init__(
'azure-nextgen:web/latest:WebAppPublicCertificate',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'WebAppPublicCertificate':
"""
Get an existing WebAppPublicCertificate resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return WebAppPublicCertificate(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def blob(self) -> pulumi.Output[Optional[str]]:
"""
Public Certificate byte array
"""
return pulumi.get(self, "blob")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="publicCertificateLocation")
def public_certificate_location(self) -> pulumi.Output[Optional[str]]:
"""
Public Certificate Location
"""
return pulumi.get(self, "public_certificate_location")
@property
@pulumi.getter
def thumbprint(self) -> pulumi.Output[str]:
"""
Certificate Thumbprint
"""
return pulumi.get(self, "thumbprint")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 41.18
| 428
| 0.646916
|
5dee7f64ca855afe0fe9a0cdfcd3db6ef8fd3799
| 2,260
|
py
|
Python
|
models.py
|
Ksusha1319/stud_projects
|
c088034c044fc6cef207fb8f5228f93f15bba0e3
|
[
"MIT"
] | null | null | null |
models.py
|
Ksusha1319/stud_projects
|
c088034c044fc6cef207fb8f5228f93f15bba0e3
|
[
"MIT"
] | null | null | null |
models.py
|
Ksusha1319/stud_projects
|
c088034c044fc6cef207fb8f5228f93f15bba0e3
|
[
"MIT"
] | null | null | null |
from app import db
from datetime import datetime
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
from app import login
class Nickname(db.Model):
__tablename__ = "nicknames"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100))
def __repr__(self):
return "{}".format(self.name)
class Report(db.Model):
""""""
__tablename__ = "reports"
id = db.Column(db.Integer, primary_key=True)
comment = db.Column(db.String(1000),nullable=True,)
created_date = db.Column(db.DateTime, default=datetime.utcnow)
incident_date = db.Column(db.String(100))
incident_type = db.Column(db.String(100),nullable=False,)
game_type = db.Column(db.String(100),nullable=False,)
priority = db.Column(db.String(100),nullable=False,)
status = db.Column(db.String(100), default="NEW")
resolution = db.Column(db.String(100), nullable=True,)
comment_soc = db.Column(db.String(1000), nullable=True,)
service_login = db.Column(db.String(100),nullable=False,)
username = db.Column(db.String(100),nullable=False,)
ticket = db.Column(db.String(100),nullable=True,)
nickname_id = db.Column(db.Integer, db.ForeignKey("nicknames.id"))
nickname = db.relationship("Nickname", backref=db.backref(
"reports", order_by=id), lazy=True)
class User(UserMixin, db.Model):
__tablename__ = 'Users'
id = db.Column(db.Integer(), primary_key=True)
created_date = db.Column(db.DateTime(), default=datetime.utcnow)
username = db.Column(db.String(50), nullable=False, unique=True)
email = db.Column(db.String(100), nullable=False, unique=True)
pass_hash = db.Column(db.String(100), nullable=False)
role = db.Column(db.String(100), default="user" )
about = db.Column(db.String(100))
def __repr__(self):
return '<User {}>'.format(self.username)
def set_password(self, password):
self.pass_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.pass_hash, password)
@login.user_loader
def load_user(id):
return User.query.get(int(id))
| 38.305085
| 74
| 0.680088
|
e17e51fd90bde50cb1041cddd4e92b7781049c83
| 4,392
|
py
|
Python
|
PaddleCV/gan/util/config.py
|
nepeplwu/models
|
88ef556a9994de31e9f6029778349a4d1b74489a
|
[
"Apache-2.0"
] | 1
|
2019-08-28T15:22:14.000Z
|
2019-08-28T15:22:14.000Z
|
PaddleCV/gan/util/config.py
|
cy5211/models-1
|
5c142ae728abc786c380ece05da52f84e82e795d
|
[
"Apache-2.0"
] | null | null | null |
PaddleCV/gan/util/config.py
|
cy5211/models-1
|
5c142ae728abc786c380ece05da52f84e82e795d
|
[
"Apache-2.0"
] | 2
|
2019-05-06T12:10:15.000Z
|
2019-09-01T04:28:10.000Z
|
#copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import six
import argparse
import functools
import distutils.util
import trainer
def print_arguments(args):
''' Print argparse's argument
Usage:
.. code-block:: python
parser = argparse.ArgumentParser()
parser.add_argument("name", default="Jonh", type=str, help="User name.")
args = parser.parse_args()
print_arguments(args)
:param args: Input argparse.Namespace for printing.
:type args: argparse.Namespace
'''
print("----------- Configuration Arguments -----------")
for arg, value in sorted(six.iteritems(vars(args))):
print("%s: %s" % (arg, value))
print("------------------------------------------------")
def add_arguments(argname, type, default, help, argparser, **kwargs):
"""Add argparse's argument.
Usage:
.. code-block:: python
parser = argparse.ArgumentParser()
add_argument("name", str, "Jonh", "User name.", parser)
args = parser.parse_args()
"""
type = distutils.util.strtobool if type == bool else type
argparser.add_argument(
"--" + argname,
default=default,
type=type,
help=help + ' Default: %(default)s.',
**kwargs)
def base_parse_args(parser):
add_arg = functools.partial(add_arguments, argparser=parser)
# yapf: disable
add_arg('model_net', str, "CGAN", "The model used.")
add_arg('dataset', str, "mnist", "The dataset used.")
add_arg('data_dir', str, "./data", "The dataset root directory")
add_arg('train_list', str, None, "The train list file name")
add_arg('test_list', str, None, "The test list file name")
add_arg('batch_size', int, 1, "Minibatch size.")
add_arg('epoch', int, 200, "The number of epoch to be trained.")
add_arg('g_base_dims', int, 64, "Base channels in generator")
add_arg('d_base_dims', int, 64, "Base channels in discriminator")
add_arg('load_size', int, 286, "the image size when load the image")
add_arg('crop_type', str, 'Centor',
"the crop type, choose = ['Centor', 'Random']")
add_arg('crop_size', int, 256, "crop size when preprocess image")
add_arg('save_checkpoints', bool, True, "Whether to save checkpoints.")
add_arg('run_test', bool, True, "Whether to run test.")
add_arg('use_gpu', bool, True, "Whether to use GPU to train.")
add_arg('profile', bool, False, "Whether to profile.")
add_arg('dropout', bool, False, "Whether to use drouput.")
add_arg('drop_last', bool, False,
"Whether to drop the last images that cannot form a batch")
add_arg('shuffle', bool, True, "Whether to shuffle data")
add_arg('output', str, "./output",
"The directory the model and the test result to be saved to.")
add_arg('init_model', str, None, "The init model file of directory.")
add_arg('gan_mode', str, "vanilla", "The init model file of directory.")
add_arg('norm_type', str, "batch_norm", "Which normalization to used")
add_arg('learning_rate', float, 0.0002, "the initialize learning rate")
add_arg('lambda_L1', float, 100.0, "the initialize lambda parameter for L1 loss")
add_arg('num_generator_time', int, 1,
"the generator run times in training each epoch")
add_arg('print_freq', int, 10, "the frequency of print loss")
# yapf: enable
return parser
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser = base_parse_args(parser)
cfg, _ = parser.parse_known_args()
model_name = cfg.model_net
model_cfg = trainer.get_special_cfg(model_name)
parser = model_cfg(parser)
args = parser.parse_args()
return args
| 37.862069
| 85
| 0.668716
|
84a4c3746af42518f1d1f7f3a2fd6d94a8988c7e
| 8,509
|
py
|
Python
|
sdk/python/pulumi_azure_native/netapp/v20210401preview/get_backup_policy.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/netapp/v20210401preview/get_backup_policy.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/netapp/v20210401preview/get_backup_policy.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetBackupPolicyResult',
'AwaitableGetBackupPolicyResult',
'get_backup_policy',
]
@pulumi.output_type
class GetBackupPolicyResult:
"""
Backup policy information
"""
def __init__(__self__, daily_backups_to_keep=None, enabled=None, id=None, location=None, monthly_backups_to_keep=None, name=None, provisioning_state=None, tags=None, type=None, volume_backups=None, volumes_assigned=None, weekly_backups_to_keep=None, yearly_backups_to_keep=None):
if daily_backups_to_keep and not isinstance(daily_backups_to_keep, int):
raise TypeError("Expected argument 'daily_backups_to_keep' to be a int")
pulumi.set(__self__, "daily_backups_to_keep", daily_backups_to_keep)
if enabled and not isinstance(enabled, bool):
raise TypeError("Expected argument 'enabled' to be a bool")
pulumi.set(__self__, "enabled", enabled)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if monthly_backups_to_keep and not isinstance(monthly_backups_to_keep, int):
raise TypeError("Expected argument 'monthly_backups_to_keep' to be a int")
pulumi.set(__self__, "monthly_backups_to_keep", monthly_backups_to_keep)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if volume_backups and not isinstance(volume_backups, list):
raise TypeError("Expected argument 'volume_backups' to be a list")
pulumi.set(__self__, "volume_backups", volume_backups)
if volumes_assigned and not isinstance(volumes_assigned, int):
raise TypeError("Expected argument 'volumes_assigned' to be a int")
pulumi.set(__self__, "volumes_assigned", volumes_assigned)
if weekly_backups_to_keep and not isinstance(weekly_backups_to_keep, int):
raise TypeError("Expected argument 'weekly_backups_to_keep' to be a int")
pulumi.set(__self__, "weekly_backups_to_keep", weekly_backups_to_keep)
if yearly_backups_to_keep and not isinstance(yearly_backups_to_keep, int):
raise TypeError("Expected argument 'yearly_backups_to_keep' to be a int")
pulumi.set(__self__, "yearly_backups_to_keep", yearly_backups_to_keep)
@property
@pulumi.getter(name="dailyBackupsToKeep")
def daily_backups_to_keep(self) -> Optional[int]:
"""
Daily backups count to keep
"""
return pulumi.get(self, "daily_backups_to_keep")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
The property to decide policy is enabled or not
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="monthlyBackupsToKeep")
def monthly_backups_to_keep(self) -> Optional[int]:
"""
Monthly backups count to keep
"""
return pulumi.get(self, "monthly_backups_to_keep")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of backup policy
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Azure lifecycle management
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="volumeBackups")
def volume_backups(self) -> Optional[Sequence['outputs.VolumeBackupsResponse']]:
"""
A list of volumes assigned to this policy
"""
return pulumi.get(self, "volume_backups")
@property
@pulumi.getter(name="volumesAssigned")
def volumes_assigned(self) -> Optional[int]:
"""
Volumes using current backup policy
"""
return pulumi.get(self, "volumes_assigned")
@property
@pulumi.getter(name="weeklyBackupsToKeep")
def weekly_backups_to_keep(self) -> Optional[int]:
"""
Weekly backups count to keep
"""
return pulumi.get(self, "weekly_backups_to_keep")
@property
@pulumi.getter(name="yearlyBackupsToKeep")
def yearly_backups_to_keep(self) -> Optional[int]:
"""
Yearly backups count to keep
"""
return pulumi.get(self, "yearly_backups_to_keep")
class AwaitableGetBackupPolicyResult(GetBackupPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetBackupPolicyResult(
daily_backups_to_keep=self.daily_backups_to_keep,
enabled=self.enabled,
id=self.id,
location=self.location,
monthly_backups_to_keep=self.monthly_backups_to_keep,
name=self.name,
provisioning_state=self.provisioning_state,
tags=self.tags,
type=self.type,
volume_backups=self.volume_backups,
volumes_assigned=self.volumes_assigned,
weekly_backups_to_keep=self.weekly_backups_to_keep,
yearly_backups_to_keep=self.yearly_backups_to_keep)
def get_backup_policy(account_name: Optional[str] = None,
backup_policy_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetBackupPolicyResult:
"""
Backup policy information
:param str account_name: The name of the NetApp account
:param str backup_policy_name: Backup policy Name which uniquely identify backup policy.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['accountName'] = account_name
__args__['backupPolicyName'] = backup_policy_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:netapp/v20210401preview:getBackupPolicy', __args__, opts=opts, typ=GetBackupPolicyResult).value
return AwaitableGetBackupPolicyResult(
daily_backups_to_keep=__ret__.daily_backups_to_keep,
enabled=__ret__.enabled,
id=__ret__.id,
location=__ret__.location,
monthly_backups_to_keep=__ret__.monthly_backups_to_keep,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
tags=__ret__.tags,
type=__ret__.type,
volume_backups=__ret__.volume_backups,
volumes_assigned=__ret__.volumes_assigned,
weekly_backups_to_keep=__ret__.weekly_backups_to_keep,
yearly_backups_to_keep=__ret__.yearly_backups_to_keep)
| 37.650442
| 283
| 0.664708
|
f0293809a7f170ffbe62aca91126d51bca343416
| 17,461
|
py
|
Python
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_object_replication_policies_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_object_replication_policies_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2019_06_01/aio/operations/_object_replication_policies_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ObjectReplicationPoliciesOperations:
"""ObjectReplicationPoliciesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2019_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ObjectReplicationPolicies"]:
"""List the object replication policies associated with the storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ObjectReplicationPolicies or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2019_06_01.models.ObjectReplicationPolicies]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicies"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ObjectReplicationPolicies', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies'} # type: ignore
async def get(
self,
resource_group_name: str,
account_name: str,
object_replication_policy_id: str,
**kwargs: Any
) -> "_models.ObjectReplicationPolicy":
"""Get the object replication policy of the storage account by policy ID.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param object_replication_policy_id: The ID of object replication policy or 'default' if the
policy ID is unknown.
:type object_replication_policy_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ObjectReplicationPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_06_01.models.ObjectReplicationPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'objectReplicationPolicyId': self._serialize.url("object_replication_policy_id", object_replication_policy_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ObjectReplicationPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
account_name: str,
object_replication_policy_id: str,
properties: "_models.ObjectReplicationPolicy",
**kwargs: Any
) -> "_models.ObjectReplicationPolicy":
"""Create or update the object replication policy of the storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param object_replication_policy_id: The ID of object replication policy or 'default' if the
policy ID is unknown.
:type object_replication_policy_id: str
:param properties: The object replication policy set to a storage account. A unique policy ID
will be created if absent.
:type properties: ~azure.mgmt.storage.v2019_06_01.models.ObjectReplicationPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ObjectReplicationPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2019_06_01.models.ObjectReplicationPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ObjectReplicationPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'objectReplicationPolicyId': self._serialize.url("object_replication_policy_id", object_replication_policy_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(properties, 'ObjectReplicationPolicy')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ObjectReplicationPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore
async def delete(
self,
resource_group_name: str,
account_name: str,
object_replication_policy_id: str,
**kwargs: Any
) -> None:
"""Deletes the object replication policy associated with the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param object_replication_policy_id: The ID of object replication policy or 'default' if the
policy ID is unknown.
:type object_replication_policy_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'objectReplicationPolicyId': self._serialize.url("object_replication_policy_id", object_replication_policy_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/objectReplicationPolicies/{objectReplicationPolicyId}'} # type: ignore
| 53.234756
| 237
| 0.683237
|
5e7aa1df2fe5f2a4c1bf921ba220a1f65abae3f5
| 6,651
|
py
|
Python
|
bindings/python/ensmallen_graph/datasets/string/betavulgaris.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/string/betavulgaris.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/string/betavulgaris.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
"""
This file offers the methods to automatically retrieve the graph Beta vulgaris.
The graph is automatically retrieved from the STRING repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 19:54:01.701863
The undirected graph Beta vulgaris has 19481 nodes and 2745993 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.01447 and has 3 connected components, where the component with most
nodes has 19477 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 119, the mean node degree is 281.91, and
the node degree mode is 1. The top 5 most central nodes are 161934.XP_010688278.1
(degree 5300), 161934.XP_010690845.1 (degree 3913), 161934.XP_010675161.1
(degree 3607), 161934.XP_010684055.1 (degree 3508) and 161934.XP_010672867.1
(degree 3418).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import BetaVulgaris
# Then load the graph
graph = BetaVulgaris()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def BetaVulgaris(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Beta vulgaris graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Beta vulgaris graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 19:54:01.701863
The undirected graph Beta vulgaris has 19481 nodes and 2745993 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.01447 and has 3 connected components, where the component with most
nodes has 19477 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 119, the mean node degree is 281.91, and
the node degree mode is 1. The top 5 most central nodes are 161934.XP_010688278.1
(degree 5300), 161934.XP_010690845.1 (degree 3913), 161934.XP_010675161.1
(degree 3607), 161934.XP_010684055.1 (degree 3508) and 161934.XP_010672867.1
(degree 3418).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import BetaVulgaris
# Then load the graph
graph = BetaVulgaris()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="BetaVulgaris",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 34.82199
| 223
| 0.700496
|
dfbdd6e683293bb5a872898573ca1c1da483d794
| 238
|
py
|
Python
|
examples/just_flask.py
|
tarcisiojr/flask-openapi3
|
9bad4533098f9246b3f17057757ce381f8b284e1
|
[
"MIT"
] | 17
|
2021-05-18T10:15:32.000Z
|
2022-02-23T04:58:07.000Z
|
examples/just_flask.py
|
tarcisiojr/flask-openapi3
|
9bad4533098f9246b3f17057757ce381f8b284e1
|
[
"MIT"
] | 16
|
2021-06-17T03:49:08.000Z
|
2022-03-30T01:31:07.000Z
|
examples/just_flask.py
|
tarcisiojr/flask-openapi3
|
9bad4533098f9246b3f17057757ce381f8b284e1
|
[
"MIT"
] | 5
|
2022-01-05T14:58:00.000Z
|
2022-03-06T10:10:47.000Z
|
# -*- coding: utf-8 -*-
# @Author : llc
# @Time : 2021/5/11 13:37
from flask_openapi3 import OpenAPI
app = OpenAPI(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
if __name__ == '__main__':
app.run()
| 14
| 34
| 0.609244
|
2e54858091dce78d5245b8cf3a1b37cf6cc8c038
| 75,746
|
py
|
Python
|
build/releases/release-0.504/src/lisp-etr.py
|
farinacci/lispers.net
|
e1ed6e0f0a242b13ad629afb0fc1c7072b19b30c
|
[
"Apache-2.0"
] | 26
|
2019-02-01T19:12:21.000Z
|
2022-03-25T04:40:38.000Z
|
build/releases/release-0.504/src/lisp-etr.py
|
farinacci/lispers.net
|
e1ed6e0f0a242b13ad629afb0fc1c7072b19b30c
|
[
"Apache-2.0"
] | 3
|
2019-10-29T17:49:19.000Z
|
2022-03-20T21:21:31.000Z
|
build/releases/release-0.504/src/lisp-etr.py
|
farinacci/lispers.net
|
e1ed6e0f0a242b13ad629afb0fc1c7072b19b30c
|
[
"Apache-2.0"
] | 4
|
2019-02-02T16:50:48.000Z
|
2020-10-29T03:10:58.000Z
|
#-----------------------------------------------------------------------------
#
# Copyright 2013-2019 lispers.net - Dino Farinacci <farinacci@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -----------------------------------------------------------------------------
#
# lisp-etr.py
#
# This file performs LISP Egress Tunnel Router (ETR) functionality.
#
# -----------------------------------------------------------------------------
import lisp
import lispconfig
import socket
import select
import threading
import time
import pcappy
import struct
import commands
import os
try:
import pytun
except:
pytun = None
#endtry
igmp_types = { 17 : "IGMP-query", 18 : "IGMPv1-report", 19 : "DVMRP",
20 : "PIMv1", 22 : "IGMPv2-report", 23 : "IGMPv2-leave",
30 : "mtrace-response", 31 : "mtrace-request", 34 : "IGMPv3-report" }
#------------------------------------------------------------------------------
#
# Global data structures relative to the lisp-etr process.
#
lisp_register_timer = None
lisp_trigger_register_timer = None
lisp_etr_info_timer = None
lisp_ephem_socket = None
lisp_ephem_port = lisp.lisp_get_ephemeral_port()
lisp_ipc_listen_socket = None
lisp_send_sockets = [None, None, None]
lisp_raw_socket = None
lisp_l2_socket = None
lisp_mac_header = None
LISP_MAP_REGISTER_INTERVAL = 60 # In units of seconds
#------------------------------------------------------------------------------
#
# lisp_etr_database_mapping_command
#
# This function supports adding additional RLOCs to a database-mapping entry
# that already exists.
#
def lisp_etr_database_mapping_command(kv_pair):
global lisp_trigger_register_timer
global lisp_send_sockets
lispconfig.lisp_database_mapping_command(kv_pair, lisp_ephem_port)
#
# Trigger Map-Register when all databaase-mappings are configured.
#
# Do not trigger Map-Register if NAT-traveral is configured. We may not
# have the global RLOC yet from Info-Replies. When the Info-Reply comes
# in we do trigger Map-Registers to all map-servers.
#
if (lisp.lisp_nat_traversal): return
if (lisp_trigger_register_timer != None and
lisp_trigger_register_timer.is_alive()): return
if (len(lisp.lisp_map_servers_list) > 0):
lisp_trigger_register_timer = threading.Timer(5,
lisp_process_register_timer, [lisp_send_sockets])
lisp_trigger_register_timer.start()
#endif
#enddef
#
# lisp_etr_show_command
#
# Show ETR configured map-servers and database-mappings.
#
def lisp_etr_show_command(clause):
#
# Show local found RLOCs.
#
output = lispconfig.lisp_show_myrlocs("")
#
# Show decapsulation stats.
#
output = lispconfig.lisp_show_decap_stats(output, "ETR")
#
# Show configured map-servers.
#
dns_suffix = lisp.lisp_decent_dns_suffix
if (dns_suffix == None):
dns_suffix = ":"
else:
dns_suffix = " (dns-suffix '{}'):".format(dns_suffix)
#endif
hover = "{} configured map-servers".format(len(lisp.lisp_map_servers_list))
title = "LISP-ETR Configured Map-Servers{}".format(dns_suffix)
title = lisp.lisp_span(title, hover)
hover = ("P = proxy-reply requested, M = merge-registrations " + \
"requested, N = Map-Notify requested")
reg_title = lisp.lisp_span("Registration<br>flags", hover)
output += lispconfig.lisp_table_header(title, "Address", "Auth-Type",
"xTR-ID", "Site-ID", reg_title, "Map-Registers<br>Sent",
"Map-Notifies<br>Received")
for ms in lisp.lisp_map_servers_list.values():
ms.resolve_dns_name()
ms_name = "" if ms.ms_name == "all" else ms.ms_name + "<br>"
addr_str = ms_name + ms.map_server.print_address_no_iid()
if (ms.dns_name): addr_str += "<br>" + ms.dns_name
xtr_id = "0x" + lisp.lisp_hex_string(ms.xtr_id)
flags = "{}-{}-{}-{}".format("P" if ms.proxy_reply else "p",
"M" if ms.merge_registrations else "m",
"N" if ms.want_map_notify else "n",
"R" if ms.refresh_registrations else "r")
registers_sent = ms.map_registers_sent + \
ms.map_registers_multicast_sent
output += lispconfig.lisp_table_row(addr_str,
"sha1" if (ms.alg_id == lisp.LISP_SHA_1_96_ALG_ID) else "sha2",
xtr_id, ms.site_id, flags, registers_sent,
ms.map_notifies_received)
#endfor
output += lispconfig.lisp_table_footer()
#
# Show database-mappings configured.
#
output = lispconfig.lisp_show_db_list("ETR", output)
#
# Show ELP configuration, if it exists.
#
if (len(lisp.lisp_elp_list) != 0):
output = lispconfig.lisp_show_elp_list(output)
#endif
#
# Show RLE configuration, if it exists.
#
if (len(lisp.lisp_rle_list) != 0):
output = lispconfig.lisp_show_rle_list(output)
#endif
#
# Show JSON configuration, if it exists.
#
if (len(lisp.lisp_json_list) != 0):
output = lispconfig.lisp_show_json_list(output)
#endif
#
# Show group-mappings, if they exist.
#
if (len(lisp.lisp_group_mapping_list) != 0):
title = "Configured Group Mappings:"
output += lispconfig.lisp_table_header(title, "Name", "Group Prefix",
"Sources", "Use MS")
for gm in lisp.lisp_group_mapping_list.values():
sources = ""
for s in gm.sources: sources += s + ", "
if (sources == ""):
sources = "*"
else:
sources = sources[0:-2]
#endif
output += lispconfig.lisp_table_row(gm.group_name,
gm.group_prefix.print_prefix(), sources, gm.use_ms_name)
#endfor
output += lispconfig.lisp_table_footer()
#endif
return(output)
#enddef
#
# lisp_etr_show_keys_command
#
# Call lispconfig.lisp_show_crypto_list().
#
def lisp_etr_show_keys_command(parameter):
return(lispconfig.lisp_show_crypto_list("ETR"))
#enddef
#
# lisp_map_server_command
#
# Store configured map-servers.
#
def lisp_map_server_command(kv_pairs):
global lisp_trigger_register_timer
global lisp_etr_info_timer
addresses = []
dns_names = []
key_id = 0
alg_id = 0
password = ""
proxy_reply = False
merge = False
refresh = False
want = False
site_id = 0
ms_name = None
ekey_id = 0
ekey = None
for kw in kv_pairs.keys():
value = kv_pairs[kw]
if (kw == "ms-name"):
ms_name = value[0]
#endif
if (kw == "address"):
for i in range(len(value)):
addresses.append(value[i])
#endfor
#endif
if (kw == "dns-name"):
for i in range(len(value)):
dns_names.append(value[i])
#endfor
#endif
if (kw == "authentication-type"):
alg_id = lisp.LISP_SHA_1_96_ALG_ID if (value == "sha1") else \
lisp.LISP_SHA_256_128_ALG_ID if (value == "sha2") else ""
#endif
if (kw == "authentication-key"):
if (alg_id == 0): alg_id = lisp.LISP_SHA_256_128_ALG_ID
auth_key = lisp.lisp_parse_auth_key(value)
key_id = auth_key.keys()[0]
password = auth_key[key_id]
#endif
if (kw == "proxy-reply"):
proxy_reply = True if value == "yes" else False
#endif
if (kw == "merge-registrations"):
merge = True if value == "yes" else False
#endif
if (kw == "refresh-registrations"):
refresh = True if value == "yes" else False
#endif
if (kw == "want-map-notify"):
want = True if value == "yes" else False
#endif
if (kw == "site-id"):
site_id = int(value)
#endif
if (kw == "encryption-key"):
ekey = lisp.lisp_parse_auth_key(value)
ekey_id = ekey.keys()[0]
ekey = ekey[ekey_id]
#Endif
#endfor
#
# Store internal data structure.
#
ms = None
for addr_str in addresses:
if (addr_str == ""): continue
ms = lisp.lisp_ms(addr_str, None, ms_name, alg_id, key_id, password,
proxy_reply, merge, refresh, want, site_id, ekey_id, ekey)
#endfor
for name in dns_names:
if (name == ""): continue
ms = lisp.lisp_ms(None, name, ms_name, alg_id, key_id, password,
proxy_reply, merge, refresh, want, site_id, ekey_id, ekey)
#endfor
#
# Trigger a Info-Request if we are doing NAT-traversal if this is the
# first Map-Server..
#
first_ms = (len(lisp.lisp_map_servers_list) == 1)
if (first_ms):
ms = lisp.lisp_map_servers_list.values()[0]
lisp_etr_info_timer = threading.Timer(2, lisp_etr_process_info_timer,
[ms.map_server])
lisp_etr_info_timer.start()
else:
#
# Trigger Map-Register to newly configured Map-Server.
#
# Do not trigger Map-Register if NAT-traveral is configured. We may not
# have the global RLOC yet from Info-Replies. When the Info-Reply comes
# in we do trigger Map-Registers to all map-servers.
#
if (lisp.lisp_nat_traversal): return
if (ms and len(lisp.lisp_db_list) > 0):
lisp_build_map_register(lisp_send_sockets, None, None, ms, False)
#endif
#endif
#
# Handle case where "lisp database-mapping" comes before "lisp map-server"
# in configuration file. We have to start periodic timer.
#
if (len(lisp.lisp_db_list) > 0):
if (lisp_trigger_register_timer != None and
lisp_trigger_register_timer.is_alive()): return
lisp_trigger_register_timer = threading.Timer(5,
lisp_process_register_timer, [lisp_send_sockets])
lisp_trigger_register_timer.start()
#endif
return
#enddef
#
# lisp_group_mapping_command
#
# Process the "lisp group-mapping" command clause.
#
def lisp_group_mapping_command(kv_pairs):
sources = []
group_prefix = None
rle_address = None
ms_name = "all"
for kw in kv_pairs.keys():
value = kv_pairs[kw]
if (kw == "group-name"):
group_name = value
#endif
if (kw == "group-prefix"):
if (group_prefix == None):
group_prefix = lisp.lisp_address(lisp.LISP_AFI_NONE, "", 0, 0)
#endif
group_prefix.store_prefix(value)
#endif
if (kw == "instance-id"):
if (group_prefix == None):
group_prefix = lisp.lisp_address(lisp.LISP_AFI_NONE, "", 0, 0)
#endif
group_prefix.instance_id = int(value)
#endif
if (kw == "ms-name"):
ms_name = value[0]
#endif
if (kw == "address"):
for source in value:
if (source != ""): sources.append(source)
#endfor
#endif
if (kw == "rle-address"):
if (rle_address == None):
rle_address = lisp.lisp_address(lisp.LISP_AFI_NONE, "", 0, 0)
#endif
rle_address.store_address(value)
#endif
#endfor
gm = lisp.lisp_group_mapping(group_name, ms_name, group_prefix, sources,
rle_address)
gm.add_group()
return
#enddef
#
# lisp_build_map_register_records
#
# Build EID and RLOC records to be inserted in a Map-Register message.
#
def lisp_build_map_register_records(quiet, db, eid, group, ttl):
#
# Don't include RTR-list if there is no NAT in the path but nat-traversal
# is configured and NAT in path is tested. When there is a NAT, include
# all RTRs if lisp_register_all_rtrs is configured. Otherwise, if the
# array element is None, then the RTR is down and should be excluded in
# the list to register.
#
rtr_list = {}
for rloc_entry in db.rloc_set:
if (rloc_entry.translated_rloc.is_null()): continue
for rtr_str in lisp.lisp_rtr_list:
rtr = lisp.lisp_rtr_list[rtr_str]
if (lisp.lisp_register_all_rtrs == False and rtr == None):
lisp.lprint(" Exclude unreachable RTR {}".format( \
lisp.red(rtr_str, False)))
continue
#endif
if (rtr == None): continue
rtr_list[rtr_str] = rtr
#endif
break
#endfor
count = 0
eid_records = ""
for iid in [eid.instance_id] + eid.iid_list:
eid_record = lisp.lisp_eid_record()
eid_record.rloc_count = len(db.rloc_set) + len(rtr_list)
eid_record.authoritative = True
eid_record.record_ttl = ttl
eid_record.eid.copy_address(eid)
eid_record.eid.instance_id = iid
eid_record.eid.iid_list = []
eid_record.group.copy_address(group)
eid_records += eid_record.encode()
if (not quiet):
prefix_str = lisp.lisp_print_eid_tuple(eid, group)
decent_index = ""
if (lisp.lisp_decent_pull_xtr_configured()):
decent_index = lisp.lisp_get_decent_index(eid)
decent_index = lisp.bold(str(decent_index), False)
decent_index = ", decent-index {}".format(decent_index)
#endif
lisp.lprint(" EID-prefix {} for ms-name '{}'{}".format( \
lisp.green(prefix_str, False), db.use_ms_name, decent_index))
eid_record.print_record(" ", False)
#endif
for rloc_entry in db.rloc_set:
rloc_record = lisp.lisp_rloc_record()
rloc_record.store_rloc_entry(rloc_entry)
rloc_record.local_bit = rloc_entry.rloc.is_local()
rloc_record.reach_bit = True
eid_records += rloc_record.encode()
if (not quiet): rloc_record.print_record(" ")
#endfor
#
# If we are doing NAT-traversal, include a set or RTR RLOCs with
# priority 1. And set the global RLOCs to priority 254.
#
for rtr in rtr_list.values():
rloc_record = lisp.lisp_rloc_record()
rloc_record.rloc.copy_address(rtr)
rloc_record.priority = 254
rloc_record.rloc_name = "RTR"
rloc_record.weight = 0
rloc_record.mpriority = 255
rloc_record.mweight = 0
rloc_record.local_bit = False
rloc_record.reach_bit = True
eid_records += rloc_record.encode()
if (not quiet): rloc_record.print_record(" RTR ")
#endfor
#
# Return to caller number of EID records written to returned buffer.
#
count += 1
#endfor
return(eid_records, count)
#enddef
#
# lisp_build_map_register
#
# From each configured "database-mapping" command, register mappings to
# configured map-servers.
#
def lisp_build_map_register(lisp_sockets, ttl, eid_only, ms_only, refresh):
#
# No database-mapping entries.
#
if (eid_only != None):
db_list_len = 1
else:
db_list_len = lisp.lisp_db_list_length()
if (db_list_len == 0): return
#endif
lisp.lprint("Build Map-Register for {} database-mapping entries". \
format(db_list_len))
#
# Set boolean if "decentralized-pull-xtr-[modulus,dns-suffix]" configured.
#
decent = lisp.lisp_decent_pull_xtr_configured()
#
# Go quiet with debug output when there are a lot of EID-records.
#
quiet = (db_list_len > 12)
ms_list = {}
if (decent):
#
# If "decentralized-pull-xtr-[modulus,dns-suffix]" is configured,
# decide which map-server this EID belongs too (and is registered with.
#
for db in lisp.lisp_db_list:
eid = db.eid if db.group.is_null() else db.group
dns_name = lisp.lisp_get_decent_dns_name(eid)
ms_list[dns_name] = []
#endfor
else:
#
# Set up each map-server names so we can decide which EID-prefixes go
# to which map-servers. [0] is eid_records and [1] is count.
#
for ms in lisp.lisp_map_servers_list.values():
if (ms_only != None and ms != ms_only): continue
ms_list[ms.ms_name] = []
#endfor
#endif
#
# Create data structure instances to build Map-Regiser message.
#
map_register = lisp.lisp_map_register()
map_register.nonce = 0xaabbccdddfdfdf00
map_register.xtr_id_present = True
if (ttl == None): ttl = lisp.LISP_REGISTER_TTL
#
# Traverse the databas-mapping associative array.
#
for db in lisp.lisp_db_list:
if (decent):
ms_dns_name = lisp.lisp_get_decent_dns_name(db.eid)
else:
ms_dns_name = db.use_ms_name
#endif
#
# Is db entry associated with a map-server name that is not
# configured?
#
if (ms_list.has_key(ms_dns_name) == False): continue
msl = ms_list[ms_dns_name]
if (msl == []):
msl = ["", 0]
ms_list[ms_dns_name].append(msl)
else:
msl = ms_list[ms_dns_name][-1]
#endif
#
# If dynamic-EIDs are discovered, add each of them to EID-records,
# unless, we are doing a trigger in which case a single dynamic-EID
# is built into an EID-record.
#
# Otherwise, add static EID-prefixes into EID-records, unless a single
# one is triggered.
#
eid_records = ""
if (db.dynamic_eid_configured()):
for dyn_eid in db.dynamic_eids.values():
eid = dyn_eid.dynamic_eid
if (eid_only == None or eid_only.is_exact_match(eid)):
records, count = lisp_build_map_register_records(quiet, db,
eid, db.group, ttl)
eid_records += records
msl[1] += count
#endif
#endfor
else:
if (eid_only == None):
eid_records, count = lisp_build_map_register_records(quiet, db,
db.eid, db.group, ttl)
msl[1] += count
#endif
#endif
#
# Add EID-records to correct map-server name set.
#
msl[0] += eid_records
if (msl[1] == 20):
msl = ["", 0]
ms_list[ms_dns_name].append(msl)
#endif
#endfor
#
# Send Map-Register to each configured map-server.
#
for ms in lisp.lisp_map_servers_list.values():
if (ms_only != None and ms != ms_only): continue
ms_dns_name = ms.dns_name if decent else ms.ms_name
if (ms_list.has_key(ms_dns_name) == False): continue
for msl in ms_list[ms_dns_name]:
#
# Build map-server specific fields.
#
map_register.record_count = msl[1]
if (map_register.record_count == 0): continue
map_register.nonce += 1
map_register.alg_id = ms.alg_id
map_register.key_id = ms.key_id
map_register.proxy_reply_requested = ms.proxy_reply
map_register.merge_register_requested = ms.merge_registrations
map_register.map_notify_requested = ms.want_map_notify
map_register.xtr_id = ms.xtr_id
map_register.site_id = ms.site_id
map_register.encrypt_bit = (ms.ekey != None)
if (ms.refresh_registrations):
map_register.map_register_refresh = refresh
#endif
if (ms.ekey != None): map_register.encryption_key_id = ms.ekey_id
packet = map_register.encode()
map_register.print_map_register()
#
# Append EID-records and encode xtr-ID and site-ID at end of
# Map-Register.
#
trailer = map_register.encode_xtr_id("")
eid_records = msl[0]
packet = packet + eid_records + trailer
ms.map_registers_sent += 1
lisp.lisp_send_map_register(lisp_sockets, packet, map_register, ms)
time.sleep(.001)
#endfor
#
# Do DNS lookup for Map-Server if "dns-name" configured.
#
ms.resolve_dns_name()
#
# Exit loop if we are triggering a Map-Register to a single
# Map-Server.
#
if (ms_only != None and ms == ms_only): break
#endfor
return
#enddef
#
# lisp_etr_process_info_timer
#
# Time to send a periodic Info-Request message. This must be done less often
# then sending periodic Map-Registers as well as less the the NAT timeout
# value which is usually one minute.
#
def lisp_etr_process_info_timer(ms):
global lisp_etr_info_timer
global lisp_ephem_socket
lisp.lisp_set_exception()
#
# Build Info-Request messages if we have any private RLOCs in database-
# mappings.
#
sockets = [lisp_ephem_socket, lisp_ephem_socket, lisp_ipc_listen_socket]
lisp.lisp_build_info_requests(sockets, ms, lisp.LISP_CTRL_PORT)
#
# Build Info-Request for RTRs so we can open up NAT state so RTRs
# can encapsulate to us when ETR is behind NAT.
#
allow_private = (os.getenv("LISP_RTR_BEHIND_NAT") == None)
for rtr in lisp.lisp_rtr_list.values():
if (rtr == None): continue
if (rtr.is_private_address() and allow_private == False):
r = lisp.red(rtr.print_address_no_iid(), False)
lisp.lprint("Skip over RTR private address {}".format(r))
continue
#endif
lisp.lisp_build_info_requests(sockets, rtr, lisp.LISP_DATA_PORT)
#endfor
#
# Restart periodic timer. For some reason only this timer has to be
# canceled. Found on while testing NAT-traversal on rasp-pi in Jul 2015.
#
lisp_etr_info_timer.cancel()
lisp_etr_info_timer = threading.Timer(lisp.LISP_INFO_INTERVAL,
lisp_etr_process_info_timer, [None])
lisp_etr_info_timer.start()
return
#enddef
#
# lisp_process_register_timer
#
# Time to send a periodic Map-Register.
#
def lisp_process_register_timer(lisp_sockets):
global lisp_register_timer
global lisp_ephem_socket
lisp.lisp_set_exception()
#
# Build and send Map-Register.
#
lisp_build_map_register(lisp_sockets, None, None, None, True)
#
# If we are are doing L2-overlays, then register as a join of the
# broadcast MAC address.
#
if (lisp.lisp_l2_overlay):
entry = [ None, "ffff-ffff-ffff", True ]
lisp_send_multicast_map_register(lisp_sockets, [entry])
#endif
#
# Restart periodic timer.
#
if (lisp_register_timer): lisp_register_timer.cancel()
lisp_register_timer = threading.Timer(LISP_MAP_REGISTER_INTERVAL,
lisp_process_register_timer, [lisp_send_sockets])
lisp_register_timer.start()
return
#enddef
#
# lisp_is_group_more_specific
#
# Take group address in string format and see if it is more specific than
# the group-prefix in class lisp_group_mapping(). If more specific, return
# mask-length, otherwise return -1.
#
def lisp_is_group_more_specific(group_str, group_mapping):
iid = group_mapping.group_prefix.instance_id
mask_len = group_mapping.group_prefix.mask_len
group = lisp.lisp_address(lisp.LISP_AFI_IPV4, group_str, 32, iid)
if (group.is_more_specific(group_mapping.group_prefix)): return(mask_len)
return(-1)
#enddef
#
# lisp_send_multicast_map_register
#
# Build a Map-Register message with a Multicast Info Type LCAF as an EID-record
# for each entry in the 'entries' array. And build an RLOC-record as an RLE
# describing this ETR as the RLOC to be used for replication.
#
# The entries is an array of (source, group, joinleave) tuples.
#
def lisp_send_multicast_map_register(lisp_sockets, entries):
length = len(entries)
if (length == 0): return
afi = None
if (entries[0][1].find(":") != -1): afi = lisp.LISP_AFI_IPV6
if (entries[0][1].find(".") != -1): afi = lisp.LISP_AFI_IPV4
if (entries[0][1].find("-") != -1): afi = lisp.LISP_AFI_MAC
if (afi == None):
lisp.lprint("lisp_send_multicast_map_register() invalid group address")
return
#endif
#
# Find all (*,G) entries in entries array and replace with (S,G) entries
# from lisp_group_mapping_list.
#
g_entries = []
for source, group, joinleave in entries:
if (source != None): continue
g_entries.append([group, joinleave])
#endfor
decent = lisp.lisp_decent_pull_xtr_configured()
ms_list = {}
entries = []
for group, joinleave in g_entries:
ms_gm = None
for gm in lisp.lisp_group_mapping_list.values():
mask_len = lisp_is_group_more_specific(group, gm)
if (mask_len == -1): continue
if (ms_gm == None or mask_len > ms_gm.group_prefix.mask_len):
ms_gm = gm
#endif
#endfor
if (ms_gm == None):
lisp.lprint("No group-mapping for {}, could be underlay group". \
format(group))
continue
#endif
lisp.lprint("Use group-mapping '{}' {} for group {}".format( \
ms_gm.group_name, ms_gm.group_prefix.print_prefix(), group))
iid = ms_gm.group_prefix.instance_id
ms_name = ms_gm.use_ms_name
rle = ms_gm.rle_address
#
# To obtain decent-index for a group address, just use group address
# and no source as part of hash. Because an ITR does not know if (*,G)
# or (S,G) is registered with the mapping system
#
key = ms_name
if (decent):
key = lisp.lisp_get_decent_dns_name_from_str(iid, group)
ms_list[key] = ["", 0]
#endif
if (len(ms_gm.sources) == 0):
entries.append(["0.0.0.0", group, iid, key, rle, joinleave])
continue
#endif
for s in ms_gm.sources:
ms_list[key] = ["", 0]
entries.append([s, group, iid, key, rle, joinleave])
#endfor
#endfor
length = len(entries)
if (length == 0): return
lisp.lprint("Build Map-Register for {} multicast entries".format(length))
#
# Build RLE node for RLOC-record encoding. If behind a NAT, we need to
# insert a global address as the RLE node address. We will do that in
# the entries for loop.
#
rle_node = lisp.lisp_rle_node()
rle_node.level = 128
translated_rloc = lisp.lisp_get_any_translated_rloc()
rle = lisp.lisp_rle("")
rle.rle_nodes.append(rle_node)
#
# Set up each map-server names so we can decide which EID-prefixes go
# to which map-servers. [0] is eid_records and [1] is count. The ms_list
# is already setup for when pull-based decent is used.
#
if (decent == False):
for ms in lisp.lisp_map_servers_list.values():
ms_list[ms.ms_name] = ["", 0]
#endfor
#endif
rloc_name = None
if (lisp.lisp_nat_traversal): rloc_name = lisp.lisp_hostname
#
# Count number of RTRs reachable so we know allocation count.
#
rtr_count = 0
for rtr in lisp.lisp_rtr_list.values():
if (rtr == None): continue
rtr_count += 1
#endfor
#
# Run through multicast entry array.
#
eid_records = ""
for source, group, iid, ms_dns_name, rle_addr, joinleave in entries:
#
# Is db entry associated with a map-server name that is not configured?
#
if (ms_list.has_key(ms_dns_name) == False): continue
eid_record = lisp.lisp_eid_record()
eid_record.rloc_count = 1 + rtr_count
eid_record.authoritative = True
eid_record.record_ttl = lisp.LISP_REGISTER_TTL if joinleave else 0
eid_record.eid = lisp.lisp_address(afi, source, 0, iid)
if (eid_record.eid.address == 0): eid_record.eid.mask_len = 0
eid_record.group = lisp.lisp_address(afi, group, 0, iid)
if (eid_record.group.is_mac_broadcast() and \
eid_record.eid.address == 0): eid_record.eid.mask_len = 0
decent_index = ""
ms_name = ""
if (lisp.lisp_decent_pull_xtr_configured()):
decent_index = lisp.lisp_get_decent_index(eid_record.group)
decent_index = lisp.bold(str(decent_index), False)
decent_index = "with decent-index {}".format(decent_index)
else:
decent_index = "for ms-name '{}'".format(ms_dns_name)
#endif
eid_str = lisp.green(eid_record.print_eid_tuple(), False)
lisp.lprint(" EID-prefix {} {}{}".format(eid_str, ms_name,
decent_index))
eid_records += eid_record.encode()
eid_record.print_record(" ", False)
ms_list[ms_dns_name][1] += 1
#
# Build our RLOC entry.
#
rloc_record = lisp.lisp_rloc_record()
rloc_record.rloc_name = rloc_name
#
# Decide on RLE address. Have NAT-traversal take precedent, otherwise
# use configured RLE in group-mapping. If one wasn't configured use
# lisp_myrlocs IPv4 address.
#
if (translated_rloc != None):
rle_node.address = translated_rloc
elif (rle_addr != None):
rle_node.address = rle_addr
else:
rle_node.address = rle_addr = lisp.lisp_myrlocs[0]
#endif
rloc_record.rle = rle
rloc_record.local_bit = True
rloc_record.reach_bit = True
rloc_record.priority = 255
rloc_record.weight = 0
rloc_record.mpriority = 1
rloc_record.mweight = 100
eid_records += rloc_record.encode()
rloc_record.print_record(" ")
#
# If we are doing NAT-traversal, include a set or RTR RLOCs with
# priority 1. And set the global RLOCs to priority 254.
#
for rtr in lisp.lisp_rtr_list.values():
if (rtr == None): continue
rloc_record = lisp.lisp_rloc_record()
rloc_record.rloc.copy_address(rtr)
rloc_record.priority = 254
rloc_record.rloc_name = "RTR"
rloc_record.weight = 0
rloc_record.mpriority = 255
rloc_record.mweight = 0
rloc_record.local_bit = False
rloc_record.reach_bit = True
eid_records += rloc_record.encode()
rloc_record.print_record(" RTR ")
#endfor
#
# Add EID-records to correct map-server name set.
#
ms_list[ms_dns_name][0] += eid_records
#endfor
#
# Build map-server independent fields.
#
map_register = lisp.lisp_map_register()
map_register.nonce = 0xaabbccdddfdfdf00
map_register.xtr_id_present = True
map_register.proxy_reply_requested = True
map_register.map_notify_requested = False
map_register.merge_register_requested = True
#
# Send Map-Register to each configured map-server.
#
for ms in lisp.lisp_map_servers_list.values():
key = ms.dns_name if decent else ms.ms_name
#
# Get EID-records from correct map-server name set.
#
if (ms_list.has_key(key) == False): continue
#
# Build map-server specific fields.
#
map_register.record_count = ms_list[key][1]
if (map_register.record_count == 0): continue
map_register.nonce += 1
map_register.alg_id = ms.alg_id
map_register.alg_id = ms.key_id
map_register.xtr_id = ms.xtr_id
map_register.site_id = ms.site_id
map_register.encrypt_bit = (ms.ekey != None)
packet = map_register.encode()
map_register.print_map_register()
#
# Append EID-records and encode xtr-ID and site-ID at end of
# Map-Register.
#
trailer = map_register.encode_xtr_id("")
packet = packet + eid_records + trailer
ms.map_registers_multicast_sent += 1
lisp.lisp_send_map_register(lisp_sockets, packet, map_register, ms)
#
# Do DNS lookup for Map-Server if "dns-name" configured.
#
ms.resolve_dns_name()
#
# Go build more EID-records.
#
time.sleep(.001)
#endfor
return
#enddef
#
# IGMP record types.
#
lisp_igmp_record_types = { 1 : "include-mode", 2 : "exclude-mode",
3 : "change-to-include", 4 : "change-to-exclude", 5 : "allow-new-source",
6 : "block-old-sources" }
#
# lisp_process_igmp_packet
#
# Process IGMP packets.
#
# Basically odd types are Joins and even types are Leaves.
#
#
# An IGMPv1 and IGMPv2 report format is:
#
# 0 1 2 3
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# |Version| Type | Unused | Checksum |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Group Address |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
# An IGMPv3 report format is:
#
# 0 1 2 3
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Type = 0x22 | Reserved | Checksum |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Reserved | Number of Group Records (M) |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | |
# . .
# . Group Record [1] .
# . .
# | |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | |
# . .
# . Group Record [2] .
# . .
# | |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | . |
# . . .
# | . |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | |
# . .
# . Group Record [M] .
# . .
# | |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
# An IGMPv3 group record format is:
#
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Record Type | Aux Data Len | Number of Sources (N) |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Multicast Address |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Source Address [1] |
# +- -+
# | Source Address [2] |
# +- -+
# . . .
# . . .
# . . .
# +- -+
# | Source Address [N] |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | |
# . .
# . Auxiliary Data .
# . .
# | |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
def lisp_process_igmp_packet(packet):
global lisp_send_sockets
r = lisp.bold("Receive", False)
lisp.lprint("{} {}-byte IGMP packet: {}".format(r, len(packet),
lisp.lisp_format_packet(packet)))
#
# Jump over IP header.
#
header_offset = (struct.unpack("B", packet[0])[0] & 0x0f) * 4
#
# Check for IGMPv3 type value 0x22. Or process an IGMPv2 report.
#
igmp = packet[header_offset::]
igmp_type = struct.unpack("B", igmp[0])[0]
group = lisp.lisp_address(lisp.LISP_AFI_IPV4, "", 32, 0)
reports_and_leaves_only = (igmp_type in (0x12, 0x16, 0x17, 0x22))
if (reports_and_leaves_only == False):
igmp_str = "{} ({})".format(igmp_type, igmp_types[igmp_type]) if \
igmp_types.has_key(igmp_type) else igmp_type
lisp.lprint("IGMP type {} not supported".format(igmp_str))
return
#endif
if (len(igmp) < 8):
lisp.lprint("IGMP message too small")
return
#endif
#
# Maybe this is an IGMPv1 or IGMPv2 message so get group address. If
# IGMPv3, we will fix up group address in loop (for each group record).
#
group.address = socket.ntohl(struct.unpack("II", igmp[:8])[1])
group_str = group.print_address_no_iid()
#
# Process either IGMPv1 or IGMPv2 and exit.
#
if (igmp_type == 0x17):
lisp.lprint("IGMPv2 leave (*, {})".format(lisp.bold(group_str, False)))
lisp_send_multicast_map_register(lisp_send_sockets,
[[None, group_str, False]])
return
#endif
if (igmp_type in (0x12, 0x16)):
lisp.lprint("IGMPv{} join (*, {})".format( \
1 if (igmp_type == 0x12) else 2, lisp.bold(group_str, False)))
#
# Suppress for link-local groups.
#
if (group_str.find("224.0.0.") != -1):
lisp.lprint("Suppress registration for link-local groups")
else:
lisp_send_multicast_map_register(lisp_send_sockets,
[[None, group_str, True]])
#endif
#
# Finished with IGMPv1 or IGMPv2 processing.
#
return
#endif
#
# Parse each record for IGMPv3 (igmp_type == 0x22).
#
record_count = group.address
igmp = igmp[8::]
group_format = "BBHI"
group_size = struct.calcsize(group_format)
source_format = "I"
source_size = struct.calcsize(source_format)
source = lisp.lisp_address(lisp.LISP_AFI_IPV4, "", 32, 0)
#
# Traverse each group record.
#
register_entries = []
for i in range(record_count):
if (len(igmp) < group_size): return
record_type, x, source_count, address = struct.unpack(group_format,
igmp[:group_size])
igmp = igmp[group_size::]
if (lisp_igmp_record_types.has_key(record_type) == False):
lisp.lprint("Invalid record type {}".format(record_type))
continue
#endif
record_type_str = lisp_igmp_record_types[record_type]
source_count = socket.ntohs(source_count)
group.address = socket.ntohl(address)
group_str = group.print_address_no_iid()
lisp.lprint("Record type: {}, group: {}, source-count: {}".format( \
record_type_str, group_str, source_count))
#
# Determine if this is a join or leave. MODE_IS_INCLUDE (1) is a join.
# MODE_TO_EXCLUDE (4) with no sources is a join. CHANGE_TO_INCLUDE (5)
# is a join. Everything else is a leave.
#
joinleave = False
if (record_type in (1, 5)): joinleave = True
if (record_type == 4 and source_count == 0): joinleave = True
j_or_l = "join" if (joinleave) else "leave"
#
# Suppress registration for link-local groups.
#
if (group_str.find("224.0.0.") != -1):
lisp.lprint("Suppress registration for link-local groups")
continue
#endif
#
# (*,G) Join or Leave has been received if source count is 0.
#
# If this is IGMPv2 or just IGMPv3 reporting a group address, encode
# a (*,G) for the element in the register_entries array.
#
if (source_count == 0):
register_entries.append([None, group_str, joinleave])
lisp.lprint("IGMPv3 {} (*, {})".format(lisp.bold(j_or_l, False),
lisp.bold(group_str, False)))
#endif
#
# Process (S,G)s (source records)..
#
for j in range(source_count):
if (len(igmp) < source_size): return
address = struct.unpack(source_format, igmp[:source_size])[0]
source.address = socket.ntohl(address)
source_str = source.print_address_no_iid()
register_entries.append([source_str, group_str, joinleave])
lisp.lprint("{} ({}, {})".format(j_or_l,
lisp.green(source_str, False), lisp.bold(group_str, False)))
igmp = igmp[source_size::]
#endfor
#endfor
#
# Build Map-Register for (S,G) entries. Put them in a Multicast Info LCAF
# Type and put ourselves as an RLE. This is draft-farinacci-lisp-signal-
# free-multicast
#
if (len(register_entries) != 0):
lisp_send_multicast_map_register(lisp_send_sockets, register_entries)
#endif
return
#enddef
#
# lisp_etr_data_plane
#
# Capture a LISP encapsulated packet, decap it, process inner header, and
# re-encapsulated it.
#
def lisp_etr_data_plane(parms, not_used, packet):
global lisp_ipc_listen_socket
device = parms[0]
lisp_raw_socket = parms[1]
#
# Jump over MAC header if packet received on interface. There is a 4-byte
# internal header in any case (loopback interfaces will have a 4 byte
# header)..
#
if (lisp.lisp_is_macos() == False):
offset = 4 if device == "lo0" else 16
packet = packet[offset::]
#endif
#
# Check IGMP packet.
#
protocol = struct.unpack("B", packet[9])[0]
if (protocol == 2):
lisp_process_igmp_packet(packet)
return
#endif
#
# Check RLOC-probe Map-Request. We need to grab the TTL from IP header.
#
orig_packet = packet
packet, source, port, ttl = lisp.lisp_is_rloc_probe(packet, 0)
if (orig_packet != packet):
if (source == None): return
lisp.lisp_parse_packet(lisp_send_sockets, packet, source, port, ttl)
return
#endif
#
# First check if we are assembling IPv4 fragments. Do this only when
# not doing NAT-traversal. Otherwise, the kernel will do it when we
# receive the same packet on a raw socket (in lisp_etr_nat_data_plane()).
#
sport = socket.ntohs(struct.unpack("H", packet[20:22])[0])
if (lisp.lisp_nat_traversal and sport == lisp.LISP_DATA_PORT): return
packet = lisp.lisp_reassemble(packet)
if (packet == None): return
packet = lisp.lisp_packet(packet)
status = packet.decode(True, lisp_ipc_listen_socket, lisp.lisp_decap_stats)
if (status == None): return
#
# Print some useful header fields.
#
packet.print_packet("Receive", True)
#
# If we are looping back Map-Registers via encapsulation, overwrite
# multicast address with source address. That means we are sending a
# Map-Register message to the lisp-core process from our local RLOC
# address to our local RLOC address. Also, zero out the UDP checksum
# since the destination address changes that affects the pseudo-header.
#
if (lisp.lisp_decent_push_configured and
packet.inner_dest.is_multicast_address() and \
packet.lisp_header.get_instance_id() == 0xffffff):
source = packet.inner_source.print_address_no_iid()
packet.strip_outer_headers()
packet = packet.packet[28::]
packet = lisp.lisp_packet_ipc(packet, source, sport)
lisp.lisp_ipc(packet, lisp_ipc_listen_socket, "lisp-ms")
return
#endif
#
# Check if inner packet is a LISP control-packet. Typically RLOC-probes
# from RTRs can come through NATs. We want to reply to the global address
# of the RTR which is the outer source RLOC. We don't care about the
# inner source port since the RTR will decapsulate a data encapsulated
# RLOC-probe Map-Reply. The inner LISP header begins at offset 20+16+28=64
# (outer-IPv4 + UDP-outer-LISP + inner-IPv4-UDP).
#
if (packet.lisp_header.get_instance_id() == 0xffffff):
inner_ip = packet.packet[36::]
inner_lisp = inner_ip[28::]
ttl = -1
if (lisp.lisp_is_rloc_probe_request(inner_lisp[0])):
ttl = struct.unpack("B", inner_ip[8])[0] - 1
#endif
source = packet.inner_source.print_address_no_iid()
lisp.lisp_parse_packet(lisp_send_sockets, inner_lisp, source, 0, ttl)
return
#endif
#
# Packets are arriving on pcap interface. Need to check if another data-
# plane is running. If so, don't deliver duplicates.
#
if (lisp.lisp_ipc_data_plane):
lisp.dprint("Drop packet, external data-plane active")
return
#endif
#
# Increment global stats.
#
lisp.lisp_decap_stats["good-packets"].increment(len(packet.packet))
#
# Strip outer headers and start inner header forwarding logic.
#
packet.strip_outer_headers()
f_or_b = lisp.bold("Forward", False)
#
# Process inner header (checksum and decrement ttl).
#
L2 = packet.inner_dest.is_mac()
if (L2):
packet.packet = lisp.lisp_mac_input(packet.packet)
if (packet.packet == None): return
f_or_b = lisp.bold("Bridge", False)
elif (packet.inner_version == 4):
packet.packet = lisp.lisp_ipv4_input(packet.packet)
if (packet.packet == None): return
packet.inner_ttl = packet.outer_ttl
elif (packet.inner_version == 6):
packet.packet = lisp.lisp_ipv6_input(packet)
if (packet.packet == None): return
packet.inner_ttl = packet.outer_ttl
else:
lisp.dprint("Cannot parse inner packet header")
return
#endif
#
# Check if database-mapping exists for our local destination.
#
if (packet.inner_dest.is_multicast_address() == False):
db = lisp.lisp_db_for_lookups.lookup_cache(packet.inner_dest, False)
if (db):
db.increment_decap_stats(packet)
else:
lisp.dprint("No database-mapping found for EID {}".format( \
lisp.green(packet.inner_dest.print_address(), False)))
return
#endif
#endif
#
# If this is a trace packet, lisp_trace_append() will swap addresses
# and send packet back to source. We have no app to forward this decap'ed
# packet to, so return.
#
if (packet.is_trace()):
if (lisp.lisp_trace_append(packet, ed="decap") == False): return
#endif
#
# We are going to forward or bridge the decapsulated packet.
#
addr_str = "{} -> {}".format(packet.inner_source.print_address(),
packet.inner_dest.print_address())
lisp.dprint("{} packet for EIDs {}: {} ...".format(f_or_b, \
lisp.green(addr_str, False),
lisp.lisp_format_packet(packet.packet[0:60])))
#
# If we are decapsulating a MAC frame, then use the L2 socket where
# the MAC header is already in packet.
#
if (L2):
packet.bridge_l2_packet(packet.inner_dest, db)
return
#endif
#
# Send on L2 socket since IPv6 raw sockets do not allow us to send an
# entire IPv6 header in payload. Prepend prebuilt MAC header.
#
if (packet.inner_version == 6):
packet.send_l2_packet(lisp_l2_socket, lisp_mac_header)
return
#endif
#
# Default to global raw socket otherwise get socket baesd on instance-ID.
#
raw_socket = packet.get_raw_socket()
if (raw_socket == None): raw_socket = lisp_raw_socket
#
# Send out.
#
packet.send_packet(raw_socket, packet.inner_dest)
return
#enddef
#
# lisp_etr_nat_data_plane
#
# Packet came in on a destination ephemeral port from a source port of 4341.
# That is a RTR encapsulated this packet that is coming through a NAT device.
#
# The packet has the outer IP and UDP headers stripped so the first byte of
# this supplied data packet has the LISP data header on it.
#
def lisp_etr_nat_data_plane(lisp_raw_socket, packet, source):
global lisp_ipc_listen_socket, lisp_send_sockets
#
# Decode LISP header.
#
lisp_header = packet
packet = lisp.lisp_packet(packet[8::])
if (packet.lisp_header.decode(lisp_header) == False): return
#
# Store outer source RLOC address so if we are doing lisp-crypto across
# NAT-traversal, we can find the decryption key.
#
packet.outer_source = lisp.lisp_address(lisp.LISP_AFI_IPV4, source,
lisp.LISP_IPV4_HOST_MASK_LEN, 0)
status = packet.decode(False, lisp_ipc_listen_socket,
lisp.lisp_decap_stats)
if (status == None): return
#
# Special case to log packets with no outer header but are considered
# decapsulated when coming through NATs. Since packets are sent from
# source port 4341, the kernel will strip outer header, so we don't have
# outer header context in lisp_packet().
#
if (lisp.lisp_flow_logging): packet.log_flow(False)
packet.print_packet("Kernel-decap", False)
lisp.dprint(packet.lisp_header.print_header(" "))
#
# If we are looping back Map-Registers via encapsulation, overwrite
# multicast address with source address. That means we are sending a
# Map-Register message to the lisp-core process from our local RLOC
# address to our local RLOC address. Also, zero out the UDP checksum
# since the destination address changes that affects the pseudo-header.
#
if (lisp.lisp_decent_push_configured and
packet.inner_dest.is_multicast_address() and \
packet.lisp_header.get_instance_id() == 0xffffff):
sport = packet.udp_sport
packet = packet.packet[28::]
packet = lisp.lisp_packet_ipc(packet, source, sport)
lisp.lisp_ipc(packet, lisp_ipc_listen_socket, "lisp-ms")
return
#endif
#
# Check if inner packet is a LISP control-packet. Typically RLOC-probes
# from RTRs can come through NATs. We want to reply to the global address
# of the RTR which is the outer source RLOC. We don't care about the
# inner source port since the RTR will decapsulate a data encapsulated
# RLOC-probe Map-Reply.
#
if (packet.lisp_header.get_instance_id() == 0xffffff):
inner_ip = packet.packet
inner_lisp = inner_ip[28::]
ttl = -1
if (lisp.lisp_is_rloc_probe_request(inner_lisp[0])):
ttl = struct.unpack("B", inner_ip[8])[0] - 1
#endif
lisp.lisp_parse_packet(lisp_send_sockets, inner_lisp, source, 0, ttl)
return
#endif
#
# Packets are arriving on ephemeral socket. Need to check if another data-
# plane is running. If so, don't deliver duplicates.
#
if (lisp.lisp_ipc_data_plane):
lisp.dprint("Drop packet, external data-plane active")
return
#endif
#
# Increment global stats.
#
lisp.lisp_decap_stats["good-packets"].increment(len(packet.packet))
#
# Check if database-mapping exists for our local destination.
#
if (packet.inner_dest.is_multicast_address() == False):
db = lisp.lisp_db_for_lookups.lookup_cache(packet.inner_dest, False)
if (db):
db.increment_decap_stats(packet)
else:
lisp.dprint("No database-mapping found for EID {}".format( \
lisp.green(packet.inner_dest.print_address(), False)))
#endif
#endif
#endif
#
# If this is a trace packet, lisp_trace_append() will swap addresses
# and send packet back to source. We have no app to forward this decap'ed
# packet to, so return.
#
if (packet.is_trace()):
if (lisp.lisp_trace_append(packet, ed="decap") == False): return
#endif
addr_str = "{} -> {}".format(packet.inner_source.print_address(),
packet.inner_dest.print_address())
lisp.dprint("{} packet for EIDs {}: {} ...".format( \
lisp.bold("NAT-Forward", False), lisp.green(addr_str, False),
lisp.lisp_format_packet(packet.packet[0:60])))
#
# Send on L2 socket since IPv6 raw sockets do not allow us to send an
# entire IPv6 header in payload. Prepend prebuilt MAC header
#
if (packet.inner_version == 6):
packet.send_l2_packet(lisp_l2_socket, lisp_mac_header)
return
#endif
#
# Default to global raw socket otherwise get socket baesd on instance-ID.
#
raw_socket = packet.get_raw_socket()
if (raw_socket == None): raw_socket = lisp_raw_socket
#
# Send out on raw socket.
#
packet.send_packet(raw_socket, packet.inner_dest)
return
#enddef
#
# lisp_register_ipv6_group_entries
#
# Find an IPv6 group-mapping and send a Map-Register for each configured IPv6
# source for the IPv6 group-prefix found.
#
def lisp_register_ipv6_group_entries(group, joinleave):
ms_gm = None
for gm in lisp.lisp_group_mapping_list.values():
mask_len = lisp_is_group_more_specific(group, gm)
if (mask_len == -1): continue
if (ms_gm == None or mask_len > ms_gm.mask_len): ms_gm = gm
#endfor
if (ms_gm == None): return
sg = []
for s in ms_gm.sources:
sg.append([s, group, joinleave])
#endfor
lisp_send_multicast_map_register(lisp_send_sockets, sg)
return
#enddef
#
# lisp_etr_join_leave_process
#
# Look at file-system to see if there is a join or leave to be done. This
# function will send joins in the form of building an IP/IGMPv2 packet to
# be passed to lisp_process_igmp_packet(). The groups that are joined are
# ones found as filenames in the current directory as "join-<group>". The
# IGMP Reports wil lbe sent to lisp_process_igmp_packet() every 30 seconds.
#
# For right now, if the group address is IPv6, send a Map-Register directly.
# We will get to MLD support later.
#
# This is used for testing and not meant for production deployment.
#
def lisp_etr_join_leave_process():
global lisp_send_sockets
lisp.lisp_set_exception()
swap = socket.htonl
ipigmp = [swap(0x46000020), swap(0x9fe60000), swap(0x0102d7cc),
swap(0x0acfc15a), swap(0xe00000fb), swap(0x94040000)]
packet = ""
for l in ipigmp: packet += struct.pack("I", l)
#
# Look for files in current directory for "join-<group>" and then send
# an IGMPv2 report to ourselves.
#
while (True):
groups = commands.getoutput("ls join-*").replace("join-", "")
groups = groups.split("\n")
for group in groups:
if (lisp.lisp_valid_address_format("address", group) == False):
continue
#endif
ipv6 = (group.find(":") != -1)
#
# Check if we are leaving group.
#
leavejoin = os.path.exists("leave-{}".format(group))
lisp.lprint("Internal {} group {}".format( \
"leaving" if leavejoin else "joining", group))
#
# Set IGMP message to Report or Leave. Then add group.
#
if (ipv6):
if (group.lower().find("ff02:") != -1):
lisp.lprint("Suppress registration for link-local groups")
continue
#endif
lisp_register_ipv6_group_entries(group, (leavejoin == False))
else:
send_packet = packet
if (leavejoin):
send_packet += struct.pack("I", swap(0x17000000))
else:
send_packet += struct.pack("I", swap(0x16000000))
#endif
octet = group.split(".")
value = int(octet[0]) << 24
value += int(octet[1]) << 16
value += int(octet[2]) << 8
value += int(octet[3])
send_packet += struct.pack("I", swap(value))
lisp_process_igmp_packet(send_packet)
time.sleep(.100)
#endif
#endfor
time.sleep(10)
#endwhile
return
#enddef
#
# lisp_etr_process
#
# This thread is for receiving encapsulated LISP packets address to destination
# port 4341. As well as IGMP reports. The IGMP reports can be captured on
# Ubuntu and Fedora but not on MacOS. The former supports IGMPv3 and the
# latter supports IGMPv2 if we listen on "en0".
#
def lisp_etr_process():
lisp.lisp_set_exception()
if (lisp.lisp_myrlocs[0] == None): return
#
# Find all multicast RLEs so we can receive packets on underlay multicast
# groups.
#
rles = lisp.lisp_get_all_multicast_rles()
#
# We need to listen on en0 when doing IGMP testing on MacOS.
#
device = "any"
# device = "en0" if lisp.lisp_is_macos() else "any"
# device = "lo0" if lisp.lisp_is_macos() else "any"
pcap = pcappy.open_live(device, 1600, 0, 100)
pfilter = "(proto 2) or "
pfilter += "((dst host "
for addr in lisp.lisp_get_all_addresses() + rles:
pfilter += "{} or ".format(addr)
#endif
pfilter = pfilter[0:-4]
pfilter += ") and ((udp dst port 4341 or 8472 or 4789) or "
pfilter += "(udp src port 4341) or "
pfilter += "(udp dst port 4342 and ip[28] == 0x12) or "
pfilter += "(proto 17 and (ip[6]&0xe0 == 0x20 or " + \
"(ip[6]&0xe0 == 0 and ip[7] != 0)))))"
lisp.lprint("Capturing packets for: '{}' on device {}".format(pfilter,
device))
pcap.filter = pfilter
#
# Enter receive loop.
#
pcap.loop(-1, lisp_etr_data_plane, [device, lisp_raw_socket])
return
#enddef
#
# lisp_etr_startup
#
# Intialize this LISP ETR process. This function returns no values.
#
def lisp_etr_startup():
global lisp_ipc_listen_socket
global lisp_ephem_socket
global lisp_send_sockets
global lisp_raw_socket
global lisp_l2_socket
global lisp_mac_header
lisp.lisp_i_am("etr")
lisp.lisp_set_exception()
lisp.lisp_print_banner("ETR starting up")
#
# Get local address for source RLOC for encapsulation.
#
lisp.lisp_get_local_interfaces()
lisp.lisp_get_local_macs()
if (lisp.lisp_get_local_addresses() == False): return(False)
#
# Prebuild MAC header for lisp_l2_socket sending. Disabled code in favor
# of using pytun. See below.
#
# m = lisp.lisp_mymacs.keys()[0]
# mac = ""
# for i in range(0, 12, 2): mac += chr(int(m[i:i+2], 16))
# lisp_mac_header = mac + mac + "\x86\xdd"
# lisp.dprint("Built MAC header for L2 socket:",
# lisp.lisp_format_packet(lisp_mac_header))
#
# Used on for listening for Info-Replies for NAT-traversal support.
#
s = lisp.lisp_open_listen_socket("0.0.0.0", str(lisp_ephem_port))
s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 32)
lisp_ephem_socket = s
#
# Open network send socket and internal listen socket.
#
lisp_ipc_listen_socket = lisp.lisp_open_listen_socket("", "lisp-etr")
lisp_send_sockets[0] = lisp_ephem_socket
lisp_send_sockets[1] = lisp.lisp_open_send_socket("", lisp.LISP_AFI_IPV6)
lisp_send_sockets[2] = lisp_ipc_listen_socket
#
# Open up raw socket so we can send with IP headers after decapsulation.
# There is a special case where the RTR's lisp_send_sockets array is of
# size 4 since we need to pass the raw socket through the lisp.py module
# to send a data encapsulated RLOC-probe to an ETR that sits behind a NAT.
# The test is in lisp_send_map_request() for this. This is the case in
# ETRs as well. All other components use an array size of 3 modulo.
#
lisp_raw_socket = socket.socket(socket.AF_INET, socket.SOCK_RAW,
socket.IPPROTO_RAW)
lisp_raw_socket.setsockopt(socket.SOL_IP, socket.IP_HDRINCL, 1)
lisp_send_sockets.append(lisp_raw_socket)
#
# Open a L2 socket so when we decapsulate and have to route an IPv6
# packet, we have the kernel receive a MAC frame on the loopback interface.
# We do this because there is no IP_HDRINCL for IPv6 raw sockets.
#
# Disabling this code in favor of using a tuntap tun interface via the
# pytun module. See code right below.
#
# if ("PF_PACKET" in dir(socket)):
# interface = "lo" if ("lo" in lisp.lisp_myinterfaces.keys()) else \
# "lo0" if ("lo0" in lisp.lisp_myinterfaces.keys()) else None
# if (interface != None):
# lisp_l2_socket = socket.socket(socket.PF_PACKET, socket.SOCK_RAW)
# lisp_l2_socket.bind(("lo", 0x86dd))
# #endif
# #endif
#
# Setup tuntap tunnel interface so when we decap IPv6 packets, we can
# send to kernel to route them.
#
if (pytun != None):
lisp_mac_header = '\x00\x00\x86\xdd'
device = "lispers.net"
try:
lisp_l2_socket = pytun.TunTapDevice(flags=pytun.IFF_TUN,
name=device)
os.system("ip link set dev {} up".format(device))
except:
lisp.lprint("Cannot create tuntap interface")
#endtry
#endif
#
# Start thread to listen on data socket.
#
threading.Thread(target=lisp_etr_process, args=[]).start()
#
# Test code to force IGMPv2 joins and leaves on an airplane. ;-)
#
threading.Thread(target=lisp_etr_join_leave_process, args=[]).start()
return(True)
#enddef
#
# lisp_etr_shutdown
#
# Shut down this process.
#
def lisp_etr_shutdown():
global lisp_register_timer
global lisp_etr_info_timer
#
# Cancel periodic Map-Register and Info timer threads.
#
if (lisp_register_timer): lisp_register_timer.cancel()
if (lisp_etr_info_timer): lisp_etr_info_timer.cancel()
#
# Close sockets.
#
lisp.lisp_close_socket(lisp_send_sockets[0], "")
lisp.lisp_close_socket(lisp_send_sockets[1], "")
lisp.lisp_close_socket(lisp_ipc_listen_socket, "lisp-etr")
return
#enddef
#
# lisp_etr_discover_eid
#
# Process IPC message from the lisp-itr process. It will be in the form of:
#
# "learn%<eid-string>%<interface-name>"
#
def lisp_etr_discover_eid(ipc):
ipc = ipc.split("%")
eid_str = ipc[1]
interface = ipc[2]
if (interface == "None"): interface = None
eid = lisp.lisp_address(lisp.LISP_AFI_NONE, "", 0, 0)
eid.store_address(eid_str)
#
# Do database-mapping lookup.
#
db = lisp.lisp_db_for_lookups.lookup_cache(eid, False)
if (db == None or db.dynamic_eid_configured() == False):
lisp.lprint("ITR/ETR dynamic-EID configuration out of sync for {}". \
format(lisp.green(eid_str, False)))
return
#endif
#
# Do logic checks. That is do not remove an entry if it is not there and
# don't try to add an entry if it is already cached.
#
dyn_eid = None
if (db.dynamic_eids.has_key(eid_str)): dyn_eid = db.dynamic_eids[eid_str]
if (dyn_eid == None and interface == None):
lisp.lprint("ITR/ETR state mismatch for {}".format( \
lisp.green(eid_str, False)))
return
#endif
#
# Check if ITR is changing the interface to the same interface, meaning
# it is confused. Otherwise, the IPC is an interface change. Don't register
# in this case.
#
if (dyn_eid and interface):
if (dyn_eid.interface == interface):
lisp.lprint("ITR sent redundant IPC for {}".format( \
lisp.green(eid_str, False)))
else:
lisp.lprint("Dynamic-EID {} interface change, {} -> {}".format( \
lisp.green(eid_str, False), dyn_eid.interface, interface))
dyn_eid.interface = interface
#endif
return
#endif
#
# Add new entry and register it.
#
if (interface):
dyn_eid = lisp.lisp_dynamic_eid()
dyn_eid.dynamic_eid.copy_address(eid)
dyn_eid.interface = interface
dyn_eid.get_timeout(interface)
db.dynamic_eids[eid_str] = dyn_eid
reg = lisp.bold("Registering", False)
eid_str = lisp.bold(eid_str, False)
lisp.lprint("{} dynamic-EID {} on interface {}, timeout {}".format(reg,
lisp.green(eid_str, False), interface, dyn_eid.timeout))
lisp_build_map_register(lisp_send_sockets, None, eid, None, False)
#
# Add /32 to routing table.
#
if (lisp.lisp_is_macos() == False):
eid_str = eid.print_prefix_no_iid()
cmd = "ip route add {} dev {}".format(eid_str, interface)
os.system(cmd)
#endif
return
#endif
#
# Remove existig entry and deregister it.
#
if (db.dynamic_eids.has_key(eid_str)):
interface = db.dynamic_eids[eid_str].interface
dereg = lisp.bold("Deregistering", False)
lisp.lprint("{} dynamic-EID {}".format(dereg,
lisp.green(eid_str, False)))
lisp_build_map_register(lisp_send_sockets, 0, eid, None, False)
db.dynamic_eids.pop(eid_str)
#
# Delete /32 from routing table.
#
if (lisp.lisp_is_macos() == False):
eid_str = eid.print_prefix_no_iid()
cmd = "ip route delete {} dev {}".format(eid_str, interface)
os.system(cmd)
#endif
#endif
return
#enddef
#
# lisp_etr_process_rtr_updown
#
# Process IPC message from lisp-itr. It is telling the lisp-etr process if
# RLOC-probing has determined if the RTR has gone up or down. And therefore
# if it should be registered to the mapping system.
#
def lisp_etr_process_rtr_updown(ipc):
if (lisp.lisp_register_all_rtrs): return
opcode, rtr_str, status = ipc.split("%")
if (lisp.lisp_rtr_list.has_key(rtr_str) == False): return
lisp.lprint("Process ITR IPC message, RTR {} has gone {}".format(
lisp.red(rtr_str, False), lisp.bold(status, False)))
rtr = lisp.lisp_rtr_list[rtr_str]
if (status == "down"):
lisp.lisp_rtr_list[rtr_str] = None
return
#endif
rtr = lisp.lisp_address(lisp.LISP_AFI_IPV4, rtr_str, 32, 0)
lisp.lisp_rtr_list[rtr_str] = rtr
return
#enddef
#
# lisp_etr_process_nonce_ipc
#
# Process an nonce IPC message from the ITR. It wants to know when a nonce
# is echoed from a remote ITR.
#
def lisp_etr_process_nonce_ipc(ipc):
x, opcode, rloc_str, nonce = ipc.split("%")
nonce = int(nonce, 16)
echo_nonce = lisp.lisp_get_echo_nonce(None, rloc_str)
if (echo_nonce == None): echo_nonce = lisp.lisp_echo_nonce(rloc_str)
if (opcode == "R"):
echo_nonce.request_nonce_sent = nonce
lisp.lprint("Waiting for echo-nonce 0x{} from {}".format( \
lisp.lisp_hex_string(nonce), lisp.red(echo_nonce.rloc_str, False)))
elif (opcode == "E"):
echo_nonce.echo_nonce_sent = nonce
lisp.lprint("Sent echo-nonce 0x{} to {}".format( \
lisp.lisp_hex_string(nonce), lisp.red(echo_nonce.rloc_str, False)))
#endif
return
#enddef
#
# ETR commands procssed by this process.
#
lisp_etr_commands = {
"lisp xtr-parameters" : [lispconfig.lisp_xtr_command, {
"rloc-probing" : [True, "yes", "no"],
"nonce-echoing" : [True, "yes", "no"],
"data-plane-security" : [True, "yes", "no"],
"data-plane-logging" : [True, "yes", "no"],
"frame-logging" : [True, "yes", "no"],
"flow-logging" : [True, "yes", "no"],
"nat-traversal" : [True, "yes", "no"],
"checkpoint-map-cache" : [True, "yes", "no"],
"ipc-data-plane" : [True, "yes", "no"],
"decentralized-push-xtr" : [True, "yes", "no"],
"decentralized-pull-xtr-modulus" : [True, 1, 0xff],
"decentralized-pull-xtr-dns-suffix" : [True],
"register-reachable-rtrs" : [True, "yes", "no"],
"program-hardware" : [True, "yes", "no"] }],
"lisp interface" : [lispconfig.lisp_interface_command, {
"interface-name" : [True],
"device" : [True],
"instance-id" : [True, 0, 0xffffffff],
"dynamic-eid" : [True],
"dynamic-eid-device" : [True],
"lisp-nat" : [True, "yes", "no"],
"dynamic-eid-timeout" : [True, 0, 0xff] }],
"lisp map-server" : [lisp_map_server_command, {
"ms-name" : [True],
"address" : [True],
"dns-name" : [True],
"authentication-type" : [False, "sha1", "sha2"],
"authentication-key" : [False],
"encryption-key" : [False],
"proxy-reply" : [False, "yes", "no"],
"want-map-notify" : [False, "yes", "no"],
"merge-registrations" : [False, "yes", "no"],
"refresh-registrations" : [False, "yes", "no"],
"site-id" : [False, 1, 0xffffffffffffffff] }],
"lisp database-mapping" : [lisp_etr_database_mapping_command, {
"prefix" : [],
"mr-name" : [True],
"ms-name" : [True],
"instance-id" : [True, 0, 0xffffffff],
"secondary-instance-id" : [True, 0, 0xffffffff],
"eid-prefix" : [True],
"group-prefix" : [True],
"dynamic-eid" : [True, "yes", "no"],
"signature-eid" : [True, "yes", "no"],
"rloc" : [],
"rloc-record-name" : [True],
"elp-name" : [True],
"geo-name" : [True],
"rle-name" : [True],
"json-name" : [True],
"address" : [True],
"interface" : [True],
"priority" : [True, 0, 255],
"weight" : [True, 0, 100] }],
"lisp explicit-locator-path" : [lispconfig.lisp_elp_command, {
"elp-name" : [False],
"elp-node" : [],
"address" : [True],
"probe" : [True, "yes", "no"],
"strict" : [True, "yes", "no"],
"eid" : [True, "yes", "no"] }],
"lisp replication-list-entry" : [lispconfig.lisp_rle_command, {
"rle-name" : [False],
"rle-node" : [],
"address" : [True],
"level" : [True, 0, 255] }],
"lisp geo-coordinates" : [lispconfig.lisp_geo_command, {
"geo-name" : [False],
"geo-tag" : [False] }],
"lisp json" : [lispconfig.lisp_json_command, {
"json-name" : [False],
"json-string" : [False] }],
"lisp group-mapping" : [lisp_group_mapping_command, {
"group-name" : [False],
"ms-name" : [True],
"group-prefix" : [False],
"instance-id" : [True, 0, 0xffffffff],
"rle-address" : [False],
"sources" : [],
"address" : [True] }],
"show database-mapping" : [lisp_etr_show_command, { }],
"show etr-keys" : [lisp_etr_show_keys_command, {}],
"show etr-dynamic-eid" : [lispconfig.lisp_show_dynamic_eid_command, { }]
}
#------------------------------------------------------------------------------
#
# Main entry point for process.
#
if (lisp_etr_startup() == False):
lisp.lprint("lisp_etr_startup() failed")
lisp.lisp_print_banner("ETR abnormal exit")
exit(1)
#endif
socket_list = [lisp_ephem_socket, lisp_ipc_listen_socket]
while (True):
try: ready_list, w, x = select.select(socket_list, [], [])
except: break
#
# Process Info-Reply messages received on ephemeral port.
#
if (lisp_ephem_socket in ready_list):
opcode, source, port, packet = \
lisp.lisp_receive(lisp_ephem_socket, False)
if (source == ""): break
if (port == lisp.LISP_DATA_PORT):
lisp_etr_nat_data_plane(lisp_raw_socket, packet, source)
else:
if (lisp.lisp_is_rloc_probe_request(packet[0])):
lisp.lprint("ETR ignoring RLOC-probe request, using pcap")
continue
#endif
send_register = lisp.lisp_parse_packet(lisp_send_sockets, packet,
source, port)
#
# Info-Reply from map-server has new RTR-list, trigger a
# Map-Register and a Info-Request to the RTR.
#
if (send_register):
lisp_etr_info_timer = threading.Timer(0,
lisp_etr_process_info_timer, [None])
lisp_etr_info_timer.start()
lisp_register_timer = threading.Timer(0,
lisp_process_register_timer, [lisp_send_sockets])
lisp_register_timer.start()
#endif
#endif
#endif
#
# Process either commands, an IPC data-packet (for testing), or any
# protocol message on the IPC listen socket.
#
if (lisp_ipc_listen_socket in ready_list):
opcode, source, port, packet = \
lisp.lisp_receive(lisp_ipc_listen_socket, True)
if (source == ""): break
if (opcode == "command"):
if (packet.find("learn%") != -1):
lisp_etr_discover_eid(packet)
elif (packet.find("nonce%") != -1):
lisp_etr_process_nonce_ipc(packet)
elif (packet.find("clear%") != -1):
lispconfig.lisp_clear_decap_stats(packet)
elif (packet.find("rtr%") != -1):
lisp_etr_process_rtr_updown(packet)
elif (packet.find("stats%") != -1):
packet = packet.split("%")[-1]
lisp.lisp_process_data_plane_decap_stats(packet, None)
else:
lispconfig.lisp_process_command(lisp_ipc_listen_socket,
opcode, packet, "lisp-etr", [lisp_etr_commands])
#endif
elif (opcode == "api"):
lisp.lisp_process_api("lisp-etr", lisp_ipc_listen_socket, packet)
else:
if (lisp.lisp_is_rloc_probe_request(packet[0])):
lisp.lprint("ETR ignoring RLOC-probe request, using pcap")
continue
#endif
lisp.lisp_parse_packet(lisp_send_sockets, packet, source, port)
#endif
#endif
#endwhile
lisp_etr_shutdown()
lisp.lisp_print_banner("ETR normal exit")
exit(0)
#------------------------------------------------------------------------------
| 33.530766
| 79
| 0.582037
|
1c670c36935f6997f4ab13b7c2cc9fe84a2294a4
| 1,948
|
py
|
Python
|
actions/patchBatchV1NamespacedJobStatus.py
|
blinkops/stackstorm-kubernetes
|
3b4a15d42f603f3e700efaf534169e2ec361f5d2
|
[
"Apache-2.0"
] | 20
|
2016-12-24T01:35:41.000Z
|
2022-03-06T08:32:16.000Z
|
actions/patchBatchV1NamespacedJobStatus.py
|
blinkops/stackstorm-kubernetes
|
3b4a15d42f603f3e700efaf534169e2ec361f5d2
|
[
"Apache-2.0"
] | 16
|
2017-05-02T19:38:57.000Z
|
2021-06-17T08:31:17.000Z
|
actions/patchBatchV1NamespacedJobStatus.py
|
blinkops/stackstorm-kubernetes
|
3b4a15d42f603f3e700efaf534169e2ec361f5d2
|
[
"Apache-2.0"
] | 18
|
2017-06-20T00:44:12.000Z
|
2022-03-30T08:41:42.000Z
|
import json
from lib.k8s import K8sClient
class patchBatchV1NamespacedJobStatus(K8sClient):
def run(
self,
body,
name,
namespace,
pretty=None,
config_override=None):
ret = False
args = {}
args['config_override'] = {}
args['params'] = {}
if config_override is not None:
args['config_override'] = config_override
if body is not None:
args['body'] = body
else:
return (False, "body is a required parameter")
if name is not None:
args['name'] = name
else:
return (False, "name is a required parameter")
if namespace is not None:
args['namespace'] = namespace
else:
return (False, "namespace is a required parameter")
if pretty is not None:
args['params'].update({'pretty': pretty})
if 'body' in args:
args['data'] = args['body']
args.pop('body')
args['headers'] = {'Content-type': u'application/json-patch+json, application/merge-patch+json, application/strategic-merge-patch+json', 'Accept': u'application/json, application/yaml, application/vnd.kubernetes.protobuf'} # noqa pylint: disable=line-too-long
args['url'] = "apis/batch/v1/namespaces/{namespace}/jobs/{name}/status".format( # noqa pylint: disable=line-too-long
body=body, name=name, namespace=namespace)
args['method'] = "patch"
self.addArgs(**args)
self.makeRequest()
myresp = {}
myresp['status_code'] = self.resp.status_code
try:
myresp['data'] = json.loads(self.resp.content.rstrip())
except ValueError:
myresp['data'] = self.resp.content
if myresp['status_code'] >= 200 and myresp['status_code'] <= 299:
ret = True
return (ret, myresp)
| 31.934426
| 268
| 0.564168
|
64672782ffc5ad54be6b17fe343305a27f7706f4
| 15,582
|
py
|
Python
|
bitmex_swagger/api/quote_api.py
|
silencewwt/bitmex-swagger-client
|
01403685eeb12eb27d53a0310d3bc7541793aa0f
|
[
"MIT"
] | 1
|
2018-08-04T15:05:43.000Z
|
2018-08-04T15:05:43.000Z
|
bitmex_swagger/api/quote_api.py
|
silencewwt/bitmex-swagger
|
01403685eeb12eb27d53a0310d3bc7541793aa0f
|
[
"MIT"
] | null | null | null |
bitmex_swagger/api/quote_api.py
|
silencewwt/bitmex-swagger
|
01403685eeb12eb27d53a0310d3bc7541793aa0f
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
BitMEX API
## REST API for the BitMEX Trading Platform [View Changelog](/app/apiChangelog) ---- #### Getting Started Base URI: [https://www.bitmex.com/api/v1](/api/v1) ##### Fetching Data All REST endpoints are documented below. You can try out any query right from this interface. Most table queries accept `count`, `start`, and `reverse` params. Set `reverse=true` to get rows newest-first. Additional documentation regarding filters, timestamps, and authentication is available in [the main API documentation](/app/restAPI). *All* table data is available via the [Websocket](/app/wsAPI). We highly recommend using the socket if you want to have the quickest possible data without being subject to ratelimits. ##### Return Types By default, all data is returned as JSON. Send `?_format=csv` to get CSV data or `?_format=xml` to get XML data. ##### Trade Data Queries *This is only a small subset of what is available, to get you started.* Fill in the parameters and click the `Try it out!` button to try any of these queries. * [Pricing Data](#!/Quote/Quote_get) * [Trade Data](#!/Trade/Trade_get) * [OrderBook Data](#!/OrderBook/OrderBook_getL2) * [Settlement Data](#!/Settlement/Settlement_get) * [Exchange Statistics](#!/Stats/Stats_history) Every function of the BitMEX.com platform is exposed here and documented. Many more functions are available. ##### Swagger Specification [⇩ Download Swagger JSON](swagger.json) ---- ## All API Endpoints Click to expand a section. # noqa: E501
OpenAPI spec version: 1.2.0
Contact: support@bitmex.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from bitmex_swagger.api_client import ApiClient
class QuoteApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def quote_get(self, **kwargs): # noqa: E501
"""Get Quotes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.quote_get(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. Send a bare series (e.g. XBU) to get data for the nearest expiring contract in that series. You can also send a timeframe, e.g. `XBU:monthly`. Timeframes are `daily`, `weekly`, `monthly`, `quarterly`, and `biquarterly`.
:param str filter: Generic table filter. Send JSON key/value pairs, such as `{\"key\": \"value\"}`. You can key on individual fields, and do more advanced querying on timestamps. See the [Timestamp Docs](https://www.bitmex.com/app/restAPI#Timestamp-Filters) for more details.
:param str columns: Array of column names to fetch. If omitted, will return all columns. Note that this method will always return item keys, even when not specified, so you may receive more columns that you expect.
:param float count: Number of results to fetch.
:param float start: Starting point for results.
:param bool reverse: If true, will sort results newest first.
:param datetime start_time: Starting date filter for results.
:param datetime end_time: Ending date filter for results.
:return: list[Quote]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.quote_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.quote_get_with_http_info(**kwargs) # noqa: E501
return data
def quote_get_with_http_info(self, **kwargs): # noqa: E501
"""Get Quotes. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.quote_get_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str symbol: Instrument symbol. Send a bare series (e.g. XBU) to get data for the nearest expiring contract in that series. You can also send a timeframe, e.g. `XBU:monthly`. Timeframes are `daily`, `weekly`, `monthly`, `quarterly`, and `biquarterly`.
:param str filter: Generic table filter. Send JSON key/value pairs, such as `{\"key\": \"value\"}`. You can key on individual fields, and do more advanced querying on timestamps. See the [Timestamp Docs](https://www.bitmex.com/app/restAPI#Timestamp-Filters) for more details.
:param str columns: Array of column names to fetch. If omitted, will return all columns. Note that this method will always return item keys, even when not specified, so you may receive more columns that you expect.
:param float count: Number of results to fetch.
:param float start: Starting point for results.
:param bool reverse: If true, will sort results newest first.
:param datetime start_time: Starting date filter for results.
:param datetime end_time: Ending date filter for results.
:return: list[Quote]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['symbol', 'filter', 'columns', 'count', 'start', 'reverse', 'start_time', 'end_time'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method quote_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'symbol' in params:
query_params.append(('symbol', params['symbol'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'columns' in params:
query_params.append(('columns', params['columns'])) # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'reverse' in params:
query_params.append(('reverse', params['reverse'])) # noqa: E501
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/quote', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Quote]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def quote_get_bucketed(self, **kwargs): # noqa: E501
"""Get previous quotes in time buckets. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.quote_get_bucketed(async=True)
>>> result = thread.get()
:param async bool
:param str bin_size: Time interval to bucket by. Available options: [1m,5m,1h,1d].
:param bool partial: If true, will send in-progress (incomplete) bins for the current time period.
:param str symbol: Instrument symbol. Send a bare series (e.g. XBU) to get data for the nearest expiring contract in that series. You can also send a timeframe, e.g. `XBU:monthly`. Timeframes are `daily`, `weekly`, `monthly`, `quarterly`, and `biquarterly`.
:param str filter: Generic table filter. Send JSON key/value pairs, such as `{\"key\": \"value\"}`. You can key on individual fields, and do more advanced querying on timestamps. See the [Timestamp Docs](https://www.bitmex.com/app/restAPI#Timestamp-Filters) for more details.
:param str columns: Array of column names to fetch. If omitted, will return all columns. Note that this method will always return item keys, even when not specified, so you may receive more columns that you expect.
:param float count: Number of results to fetch.
:param float start: Starting point for results.
:param bool reverse: If true, will sort results newest first.
:param datetime start_time: Starting date filter for results.
:param datetime end_time: Ending date filter for results.
:return: list[Quote]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.quote_get_bucketed_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.quote_get_bucketed_with_http_info(**kwargs) # noqa: E501
return data
def quote_get_bucketed_with_http_info(self, **kwargs): # noqa: E501
"""Get previous quotes in time buckets. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.quote_get_bucketed_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str bin_size: Time interval to bucket by. Available options: [1m,5m,1h,1d].
:param bool partial: If true, will send in-progress (incomplete) bins for the current time period.
:param str symbol: Instrument symbol. Send a bare series (e.g. XBU) to get data for the nearest expiring contract in that series. You can also send a timeframe, e.g. `XBU:monthly`. Timeframes are `daily`, `weekly`, `monthly`, `quarterly`, and `biquarterly`.
:param str filter: Generic table filter. Send JSON key/value pairs, such as `{\"key\": \"value\"}`. You can key on individual fields, and do more advanced querying on timestamps. See the [Timestamp Docs](https://www.bitmex.com/app/restAPI#Timestamp-Filters) for more details.
:param str columns: Array of column names to fetch. If omitted, will return all columns. Note that this method will always return item keys, even when not specified, so you may receive more columns that you expect.
:param float count: Number of results to fetch.
:param float start: Starting point for results.
:param bool reverse: If true, will sort results newest first.
:param datetime start_time: Starting date filter for results.
:param datetime end_time: Ending date filter for results.
:return: list[Quote]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bin_size', 'partial', 'symbol', 'filter', 'columns', 'count', 'start', 'reverse', 'start_time', 'end_time'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method quote_get_bucketed" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'bin_size' in params:
query_params.append(('binSize', params['bin_size'])) # noqa: E501
if 'partial' in params:
query_params.append(('partial', params['partial'])) # noqa: E501
if 'symbol' in params:
query_params.append(('symbol', params['symbol'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'columns' in params:
query_params.append(('columns', params['columns'])) # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'reverse' in params:
query_params.append(('reverse', params['reverse'])) # noqa: E501
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/quote/bucketed', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Quote]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 54.673684
| 1,509
| 0.645617
|
522a50c7d60f12d587956d88a21b0b4a2726f089
| 3,057
|
py
|
Python
|
pytrader/ddl.py
|
irr/python-labs
|
43bb3a528c151653b2be832c7ff13240a10e18a4
|
[
"Apache-2.0"
] | 4
|
2015-11-25T09:06:44.000Z
|
2019-12-11T21:35:21.000Z
|
pytrader/ddl.py
|
irr/python-labs
|
43bb3a528c151653b2be832c7ff13240a10e18a4
|
[
"Apache-2.0"
] | null | null | null |
pytrader/ddl.py
|
irr/python-labs
|
43bb3a528c151653b2be832c7ff13240a10e18a4
|
[
"Apache-2.0"
] | 2
|
2015-11-25T09:19:38.000Z
|
2016-02-26T03:54:06.000Z
|
#!/usr/bin/env python
import sys
# ./ddl.py symbols.sql csv > symbols.csv
# ...|grep UOLL4| awk -F "," '{print $2","$3","$4","$5","$6","$7","$8}' > UOLL4.csv
# sqlite3 symbols.db
# CREATE TABLE symbols (S TEXT NOT NULL, D TEXT NOT NULL, O REAL NOT NULL, H REAL NOT NULL, L REAL NOT NULL, C REAL NOT NULL, V REAL NOT NULL, A REAL NOT NULL);
# CREATE INDEX symbols_idx01 ON symbols (S);
# CREATE INDEX symbols_idx02 ON symbols (S, D);
# .separator ','
# .import symbols.csv symbols
# R commands:
#> library(quantmod)
#> getSymbols("UOLL4", src="csv", dir="/data/bovespa")
#> candleChart(UOLL4,multi.col=TRUE,theme="white")
#> chartSeries(to.weekly(UOLL4),up.col='white',dn.col='blue',theme="white")
#> addMACD()
#> addBBands()
#> addSAR()
#> addSMI()
INSERT = "INSERT INTO symbols VALUES(\"%s\", \"%s\", %s, %s, %s, %s, %s, %s);"
CREATE_SQLITE = """
/* ./ddl.py symbols.sql |sqlite3 bovespa.db */
DROP TABLE IF EXISTS symbols;
CREATE TABLE symbols (S TEXT NOT NULL, D TEXT NOT NULL, O REAL NOT NULL, H REAL NOT NULL, L REAL NOT NULL, C REAL NOT NULL, V REAL NOT NULL, A REAL NOT NULL);
CREATE INDEX symbols_idx01 ON symbols (S);
CREATE INDEX symbols_idx02 ON symbols (S, D);
"""
CREATE_MYSQL = """
DROP DATABASE IF EXISTS bovespa;
CREATE DATABASE bovespa;
USE bovespa;
CREATE TABLE symbols (S varchar(10) COLLATE latin1_bin NOT NULL, D varchar(10) COLLATE latin1_bin NOT NULL, O float NOT NULL, H float NOT NULL, L float NOT NULL, C float NOT NULL, V float NOT NULL, A float NOT NULL) ENGINE=MyISAM DEFAULT CHARSET=latin1 COLLATE=latin1_bin;
CREATE INDEX symbols_idx01 ON symbols (S);
CREATE INDEX symbols_idx02 ON symbols (S, D);
"""
def check(line, fields, n):
if len(fields) <> n:
print "ERROR: invalid line! [%s](%s:%d)" % (line, fields, n)
sys.exit(1)
def process(fname, db = None):
with open(fname, 'r') as f:
if db == None or db.lower() == "sqlite":
print CREATE_SQLITE
elif db.lower() == "mysql":
print CREATE_MYSQL
for l in f:
if l.startswith("/*"):
continue
line = l.strip()
f1 = line.split("INSERT INTO")
check(line, f1, 2)
f2 = f1[1].split("VALUES")
check(line, f2, 2)
stock = f2[0].strip().replace(" ", "_")
f3 = f2[1].split("(")
check(line, f3, 2)
f4 = f3[1].split(");")[0]
values = f4.split(",")
values.insert(0, stock)
values[1] = "%s-%s-%s" % (values[1][:4], values[1][4:6], values[1][6:])
values.append(0)
if db <> None and db.lower() == "csv":
print "%s,%s,%s,%s,%s,%s,%s,%s" % tuple(values)
else:
print INSERT % tuple(values)
if __name__ == '__main__':
if len(sys.argv) > 1:
if len(sys.argv) > 2:
process(sys.argv[1], sys.argv[2])
else:
process(sys.argv[1])
else:
print "usage: ddl.py <symbols sql file> [sqlite|mysql|csv] default=sqlite"
sys.exit(1)
| 35.137931
| 272
| 0.581289
|
84cb324ee5195f33e9351a5473488f9ab6e35f76
| 586
|
py
|
Python
|
problems/big_data_hdf5_2.py
|
rayanimam/Python-in-the-lab
|
49b9d47e3363fc0b51e8c534e4c016ab7991cd33
|
[
"CC-BY-3.0"
] | 12
|
2017-01-27T09:47:50.000Z
|
2020-06-26T07:08:22.000Z
|
problems/big_data_hdf5_2.py
|
rayanimam/Python-in-the-lab
|
49b9d47e3363fc0b51e8c534e4c016ab7991cd33
|
[
"CC-BY-3.0"
] | null | null | null |
problems/big_data_hdf5_2.py
|
rayanimam/Python-in-the-lab
|
49b9d47e3363fc0b51e8c534e4c016ab7991cd33
|
[
"CC-BY-3.0"
] | 8
|
2017-01-27T09:49:18.000Z
|
2021-03-19T17:10:07.000Z
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pandas as pd
import time
t0 = time.time()
fig = plt.figure()
ax = fig.add_subplot(111)
period = 7200
i0 = 0
table = "table/t1"
data = pd.read_hdf("test.hdf5", table, start=0, stop=period)
print(data.shape)
theta = data.Theta
average, N = 0, 100
for i in range(N):
data = pd.read_hdf("test.hdf5", table, start=i*period, stop=(i+1)*period)
data = data.p_Cyl1
average += data.values
ax.plot(theta, data, lw=1, c='gray')
ax.plot(theta, average/N, lw=1, c='r')
print(time.time() - t0)
plt.show()
| 23.44
| 77
| 0.675768
|
83ca7d7bff749b9977274e2c2a5b6879d0dadfbe
| 50,355
|
py
|
Python
|
pyglmnet/pyglmnet.py
|
skabl/zmglmnet
|
82b6f523ab6a5cb90d8f6e9cc147c5ac4f04b209
|
[
"MIT"
] | null | null | null |
pyglmnet/pyglmnet.py
|
skabl/zmglmnet
|
82b6f523ab6a5cb90d8f6e9cc147c5ac4f04b209
|
[
"MIT"
] | null | null | null |
pyglmnet/pyglmnet.py
|
skabl/zmglmnet
|
82b6f523ab6a5cb90d8f6e9cc147c5ac4f04b209
|
[
"MIT"
] | null | null | null |
"""Python implementation of elastic-net regularized GLMs."""
import warnings
from copy import deepcopy
import numpy as np
from scipy.special import expit, loggamma
from scipy.stats import norm
from .utils import logger, set_log_level, _check_params, \
_verbose_iterable, _tqdm_log, softplus
from .base import BaseEstimator, is_classifier, check_version
from .externals.sklearn.utils import check_random_state, check_array, check_X_y
from .externals.sklearn.utils.validation import check_is_fitted
ALLOWED_DISTRS = ['gaussian', 'binomial', 'softplus', 'poisson',
'probit', 'gamma', 'neg-binomial']
def _probit_g1(z, pdfz, cdfz, thresh=5):
res = np.zeros_like(z)
res[z < -thresh] = np.log(-pdfz[z < -thresh] / z[z < -thresh])
res[np.abs(z) <= thresh] = np.log(cdfz[np.abs(z) <= thresh])
res[z > thresh] = -pdfz[z > thresh] / z[z > thresh]
return res
def _probit_g2(z, pdfz, cdfz, thresh=5):
res = np.zeros_like(z)
res[z < -thresh] = pdfz[z < -thresh] / z[z < -thresh]
res[np.abs(z) <= thresh] = np.log(1 - cdfz[np.abs(z) <= thresh])
res[z > thresh] = np.log(pdfz[z > thresh] / z[z > thresh])
return res
def _probit_g3(z, pdfz, cdfz, thresh=5):
res = np.zeros_like(z)
res[z < -thresh] = -z[z < -thresh]
res[np.abs(z) <= thresh] = \
pdfz[np.abs(z) <= thresh] / cdfz[np.abs(z) <= thresh]
res[z > thresh] = pdfz[z > thresh]
return res
def _probit_g4(z, pdfz, cdfz, thresh=5):
res = np.zeros_like(z)
res[z < -thresh] = pdfz[z < -thresh]
res[np.abs(z) <= thresh] = \
pdfz[np.abs(z) <= thresh] / (1 - cdfz[np.abs(z) <= thresh])
res[z > thresh] = z[z > thresh]
return res
def _probit_g5(z, pdfz, cdfz, thresh=5):
res = np.zeros_like(z)
res[z < -thresh] = 0 * z[z < -thresh]
res[np.abs(z) <= thresh] = \
z[np.abs(z) <= thresh] * pdfz[np.abs(z) <= thresh] / \
cdfz[np.abs(z) <= thresh] + (pdfz[np.abs(z) <= thresh] /
cdfz[np.abs(z) <= thresh]) ** 2
res[z > thresh] = z[z > thresh] * pdfz[z > thresh] + pdfz[z > thresh] ** 2
return res
def _probit_g6(z, pdfz, cdfz, thresh=5):
res = np.zeros_like(z)
res[z < -thresh] = \
pdfz[z < -thresh] ** 2 - z[z < -thresh] * pdfz[z < -thresh]
res[np.abs(z) <= thresh] = \
(pdfz[np.abs(z) <= thresh] / (1 - cdfz[np.abs(z) <= thresh])) ** 2 - \
z[np.abs(z) <= thresh] * pdfz[np.abs(z) <= thresh] / \
(1 - cdfz[np.abs(z) <= thresh])
res[z > thresh] = 0 * z[z > thresh]
return res
def _z(beta0, beta, X, fit_intercept):
"""Compute z to be passed through non-linearity."""
if fit_intercept:
z = beta0 + np.dot(X, beta)
else:
z = np.dot(X, np.r_[beta0, beta])
return z
def _lmb(distr, beta0, beta, X, eta, fit_intercept=True):
"""Conditional intensity function."""
z = _z(beta0, beta, X, fit_intercept)
return _mu(distr, z, eta, fit_intercept)
def _mu(distr, z, eta, fit_intercept):
"""The non-linearity (inverse link)."""
if distr in ['softplus', 'gamma', 'neg-binomial']:
mu = softplus(z)
elif distr == 'poisson':
mu = z.copy()
beta0 = (1 - eta) * np.exp(eta) if fit_intercept else 0.
mu[z > eta] = z[z > eta] * np.exp(eta) + beta0
mu[z <= eta] = np.exp(z[z <= eta])
elif distr == 'gaussian':
mu = z
elif distr == 'binomial':
mu = expit(z)
elif distr == 'probit':
mu = norm.cdf(z)
return mu
def _grad_mu(distr, z, eta):
"""Derivative of the non-linearity."""
if distr in ['softplus', 'gamma', 'neg-binomial']:
grad_mu = expit(z)
elif distr == 'poisson':
grad_mu = z.copy()
grad_mu[z > eta] = np.ones_like(z)[z > eta] * np.exp(eta)
grad_mu[z <= eta] = np.exp(z[z <= eta])
elif distr == 'gaussian':
grad_mu = np.ones_like(z)
elif distr == 'binomial':
grad_mu = expit(z) * (1 - expit(z))
elif distr in 'probit':
grad_mu = norm.pdf(z)
return grad_mu
def _logL(distr, y, y_hat, z=None, theta=1.0):
"""The log likelihood."""
if distr in ['softplus', 'poisson']:
eps = np.spacing(1)
logL = np.sum(y * np.log(y_hat + eps) - y_hat)
elif distr == 'gaussian':
logL = -0.5 * np.sum((y - y_hat)**2)
elif distr == 'binomial':
# prevents underflow
if z is not None:
logL = np.sum(y * z - np.log(1 + np.exp(z)))
# for scoring
else:
logL = np.sum(y * np.log(y_hat) + (1 - y) * np.log(1 - y_hat))
elif distr == 'probit':
if z is not None:
pdfz, cdfz = norm.pdf(z), norm.cdf(z)
logL = np.sum(y * _probit_g1(z, pdfz, cdfz) +
(1 - y) * _probit_g2(z, pdfz, cdfz))
else:
logL = np.sum(y * np.log(y_hat) + (1 - y) * np.log(1 - y_hat))
elif distr == 'gamma':
# see
# https://www.statistics.ma.tum.de/fileadmin/w00bdb/www/czado/lec8.pdf
nu = 1. # shape parameter, exponential for now
logL = np.sum(nu * (-y / y_hat - np.log(y_hat)))
elif distr == 'neg-binomial':
logL = np.sum(loggamma(y + theta) - loggamma(theta) - loggamma(y + 1) +
theta * np.log(theta) + y * np.log(y_hat) - (theta + y) *
np.log(y_hat + theta))
return logL
def _penalty(alpha, beta, Tau, group):
"""The penalty."""
# Combine L1 and L2 penalty terms
P = 0.5 * (1 - alpha) * _L2penalty(beta, Tau) + \
alpha * _L1penalty(beta, group)
return P
def _L2penalty(beta, Tau):
"""The L2 penalty."""
# Compute the L2 penalty
if Tau is None:
# Ridge=like penalty
L2penalty = np.linalg.norm(beta, 2) ** 2
else:
# Tikhonov penalty
if (Tau.shape[0] != beta.shape[0] or
Tau.shape[1] != beta.shape[0]):
raise ValueError('Tau should be (n_features x n_features)')
else:
L2penalty = np.linalg.norm(np.dot(Tau, beta), 2) ** 2
return L2penalty
def _L1penalty(beta, group=None):
"""The L1 penalty."""
# Compute the L1 penalty
if group is None:
# Lasso-like penalty
L1penalty = np.linalg.norm(beta, 1)
else:
# Group sparsity case: apply group sparsity operator
group_ids = np.unique(group)
L1penalty = 0.0
for group_id in group_ids:
if group_id != 0:
L1penalty += \
np.linalg.norm(beta[group == group_id], 2)
L1penalty += np.linalg.norm(beta[group == 0], 1)
return L1penalty
def _loss(distr, alpha, Tau, reg_lambda, X, y, eta, theta, group, beta,
fit_intercept=True):
"""Define the objective function for elastic net."""
n_samples, n_features = X.shape
z = _z(beta[0], beta[1:], X, fit_intercept)
y_hat = _mu(distr, z, eta, fit_intercept)
L = 1. / n_samples * _logL(distr, y, y_hat, z, theta)
if fit_intercept:
P = _penalty(alpha, beta[1:], Tau, group)
else:
P = _penalty(alpha, beta, Tau, group)
J = -L + reg_lambda * P
return J
def _L2loss(distr, alpha, Tau, reg_lambda, X, y, eta, theta, group, beta,
fit_intercept=True):
"""Define the objective function for elastic net."""
n_samples, n_features = X.shape
z = _z(beta[0], beta[1:], X, fit_intercept)
y_hat = _mu(distr, z, eta, fit_intercept)
L = 1. / n_samples * _logL(distr, y, y_hat, z, theta)
if fit_intercept:
P = 0.5 * (1 - alpha) * _L2penalty(beta[1:], Tau)
else:
P = 0.5 * (1 - alpha) * _L2penalty(beta, Tau)
J = -L + reg_lambda * P
return J
def _grad_L2loss(distr, alpha, Tau, reg_lambda, X, y, eta, theta, beta,
fit_intercept=True):
"""The gradient."""
n_samples, n_features = X.shape
n_samples = np.float(n_samples)
if Tau is None:
if fit_intercept:
Tau = np.eye(beta[1:].shape[0])
else:
Tau = np.eye(beta.shape[0])
InvCov = np.dot(Tau.T, Tau)
z = _z(beta[0], beta[1:], X, fit_intercept)
mu = _mu(distr, z, eta, fit_intercept)
grad_mu = _grad_mu(distr, z, eta)
grad_beta0 = 0.
if distr in ['poisson', 'softplus']:
if fit_intercept:
grad_beta0 = np.sum(grad_mu) - np.sum(y * grad_mu / mu)
grad_beta = ((np.dot(grad_mu.T, X) -
np.dot((y * grad_mu / mu).T, X)).T)
elif distr == 'gaussian':
if fit_intercept:
grad_beta0 = np.sum((mu - y) * grad_mu)
grad_beta = np.dot((mu - y).T, X * grad_mu[:, None]).T
elif distr == 'binomial':
if fit_intercept:
grad_beta0 = np.sum(mu - y)
grad_beta = np.dot((mu - y).T, X).T
elif distr == 'probit':
grad_logl = (y * _probit_g3(z, grad_mu, mu) -
(1 - y) * _probit_g4(z, grad_mu, mu))
if fit_intercept:
grad_beta0 = -np.sum(grad_logl)
grad_beta = -np.dot(grad_logl.T, X).T
elif distr == 'gamma':
nu = 1.
grad_logl = (y / mu ** 2 - 1 / mu) * grad_mu
if fit_intercept:
grad_beta0 = -nu * np.sum(grad_logl)
grad_beta = -nu * np.dot(grad_logl.T, X).T
elif distr == 'neg-binomial':
partial_beta_0 = grad_mu * ((theta + y) / (mu + theta) - y / mu)
if fit_intercept:
grad_beta0 = np.sum(partial_beta_0)
grad_beta = np.dot(partial_beta_0.T, X)
grad_beta0 *= 1. / n_samples
grad_beta *= 1. / n_samples
if fit_intercept:
grad_beta += reg_lambda * (1 - alpha) * np.dot(InvCov, beta[1:])
g = np.zeros((n_features + 1, ))
g[0] = grad_beta0
g[1:] = grad_beta
else:
grad_beta += reg_lambda * (1 - alpha) * np.dot(InvCov, beta)
g = grad_beta
return g
def _gradhess_logloss_1d(distr, xk, y, z, eta, theta, fit_intercept=True):
"""
Compute gradient (1st derivative)
and Hessian (2nd derivative)
of log likelihood for a single coordinate.
Parameters
----------
xk: float
(n_samples,)
y: float
(n_samples,)
z: float
(n_samples,)
Returns
-------
gk: gradient, float:
(n_features + 1,)
hk: float:
(n_features + 1,)
"""
n_samples = xk.shape[0]
if distr == 'softplus':
mu = _mu(distr, z, eta, fit_intercept)
s = expit(z)
gk = np.sum(s * xk) - np.sum(y * s / mu * xk)
grad_s = s * (1 - s)
grad_s_by_mu = grad_s / mu - s / (mu ** 2)
hk = np.sum(grad_s * xk ** 2) - np.sum(y * grad_s_by_mu * xk ** 2)
elif distr == 'poisson':
mu = _mu(distr, z, eta, fit_intercept)
s = expit(z)
gk = np.sum((mu[z <= eta] - y[z <= eta]) *
xk[z <= eta]) + \
np.exp(eta) * \
np.sum((1 - y[z > eta] / mu[z > eta]) *
xk[z > eta])
hk = np.sum(mu[z <= eta] * xk[z <= eta] ** 2) + \
np.exp(eta) ** 2 * \
np.sum(y[z > eta] / (mu[z > eta] ** 2) *
(xk[z > eta] ** 2))
elif distr == 'gaussian':
gk = np.sum((z - y) * xk)
hk = np.sum(xk * xk)
elif distr == 'binomial':
mu = _mu(distr, z, eta, fit_intercept)
gk = np.sum((mu - y) * xk)
hk = np.sum(mu * (1.0 - mu) * xk * xk)
elif distr == 'probit':
pdfz = norm.pdf(z)
cdfz = norm.cdf(z)
gk = -np.sum((y * _probit_g3(z, pdfz, cdfz) -
(1 - y) * _probit_g4(z, pdfz, cdfz)) * xk)
hk = np.sum((y * _probit_g5(z, pdfz, cdfz) +
(1 - y) * _probit_g6(z, pdfz, cdfz)) * (xk * xk))
elif distr == 'neg-binomial':
mu = _mu(distr, z, eta, fit_intercept)
grad_mu = _grad_mu(distr, z, eta)
hess_mu = np.exp(-z) * expit(z)**2
gradient_beta_j = -grad_mu * (y / mu - (y + theta) / (mu + theta))
partial_beta_0_1 = hess_mu * (y / mu - (y + theta) / (mu + theta))
partial_beta_0_2 = grad_mu**2 * \
((y + theta) / (mu + theta)**2 - y / mu**2)
partial_beta_0 = -(partial_beta_0_1 + partial_beta_0_2)
gk = np.dot(gradient_beta_j.T, xk)
hk = np.dot(partial_beta_0.T, xk**2)
elif distr == 'gamma':
raise NotImplementedError('cdfast is not implemented for Gamma '
'distribution')
elif distr == 'neg-binomial':
mu = _mu(distr, z, eta, fit_intercept)
grad_mu = _grad_mu(distr, z, eta)
hess_mu = np.exp(-z) * expit(z) ** 2
gradient_beta_j = -grad_mu * (y / mu - (y + theta) / (mu + theta))
partial_beta_0_1 = hess_mu * (y / mu - (y + theta) / (mu + theta))
partial_beta_0_2 = grad_mu ** 2 * \
((y + theta) / (mu + theta) ** 2 - y / mu ** 2)
partial_beta_0 = -(partial_beta_0_1 + partial_beta_0_2)
gk = np.dot(gradient_beta_j.T, xk)
hk = np.dot(partial_beta_0.T, xk ** 2)
elif distr == 'gamma':
raise NotImplementedError('cdfast is not implemented for Gamma '
'distribution')
return 1. / n_samples * gk, 1. / n_samples * hk
def simulate_glm(distr, beta0, beta, X, eta=2.0, random_state=None,
sample=False, theta=1.0, fit_intercept=True):
"""Simulate target data under a generative model.
Parameters
----------
distr: str
distribution
beta0: float
intercept coefficient
beta: array
coefficients of shape (n_features,)
X: array
design matrix of shape (n_samples, n_features)
eta: float
parameter for poisson non-linearity
random_state: float
random state
sample: bool
If True, sample from distribution. Otherwise, return
conditional intensity function
Returns
-------
y: array
simulated target data of shape (n_samples,)
"""
if distr not in ALLOWED_DISTRS:
raise ValueError("'distr' must be in %s, got %s"
% (repr(ALLOWED_DISTRS), distr))
if not isinstance(beta0, float):
raise ValueError("'beta0' must be float, got %s" % type(beta0))
if beta.ndim != 1:
raise ValueError("'beta' must be 1D, got %dD" % beta.ndim)
if not sample:
return _lmb(distr, beta0, beta, X, eta, fit_intercept=fit_intercept)
_random_state = check_random_state(random_state)
if distr == 'softplus' or distr == 'poisson':
y = _random_state.poisson(
_lmb(distr, beta0, beta, X, eta,
fit_intercept=fit_intercept))
if distr == 'gaussian':
y = _random_state.normal(
_lmb(distr, beta0, beta, X, eta,
fit_intercept=fit_intercept))
if distr == 'binomial' or distr == 'probit':
y = _random_state.binomial(
1,
_lmb(distr, beta0, beta, X, eta,
fit_intercept=fit_intercept))
if distr == 'gamma':
mu = _lmb(distr, beta0, beta, X, eta, fit_intercept=fit_intercept)
y = np.exp(mu)
if distr == 'neg-binomial':
mu = _lmb(distr, beta0, beta, X, eta, fit_intercept=fit_intercept)
p = theta / (theta + mu) # Probability of success
y = _random_state.negative_binomial(theta, p)
return y
class GLM(BaseEstimator):
"""Class for estimating regularized generalized linear models (GLM).
The regularized GLM minimizes the penalized negative log likelihood:
.. math::
\\min_{\\beta_0, \\beta} \\frac{1}{N}
\\sum_{i = 1}^N \\mathcal{L} (y_i, \\beta_0 + \\beta^T x_i)
+ \\lambda [ \\frac{1}{2}(1 - \\alpha) \\mathcal{P}_2 +
\\alpha \\mathcal{P}_1 ]
where :math:`\\mathcal{P}_2` and :math:`\\mathcal{P}_1` are the generalized
L2 (Tikhonov) and generalized L1 (Group Lasso) penalties, given by:
.. math::
\\mathcal{P}_2 = \\|\\Gamma \\beta \\|_2^2 \\
\\mathcal{P}_1 = \\sum_g \\|\\beta_{j,g}\\|_2
where :math:`\\Gamma` is the Tikhonov matrix: a square factorization
of the inverse covariance matrix and :math:`\\beta_{j,g}` is the
:math:`j` th coefficient of group :math:`g`.
The generalized L2 penalty defaults to the ridge penalty when
:math:`\\Gamma` is identity.
The generalized L1 penalty defaults to the lasso penalty when each
:math:`\\beta` belongs to its own group.
Parameters
----------
distr: str
distribution family can be one of the following
'gaussian' | 'binomial' | 'poisson' | 'softplus'
| 'probit' | 'gamma' | 'neg-binomial'
default: 'poisson'.
alpha: float
the weighting between L1 penalty (alpha=1.) and L2 penalty (alpha=0.)
term of the loss function.
default: 0.5
Tau: array | None
the (n_features, n_features) Tikhonov matrix.
default: None, in which case Tau is identity
and the L2 penalty is ridge-like
group: array | list | None
the (n_features, )
list or array of group identities for each parameter :math:`\\beta`.
Each entry of the list/ array should contain an int from 1 to n_groups
that specify group membership for each parameter
(except :math:`\\beta_0`).
If you do not want to specify a group for a specific parameter,
set it to zero.
default: None, in which case it defaults to L1 regularization
reg_lambda: float
regularization parameter :math:`\\lambda` of penalty term.
default: 0.1
solver: str
optimization method, can be one of the following
'batch-gradient' (vanilla batch gradient descent)
'cdfast' (Newton coordinate gradient descent).
default: 'batch-gradient'
learning_rate: float
learning rate for gradient descent.
default: 2e-1
max_iter: int
maximum number of iterations for the solver.
default: 1000
tol: float
convergence threshold or stopping criteria.
Optimization loop will stop when relative change
in parameter norm is below the threshold.
default: 1e-6
eta: float
a threshold parameter that linearizes the exp() function above eta.
default: 2.0
score_metric: str
specifies the scoring metric.
one of either 'deviance' or 'pseudo_R2'.
default: 'deviance'
fit_intercept: boolean
specifies if a constant (a.k.a. bias or intercept) should be
added to the decision function.
default: True
theta: float
shape parameter of the negative binomial distribution (number of
successes before the first failure). It is used only if distr is
equal to neg-binomial, otherwise it is ignored.
default: 1.0
random_state : int
seed of the random number generator used to initialize the solution.
default: 0
verbose: boolean or int
default: False
Attributes
----------
beta0_: int
The intercept
beta_: array, (n_features)
The learned betas
n_iter_: int
The number of iterations
is_fitted_: bool
if True, the model is previously fitted
Examples
--------
>>> import numpy as np
>>> random_state = 1
>>> n_samples, n_features = 100, 4
>>> rng = np.random.RandomState(random_state)
>>> X = rng.normal(0, 1, (n_samples, n_features))
>>> y = 2.2 * X[:, 0] -1.0 * X[:, 1] + 0.3 * X[:, 3] + 1.0
>>> glm = GLM(distr='gaussian', verbose=False, random_state=random_state)
>>> glm = glm.fit(X, y)
>>> glm.beta0_ # The intercept
1.005380485553247
>>> glm.beta_ # The coefficients
array([ 1.90216711, -0.78782533, -0. , 0.03227455])
>>> y_pred = glm.predict(X)
Reference
---------
Friedman, Hastie, Tibshirani (2010). Regularization Paths for Generalized
Linear Models via Coordinate Descent, J Statistical Software.
https://core.ac.uk/download/files/153/6287975.pdf
"""
def __init__(self, distr='poisson', alpha=0.5,
Tau=None, group=None,
reg_lambda=0.1,
solver='batch-gradient',
learning_rate=2e-1, max_iter=1000,
tol=1e-6, eta=2.0, score_metric='deviance',
fit_intercept=True,
random_state=0, theta=1.0, callback=None, verbose=False):
_check_params(distr=distr, max_iter=max_iter,
fit_intercept=fit_intercept)
self.distr = distr
self.alpha = alpha
self.reg_lambda = reg_lambda
self.Tau = Tau
self.group = group
self.solver = solver
self.learning_rate = learning_rate
self.max_iter = max_iter
self.tol = tol
self.eta = eta
self.score_metric = score_metric
self.fit_intercept = fit_intercept
self.random_state = random_state
self.callback = callback
self.verbose = verbose
self.theta = theta
set_log_level(verbose)
def _set_cv(cv, estimator=None, X=None, y=None):
"""Set the default CV depending on whether clf
is classifier/regressor."""
# Detect whether classification or regression
if estimator in ['classifier', 'regressor']:
est_is_classifier = estimator == 'classifier'
else:
est_is_classifier = is_classifier(estimator)
# Setup CV
if check_version('sklearn', '0.18'):
from sklearn import model_selection as models
from sklearn.model_selection import (check_cv,
StratifiedKFold, KFold)
if isinstance(cv, (int, np.int)):
XFold = StratifiedKFold if est_is_classifier else KFold
cv = XFold(n_splits=cv)
elif isinstance(cv, str):
if not hasattr(models, cv):
raise ValueError('Unknown cross-validation')
cv = getattr(models, cv)
cv = cv()
cv = check_cv(cv=cv, y=y, classifier=est_is_classifier)
else:
from sklearn import cross_validation as models
from sklearn.cross_validation import (check_cv,
StratifiedKFold, KFold)
if isinstance(cv, (int, np.int)):
if est_is_classifier:
cv = StratifiedKFold(y=y, n_folds=cv)
else:
cv = KFold(n=len(y), n_folds=cv)
elif isinstance(cv, str):
if not hasattr(models, cv):
raise ValueError('Unknown cross-validation')
cv = getattr(models, cv)
if cv.__name__ not in ['KFold', 'LeaveOneOut']:
raise NotImplementedError('CV cannot be defined with str'
' for sklearn < .017.')
cv = cv(len(y))
cv = check_cv(cv=cv, X=X, y=y, classifier=est_is_classifier)
# Extract train and test set to retrieve them at predict time
if hasattr(cv, 'split'):
cv_splits = [(train, test) for train, test in
cv.split(X=np.zeros_like(y), y=y)]
else:
# XXX support sklearn.cross_validation cv
cv_splits = [(train, test) for train, test in cv]
if not np.all([len(train) for train, _ in cv_splits]):
raise ValueError('Some folds do not have any train epochs.')
return cv, cv_splits
def __repr__(self):
"""Description of the object."""
reg_lambda = self.reg_lambda
s = '<\nDistribution | %s' % self.distr
s += '\nalpha | %0.2f' % self.alpha
s += '\nmax_iter | %0.2f' % self.max_iter
s += '\nlambda: %0.2f\n>' % reg_lambda
return s
def copy(self):
"""Return a copy of the object.
Parameters
----------
none
Returns
-------
self: instance of GLM
A copy of the GLM instance.
"""
return deepcopy(self)
def _prox(self, beta, thresh):
"""Proximal operator."""
if self.group is None:
# The default case: soft thresholding
return np.sign(beta) * (np.abs(beta) - thresh) * \
(np.abs(beta) > thresh)
else:
# Group sparsity case: apply group sparsity operator
group_ids = np.unique(self.group)
group_norms = np.abs(beta)
for group_id in group_ids:
if group_id != 0:
group_norms[self.group == group_id] = \
np.linalg.norm(beta[self.group == group_id], 2)
nzero_norms = group_norms > 0.0
over_thresh = group_norms > thresh
idxs_to_update = nzero_norms & over_thresh
result = beta
result[idxs_to_update] = (beta[idxs_to_update] -
thresh * beta[idxs_to_update] /
group_norms[idxs_to_update])
result[~idxs_to_update] = 0.0
return result
def _cdfast(self, X, y, ActiveSet, beta, rl, fit_intercept=True):
"""
Perform one cycle of Newton updates for all coordinates.
Parameters
----------
X: array
n_samples x n_features
The input data
y: array
Labels to the data
n_samples x 1
ActiveSet: array
n_features + 1 x 1, or n_features
Active set storing which betas are non-zero
beta: array
n_features + 1 x 1, or n_features
Parameters to be updated
rl: float
Regularization lambda
Returns
-------
beta: array
(n_features + 1) x 1, or (n_features)
Updated parameters
"""
n_samples, n_features = X.shape
reg_scale = rl * (1 - self.alpha)
z = _z(beta[0], beta[1:], X, fit_intercept)
for k in range(0, n_features + int(fit_intercept)):
# Only update parameters in active set
if ActiveSet[k] != 0:
if fit_intercept:
if k == 0:
xk = np.ones((n_samples, ))
else:
xk = X[:, k - 1]
else:
xk = X[:, k]
# Calculate grad and hess of log likelihood term
gk, hk = _gradhess_logloss_1d(self.distr, xk, y, z, self.eta,
self.theta, fit_intercept)
# Add grad and hess of regularization term
if self.Tau is None:
if k == 0 and fit_intercept:
gk_reg, hk_reg = 0.0, 0.0
else:
gk_reg, hk_reg = beta[k], 1.0
else:
InvCov = np.dot(self.Tau.T, self.Tau)
if fit_intercept:
gk_reg = np.sum(InvCov[k - 1, :] * beta[1:])
hk_reg = InvCov[k - 1, k - 1]
else:
gk_reg = np.sum(InvCov[k, :] * beta)
hk_reg = InvCov[k, k]
gk += reg_scale * gk_reg
hk += reg_scale * hk_reg
# Ensure that update does not blow up if Hessian is small
update = 1. / hk * gk if hk > 1. else self.learning_rate * gk
# Update parameters, z
beta[k], z = beta[k] - update, z - update * xk
return beta
def fit(self, X, y):
"""The fit function.
Parameters
----------
X: array
The 2D input data of shape (n_samples, n_features)
y: array
The 1D target data of shape (n_samples,)
Returns
-------
self: instance of GLM
The fitted model.
"""
X, y = check_X_y(X, y, accept_sparse=False)
self.beta0_ = None
self.beta_ = None
self.ynull_ = None
self.n_iter_ = 0
self.random_state_ = check_random_state(self.random_state)
# checks for group
if self.group is not None:
self.group = np.array(self.group)
self.group = self.group.astype(np.int64)
# shape check
if self.group.shape[0] != X.shape[1]:
raise ValueError('group should be (n_features,)')
# int check
if not np.all([isinstance(g, np.int64) for g in self.group]):
raise ValueError('all entries of group should be integers')
# type check for data
if not (isinstance(X, np.ndarray) and isinstance(y, np.ndarray)):
msg = ("Input must be ndarray. Got {} and {}"
.format(type(X), type(y)))
raise ValueError(msg)
if X.ndim != 2:
raise ValueError("X must be a 2D array, got %sD" % X.ndim)
if y.ndim != 1:
raise ValueError("y must be 1D, got %sD" % y.ndim)
n_observations, n_features = X.shape
if n_observations != len(y):
raise ValueError('Shape mismatch.' +
'X has {} observations, y has {}.'
.format(n_observations, len(y)))
# Initialize parameters
beta = np.zeros((n_features + int(self.fit_intercept),))
if self.fit_intercept:
if self.beta0_ is None and self.beta_ is None:
beta[0] = 1 / (n_features + 1) * \
self.random_state_.normal(0.0, 1.0, 1)
beta[1:] = 1 / (n_features + 1) * \
self.random_state_.normal(0.0, 1.0, (n_features, ))
else:
beta[0] = self.beta0_
beta[1:] = self.beta_
else:
if self.beta0_ is None and self.beta_ is None:
beta = 1 / (n_features + 1) * \
self.random_state_.normal(0.0, 1.0, (n_features, ))
else:
beta = self.beta_
_tqdm_log('Lambda: %6.4f' % self.reg_lambda)
tol = self.tol
alpha = self.alpha
reg_lambda = self.reg_lambda
if self.solver == 'cdfast':
# init active set
ActiveSet = np.ones_like(beta)
self._convergence = list()
training_iterations = _verbose_iterable(range(self.max_iter))
# Iterative updates
for t in training_iterations:
self.n_iter_ += 1
beta_old = beta.copy()
if self.solver == 'batch-gradient':
grad = _grad_L2loss(self.distr,
alpha, self.Tau,
reg_lambda, X, y, self.eta,
self.theta, beta, self.fit_intercept)
# Update
beta = beta - self.learning_rate * grad
elif self.solver == 'cdfast':
beta = \
self._cdfast(X, y, ActiveSet, beta, reg_lambda,
self.fit_intercept)
else:
raise ValueError("solver must be one of "
"'('batch-gradient', 'cdfast'), got %s."
% (self.solver))
# Apply proximal operator
if self.fit_intercept:
beta[1:] = self._prox(beta[1:],
self.learning_rate * reg_lambda * alpha)
else:
beta = self._prox(beta,
self.learning_rate * reg_lambda * alpha)
# Update active set
if self.solver == 'cdfast':
ActiveSet[beta == 0] = 0
if self.fit_intercept:
ActiveSet[0] = 1.
# Convergence by relative parameter change tolerance
norm_update = np.linalg.norm(beta - beta_old)
norm_update /= np.linalg.norm(beta)
self._convergence.append(norm_update)
if t > 1 and self._convergence[-1] < tol:
msg = ('\tParameter update tolerance. ' +
'Converged in {0:d} iterations'.format(t))
_tqdm_log(msg)
break
# Compute and save loss if callbacks are requested
if callable(self.callback):
self.callback(beta)
if self.n_iter_ == self.max_iter:
warnings.warn(
"Reached max number of iterations without convergence.")
# Update the estimated variables
if self.fit_intercept:
self.beta0_ = beta[0]
self.beta_ = beta[1:]
else:
self.beta0_ = 0
self.beta_ = beta
self.ynull_ = np.mean(y)
self.is_fitted_ = True
return self
def plot_convergence(self, ax=None, show=True):
"""Plot convergence.
Parameters
----------
ax : matplotlib.pyplot.axes object
If not None, plot in this axis.
show : bool
If True, call plt.show()
Returns
-------
fig : matplotlib.Figure
The matplotlib figure handle
"""
import matplotlib.pyplot as plt
if ax is None:
fig, ax = plt.subplots(1, 1)
ax.semilogy(self._convergence)
ax.set_xlim((-20, self.max_iter + 20))
ax.axhline(self.tol, linestyle='--', color='r', label='tol')
ax.set_ylabel(r'$\Vert\beta_{t} - \beta_{t-1}\Vert/\Vert\beta_t\Vert$')
ax.legend()
if show:
plt.show()
return ax.get_figure()
def predict(self, X):
"""Predict targets.
Parameters
----------
X: array
Input data for prediction, of shape (n_samples, n_features)
Returns
-------
yhat: array
The predicted targets of shape (n_samples,)
"""
X = check_array(X, accept_sparse=False)
check_is_fitted(self, 'is_fitted_')
if not isinstance(X, np.ndarray):
raise ValueError('Input data should be of type ndarray (got %s).'
% type(X))
yhat = _lmb(self.distr, self.beta0_, self.beta_, X, self.eta,
fit_intercept=True)
if self.distr == 'binomial':
yhat = (yhat > 0.5).astype(int)
yhat = np.asarray(yhat)
return yhat
def _predict_proba(self, X):
"""Predict class probability for a binomial or probit distribution.
Parameters
----------
X: array
Input data for prediction, of shape (n_samples, n_features)
Returns
-------
yhat: array
The predicted targets of shape (n_samples,).
"""
if self.distr not in ['binomial', 'probit']:
raise ValueError('This is only applicable for \
the binomial distribution.')
if not isinstance(X, np.ndarray):
raise ValueError('Input data should be of type ndarray (got %s).'
% type(X))
yhat = _lmb(self.distr, self.beta0_, self.beta_, X, self.eta,
fit_intercept=True)
yhat = np.asarray(yhat)
return yhat
def predict_proba(self, X):
"""Predict class probability for a binomial or probit distribution.
Parameters
----------
X: array
Input data for prediction, of shape (n_samples, n_features)
Returns
-------
yhat: array
The predicted targets of shape (n_samples,).
Raises
------
Works only for the binomial distribution.
Warn the output otherwise.
"""
X = check_array(X, accept_sparse=False)
check_is_fitted(self, 'is_fitted_')
if self.distr in ['binomial', 'probit']:
return np.stack((1-self._predict_proba(X), self._predict_proba(X)), axis=1)
else:
warnings.warn('This is only applicable for \
the binomial distribution. \
We returns predict as an output here.')
return self.predict(X)
def fit_predict(self, X, y):
"""Fit the model and predict on the same data.
Parameters
----------
X: array
The input data to fit and predict,
of shape (n_samples, n_features)
Returns
-------
yhat: array
The predicted targets of shape (n_samples,).
"""
return self.fit(X, y).predict(X)
def score(self, X, y):
"""Score the model.
Parameters
----------
X: array
The input data whose prediction will be scored,
of shape (n_samples, n_features).
y: array
The true targets against which to score the predicted targets,
of shape (n_samples,).
Returns
-------
score: float
The score metric
"""
check_is_fitted(self, 'is_fitted_')
from . import metrics
valid_metrics = ['deviance', 'pseudo_R2', 'accuracy']
if self.score_metric not in valid_metrics:
raise ValueError("score_metric has to be one of: "
",".join(valid_metrics))
# If the model has not been fit it cannot be scored
if not hasattr(self, 'ynull_'):
raise ValueError('Model must be fit before ' +
'prediction can be scored')
# For f1 as well
if self.score_metric in ['accuracy']:
if self.distr not in ['binomial', 'multinomial']:
raise ValueError(self.score_metric +
' is only defined for binomial ' +
'or multinomial distributions')
y = np.asarray(y).ravel()
if self.distr in ['binomial', 'probit'] and \
self.score_metric != 'accuracy':
yhat = self.predict_proba(X)
else:
yhat = self.predict(X)
# Check whether we have a list of estimators or a single estimator
if self.score_metric == 'deviance':
return metrics.deviance(y, yhat, self.distr, self.theta)
elif self.score_metric == 'pseudo_R2':
return metrics.pseudo_R2(X, y, yhat, self.ynull_,
self.distr, self.theta)
if self.score_metric == 'accuracy':
return metrics.accuracy(y, yhat)
class GLMCV(object):
"""Class for estimating regularized generalized linear models (GLM)
along a regularization path with warm restarts.
The regularized GLM minimizes the penalized negative log likelihood:
.. math::
\\min_{\\beta_0, \\beta} \\frac{1}{N}
\\sum_{i = 1}^N \\mathcal{L} (y_i, \\beta_0 + \\beta^T x_i)
+ \\lambda [ \\frac{1}{2}(1 - \\alpha) \\mathcal{P}_2 +
\\alpha \\mathcal{P}_1 ]
where :math:`\\mathcal{P}_2` and :math:`\\mathcal{P}_1` are the generalized
L2 (Tikhonov) and generalized L1 (Group Lasso) penalties, given by:
.. math::
\\mathcal{P}_2 = \\|\\Gamma \\beta \\|_2^2 \\
\\mathcal{P}_1 = \\sum_g \\|\\beta_{j,g}\\|_2
where :math:`\\Gamma` is the Tikhonov matrix: a square factorization
of the inverse covariance matrix and :math:`\\beta_{j,g}` is the
:math:`j` th coefficient of group :math:`g`.
The generalized L2 penalty defaults to the ridge penalty when
:math:`\\Gamma` is identity.
The generalized L1 penalty defaults to the lasso penalty when each
:math:`\\beta` belongs to its own group.
Parameters
----------
distr: str
distribution family can be one of the following
'gaussian' | 'binomial' | 'poisson' | 'softplus' | 'probit' | 'gamma'
default: 'poisson'.
alpha: float
the weighting between L1 penalty (alpha=1.) and L2 penalty (alpha=0.)
term of the loss function.
default: 0.5
Tau: array | None
the (n_features, n_features) Tikhonov matrix.
default: None, in which case Tau is identity
and the L2 penalty is ridge-like
group: array | list | None
the (n_features, )
list or array of group identities for each parameter :math:`\\beta`.
Each entry of the list/ array should contain an int from 1 to n_groups
that specify group membership for each parameter
(except :math:`\\beta_0`).
If you do not want to specify a group for a specific parameter,
set it to zero.
default: None, in which case it defaults to L1 regularization
reg_lambda: array | list | None
array of regularized parameters :math:`\\lambda` of penalty term.
default: None, a list of 10 floats spaced logarithmically (base e)
between 0.5 and 0.01.
cv: int
number of cross validation repeats
default: 10
solver: str
optimization method, can be one of the following
'batch-gradient' (vanilla batch gradient descent)
'cdfast' (Newton coordinate gradient descent).
default: 'batch-gradient'
learning_rate: float
learning rate for gradient descent.
default: 2e-1
max_iter: int
maximum number of iterations for the solver.
default: 1000
tol: float
convergence threshold or stopping criteria.
Optimization loop will stop when relative change
in parameter norm is below the threshold.
default: 1e-6
eta: float
a threshold parameter that linearizes the exp() function above eta.
default: 2.0
score_metric: str
specifies the scoring metric.
one of either 'deviance' or 'pseudo_R2'.
default: 'deviance'
fit_intercept: boolean
specifies if a constant (a.k.a. bias or intercept) should be
added to the decision function.
default: True
theta: float
shape parameter of the negative binomial distribution (number of
successes before the first failure). It is used only if distr is
equal to neg-binomial, otherwise it is ignored.
default: 1.0
random_state : int
seed of the random number generator used to initialize the solution.
default: 0
verbose: boolean or int
default: False
Attributes
----------
beta0_: int
The intercept
beta_: array, (n_features)
The learned betas
glm_: instance of GLM
The GLM object with best score
reg_lambda_opt_: float
The reg_lambda parameter for best GLM object
n_iter_: int
The number of iterations
Reference
---------
Friedman, Hastie, Tibshirani (2010). Regularization Paths for Generalized
Linear Models via Coordinate Descent, J Statistical Software.
https://core.ac.uk/download/files/153/6287975.pdf
Notes
-----
To select subset of fitted glm models, you can simply do:
glm = glm[1:3]
glm[2].predict(X_test)
"""
def __init__(self, distr='poisson', alpha=0.5,
Tau=None, group=None,
reg_lambda=None, cv=10,
solver='batch-gradient',
learning_rate=2e-1, max_iter=1000,
tol=1e-6, eta=2.0, score_metric='deviance',
fit_intercept=True,
random_state=0, theta=1.0, verbose=False):
if reg_lambda is None:
reg_lambda = np.logspace(np.log(0.5), np.log(0.01), 10,
base=np.exp(1))
if not isinstance(reg_lambda, (list, np.ndarray)):
reg_lambda = [reg_lambda]
_check_params(distr=distr, max_iter=max_iter,
fit_intercept=fit_intercept)
self.distr = distr
self.alpha = alpha
self.reg_lambda = reg_lambda
self.cv = cv
self.Tau = Tau
self.group = group
self.solver = solver
self.learning_rate = learning_rate
self.max_iter = max_iter
self.beta0_ = None
self.beta_ = None
self.reg_lambda_opt_ = None
self.glm_ = None
self.scores_ = None
self.ynull_ = None
self.tol = tol
self.eta = eta
self.theta = theta
self.score_metric = score_metric
self.fit_intercept = fit_intercept
self.random_state = random_state
self.verbose = verbose
set_log_level(verbose)
def __repr__(self):
"""Description of the object."""
reg_lambda = self.reg_lambda
s = '<\nDistribution | %s' % self.distr
s += '\nalpha | %0.2f' % self.alpha
s += '\nmax_iter | %0.2f' % self.max_iter
if len(reg_lambda) > 1:
s += ('\nlambda: %0.2f to %0.2f\n>'
% (reg_lambda[0], reg_lambda[-1]))
else:
s += '\nlambda: %0.2f\n>' % reg_lambda[0]
return s
def copy(self):
"""Return a copy of the object.
Parameters
----------
none
Returns
-------
self: instance of GLM
A copy of the GLM instance.
"""
return deepcopy(self)
def fit(self, X, y):
"""The fit function.
Parameters
----------
X: array
The input data of shape (n_samples, n_features)
y: array
The target data
Returns
-------
self: instance of GLM
The fitted model.
"""
logger.info('Looping through the regularization path')
glms, scores = list(), list()
self.ynull_ = np.mean(y)
if not type(int):
raise ValueError('cv must be int. We do not support scikit-learn '
'cv objects at the moment')
idxs = np.arange(y.shape[0])
np.random.shuffle(idxs)
cv_splits = np.array_split(idxs, self.cv)
cv_training_iterations = _verbose_iterable(self.reg_lambda)
for idx, rl in enumerate(cv_training_iterations):
glm = GLM(distr=self.distr,
alpha=self.alpha,
Tau=self.Tau,
group=self.group,
reg_lambda=0.1,
solver=self.solver,
learning_rate=self.learning_rate,
max_iter=self.max_iter,
tol=self.tol,
eta=self.eta,
theta=self.theta,
score_metric=self.score_metric,
fit_intercept=self.fit_intercept,
random_state=self.random_state,
verbose=self.verbose)
_tqdm_log('Lambda: %6.4f' % rl)
glm.reg_lambda = rl
scores_fold = list()
for fold in range(self.cv):
val = cv_splits[fold]
train = np.setdiff1d(idxs, val)
if idx == 0:
glm.beta0_, glm.beta_ = self.beta0_, self.beta_
else:
glm.beta0_, glm.beta_ = glms[-1].beta0_, glms[-1].beta_
glm.n_iter_ = 0
glm.fit(X[train], y[train])
scores_fold.append(glm.score(X[val], y[val]))
scores.append(np.mean(scores_fold))
if idx == 0:
glm.beta0_, glm.beta_ = self.beta0_, self.beta_
else:
glm.beta0_, glm.beta_ = glms[-1].beta0_, glms[-1].beta_
glm.n_iter_ = 0
glm.fit(X, y)
glms.append(glm)
# Update the estimated variables
if self.score_metric == 'deviance':
opt = np.array(scores).argmin()
elif self.score_metric in ['pseudo_R2', 'accuracy']:
opt = np.array(scores).argmax()
else:
raise ValueError("Unknown score_metric: %s" % (self.score_metric))
self.beta0_, self.beta_ = glms[opt].beta0_, glms[opt].beta_
self.reg_lambda_opt_ = self.reg_lambda[opt]
self.glm_ = glms[opt]
self.scores_ = scores
return self
def predict(self, X):
"""Predict targets.
Parameters
----------
X: array
Input data for prediction, of shape (n_samples, n_features)
Returns
-------
yhat: array
The predicted targets of shape based on the model with optimal
reg_lambda (n_samples,)
"""
return self.glm_.predict(X)
def predict_proba(self, X):
"""Predict class probability for binomial.
Parameters
----------
X: array
Input data for prediction, of shape (n_samples, n_features)
Returns
-------
yhat: array
The predicted targets of shape (n_samples, ).
Raises
------
Works only for the binomial distribution.
Raises error otherwise.
"""
return self.glm_.predict_proba(X)
def fit_predict(self, X, y):
"""Fit the model and predict on the same data.
Parameters
----------
X: array
The input data to fit and predict,
of shape (n_samples, n_features)
Returns
-------
yhat: array
The predicted targets of shape based on the model with optimal
reg_lambda (n_samples,)
"""
self.fit(X, y)
return self.glm_.predict(X)
def score(self, X, y):
"""Score the model.
Parameters
----------
X: array
The input data whose prediction will be scored,
of shape (n_samples, n_features).
y: array
The true targets against which to score the predicted targets,
of shape (n_samples,).
Returns
-------
score: float
The score metric for the optimal reg_lambda
"""
return self.glm_.score(X, y)
| 33.795302
| 87
| 0.538278
|
1cdc49f2145fbeb7dd4b3a74970da2f4b648975c
| 34,332
|
py
|
Python
|
sbpy/activity/gas/productionrate.py
|
dirac-institute/sbpy
|
9eb0523610f497ba2d068a071aae05ebfd67ed9d
|
[
"BSD-3-Clause"
] | null | null | null |
sbpy/activity/gas/productionrate.py
|
dirac-institute/sbpy
|
9eb0523610f497ba2d068a071aae05ebfd67ed9d
|
[
"BSD-3-Clause"
] | null | null | null |
sbpy/activity/gas/productionrate.py
|
dirac-institute/sbpy
|
9eb0523610f497ba2d068a071aae05ebfd67ed9d
|
[
"BSD-3-Clause"
] | null | null | null |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
===========================
sbpy Production Rate Module
===========================
:author: Giannina Guzman (gguzman2@villanova.edu)
created on June 26, 2019
"""
import tempfile
import numpy as np
import astropy
import astropy.constants as con
import astropy.units as u
from astroquery.jplspec import JPLSpec
from astroquery.lamda import Lamda
from ...bib import register
from ...data import Phys
__all__ = ['LTE', 'NonLTE', 'einstein_coeff',
'intensity_conversion', 'beta_factor', 'total_number',
'from_Haser']
if astropy.__version__ < '5':
__doctest_skip__ = ['LTE.from_Drahus']
def intensity_conversion(mol_data):
"""
Returns conversion of the integrated line intensity at 300 K
(from the JPL molecular spectra catalog) to a chosen temperature
Parameters
----------
mol_data : `~sbpy.data.Phys`
`~sbpy.data.Phys` object that contains the following data,
using `~astropy.units` for the required units:
* Transition frequency in MHz
* Temperature in Kelvins
* Integrated line intensity at 300 K in MHz * nm**2
* Partition function at 300 K (Dimensionless)
* Partition function at designated temperature (Dimensionless)
* Upper state degeneracy (Dimensionless)
* Upper level energy in Joules
* Lower level energy in Joules
* Degrees of freedom (Dimensionless)
Keywords that can be used for these values are found under
`~sbpy.data.Conf.fieldnames` documentation. We recommend the use of the
JPL Molecular Spectral Catalog and the use of
`~sbpy.data.phys.from_jplspec` to obtain these values in order to
maintain consistency and because all calculations can be handled within
`sbpy` scope if JPLSpec is used. Yet, if you wish to use your own
molecular data, it is possible. Make sure to inform yourself on the
values needed for each function, their units, and their interchangeable
keywords as part of the `~sbpy.data.Phys` data class.
Returns
-------
intl : `~astropy.Quantity`
Integrated line intensity at designated temperature in MHz * nm**2,
which can be appended to the original `sbpy.data.Phys` object for
future calculations
References
----------
Picket et al 1998, JQSRT 60, 883-890
"""
if not isinstance(mol_data, Phys):
raise ValueError('mol_data must be a `sbpy.data.Phys` instance.')
temp = mol_data['Temperature'][0]
lgint = mol_data['lgint300'][0]
part300 = mol_data['partfn300'][0]
partition = mol_data['partfn'][0]
eup_J = mol_data['eup_j'][0]
elo_J = mol_data['elo_J'][0]
df = mol_data['degfr'][0]
register(intensity_conversion, {'conversion': '1998JQSRT..60..883P'})
k = con.k_B.to('J/K') # Boltzmann constant
if (eup_J - elo_J) < (k * min(temp, 300 * u.K)):
if df in (0, 2):
n = 1
else:
n = 3./2
intl = lgint*(300*u.K/temp)**(n+1)*np.exp(-(1/temp - 1/(300*u.K))
* elo_J/k)
else:
intl = lgint*(part300/partition)*(np.exp(-elo_J/(k*temp)) -
np.exp(-eup_J/(k*temp))) / \
(np.exp(-elo_J/(k*300 * u.K)) - np.exp(-eup_J/(k*300*u.K)))
return intl
def einstein_coeff(mol_data):
"""
Einstein coefficient from molecular data
Parameters
----------
mol_data : `~sbpy.data.phys`
`~sbpy.data.phys` object that contains the following data,
using `astropy.units` for the required units:
* Transition frequency in MHz
* Temperature in Kelvins
* Integrated line intensity at 300 K in MHz * nm**2
* Integrated line intensity at desired temperature
* Partition function at 300 K (Dimensionless)
* Partition function at designated temperature (Dimensionless)
* Upper state degeneracy (Dimensionless)
* Upper level energy in Joules
* Lower level energy in Joules
* Degrees of freedom (Dimensionless)
Keywords that can be used for these values are found under
`~sbpy.data.Conf.fieldnames` documentation. We recommend the use of the
JPL Molecular Spectral Catalog and the use of
`~sbpy.data.phys.from_jplspec` to obtain these values in order to
maintain consistency. Yet, if you wish to use your own molecular data,
it is possible. Make sure to inform yourself on the values needed for
each function, their units, and their interchangeable keywords as part
of the data class.
Returns
-------
einstein_coeff : `~astropy.Quantity`
Spontaneous emission coefficient (1/s), which can be appended
to the original `sbpy.phys` object for future calculations
"""
if not isinstance(mol_data, Phys):
raise ValueError('mol_data must be a `sbpy.data.phys` instance.')
temp = mol_data['Temperature'][0]
lgint = mol_data['lgint300'][0]
part300 = mol_data['partfn300'][0]
partition = mol_data['partfn'][0]
eup_J = mol_data['eup_j'][0]
elo_J = mol_data['elo_J'][0]
df = mol_data['degfr'][0]
t_freq = mol_data['t_freq'][0]
gu = mol_data['dgup'][0]
h = con.h.to('J*s') # Planck constant
k = con.k_B.to('J/K') # Boltzmann constant
intl = mol_data['lgint'][0]
if (h*t_freq/(k*temp)).decompose().value and \
(h*t_freq/(k*300*u.K)).decompose().value < 1:
au = (lgint*t_freq
* (part300/gu)*np.exp(eup_J / (k*300*u.K))*(1.748e-9)).value
else:
au = (intl*(t_freq)**2 *
(partition/gu)*(np.exp(-(elo_J/(k*temp)).value) -
np.exp(-(eup_J/(k*temp)).value))**(-1)
* (2.7964e-16)).value
au = au / u.s
return au
def beta_factor(mol_data, ephemobj):
"""
Returns beta factor based on timescales from `~sbpy.activity.gas`
and distance from the Sun using an `~sbpy.data.ephem` object.
The calculation is:
parent photodissociation timescale * (distance from comet to Sun)**2
If you wish to provide your own beta factor, you can calculate the equation
expressed in units of AU**2 * s , all that is needed is the timescale
of the molecule and the distance of the comet from the Sun. Once you
have the beta factor you can append it to your mol_data phys object
with the name 'beta' or any of its alternative names.
Parameters
----------
mol_data : `~sbpy.data.phys`
`sbpy.data.phys` object that contains AT LEAST the following data:
| mol_tag: Molecular identifier (`int` or `str`)
This field can be given by the user directly or found using
`~sbpy.data.phys.from_jplspec`. If the mol_tag is an integer, the
program will assume it is the JPL Spectral Molecular Catalog identifier
of the molecule and will treat it as such. If mol_tag is a string,
then it will be assumed to be the human-readable name of the molecule.
The molecule MUST be defined in `sbpy.activity.gas.timescale`,
otherwise this function cannot be used and the beta factor
will have to be provided by the user directly for calculations. The
user can obtain the beta factor from the formula provided above.
Keywords that can be used for these values are found under
`~sbpy.data.Conf.fieldnames` documentation. We recommend the use of the
JPL Molecular Spectral Catalog and the use of
`~sbpy.data.Phys.from_jplspec` to obtain
these values in order to maintain consistency. Yet, if you wish to
use your own molecular data, it is possible. Make sure to inform
yourself on the values needed for each function, their units, and
their interchangeable keywords as part of the Phys data class.
ephemobj : `~sbpy.data.ephem`
`sbpy.data.ephem` object holding ephemeride information including
distance from comet to Sun ['r'] and from comet to observer ['delta']
Returns
-------
q : `~astropy.units.Quantity`
Beta factor 'beta', which can be appended
to the original `sbpy.phys` object for future calculations
"""
# imported here to avoid circular dependency with activity.gas
from .core import photo_timescale
from ...data import Ephem
if not isinstance(ephemobj, Ephem):
raise ValueError('ephemobj must be a `sbpy.data.ephem` instance.')
if not isinstance(mol_data, Phys):
raise ValueError('mol_data must be a `sbpy.data.phys` instance.')
orb = ephemobj
delta = (orb['delta']).to('m')
r = (orb['r'])
if not isinstance(mol_data['mol_tag'][0], str):
cat = JPLSpec.get_species_table()
mol = cat[cat['TAG'] == mol_data['mol_tag'][0]]
name = mol['NAME'].data[0]
else:
name = mol_data['mol_tag'][0]
timescale = photo_timescale(name)
if timescale.ndim != 0:
# array
timescale = timescale[0]
beta = (timescale) * r**2
return beta
def total_number(mol_data, aper, b):
"""
Equation relating number of molecules with column density, the aperture,
and geometry given and accounting for photodissociation, derived from data
provided. Feel free to use your own total number to calculate production
rate or use this function with your own molecular data as long as you are
aware of the needed data.
Parameters
----------
integrated_line : `~astropy.units.Quantity`
Integrated flux of emission line.
mol_data : `sbpy.data.phys`
`sbpy.data.phys` object that contains AT LEAST the following data:
| Transition frequency in MHz
| Einstein Coefficient (1/s)
| Beta: (Timescale (in s) * r^2 (in au))
| Column Density in 1/m^2
The values above can either be given by the user or obtained from the
functions `~sbpy.activity.gas.productionrate.einstein_coeff` and
`~sbpy.activity.gas.productionrate.beta_factor`
Keywords that can be used for these values are found under
`~sbpy.data.Conf.fieldnames` documentation. We recommend the use of the
JPL Molecular Spectral Catalog and the use of
`~sbpy.data.phys.from_jplspec` to obtain
these values in order to maintain consistency. Yet, if you wish to
use your own molecular data, it is possible. Make sure to inform
yourself on the values needed for each function, their units, and
their interchangeable keywords as part of the Phys data class.
Returns
-------
total_number: `astropy.units.Quantity`
An astropy Quantity containing the total number of molecules within the
aperture (Dimensionless)
"""
if not isinstance(mol_data, Phys):
raise ValueError('mol_data must be a `sbpy.data.phys` instance.')
beta = mol_data['beta'][0]
sigma = (1./2. * beta * b * con.c / (mol_data['t_freq'][0] * aper)).value
tnumber = mol_data['cdensity'][0].decompose() * sigma * u.m**2 / \
np.sqrt(np.log(2))
return tnumber
def from_Haser(coma, mol_data, aper=25 * u.m):
"""
Calculate production rate for `GasComa`
Parameters
----------
coma : `sbpy.activity.gas.GasComa`
Gas coma model for ratio calculation of production rate, the
production rate `Q` that the gas coma model expects should be an
educated first guess. A good way to get this guess would be to
use the function `from_drahus` found under `sbpy.activity.gas.LTE`
The molecule name used for the `parent` argument of the coma model
should be the same name or equivalent JPLSpec identifier used to
calculate the total number of molecules.
mol_data: `sbpy.data.phys`
`sbpy.data.phys` object that contains AT LEAST the following data:
| Total Number of Molecules (See
| `~sbpy.activity.gas.total_number` for a calculation
| of this datum if you don't wish to provide it yourself)
This field can be given by the user directly or calculated using the
necessary combinations of the following functions:
`~sbpy.data.phys.from_jplspec`,
`~sbpy.activity.gas.productionrate.einstein_coeff`,
`~sbpy.activity.gas.productionrate.beta_factor`, and
`~sbpy.activity.gas.productionrate.total_number`.
Keywords that can be used for these values are found under
`~sbpy.data.Conf.fieldnames` documentation. We recommend the use of the
JPL Molecular Spectral Catalog and the use of
`~sbpy.data.Phys.from_jplspec` to obtain
these values in order to maintain consistency. Yet, if you wish to
use your own molecular data, it is possible. Make sure to inform
yourself on the values needed for each function, their units, and
their interchangeable keywords as part of the Phys data class.
aper : `~astropy.units.Quantity`
Telescope aperture in meters. Default is 25 m
Returns
-------
Q : `~astropy.units.Quantity`
production rate
Examples
--------
>>> import astropy.units as u
>>> from astropy.time import Time
>>> from sbpy.data import Ephem, Phys
>>> from sbpy.activity import (Haser, LTE, photo_timescale, einstein_coeff,
... from_Haser)
>>> from sbpy.activity import (intensity_conversion, beta_factor,
... total_number)
>>> aper = 10 * u.m
>>> mol_tag = 28001
>>> temp_estimate = 25. * u.K
>>> target = 'C/2016 R2'
>>> b = 0.74
>>> vgas = 0.5 * u.km / u.s
>>> transition_freq = (230.53799 * u.GHz).to('MHz')
>>> integrated_flux = 0.26 * u.K * u.km / u.s
>>> time = Time('2017-12-22 05:24:20', format = 'iso')
>>> ephemobj = Ephem.from_horizons(target,
... epochs=time) # doctest: +REMOTE_DATA
>>> mol_data = Phys.from_jplspec(temp_estimate, transition_freq,
... mol_tag) # doctest: +REMOTE_DATA
>>> intl = intensity_conversion(mol_data) # doctest: +REMOTE_DATA
>>> mol_data.apply([intl.value] * intl.unit,
... name='intl') # doctest: +REMOTE_DATA
>>> au = einstein_coeff(mol_data) # doctest: +REMOTE_DATA
>>> mol_data.apply([au.value] * au.unit,
... name='eincoeff') # doctest: +REMOTE_DATA
>>> beta = beta_factor(mol_data, ephemobj) # doctest: +REMOTE_DATA
>>> mol_data.apply([beta.value] * beta.unit,
... name='beta') # doctest: +REMOTE_DATA
>>> lte = LTE()
>>> cdensity = lte.cdensity_Bockelee(integrated_flux,
... mol_data) # doctest: +REMOTE_DATA
>>> mol_data.apply([cdensity.value] * cdensity.unit,
... name='cdensity') # doctest: +REMOTE_DATA
>>> tnum = total_number(mol_data, aper, b) # doctest: +REMOTE_DATA
>>> mol_data.apply([tnum.value] * tnum.unit,
... name='total_number') # doctest: +REMOTE_DATA
>>> Q_estimate = 2.8*10**(28) / u.s
>>> parent = photo_timescale('CO') * vgas
>>> coma = Haser(Q_estimate, vgas, parent)
>>> Q = from_Haser(coma, mol_data, aper=aper) # doctest: +REMOTE_DATA
>>> Q # doctest: +REMOTE_DATA +FLOAT_CMP
<Quantity [9.35795579e+27] 1 / s>
References
----------
Haser 1957, Bulletin de la Societe Royale des Sciences de Liege
43, 740.
Newburn and Johnson 1978, Icarus 35, 360-368.
"""
from .core import GasComa
if not isinstance(coma, GasComa):
raise ValueError('coma must be a GasComa instance.')
if not isinstance(mol_data, Phys):
raise ValueError('mol_data must be a `sbpy.data.phys` instance.')
register('Spectroscopy', {'Total Number (eq. 15)': '2004come.book..391B'})
# integrated_line = self.integrated_flux(transition_freq) - not yet implemented
molecules = mol_data['total_number']
model_molecules = coma.total_number(aper)
Q = coma.Q * molecules/model_molecules
return Q
class LTE():
""" LTE Methods for calculating production_rate """
def cdensity_Bockelee(self, integrated_flux, mol_data):
"""
Basic equation relating column density with observed integrated flux
without the need for an initial column density to be given.
This is found in equation 10 in
https://ui.adsabs.harvard.edu/abs/2004come.book..391B
and is derived from data from JPLSpec, feel free to use your own column
density to calculate production rate or use this function with your own
molecular data as long as you are aware of the needed data.
Parameters
----------
integrated_flux : `~astropy.units.Quantity`
Integrated flux of emission line.
mol_data : `sbpy.data.phys`
`sbpy.data.phys` object that contains AT LEAST the following data:
| Transition frequency in MHz
| Einstein Coefficient (1/s)
This function will calculate the column
density from Bockelee-Morvan et al. 2004 and append it to the phys
object as 'Column Density' or any of its alternative field names.
The values above can either be given by the user or obtained from
the functions `~sbpy.activity.gas.productionrate.einstein_coeff`
and `~sbpy.activity.gas.productionrate.beta_factor`
Keywords that can be used for these values are found under
`~sbpy.data.Conf.fieldnames` documentation. We recommend the use of
the JPL Molecular Spectral Catalog and the use of
`~sbpy.data.phys.from_jplspec` to obtain
these values in order to maintain consistency. Yet, if you wish to
use your own molecular data, it is possible. Make sure to inform
yourself on the values needed for each function, their units, and
their interchangeable keywords as part of the Phys data class.
Returns
-------
Column Density : `astropy.units.Quantity`
Column density from Bockelee-Morvan et al. 2004 as astropy Quantity
(1/m^2)
"""
if not isinstance(mol_data, Phys):
raise ValueError('mol_data must be a `sbpy.data.phys` instance.')
register('Spectroscopy', {
'Total Number (eq. 10)': '2004come.book..391B'})
cdensity = (8*np.pi*con.k_B*mol_data['t_freq'][0]**2 /
(con.h*con.c**3 * mol_data['eincoeff'][0])).decompose()
cdensity *= integrated_flux
return cdensity
def from_Drahus(self, integrated_flux, mol_data, ephemobj, vgas=1 * u.km/u.s,
aper=25 * u.m, b=1.2):
"""
Returns production rate based on Drahus 2012 model referenced.
Does not include photodissociation, good for first guesses for
more computationally intensive methods or for the Haser model
under `sbpy.activity.gas.productionrate.from_Haser`
Parameters
----------
integrated_flux : `~astropy.units.Quantity`
Line integral derived from spectral data in Kelvins * km/s
mol_data : `sbpy.data.phys`
`sbpy.data.phys` object that contains the following data:
| Transition frequency in MHz
| Temperature in Kelvins
| Partition function at designated temperature (unitless)
| Upper state degeneracy (unitless)
| Upper level energy in Joules
| Degrees of freedom (unitless)
| Einstein Coefficient (1/s)
These fields can be given by the user directly or calculated using
`~sbpy.data.phys.from_jplspec`,
`~sbpy.activity.gas.productionrate.einstein_coeff`,
Keywords that can be used for these values are found under
`~sbpy.data.Conf.fieldnames` documentation. We recommend the use of
the JPL Molecular Spectral Catalog and the use of
`~sbpy.data.Phys.from_jplspec` to obtain
these values in order to maintain consistency. Yet, if you wish to
use your own molecular data, it is possible. Make sure to inform
yourself on the values needed for each function, their units, and
their interchangeable keywords as part of the Phys data class.
ephemobj : `sbpy.data.ephem`
`sbpy.data.ephem` object holding ephemeride information including
distance from comet to Sun ['r'] and from comet to observer
['delta']
vgas : `~astropy.units.Quantity`
Gas velocity approximation in km / s. Default is 1 km / s
aper : `~astropy.units.Quantity`
Telescope aperture in meters. Default is 25 m
b : int
Dimensionless factor intrinsic to every antenna. Typical
value, and the default for this model, is 1.22. See
references for more information on this parameter.
Returns
-------
q : `~astropy.units.Quantity`
Production rate, not including photodissociation
Examples
--------
>>> import astropy.units as u
>>> from astropy.time import Time
>>> from sbpy.data import Ephem, Phys
>>> from sbpy.activity import LTE, einstein_coeff, intensity_conversion
>>> temp_estimate = 47. * u.K
>>> target = '103P'
>>> vgas = 0.8 * u.km / u.s
>>> aper = 30 * u.m
>>> b = 1.13
>>> mol_tag = 27001
>>> transition_freq = (265.886434 * u.GHz).to('MHz')
>>> integrated_flux = 1.22 * u.K * u.km / u.s
>>> time = Time('2010-11-3 00:48:06', format='iso')
>>> ephemobj = Ephem.from_horizons(
... target, epochs=time, closest_apparition=True,
... id_type='designation') # doctest: +REMOTE_DATA
>>> mol_data = Phys.from_jplspec(temp_estimate, transition_freq,
... mol_tag) # doctest: +REMOTE_DATA
>>> intl = intensity_conversion(mol_data) # doctest: +REMOTE_DATA
>>> mol_data.apply([intl.value] * intl.unit,
... name='intl') # doctest: +REMOTE_DATA
>>> au = einstein_coeff(mol_data) # doctest: +REMOTE_DATA
>>> mol_data.apply([au.value] * au.unit,
... name='eincoeff') # doctest: +REMOTE_DATA
>>> lte = LTE()
>>> q = lte.from_Drahus(integrated_flux, mol_data,
... ephemobj, vgas, aper, b=b) # doctest: +REMOTE_DATA
>>> q # doctest: +REMOTE_DATA +FLOAT_CMP
<MaskedQuantity 1.09899965e+25 1 / s>
References
----------
Drahus et al. September 2012. The Sources of HCN and CH3OH and the
Rotational Temperature in Comet 103P/Hartley 2 from Time-resolved
Millimeter Spectroscopy. The Astrophysical Journal, Volume 756,
Issue 1.
"""
register('Spectroscopy', {'Production Rate (No photodissociation)':
'2012ApJ...756...80D'})
if not isinstance(mol_data, Phys):
raise ValueError('mol_data must be a `sbpy.data.phys` instance.')
t_freq = mol_data['t_freq'][0]
temp = mol_data['Temperature'][0]
partition = mol_data['partfn']
gu = mol_data['dgup'][0]
eup_J = mol_data['eup_j'][0]
h = con.h.to('J*s') # Planck constant
k = con.k_B.to('J/K') # Boltzmann constant
c = con.c.to('m/s') # speed of light
vgas = vgas.to('m/s')
au = mol_data['eincoeff'][0]
delta = ephemobj["delta"][0]
calc = ((16*np.pi*k*t_freq.decompose() *
partition*vgas) / (np.sqrt(np.pi*np.log(2))
* h * c**2 * au * gu *
np.exp(-eup_J/(k*temp)))).decompose()
q = integrated_flux*(calc * b * delta / aper)
q = q.to(u.Hz, equivalencies=u.spectral()).decompose()[0]
return q
class NonLTE():
"""
Class method for non LTE production rate models
Not Yet implemented
"""
def from_pyradex(self, integrated_flux, mol_data, line_width=1.0 * u.km / u.s,
escapeProbGeom='lvg', iter=100,
collider_density={'H2': 900*2.2}):
"""
Calculate production rate from the Non-LTE iterative code pyradex
Presently, only the LAMDA catalog is supported by this function.
In the future a function will be provided by sbpy to build your own
molecular data file from JPLSpec for use in this function.
Collider is assumed to be H2O for the default setting since we are only
considering comet production rates. See documentation for pyradex
installation tips
Parameters
----------
integrated_flux : `~astropy.units.Quantity`
The integrated flux in K * km/s
mol_data : `sbpy.data.phys`
`sbpy.data.phys` object that contains AT LEAST the following data:
| mol_tag: molecule of interest as string or int JPLSpec identifier
| temp: kinetic temperature in gas coma (unit K)
| cdensity : cdensity estimate (can be calculated from cdensity_Bockelee) (unit 1/cm^2)
| temp_back: (optional) background temperature in K (default is 2.730 K)
| lamda_name: (optional) LAMDA molecule identifier to avoid ambiguity. `Lamda.molecule_dict` provides list
Keywords that can be used for these values are found under
`~sbpy.data.Conf.fieldnames` documentation. Make sure to inform
yourself on the values needed for each function, their units, and
their interchangeable keywords as part of the Phys data class.
line_width : `~astropy.units.Quantity`
The FWHM line width (really, the single-zone velocity width to
scale the column density by: this is most sensibly interpreted as a
velocity gradient (dv/length)) in km/s (default is 1.0 km/s)
escapeProbGeom : str
Which escape probability method to use, available choices are
'sphere', 'lvg', and 'slab'
iter : int
Number of iterations you wish to perform. Default is 100, more
iterations will take more time to do but will offer a better range
of results to compare against. The range of guesses is built by
having the column density guess and subtracting/adding an order of
magnitude for the start and end values of the loop, respectively.
i.e. a guess of 1e15 will create a range between 1e14 and 1e16
collider_density : dict
Dictionary of colliders and their densities in cm^-3. Allowed
entries are any of the following : h2,oh2,ph2,e,He,H,H+
See `~Pyradex` documentation for more information.
Default dictionary is {'H2' : 900*2.2} where 900 is the
collider density of H2 and 2.2 is the value for the
square root of the ratio of reduced masses of H2O/H2
as follows:
(mu_H2O/mu_H2)**0.5 = ((18**2/18*2)/((18*2)/(18+2)))**0.5 = 2.2
in order to scale the collisional excitation to H2O as the main
collisional partner. (Walker, et al. 2014; de val Borro, et al.
2017; & Schoier, et al. 2004)
Returns
-------
column density : `~astropy.units.Quantity`
column density to use for the Haser model ratio calculation
Note: it is normal for pyradex/RADEX to output warnings depending
on the setup the user gives it (such as not having found a
molecular data file so it searches for it on LAMDA catalog)
Examples
--------
>>> from sbpy.activity import NonLTE
>>> from sbpy.data import Phys
>>> import astropy.units as u
>>> transition_freq = (177.196 * u.GHz).to(u.MHz)
>>> mol_tag = 29002
>>> cdensity_guess = (1.89*10.**(14) / (u.cm * u.cm))
>>> temp_estimate = 20. * u.K
>>> mol_data = Phys.from_jplspec(temp_estimate, transition_freq,
... mol_tag) # doctest: +REMOTE_DATA
>>> mol_data.apply([cdensity_guess.value] * cdensity_guess.unit,
... name= 'cdensity') # doctest: +REMOTE_DATA
>>> mol_data.apply(['HCO+@xpol'],
... name='lamda_name') # doctest: +REMOTE_DATA
>>> nonLTE = NonLTE()
>>> try: # doctest: +SKIP
... cdensity = nonLTE.from_pyradex(1.234 * u.K * u.km / u.s,
... mol_data, iter=600,
... collider_density={'H2': 900}) # doctest: +REMOTE_DATA
... print(cdensity) # doctest: +REMOTE_DATA
... except ImportError:
... pass
Closest Integrated Flux:[1.24925956] K km / s
Given Integrated Flux: 1.234 K km / s
[1.06363773e+14] 1 / cm2
References
----------
Haser 1957, Bulletin de la Societe Royale des Sciences de Liege
43, 740.
Walker, et al., On the Validity of Collider-mass Scaling for Molecular
Rotational Excitation, APJ, August 2014.
van der Tak, et al., A computer program for fast non-LTE analysis of
interstellar line spectra. With diagnostic plots to interpret observed
line intensity ratios. A&A, February 12 2013.
de Val Borro, et al., Measuring molecular abundances in comet C/2014 Q2
(Lovejoy) using the APEX telescope. Monthly Notices of the Royal
Astronomical Society, October 27 2017.
Schoier, et al., An atomic and molecular database for analysis of
submillimetre line observations. A&A, November 4 2004.
"""
try:
import pyradex
except ImportError:
raise ImportError('Pyradex not installed. Please see \
https://github.com/keflavich/pyradex/blob/master/INSTALL.rst')
if not isinstance(mol_data, Phys):
raise ValueError('mol_data must be a `sbpy.data.phys` instance.')
register('Production Rates', {'Radex': '2007A&A...468..627V'})
# convert mol_tag JPLSpec identifier to verbose name if needed
try:
mol_data['lamda_name']
name = mol_data['lamda_name'][0]
name = name.lower()
except KeyError:
if not isinstance(mol_data['mol_tag'][0], str):
cat = JPLSpec.get_species_table()
mol = cat[cat['TAG'] == mol_data['mol_tag'][0]]
name = mol['NAME'].data[0]
name = name.lower()
else:
name = mol_data['mol_tag'][0]
name = name.lower()
# try various common instances of molecule names and check them against LAMDA before complaining
try:
Lamda.molecule_dict[name]
except KeyError:
try_name = "{}@xpol".format(name)
try:
Lamda.molecule_dict[try_name]
name = try_name
except KeyError:
print('Molecule name {} not found in LAMDA, module tried {} and also\
found no molecule with this identifier within LAMDA. Please\
enter LAMDA identifiable name using mol_data["lamda_name"]\
. Use Lamda.molecule_dict to see all available options.'.format(name, try_name))
raise
# define Temperature
temp = mol_data['temp']
# check for optional values within mol_data
if 'temp_back' in mol_data:
tbackground = mol_data['temp_back']
else:
tbackground = 2.730 * u.K
# define cdensity and iteration parameters
cdensity = mol_data['cdensity'].to(1 / (u.cm * u.cm))
cdensity_low = cdensity - (cdensity*0.9)
cdensity_high = cdensity + (cdensity*9)
# range for 400 iterations
cdensity_range = np.linspace(cdensity_low, cdensity_high, iter)
fluxes = []
column_density = []
with tempfile.TemporaryDirectory() as datapath:
for i in cdensity_range:
R = pyradex.Radex(column=i, deltav=line_width,
tbackground=tbackground, species=name,
temperature=temp, datapath=datapath,
escapeProbGeom=escapeProbGeom,
collider_densities=collider_density)
table = R()
# find closest matching frequency to user defined
indx = (np.abs(table['frequency']-mol_data['t_freq'])).argmin()
radexfreq = table['frequency'][indx]
# get table for that frequency
values = table[table['frequency'] == radexfreq]
# use eq in io.f from Pyradex to get integrated flux in K * km/s
int_flux_pyradex = 1.0645 * values['T_B'] * line_width
fluxes.append(int_flux_pyradex)
column_density.append(i)
# closest matching integrated flux from pyradex
fluxes = np.array(fluxes)
index_flux = (
np.abs(fluxes-integrated_flux.to(u.K * u.km / u.s).value)).argmin()
# corresponding column density in 1/cm^2
column_density = column_density[index_flux]
print('Closest Integrated Flux:{}'.format(
fluxes[index_flux] * u.K * u.km / u.s))
print('Given Integrated Flux: {}'.format(integrated_flux))
return column_density
| 39.690173
| 126
| 0.604655
|
c38a6117e901a2a96542b44382fa1b8794d2f037
| 6,384
|
py
|
Python
|
dev/external_subtrees.py
|
dpbaines/vtr-verilog-to-routing
|
cea4b63689db11816027aea23b6690af1c384f2d
|
[
"MIT"
] | 1
|
2020-06-01T19:29:11.000Z
|
2020-06-01T19:29:11.000Z
|
dev/external_subtrees.py
|
dpbaines/vtr-verilog-to-routing
|
cea4b63689db11816027aea23b6690af1c384f2d
|
[
"MIT"
] | 1
|
2020-06-02T21:43:59.000Z
|
2020-06-02T21:55:03.000Z
|
dev/external_subtrees.py
|
dpbaines/vtr-verilog-to-routing
|
cea4b63689db11816027aea23b6690af1c384f2d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import argparse
import os
import xml.etree.ElementTree as ET
from collections import OrderedDict
class ExternalSubtree:
def __init__(self, name, internal_path, external_url, default_external_ref):
self.name = name
self.internal_path = internal_path
self.external_url = external_url
self.default_external_ref = default_external_ref
SCRIPT_DIR=os.path.dirname(os.path.realpath(__file__))
DEFAULT_CONFIG=os.path.join(SCRIPT_DIR, 'subtree_config.xml')
def parse_args():
parser = argparse.ArgumentParser(description="VTR uses external tools/libraries which are developed outside of the VTR source tree. Some of these are included in the VTR source tree using git's subtree feature. This script is used to update git-subtrees in a consistent manner. It must be run from the root of the VTR source tree.")
parser.add_argument("components",
nargs="*",
metavar='COMPONENT',
help="External components to upgrade (See list of components with --list)")
parser.add_argument("--external_ref",
default=None,
help="Specifies the external reference (revision/branch). If unspecified uses the subtree default (usually master).")
parser.add_argument("-m", "--message",
help="Commit comment when updating a component")
exclusive_group = parser.add_mutually_exclusive_group()
exclusive_group.add_argument("--list",
action="store_true",
default=False,
help="List known components")
exclusive_group.add_argument("--update",
action="store_true",
default=False,
help="Update components subtrees")
parser.add_argument("-n", "--dry_run",
action="store_true",
default=False,
help="Show the commands which would be executed, but do not actually perform them.")
parser.add_argument("--subtree_config",
default=DEFAULT_CONFIG,
help="Path to subtree config file (Default: %(default)s)")
args = parser.parse_args()
return args
def main():
args = parse_args()
config = load_subtree_config(args.subtree_config)
if args.list and len(args.components) == 0:
args.components = list(config.keys())
for component in args.components:
update_component(args, config[component])
def load_subtree_config(config_path):
config = OrderedDict()
tree = ET.parse(config_path)
root = tree.getroot()
assert root.tag == 'subtrees', "Expected root tag to be 'subtrees'"
for child in root:
assert child.tag == 'subtree', "Expected children to be 'subtree'"
name = None
internal_path = None
external_url = None
default_external_ref = None
for attrib, value in child.attrib.items():
if attrib == 'name':
name = value
elif attrib == 'internal_path':
internal_path = value
elif attrib == 'external_url':
external_url = value
elif attrib == 'default_external_ref':
default_external_ref = value
else:
assert False, "Unrecognized subtree attribute {}".format(attrib)
assert name != None, "Missing subtree name"
assert internal_path != None, "Missing subtree internal path"
assert external_url != None, "Missing subtree external url"
assert default_external_ref != None, "Missing subtree default external ref"
assert name not in config, "Duplicate subtree name '{}'".format(name)
config[name] = ExternalSubtree(name, internal_path, external_url, default_external_ref)
return config
def update_component(args, subtree_info):
external_ref = None
if not args.external_ref:
external_ref = subtree_info.default_external_ref
print("Component: {:<15} Path: {:<30} URL: {:<45} URL_Ref: {}".format(subtree_info.name, subtree_info.internal_path, subtree_info.external_url, external_ref))
if args.list:
return #List only
else:
assert args.update
assert args.message, "Must provide a commit message (-m/--message) describing why component is being updated"
assert external_ref != None
action = None
message = None
if not os.path.exists(subtree_info.internal_path):
#Create
action = 'add'
message = "{msg}\n\n{name}: Adding '{path}/' as an external git subtree from {url} {rev}".format(
name=subtree_info.name,
path=subtree_info.internal_path,
url=subtree_info.external_url,
rev=external_ref,
msg=args.message)
else:
#Pull
action = 'pull'
message = "{msg}\n\n{name}: Updating '{path}/' (external git subtree from {url} {rev})".format(
name=subtree_info.name,
path=subtree_info.internal_path,
url=subtree_info.external_url,
rev=external_ref,
msg=args.message)
assert action != None
assert message != None
cmd = "git subtree {action} --prefix {internal_path} {external_url} {external_branch} --squash --message '{message}'".format(
action=action,
internal_path=subtree_info.internal_path,
external_url=subtree_info.external_url,
external_branch=external_ref,
message=message)
if args.dry_run:
print(cmd)
else:
os.system(cmd)
if __name__ == "__main__":
main()
| 38.690909
| 336
| 0.562187
|
b7e6ecc497ce0034c799caf8431bc9d4673c9754
| 13,562
|
py
|
Python
|
sdks/python/client/openapi_client/model/io_argoproj_workflow_v1alpha1_workflow_template.py
|
Siebjee/argo-workflows
|
1a3b87bdf8edba02ba5e5aed20f3942be1d6f46c
|
[
"Apache-2.0"
] | null | null | null |
sdks/python/client/openapi_client/model/io_argoproj_workflow_v1alpha1_workflow_template.py
|
Siebjee/argo-workflows
|
1a3b87bdf8edba02ba5e5aed20f3942be1d6f46c
|
[
"Apache-2.0"
] | 3
|
2022-03-22T11:49:02.000Z
|
2022-03-24T14:13:59.000Z
|
sdks/python/client/openapi_client/model/io_argoproj_workflow_v1alpha1_workflow_template.py
|
Siebjee/argo-workflows
|
1a3b87bdf8edba02ba5e5aed20f3942be1d6f46c
|
[
"Apache-2.0"
] | null | null | null |
"""
Argo Server API
You can get examples of requests and responses by using the CLI with `--gloglevel=9`, e.g. `argo list --gloglevel=9` # noqa: E501
The version of the OpenAPI document: VERSION
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from openapi_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from openapi_client.exceptions import ApiAttributeError
def lazy_import():
from openapi_client.model.io_argoproj_workflow_v1alpha1_workflow_template_spec import IoArgoprojWorkflowV1alpha1WorkflowTemplateSpec
from openapi_client.model.object_meta import ObjectMeta
globals()['IoArgoprojWorkflowV1alpha1WorkflowTemplateSpec'] = IoArgoprojWorkflowV1alpha1WorkflowTemplateSpec
globals()['ObjectMeta'] = ObjectMeta
class IoArgoprojWorkflowV1alpha1WorkflowTemplate(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'metadata': (ObjectMeta,), # noqa: E501
'spec': (IoArgoprojWorkflowV1alpha1WorkflowTemplateSpec,), # noqa: E501
'api_version': (str,), # noqa: E501
'kind': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'metadata': 'metadata', # noqa: E501
'spec': 'spec', # noqa: E501
'api_version': 'apiVersion', # noqa: E501
'kind': 'kind', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, metadata, spec, *args, **kwargs): # noqa: E501
"""IoArgoprojWorkflowV1alpha1WorkflowTemplate - a model defined in OpenAPI
Args:
metadata (ObjectMeta):
spec (IoArgoprojWorkflowV1alpha1WorkflowTemplateSpec):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501
kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.metadata = metadata
self.spec = spec
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, metadata, spec, *args, **kwargs): # noqa: E501
"""IoArgoprojWorkflowV1alpha1WorkflowTemplate - a model defined in OpenAPI
Args:
metadata (ObjectMeta):
spec (IoArgoprojWorkflowV1alpha1WorkflowTemplateSpec):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
api_version (str): APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources. [optional] # noqa: E501
kind (str): Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.metadata = metadata
self.spec = spec
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 47.753521
| 346
| 0.602124
|
788e5e456e540ad803ee45440b5f9474ed79082d
| 623
|
py
|
Python
|
myvenv/lib/python3.5/site-packages/allauth/socialaccount/providers/reddit/provider.py
|
tuvapp/tuvappcom
|
5ca2be19f4b0c86a1d4a9553711a4da9d3f32841
|
[
"MIT"
] | 1
|
2019-06-26T19:23:55.000Z
|
2019-06-26T19:23:55.000Z
|
myvenv/lib/python3.5/site-packages/allauth/socialaccount/providers/reddit/provider.py
|
tuvapp/tuvappcom
|
5ca2be19f4b0c86a1d4a9553711a4da9d3f32841
|
[
"MIT"
] | 6
|
2020-06-05T18:44:19.000Z
|
2022-01-13T00:48:56.000Z
|
myvenv/lib/python3.5/site-packages/allauth/socialaccount/providers/reddit/provider.py
|
tuvapp/tuvappcom
|
5ca2be19f4b0c86a1d4a9553711a4da9d3f32841
|
[
"MIT"
] | 1
|
2022-02-01T17:19:28.000Z
|
2022-02-01T17:19:28.000Z
|
from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class RedditAccount(ProviderAccount):
pass
class RedditProvider(OAuth2Provider):
id = 'reddit'
name = 'Reddit'
account_class = RedditAccount
def extract_uid(self, data):
return data['name']
def extract_common_fields(self, data):
return dict(name=data.get('name'))
def get_default_scope(self):
scope = ['identity']
return scope
providers.registry.register(RedditProvider)
| 23.961538
| 74
| 0.735152
|
c035ccf3be408c0f7e9eb501656133d29945832b
| 6,948
|
py
|
Python
|
src/jupyter/util.py
|
webis-de/ecir22-anchor-text
|
254ab53fe4a5fca809af0f846f82b3fb6abd2a82
|
[
"MIT"
] | 3
|
2021-11-16T19:52:54.000Z
|
2022-01-20T22:55:01.000Z
|
src/jupyter/util.py
|
webis-de/ecir22-anchor-text
|
254ab53fe4a5fca809af0f846f82b3fb6abd2a82
|
[
"MIT"
] | null | null | null |
src/jupyter/util.py
|
webis-de/ecir22-anchor-text
|
254ab53fe4a5fca809af0f846f82b3fb6abd2a82
|
[
"MIT"
] | null | null | null |
import spacy
from copy import deepcopy
import random
import json
import numpy as np
from nltk.stem.porter import PorterStemmer
stemmer = PorterStemmer()
import itertools
nlp = None
ALL_PERMUTATIONS=set([i for i in itertools.permutations([0,0,0,0,0,1,1,1,1,1])])
def distinct_terms(processed_terms):
return set([i['stemmed'] for i in processed_terms])
def extract_terms(sentence):
global nlp
if nlp is None:
nlp = spacy.load("en_core_web_sm", exclude=["parser", "tagger", "ner", "attribute_ruler", "lemmatizer", "tok2vec"])
ret = []
for i in nlp(sentence):
if not i.is_punct:
ret += [{'term': str(i), 'stemmed': stemmer.stem(str(i))}]
return ret
def json_lines(filename):
from tqdm import tqdm
import json
with open(filename, 'r') as f:
for i in tqdm(f):
yield json.loads(i)
def enrich_presentation(presentation):
ret = deepcopy(presentation)
if 'contents' in ret:
ret['contentsProcessed'] = extract_terms(ret['contents'])
if 'anchorText' in ret:
ret['anchorTextProcessed'] = []
for anchor in ret['anchorText']:
ret['anchorTextProcessed'] += [extract_terms(anchor)]
return ret
def __terms_for_counting(line):
if 'contentsProcessed' in line:
yield line['contentsProcessed']
if 'anchorTextProcessed' in line:
for anchor in line['anchorTextProcessed']:
yield anchor
def __flatten(terms):
ret = {}
for term in terms.keys():
term_with_count = [(i,c) for i,c in terms[term].items()]
most_term = sorted(term_with_count, key=lambda i: i[1], reverse=True)[0][0]
ret[most_term] = sum(i[1] for i in term_with_count)
return ret
def count_terms(lines):
ret = {}
for line in lines:
if type(line) == str:
line = json.loads(line)
for terms in __terms_for_counting(line):
for term in terms:
if term['stemmed'] not in ret:
ret[term['stemmed']] = {}
if term['term'] not in ret[term['stemmed']]:
ret[term['stemmed']][term['term']] = 0
ret[term['stemmed']][term['term']] += 1
return __flatten(ret)
def distribution_distinct_terms(lines):
ret = {}
for line in lines:
if type(line) == str:
line = json.loads(line)
terms_in_line = str(len(distinct_terms(line['contentsProcessed'])))
if int(terms_in_line) <= 0:
continue
if terms_in_line not in ret:
ret[terms_in_line] = 0
to_add = 1
if 'target_document' in line and type(line['target_document']) == list:
to_add = len(line['target_document'])
ret[terms_in_line] += to_add
return ret
def anchor_text_distribution_distinct_terms(lines, limit=None):
ret = []
for line in lines:
if type(line) == str:
line = json.loads(line)
if limit != None and limit< len(line['anchorTextProcessed']):
continue
for anchor in line['anchorTextProcessed']:
ret += [{'contentsProcessed': anchor}]
return distribution_distinct_terms(ret)
def aggregated_anchor_text_distribution_distinct_terms(lines, limit=None):
ret = []
for line in lines:
if type(line) == str:
line = json.loads(line)
contents = []
if limit != None and limit< len(line['anchorTextProcessed']):
continue
for anchor in line['anchorTextProcessed']:
contents += anchor
ret += [{'contentsProcessed': contents}]
return distribution_distinct_terms(ret)
def build_probability_arrays(first, second):
first_ret = []
second_ret = []
first_sum = sum(first.values())
second_sum = sum(second.values())
max_id = max([int(i) for i in first.keys()] + [int(i) for i in second.keys()])
for i in range(1, max_id +1):
first_ret += [first.get(str(i), 0) / first_sum]
second_ret += [second.get(str(i), 0) / second_sum]
return first_ret, second_ret
def __combine_terms(docs):
ret = {}
for doc in docs:
for term, count in doc.items():
if term not in ret:
ret[term] = 0
ret[term] += count
return ret
def build_probability_arrays_terms(first, second):
first_ret = []
second_ret = []
first_sum = sum(first.values())
second_sum = sum(second.values())
terms = set([i for i in first.keys()] + [i for i in second.keys()])
for i in sorted(terms):
first_ret += [first.get(str(i), 0) / first_sum]
second_ret += [second.get(str(i), 0) / second_sum]
return first_ret, second_ret
def run_homogenity_experiments(json_line, num_splits=10):
batches = combine_retrieved_documents_to_batches(json_line, num_splits)
for batch in batches:
if sum(batch.values()) == 0:
return []
topic = json.loads(json_line)['topic']
ret = []
for slice_ids in ALL_PERMUTATIONS:
parts = [[], []]
for i in range(num_splits):
parts[slice_ids[i]] += [batches[i]]
assert len(parts) == 2
assert len(parts[0]) == 5
assert len(parts[1]) == 5
part_0 = __combine_terms(parts[0])
part_1 = __combine_terms(parts[1])
part_0, part_1 = build_probability_arrays_terms(part_0, part_1)
ret += [{
'topic': topic,
'jensen_shannon_distance': jensen_shannon_distance(part_0, part_1),
}]
return ret
def evaluate_retrieval_homogenity(directory, index, retrieval_model, processes=20):
from multiprocessing import Pool
with Pool(processes=processes) as pool:
lines = [i for i in open(directory + '/run.' + index + '.' + retrieval_model + '.txt.jsonl')]
tmp_ret = [i for i in pool.map(run_homogenity_experiments, lines, 1)]
ret = []
for li in tmp_ret:
for i in li:
i['index'] = index
i['retrieval_model'] = retrieval_model
ret += [i]
return ret
def combine_retrieved_documents_to_batches(json_line, num_splits=10):
json_line = json.loads(json_line)
documents = [i['documentRepresentation'] for i in json_line['documents'] if i is not None and 'documentRepresentation' in i and
i['documentRepresentation'] is not None and len(i['documentRepresentation'].keys()) >= 2]
random.Random(num_splits).shuffle(documents)
return [__combine_terms(i) for i in np.array_split(documents, num_splits)]
def jensen_shannon_distance(p, q):
from scipy.spatial.distance import jensenshannon
return jensenshannon(p,q)
def kullback_leibler_divergence(p,q):
from scipy.stats import entropy
assert p != None
assert q != None # scipy.stats.entropy calculate KL-divergence if p and q are both not none
return entropy(p, q)
| 28.829876
| 131
| 0.615141
|
c356e78a9c350733a5a8e82e7acfe0e212e27625
| 1,842
|
py
|
Python
|
back_end/todo_app/views.py
|
marcelloinfante/todo-giftcard-app
|
cd189ad1caa799b76103e04f7315d57e1a531e0c
|
[
"MIT"
] | null | null | null |
back_end/todo_app/views.py
|
marcelloinfante/todo-giftcard-app
|
cd189ad1caa799b76103e04f7315d57e1a531e0c
|
[
"MIT"
] | null | null | null |
back_end/todo_app/views.py
|
marcelloinfante/todo-giftcard-app
|
cd189ad1caa799b76103e04f7315d57e1a531e0c
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.models import User
from .models import CardInformations, Extract
from .serializers import CardInformationsSerializer, ExtractSerializer, UserSerializer
from rest_framework import status, viewsets, permissions, generics
from rest_framework.decorators import permission_classes
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
class CurrentUserInfo(generics.ListAPIView):
permission_classes = [IsAuthenticated]
serializer_class = UserSerializer
def get_queryset(self):
user = self.request.user.username
return User.objects.filter(username=user)
class CurrentCardInfo(generics.ListAPIView):
permission_classes = [IsAuthenticated]
serializer_class = CardInformationsSerializer
def get_queryset(self):
user = self.request.user
return CardInformations.objects.filter(user=user)
class CurrentExtractInfo(generics.ListAPIView):
permission_classes = [IsAuthenticated]
serializer_class = ExtractSerializer
def get_queryset(self):
card = self.request.user
return Extract.objects.filter(card=card)
class UserViewSet(viewsets.ModelViewSet):
permission_classes = [IsAuthenticated]
queryset = User.objects.all().order_by('-date_joined')
serializer_class = UserSerializer
def create(self, request):
return Response("Create is not permitted", status=status.HTTP_403_FORBIDDEN)
def update(self, request, pk=None):
return Response("Update is not permitted", status=status.HTTP_403_FORBIDDEN)
def partial_update(self, request, pk=None):
return Response("Update is not permitted", status=status.HTTP_403_FORBIDDEN)
def destroy(self, request, pk=None):
return Response("Delete is not permitted", status=status.HTTP_403_FORBIDDEN)
| 35.423077
| 86
| 0.767644
|
344cb28a239aa915641f90465aadf2e11db4f804
| 14,082
|
py
|
Python
|
angr/analyses/reaching_definitions/rd_state.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | 1
|
2019-03-11T07:42:24.000Z
|
2019-03-11T07:42:24.000Z
|
angr/analyses/reaching_definitions/rd_state.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | null | null | null |
angr/analyses/reaching_definitions/rd_state.py
|
Kyle-Kyle/angr
|
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
|
[
"BSD-2-Clause"
] | null | null | null |
from typing import Optional, Iterable, Set, TYPE_CHECKING
import logging
import archinfo
from ...knowledge_plugins.key_definitions import LiveDefinitions
from ...knowledge_plugins.key_definitions.atoms import Atom, GuardUse, Register, MemoryLocation
from ...knowledge_plugins.key_definitions.definition import Definition
from ...knowledge_plugins.key_definitions.tag import InitialValueTag, ParameterTag, Tag
from ...knowledge_plugins.key_definitions.undefined import UNDEFINED
from ...knowledge_plugins.key_definitions.dataset import DataSet
from ...calling_conventions import SimCC, SimRegArg, SimStackArg
from ...engines.light import SpOffset
from ...code_location import CodeLocation
from .external_codeloc import ExternalCodeLocation
from .heap_allocator import HeapAllocator
from .subject import Subject, SubjectType
if TYPE_CHECKING:
from .reaching_definitions import ReachingDefinitionsAnalysis
l = logging.getLogger(name=__name__)
class ReachingDefinitionsState:
"""
Represents the internal state of the ReachingDefinitionsAnalysis.
It contains a data class LiveDefinitions, which stores both definitions and uses for register, stack, memory, and
temporary variables, uncovered during the analysis.
:param subject: The subject being analysed.
:ivar arch: The architecture targeted by the program.
:param track_tmps: Only tells whether or not temporary variables should be taken into consideration when
representing the state of the analysis.
Should be set to true when the analysis has counted uses and definitions for temporary
variables, false otherwise.
:param analysis: The analysis that generated the state represented by this object.
:param rtoc_value: When the targeted architecture is ppc64, the initial function needs to know the `rtoc_value`.
:param live_definitions:
:param canonical_size:
The sizes (in bytes) that objects with an UNKNOWN_SIZE are treated as for operations where sizes are necessary.
:param heap_allocator: Mechanism to model the management of heap memory.
"""
__slots__ = ('arch', '_subject', '_track_tmps', 'analysis', 'current_codeloc', 'codeloc_uses', 'live_definitions',
'all_definitions', '_canonical_size', 'heap_allocator' )
def __init__(self, arch: archinfo.Arch, subject: Subject, track_tmps: bool=False,
analysis: Optional['ReachingDefinitionsAnalysis']=None, rtoc_value=None,
live_definitions=None, canonical_size: int=8, heap_allocator: HeapAllocator=None):
# handy short-hands
self.arch = arch
self._subject = subject
self._track_tmps = track_tmps
self.analysis = analysis
self._canonical_size: int = canonical_size
if live_definitions is None:
# the first time this state is created. initialize it
self.live_definitions = LiveDefinitions(self.arch, track_tmps=self._track_tmps,
canonical_size=canonical_size)
self._set_initialization_values(subject, rtoc_value)
else:
# this state is a copy from a previous state. skip the initialization
self.live_definitions = live_definitions
self.all_definitions: Set[Definition] = set()
self.heap_allocator = heap_allocator or HeapAllocator(canonical_size)
self.current_codeloc: Optional[CodeLocation] = None
self.codeloc_uses: Set[Definition] = set()
@property
def tmp_definitions(self): return self.live_definitions.tmp_definitions
@property
def tmp_uses(self): return self.live_definitions.tmp_uses
@property
def register_uses(self): return self.live_definitions.register_uses
@property
def register_definitions(self): return self.live_definitions.register_definitions
@property
def stack_definitions(self): return self.live_definitions.stack_definitions
@property
def stack_uses(self): return self.live_definitions.stack_uses
@property
def heap_definitions(self): return self.live_definitions.heap_definitions
@property
def heap_uses(self): return self.live_definitions.heap_uses
@property
def memory_uses(self): return self.live_definitions.memory_uses
@property
def memory_definitions(self): return self.live_definitions.memory_definitions
@property
def uses_by_codeloc(self): return self.live_definitions.uses_by_codeloc
def get_sp(self) -> int: return self.live_definitions.get_sp()
@property
def dep_graph(self):
return self.analysis.dep_graph
def __repr__(self):
ctnt = "RDState-%r" % (self.live_definitions)
return "{%s}" % ctnt
def _set_initialization_values(self, subject: Subject, rtoc_value: Optional[int]=None):
if subject.type == SubjectType.Function:
if isinstance(self.arch, archinfo.arch_ppc64.ArchPPC64) and not rtoc_value:
raise ValueError('The architecture being ppc64, the parameter `rtoc_value` should be provided.')
self._initialize_function(
subject.cc,
subject.content.addr,
rtoc_value,
)
elif subject.type == SubjectType.CallTrace:
if isinstance(self.arch, archinfo.arch_ppc64.ArchPPC64) and not rtoc_value:
raise ValueError('The architecture being ppc64, the parameter `rtoc_value` should be provided.')
self._initialize_function(
subject.cc,
subject.content.current_function_address(),
rtoc_value,
)
elif subject.type == SubjectType.Block:
pass
return self
def _initialize_function(self, cc: SimCC, func_addr: int, rtoc_value: Optional[int]=None):
# initialize stack pointer
sp = Register(self.arch.sp_offset, self.arch.bytes)
sp_def = Definition(sp, ExternalCodeLocation(), DataSet(SpOffset(self.arch.bits, 0), self.arch.bits), tags={InitialValueTag()})
self.register_definitions.set_object(sp_def.offset, sp_def, sp_def.size)
if self.arch.name.startswith('MIPS'):
if func_addr is None:
l.warning("func_addr must not be None to initialize a function in mips")
t9 = Register(self.arch.registers['t9'][0], self.arch.bytes)
t9_def = Definition(t9, ExternalCodeLocation(), DataSet(func_addr, self.arch.bits), tags={InitialValueTag()})
self.register_definitions.set_object(t9_def.offset,t9_def,t9_def.size)
if cc is not None and cc.args is not None:
for arg in cc.args:
# initialize register parameters
if isinstance(arg, SimRegArg):
# FIXME: implement reg_offset handling in SimRegArg
reg_offset = self.arch.registers[arg.reg_name][0]
reg = Register(reg_offset, self.arch.bytes)
reg_def = Definition(reg, ExternalCodeLocation(), DataSet(UNDEFINED, self.arch.bits), tags={ParameterTag()})
self.register_definitions.set_object(reg.reg_offset, reg_def, reg.size)
# initialize stack parameters
elif isinstance(arg, SimStackArg):
sp_offset = SpOffset(self.arch.bits, arg.stack_offset)
ml = MemoryLocation(sp_offset, arg.size)
ml_def = Definition(ml, ExternalCodeLocation(), DataSet(UNDEFINED, arg.size * 8), tags={ParameterTag()})
self.stack_definitions.set_object(arg.stack_offset, ml_def, ml.size)
else:
raise TypeError('Unsupported parameter type %s.' % type(arg).__name__)
# architecture dependent initialization
if self.arch.name.lower().find('ppc64') > -1:
if rtoc_value is None:
raise TypeError("rtoc_value must be provided on PPC64.")
offset, size = self.arch.registers['rtoc']
rtoc = Register(offset, size)
rtoc_def = Definition(rtoc, ExternalCodeLocation(), DataSet(rtoc_value, self.arch.bits), tags=InitialValueTag())
self.register_definitions.set_object(rtoc.reg_offset, rtoc_def, rtoc.size)
elif self.arch.name.lower().find('mips64') > -1:
offset, size = self.arch.registers['t9']
t9 = Register(offset, size)
t9_def = Definition(t9, ExternalCodeLocation(), DataSet({func_addr}, self.arch.bits), tags=InitialValueTag())
self.register_definitions.set_object(t9.reg_offset, t9_def, t9.size)
def copy(self) -> 'ReachingDefinitionsState':
rd = ReachingDefinitionsState(
self.arch,
self._subject,
track_tmps=self._track_tmps,
analysis=self.analysis,
live_definitions=self.live_definitions.copy(),
canonical_size=self._canonical_size,
heap_allocator=self.heap_allocator,
)
return rd
def merge(self, *others):
state = self.copy()
for other in others:
other: 'ReachingDefinitionsState'
state.live_definitions = state.live_definitions.merge(other.live_definitions)
return state
def _cycle(self, code_loc: CodeLocation) -> None:
if code_loc != self.current_codeloc:
self.current_codeloc = code_loc
self.codeloc_uses = set()
def kill_definitions(self, atom: Atom, code_loc: CodeLocation, data: Optional[DataSet]=None, dummy=True, tags: Set[Tag]=None) -> None:
"""
Overwrite existing definitions w.r.t 'atom' with a dummy definition instance. A dummy definition will not be
removed during simplification.
:param atom:
:param CodeLocation code_loc:
:param object data:
:return: None
"""
if data is None:
data = DataSet(UNDEFINED, atom.size)
self.kill_and_add_definition(atom, code_loc, data, dummy=dummy, tags=tags)
def kill_and_add_definition(self, atom: Atom, code_loc: CodeLocation, data: Optional[DataSet],
dummy=False, tags: Set[Tag]=None) -> Optional[Definition]:
self._cycle(code_loc)
definition: Optional[Definition]
definition = self.live_definitions.kill_and_add_definition(atom, code_loc, data, dummy=dummy, tags=tags)
if definition is not None:
self.all_definitions.add(definition)
if self.dep_graph is not None:
stack_use = set(filter(
lambda u: isinstance(u.atom, MemoryLocation) and u.atom.is_on_stack,
self.codeloc_uses
))
sp_offset = self.arch.sp_offset
bp_offset = self.arch.bp_offset
for used in self.codeloc_uses:
# sp is always used as a stack pointer, and we do not track dependencies against stack pointers.
# bp is sometimes used as a base pointer. we recognize such cases by checking if there is a use to
# the stack variable.
#
# There are two cases for which it is superfluous to report a dependency on (a use of) stack/base
# pointers:
# - The `Definition` *uses* a `MemoryLocation` pointing to the stack;
# - The `Definition` *is* a `MemoryLocation` pointing to the stack.
is_using_spbp_while_memory_address_on_stack_is_used = (
isinstance(used.atom, Register) and
used.atom.reg_offset in (sp_offset, bp_offset) and
len(stack_use) > 0
)
is_using_spbp_to_define_memory_location_on_stack = (
isinstance(definition.atom, MemoryLocation) and
definition.atom.is_on_stack and
isinstance(used.atom, Register) and
used.atom.reg_offset in (sp_offset, bp_offset)
)
if not (
is_using_spbp_while_memory_address_on_stack_is_used or
is_using_spbp_to_define_memory_location_on_stack
):
# Moderately confusing misnomers. This is an edge from a def to a use, since the
# "uses" are actually the definitions that we're using and the "definition" is the
# new definition; i.e. The def that the old def is used to construct so this is
# really a graph where nodes are defs and edges are uses.
self.dep_graph.add_edge(used, definition)
self.dep_graph.add_dependencies_for_concrete_pointers_of(
used,
self.analysis.project.kb.cfgs['CFGFast'],
self.analysis.project.loader
)
return definition
def add_use(self, atom: Atom, code_loc) -> None:
self._cycle(code_loc)
self.codeloc_uses.update(self.get_definitions(atom))
self.live_definitions.add_use(atom, code_loc)
def add_use_by_def(self, definition: Definition, code_loc: CodeLocation) -> None:
self._cycle(code_loc)
self.codeloc_uses.add(definition)
self.live_definitions.add_use_by_def(definition, code_loc)
def get_definitions(self, atom: Atom) -> Iterable[Definition]:
return self.live_definitions.get_definitions(atom)
def mark_guard(self, code_loc: CodeLocation, data: DataSet, target):
self._cycle(code_loc)
atom = GuardUse(target)
kinda_definition = Definition(atom, code_loc, data)
if self.dep_graph is not None:
for used in self.codeloc_uses:
self.dep_graph.add_edge(used, kinda_definition)
| 45.134615
| 138
| 0.646002
|
77f41216e79d170488b5cac4bbac78754ccaba21
| 1,697
|
py
|
Python
|
ansible/plugins/lookup/graphfile.py
|
emilmih/sonic-mgmt
|
e4e42ec8028bf51b39587e2b53e526d505fe7938
|
[
"Apache-2.0"
] | 132
|
2016-10-19T12:34:44.000Z
|
2022-03-16T09:00:39.000Z
|
ansible/plugins/lookup/graphfile.py
|
emilmih/sonic-mgmt
|
e4e42ec8028bf51b39587e2b53e526d505fe7938
|
[
"Apache-2.0"
] | 3,152
|
2016-09-21T23:05:58.000Z
|
2022-03-31T23:29:08.000Z
|
ansible/plugins/lookup/graphfile.py
|
emilmih/sonic-mgmt
|
e4e42ec8028bf51b39587e2b53e526d505fe7938
|
[
"Apache-2.0"
] | 563
|
2016-09-20T01:00:15.000Z
|
2022-03-31T22:43:54.000Z
|
from __future__ import (absolute_import, division, print_function)
import os.path
import yaml
import xml.etree.ElementTree as ET
from ansible.utils.display import Display
from ansible.plugins.lookup import LookupBase
from ansible.errors import AnsibleError
__metaclass__ = type
DOCUMENTATION = """
lookup: graphfile
version_added: "1.0"
short_description: find connection graph file that has DUTs listed defined.
description:
- This lookup returns the connection graph file contains the DUTs.
options:
_terms:
description: list of DUT hostnames
required: True
"""
display = Display()
LAB_CONNECTION_GRAPH_FILE = 'graph_files.yml'
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
hostnames = terms[0]
display.debug('Graph file lookup DUTs: %s' % hostnames)
graph_list_file = self.find_file_in_search_path(variables, 'files', LAB_CONNECTION_GRAPH_FILE)
if not graph_list_file:
raise AnsibleError('Unable to locate %s' % LAB_CONNECTION_GRAPH_FILE)
with open(graph_list_file) as fd:
file_list = yaml.safe_load(fd)
for gf in file_list:
display.debug('Looking at conn graph file: %s' % gf)
gf = self.find_file_in_search_path(variables, 'files', gf)
if not gf:
continue
with open(gf) as fd:
root = ET.fromstring(fd.read())
hosts_all = [d.attrib['Hostname'] for d in root.iter('Device')]
if set(hostnames) <= set(hosts_all):
return [os.path.basename(gf)]
return []
| 34.632653
| 102
| 0.64231
|
322c32424ed657a5d8dc6110e9ffb3fcec538620
| 25,201
|
py
|
Python
|
AFQ/tests/test_api.py
|
grotheer/pyAFQ
|
3a531b5bdc3d53f4a76d5d604a26fde488e1aaf6
|
[
"BSD-2-Clause"
] | null | null | null |
AFQ/tests/test_api.py
|
grotheer/pyAFQ
|
3a531b5bdc3d53f4a76d5d604a26fde488e1aaf6
|
[
"BSD-2-Clause"
] | null | null | null |
AFQ/tests/test_api.py
|
grotheer/pyAFQ
|
3a531b5bdc3d53f4a76d5d604a26fde488e1aaf6
|
[
"BSD-2-Clause"
] | null | null | null |
import tempfile
import os
import os.path as op
import shutil
import subprocess
import gc
import toml
import numpy as np
import numpy.testing as npt
import pytest
import pandas as pd
from pandas.testing import assert_series_equal
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.tracking.utils as dtu
import dipy.tracking.streamline as dts
import dipy.data as dpd
from dipy.data import fetcher, get_fnames
from dipy.io.streamline import save_tractogram, load_tractogram
from dipy.io.stateful_tractogram import StatefulTractogram, Space
from dipy.testing.decorators import xvfb_it
from AFQ import api
import AFQ.data as afd
import AFQ.segmentation as seg
import AFQ.utils.streamlines as aus
import AFQ.utils.bin as afb
from AFQ.definitions.mask import RoiMask, ThresholdedScalarMask,\
PFTMask, MaskFile
from AFQ.definitions.mapping import SynMap, AffMap, SlrMap
from AFQ.definitions.scalar import TemplateScalar
def touch(fname, times=None):
with open(fname, 'a'):
os.utime(fname, times)
def get_temp_hardi():
tmpdir = nbtmp.InTemporaryDirectory()
afd.organize_stanford_data(path=tmpdir.name)
bids_path = op.join(tmpdir.name, 'stanford_hardi')
sub_path = op.join(
tmpdir.name,
'stanford_hardi',
'derivatives',
'vistasoft',
'sub-01',
'ses-01',
'dwi')
return tmpdir, bids_path, sub_path
def create_dummy_data(dmriprep_dir, subject, session=None):
aff = np.eye(4)
data = np.ones((10, 10, 10, 6))
bvecs = np.vstack([np.eye(3), np.eye(3)])
bvecs[0] = 0
bvals = np.ones(6) * 1000.
bvals[0] = 0
if session is None:
data_dir = subject
else:
data_dir = op.join(subject, session)
np.savetxt(
op.join(
dmriprep_dir, data_dir, 'dwi', 'dwi.bval'),
bvals)
np.savetxt(
op.join(
dmriprep_dir, data_dir, 'dwi', 'dwi.bvec'),
bvecs)
nib.save(
nib.Nifti1Image(data, aff),
op.join(
dmriprep_dir, data_dir, 'dwi', 'dwi.nii.gz'))
nib.save(
nib.Nifti1Image(data, aff),
op.join(
dmriprep_dir, data_dir, 'anat', 'T1w.nii.gz'))
nib.save(
nib.Nifti1Image(data, aff),
op.join(
dmriprep_dir, data_dir, 'anat', 'seg.nii.gz'))
def create_dummy_bids_path(n_subjects, n_sessions, share_sessions=True):
subjects = ['sub-0%s' % (d + 1) for d in range(n_subjects)]
# Case where there are individual session folders within each subject's
# folder:
if n_sessions > 1:
# create data for n_sessions for each subject
if share_sessions:
sessions = ['ses-0%s' % (d + 1) for d in range(n_sessions)]
bids_dir = tempfile.mkdtemp()
afd.to_bids_description(
bids_dir,
**{"Name": "Dummy",
"Subjects": subjects,
"Sessions": sessions})
dmriprep_dir = op.join(bids_dir, "derivatives", "dmriprep")
os.makedirs(dmriprep_dir)
afd.to_bids_description(
dmriprep_dir,
**{"Name": "Dummy",
"PipelineDescription": {"Name": "synthetic"}})
for subject in subjects:
for session in sessions:
for modality in ['anat', 'dwi']:
os.makedirs(
op.join(dmriprep_dir, subject, session, modality))
# Make some dummy data:
create_dummy_data(dmriprep_dir, subject, session)
else:
# create different sessions for each subject
sessions = ['ses-0%s' % (d + 1) for d in range(n_subjects)]
bids_dir = tempfile.mkdtemp()
afd.to_bids_description(
bids_dir,
**{"Name": "Dummy",
"Subjects": subjects,
"Sessions": sessions})
dmriprep_dir = op.join(bids_dir, "derivatives", "dmriprep")
os.makedirs(dmriprep_dir)
afd.to_bids_description(
dmriprep_dir,
**{"Name": "Dummy",
"PipelineDescription": {"Name": "synthetic"}})
for d in range(n_subjects):
subject = subjects[d]
session = sessions[d]
for modality in ['anat', 'dwi']:
os.makedirs(
op.join(dmriprep_dir, subject, session, modality))
# Make some dummy data:
create_dummy_data(dmriprep_dir, subject, session)
else:
# Don't create session folders at all:
bids_dir = tempfile.mkdtemp()
afd.to_bids_description(
bids_dir,
**{"Name": "Dummy", "Subjects": subjects})
dmriprep_dir = op.join(bids_dir, "derivatives", "dmriprep")
os.makedirs(dmriprep_dir)
afd.to_bids_description(
dmriprep_dir,
**{"Name": "Dummy",
"PipelineDescription": {"Name": "synthetic"}})
for subject in subjects:
for modality in ['anat', 'dwi']:
os.makedirs(op.join(dmriprep_dir, subject, modality))
# Make some dummy data:
create_dummy_data(dmriprep_dir, subject)
return bids_dir
def test_BundleDict():
"""
Tests bundle dict
"""
# test defaults
afq_bundles = api.BundleDict()
# bundles restricted within hemisphere
# NOTE: FA and FP cross midline so are removed
# NOTE: all others generate two bundles
num_hemi_bundles = (len(api.BUNDLES)-2)*2
# bundles that cross the midline
num_whole_bundles = 2
assert len(afq_bundles) == num_hemi_bundles + num_whole_bundles
# Arcuate Fasciculus
afq_bundles = api.BundleDict(["ARC"])
assert len(afq_bundles) == 2
# Forceps Minor
afq_bundles = api.BundleDict(["FA"])
assert len(afq_bundles) == 1
# Cingulum Hippocampus
# not included but exists in templates
afq_bundles = api.BundleDict(["HCC"])
assert len(afq_bundles) == 2
# Vertical Occipital Fasciculus
# not included and does not exist in afq templates
with pytest.raises(
ValueError,
match="VOF_R is not in AFQ templates"):
afq_bundles = api.BundleDict(["VOF"])
afq_bundles["VOF_R"]
afq_bundles = api.BundleDict(["VOF"], seg_algo="reco80")
assert len(afq_bundles) == 2
afq_bundles = api.BundleDict(["whole_brain"], seg_algo="reco80")
assert len(afq_bundles) == 1
def test_AFQ_missing_files():
tmpdir = nbtmp.InTemporaryDirectory()
bids_path = tmpdir.name
with pytest.raises(
ValueError,
match="There must be a dataset_description.json in bids_path"):
api.AFQ(bids_path)
afd.to_bids_description(
bids_path,
**{"Name": "Missing", "Subjects": ["sub-01"]})
with pytest.raises(
ValueError,
match=f"No non-json files recognized by pyBIDS in {bids_path}"):
api.AFQ(bids_path)
subses_folder = op.join(
bids_path,
"derivatives",
"otherDeriv",
'sub-01',
'ses-01')
os.makedirs(subses_folder, exist_ok=True)
afd.to_bids_description(
op.join(
bids_path,
"derivatives",
"otherDeriv"),
**{
"Name": "Missing",
"PipelineDescription": {"Name": "otherDeriv"}})
touch(op.join(subses_folder, "sub-01_ses-01_dwi.nii.gz"))
with pytest.raises(
ValueError,
match="No non-json files recognized by pyBIDS"
+ " in the pipeline: missingPipe"):
api.AFQ(bids_path, dmriprep="missingPipe")
os.mkdir(op.join(bids_path, "missingPipe"))
afd.to_bids_description(
op.join(bids_path, "missingPipe"), **{
"Name": "Missing",
"PipelineDescription": {"Name": "missingPipe"}})
with pytest.raises(
ValueError,
match="No non-json files recognized by pyBIDS"
+ " in the pipeline: missingPipe"):
api.AFQ(bids_path, dmriprep="missingPipe")
@pytest.mark.nightly_custom
def test_AFQ_custom_tract():
"""
Test whether AFQ can use tractography from
custom_tractography_bids_filters
"""
_, bids_path, sub_path = get_temp_hardi()
afd.fetch_stanford_hardi_tractography()
bundle_names = ["SLF", "ARC", "CST", "FP"]
# move subsampled tractography into bids folder
os.rename(
op.join(
op.expanduser('~'),
'AFQ_data',
'stanford_hardi_tractography',
'tractography_subsampled.trk'),
op.join(
sub_path,
'subsampled_tractography.trk'
)
)
my_afq = api.AFQ(
bids_path,
dmriprep='vistasoft',
bundle_info=bundle_names,
custom_tractography_bids_filters={
"suffix": "tractography",
"scope": "vistasoft"
})
my_afq.export_streamlines()
@pytest.mark.nightly_basic
def test_AFQ_no_derivs():
"""
Test the initialization of the AFQ object
"""
bids_path = create_dummy_bids_path(1, 1)
os.remove(op.join(
bids_path, "derivatives", "dmriprep", "dataset_description.json"))
with pytest.raises(
ValueError,
match="`bids_path` contains no subjects in derivatives folders."
+ " This could be caused by derivatives folders not"
+ " following the BIDS format."):
api.AFQ(
bids_path,
dmriprep="synthetic")
@pytest.mark.nightly_custom
@xvfb_it
def test_AFQ_fury():
_, bids_path, _ = get_temp_hardi()
myafq = api.AFQ(
bids_path=bids_path,
dmriprep='vistasoft',
viz_backend="fury")
myafq.all_bundles_figure
@pytest.mark.nightly_msmt_and_init
def test_AFQ_init():
"""
Test the initialization of the AFQ object
"""
for n_sessions in [1, 2, 3]:
n_subjects = 3
bids_path = create_dummy_bids_path(n_subjects, n_sessions,
(n_subjects != n_sessions))
my_afq = api.AFQ(bids_path,
dmriprep="synthetic")
for subject in range(n_subjects):
sub = f"0{subject+1}"
if n_subjects == n_sessions:
npt.assert_equal(
len(my_afq.wf_dict[sub][sub]),
40)
else:
for session in range(n_sessions):
if n_sessions == 1:
sess = "None"
else:
sess = f"0{session+1}"
npt.assert_equal(
len(my_afq.wf_dict[sub][sess]),
40)
def test_AFQ_custom_bundle_dict():
bids_path = create_dummy_bids_path(3, 1)
bundle_dict = api.BundleDict()
api.AFQ(
bids_path,
dmriprep="synthetic",
bundle_info=bundle_dict)
@pytest.mark.nightly_basic
def test_AFQ_data():
"""
Test if API can run without prealign and with only pre-align
"""
_, bids_path, _ = get_temp_hardi()
for mapping in [SynMap(use_prealign=False), AffMap()]:
myafq = api.AFQ(
bids_path=bids_path,
dmriprep='vistasoft',
mapping=mapping)
npt.assert_equal(nib.load(myafq.b0["01"]).shape,
nib.load(myafq.dwi_file["01"]).shape[:3])
npt.assert_equal(nib.load(myafq.b0["01"]).shape,
nib.load(myafq.dti_params["01"]).shape[:3])
myafq.rois
shutil.rmtree(op.join(
bids_path,
'derivatives/afq'))
@pytest.mark.nightly_anisotropic
def test_AFQ_anisotropic():
"""
Test if API can run using anisotropic registration
with a specific selection of b vals
"""
_, bids_path, _ = get_temp_hardi()
myafq = api.AFQ(
bids_path=bids_path,
dmriprep='vistasoft',
min_bval=1990,
max_bval=2010,
b0_threshold=50,
reg_template="mni_T1",
reg_subject="power_map")
gtab = myafq.gtab["01"]
# check the b0s mask is correct
b0s_mask = np.zeros(160, dtype=bool)
b0s_mask[0:10] = True
npt.assert_equal(gtab.b0s_mask, b0s_mask)
# check that only b values in the b val range passed
bvals_in_range = \
np.logical_and((gtab.bvals > 1990), (gtab.bvals < 2010))
bvals_in_range_or_0 = \
np.logical_or(bvals_in_range, gtab.b0s_mask)
npt.assert_equal(bvals_in_range_or_0, np.ones(160, dtype=bool))
# check that the apm map was made
myafq.mapping
assert op.exists(op.join(
myafq.results_dir["01"],
'sub-01_ses-01_dwi_model-CSD_APM.nii.gz'))
def test_API_type_checking():
_, bids_path, _ = get_temp_hardi()
with pytest.raises(
TypeError,
match="bids_path must be a string"):
api.AFQ(2)
with pytest.raises(
TypeError,
match="custom_tractography_bids_filters must be"
+ " either a dict or None"):
api.AFQ(
bids_path,
custom_tractography_bids_filters=["dwi"])
with pytest.raises(
TypeError,
match=("brain_mask must be None or a mask defined"
" in `AFQ.definitions.mask`")):
api.AFQ(
bids_path,
brain_mask="not a brain mask")
with pytest.raises(
TypeError,
match="viz_backend must contain either 'fury' or 'plotly'"):
api.AFQ(bids_path, viz_backend="matplotlib")
with pytest.raises(
TypeError,
match=(
"bundle_info must be None, a list of strings,"
" a dict, or a BundleDict")):
api.AFQ(bids_path, bundle_info=[2, 3])
@pytest.mark.nightly_slr
def test_AFQ_slr():
"""
Test if API can run using slr map
"""
_, bids_path, _ = get_temp_hardi()
myafq = api.AFQ(
bids_path=bids_path,
dmriprep='vistasoft',
reg_subject='subject_sls',
reg_template='hcp_atlas',
mapping=SlrMap())
tgram = load_tractogram(myafq.clean_bundles["01"], myafq.img["01"])
bundles = aus.tgram_to_bundles(
tgram, myafq.bundle_dict, myafq.img["01"])
npt.assert_(len(bundles['CST_L']) > 0)
@pytest.mark.nightly_reco
def test_AFQ_reco():
"""
Test if API can run segmentation with recobundles
"""
_, bids_path, _ = get_temp_hardi()
myafq = api.AFQ(
bids_path=bids_path,
dmriprep='vistasoft',
viz_backend="plotly",
profile_weights="median",
segmentation_params={
'seg_algo': 'reco',
'rng': 42})
tgram = load_tractogram(myafq.clean_bundles["01"], myafq.img["01"])
bundles = aus.tgram_to_bundles(tgram, myafq.bundle_dict, myafq.img["01"])
npt.assert_(len(bundles['CCMid']) > 0)
myafq.export_all()
@pytest.mark.nightly_custom
def test_AFQ_reco80():
"""
Test API segmentation with the 80-bundle atlas
"""
_, bids_path, _ = get_temp_hardi()
myafq = api.AFQ(
bids_path=bids_path,
dmriprep='vistasoft',
segmentation_params={
'seg_algo': 'reco80',
'rng': 42})
tgram = load_tractogram(myafq.clean_bundles["01"], myafq.img["01"])
bundles = aus.tgram_to_bundles(tgram, myafq.bundle_dict, myafq.img["01"])
npt.assert_(len(bundles['CCMid']) > 0)
@pytest.mark.nightly_pft
def test_AFQ_pft():
"""
Test pft interface for AFQ
"""
_, bids_path, sub_path = get_temp_hardi()
bundle_names = ["SLF", "ARC", "CST", "FP"]
f_pve_csf, f_pve_gm, f_pve_wm = get_fnames('stanford_pve_maps')
os.rename(f_pve_wm, op.join(sub_path, "sub-01_ses-01_WMprobseg.nii.gz"))
os.rename(f_pve_gm, op.join(sub_path, "sub-01_ses-01_GMprobseg.nii.gz"))
os.rename(f_pve_csf, op.join(sub_path, "sub-01_ses-01_CSFprobseg.nii.gz"))
stop_mask = PFTMask(
MaskFile("WMprobseg"),
MaskFile("GMprobseg"),
MaskFile("CSFprobseg"))
my_afq = api.AFQ(
bids_path,
dmriprep='vistasoft',
bundle_info=bundle_names,
tracking_params={
"stop_mask": stop_mask,
"stop_threshold": "CMC",
"tracker": "pft"
})
my_afq.export_streamlines()
@pytest.mark.nightly_custom
def test_AFQ_custom_subject_reg():
"""
Test custom subject registration using AFQ object
"""
# make first temproary directory to generate b0
_, bids_path, sub_path = get_temp_hardi()
bundle_names = ["SLF", "ARC", "CST", "FP"]
b0_file = api.AFQ(
bids_path,
dmriprep='vistasoft',
bundle_info=bundle_names).b0["01"]
# make a different temporary directly to test this custom file in
_, bids_path, sub_path = get_temp_hardi()
os.rename(b0_file, op.join(sub_path, "sub-01_ses-01_customb0.nii.gz"))
my_afq = api.AFQ(
bids_path,
dmriprep='vistasoft',
bundle_info=bundle_names,
reg_template="mni_T2",
reg_subject={
"suffix": "customb0",
"scope": "vistasoft"})
my_afq.export_rois()
# Requires large download
@pytest.mark.nightly
def test_AFQ_FA():
"""
Test if API can run registeration with FA
"""
_, bids_path, _ = get_temp_hardi()
myafq = api.AFQ(
bids_path=bids_path,
dmriprep='vistasoft',
reg_template='dti_fa_template',
reg_subject='dti_fa_subject')
myafq.rois
@pytest.mark.nightly
def test_DKI_profile():
"""
Test using API to profile dki
"""
tmpdir = nbtmp.InTemporaryDirectory()
afd.organize_cfin_data(path=tmpdir.name)
myafq = api.AFQ(bids_path=op.join(tmpdir.name, 'cfin_multib'),
dmriprep='dipy')
myafq.dki_fa
myafq.dki_md
def test_auto_cli():
tmpdir = nbtmp.InTemporaryDirectory()
config_file = op.join(tmpdir.name, 'test.toml')
arg_dict = afb.func_dict_to_arg_dict()
arg_dict['BIDS']['bids_path']['default'] = tmpdir.name
afb.generate_config(config_file, arg_dict, False)
with pytest.raises(
ValueError,
match="There must be a dataset_description.json in bids_path"):
afb.parse_config_run_afq(config_file, arg_dict, False)
@pytest.mark.skip(reason="causes segmentation fault")
def test_run_using_auto_cli():
tmpdir, bids_path, _ = get_temp_hardi()
config_file = op.join(tmpdir.name, 'test.toml')
arg_dict = afb.func_dict_to_arg_dict()
# set our custom defaults for the toml file
# It is easier to edit them here, than to parse the file and edit them
# after the file is written
arg_dict['BIDS']['bids_path']['default'] = bids_path
arg_dict['BIDS']['dmriprep']['default'] = 'vistasoft'
arg_dict['BUNDLES']['bundle_names']['default'] = ["CST"]
arg_dict['TRACTOGRAPHY']['n_seeds']['default'] = 500
arg_dict['TRACTOGRAPHY']['random_seeds']['default'] = True
afb.generate_config(config_file, arg_dict, False)
afb.parse_config_run_afq(config_file, arg_dict, False)
def test_AFQ_data_waypoint():
"""
Test with some actual data again, this time for track segmentation
"""
tmpdir, bids_path, _ = get_temp_hardi()
t1_path = op.join(tmpdir.name, "T1.nii.gz")
nib.save(
afd.read_mni_template(mask=True, weight="T1w"),
t1_path)
bundle_names = ["SLF", "ARC", "CST", "FP"]
tracking_params = dict(odf_model="dti",
seed_mask=RoiMask(),
n_seeds=100,
random_seeds=True,
rng_seed=42)
segmentation_params = dict(filter_by_endpoints=False,
seg_algo="AFQ",
return_idx=True)
clean_params = dict(return_idx=True)
myafq = api.AFQ(bids_path=bids_path,
dmriprep='vistasoft',
bundle_info=bundle_names,
scalars=[
"dti_FA",
"dti_MD",
TemplateScalar("T1", t1_path)],
robust_tensor_fitting=True,
tracking_params=tracking_params,
segmentation_params=segmentation_params,
clean_params=clean_params)
# Replace the mapping and streamlines with precomputed:
file_dict = afd.read_stanford_hardi_tractography()
mapping = file_dict['mapping.nii.gz']
streamlines = file_dict['tractography_subsampled.trk']
streamlines = dts.Streamlines(
dtu.transform_tracking_output(
[s for s in streamlines if s.shape[0] > 100],
np.linalg.inv(myafq.dwi_affine["01"])))
mapping_file = op.join(
myafq.results_dir["01"],
'sub-01_ses-01_dwi_mapping_from-DWI_to_MNI_xfm.nii.gz')
nib.save(mapping, mapping_file)
reg_prealign_file = op.join(
myafq.results_dir["01"],
'sub-01_ses-01_dwi_prealign_from-DWI_to-MNI_xfm.npy')
np.save(reg_prealign_file, np.eye(4))
tgram = load_tractogram(myafq.bundles["01"], myafq.img["01"])
bundles = aus.tgram_to_bundles(
tgram, myafq.bundle_dict, myafq.img["01"])
npt.assert_(len(bundles['CST_L']) > 0)
# Test ROI exporting:
myafq.export_rois()
assert op.exists(op.join(
myafq.results_dir["01"],
'ROIs',
'sub-01_ses-01_dwi_desc-ROI-CST_R-1-include.json'))
# Test bundles exporting:
myafq.export_indiv_bundles()
assert op.exists(op.join(
myafq.results_dir["01"],
'bundles',
'sub-01_ses-01_dwi_space-RASMM_model-DTI_desc-det-AFQ-CST_L_tractography.trk')) # noqa
tract_profile_fname = myafq.profiles["01"]
tract_profiles = pd.read_csv(tract_profile_fname)
assert tract_profiles.shape == (500, 6)
myafq.tract_profile_plots
assert op.exists(op.join(
myafq.results_dir["01"],
'sub-01_ses-01_dwi_space-RASMM_model-DTI_desc-det-AFQ_dti_fa_profile_plots.png')) # noqa
assert op.exists(op.join(
myafq.results_dir["01"],
'sub-01_ses-01_dwi_space-RASMM_model-DTI_desc-det-AFQ_dti_md_profile_plots.png')) # noqa
# Before we run the CLI, we'll remove the bundles and ROI folders, to see
# that the CLI generates them
shutil.rmtree(op.join(myafq.results_dir["01"],
'bundles'))
shutil.rmtree(op.join(myafq.results_dir["01"],
'ROIs'))
os.remove(tract_profile_fname)
# Test the CLI:
print("Running the CLI:")
# Set up config to use the same parameters as above:
# ROI mask needs to be put in quotes in config
tracking_params = dict(odf_model="DTI",
seed_mask="RoiMask()",
n_seeds=100,
random_seeds=True,
rng_seed=42)
config = dict(BIDS=dict(bids_path=bids_path,
dmriprep='vistasoft'),
DATA=dict(
robust_tensor_fitting=True),
BUNDLES=dict(
bundle_info=bundle_names,
scalars=[
"dti_fa",
"dti_md",
f"TemplateScalar('T1', '{t1_path}')"]),
VIZ=dict(
viz_backend="plotly_no_gif"),
TRACTOGRAPHY=tracking_params,
SEGMENTATION=segmentation_params,
CLEANING=clean_params)
config_file = op.join(tmpdir.name, "afq_config.toml")
with open(config_file, 'w') as ff:
toml.dump(config, ff)
# save memory
results_dir = myafq.results_dir["01"]
del myafq
gc.collect()
cmd = "pyAFQ " + config_file
completed_process = subprocess.run(
cmd, shell=True, capture_output=True)
if completed_process.returncode != 0:
print(completed_process.stdout)
print(completed_process.stderr)
assert completed_process.returncode == 0
# The tract profiles should already exist from the CLI Run:
from_file = pd.read_csv(tract_profile_fname)
assert from_file.shape == (500, 6)
assert_series_equal(tract_profiles['dti_fa'], from_file['dti_fa'])
# Make sure the CLI did indeed generate these:
assert op.exists(op.join(
results_dir,
'ROIs',
'sub-01_ses-01_dwi_desc-ROI-CST_R-1-include.json'))
assert op.exists(op.join(
results_dir,
'bundles',
'sub-01_ses-01_dwi_space-RASMM_model-DTI_desc-det-AFQ-CST_L_tractography.trk')) # noqa
@pytest.mark.nightly_msmt_and_init
def test_afq_msmt():
tmpdir = nbtmp.InTemporaryDirectory()
afd.organize_cfin_data(path=tmpdir.name)
myafq = api.AFQ(bids_path=op.join(tmpdir.name, 'cfin_multib'),
dmriprep='dipy', tracking_params={"odf_model": "MSMT"})
npt.assert_equal(
op.split(myafq.streamlines["01"])[-1],
"sub-01_ses-01_dwi_space-RASMM_model-MSMT_desc-det_tractography.trk")
| 30.695493
| 97
| 0.597992
|
68c300426a76004ae9349584b482a764694e1f95
| 2,577
|
py
|
Python
|
pandashells/bin/p_config.py
|
timgates42/pandashells
|
4b565435a25ac713eeeacf28c3e5b52fe94530d8
|
[
"BSD-2-Clause-FreeBSD"
] | 878
|
2015-08-02T02:07:20.000Z
|
2022-01-15T19:06:47.000Z
|
pandashells/bin/p_config.py
|
timgates42/pandashells
|
4b565435a25ac713eeeacf28c3e5b52fe94530d8
|
[
"BSD-2-Clause-FreeBSD"
] | 44
|
2015-05-12T15:56:57.000Z
|
2021-01-13T20:58:29.000Z
|
pandashells/bin/p_config.py
|
timgates42/pandashells
|
4b565435a25ac713eeeacf28c3e5b52fe94530d8
|
[
"BSD-2-Clause-FreeBSD"
] | 31
|
2015-08-02T22:48:36.000Z
|
2021-01-13T20:54:58.000Z
|
#! /usr/bin/env python
# --- standard library imports
import os
import sys # noqa Not used in code, but patched in testing
import argparse
import textwrap
from pandashells.lib import config_lib
def main():
# --- read in the current configuration
default_dict = config_lib.get_config()
msg = textwrap.dedent(
"""
Sets the default IO and plotting behavior for the pandashells toolset
by creating/modifying a hidden configuration file (~/.pandashells)
-----------------------------------------------------------------------
Examples:
* Show the current configuration
p.config
* Set the configuration to "factory defaults"
p.config --force_defaults
* Set default input/output to assume white-space delimited columns
with no headers.
p.config --io_input_header noheader --io_input_type table
p.config --io_output_header noheader --io_output_type table
-----------------------------------------------------------------------
"""
)
# --- populate the arg parser with current configuration
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter, description=msg)
parser.add_argument('--force_defaults', action='store_true',
dest='force_defaults',
help='Force to default settings')
for tup in config_lib.CONFIG_OPTS:
msg = 'opts: {}'.format(str(tup[1]))
parser.add_argument('--%s' % tup[0], nargs=1, type=str,
dest=tup[0], metavar='',
default=[default_dict[tup[0]]],
choices=tup[1], help=msg)
# --- parse arguments
args = parser.parse_args()
# --- set the arguments to the current value of the arg parser
config_dict = {t[0]: t[1][0] for t in args.__dict__.items()
if not t[0] in ['force_defaults']}
if args.force_defaults:
cmd = 'rm {} 2>/dev/null'.format(config_lib.CONFIG_FILE_NAME)
os.system(cmd)
config_dict = config_lib.DEFAULT_DICT
config_lib.set_config(config_dict)
sys.stdout.write('\n Current Config\n')
sys.stdout.write(' ' + '-' * 40 + '\n')
for k in sorted(config_dict.keys()):
if k not in ['--force_defaults']:
sys.stdout.write(
' {: <20} {}\n'.format(k + ':', repr(str(config_dict[k]))))
if __name__ == '__main__': # pragma: no cover
main()
| 34.824324
| 79
| 0.558401
|
d6253692f0974b7786b8c2a9b8287b6c04d7c1cd
| 10,345
|
py
|
Python
|
examples/billing/add_billing_setup.py
|
sidakwalia/gads
|
490514da86213c32cf26a43cfb5c479b5c3bb114
|
[
"Apache-2.0"
] | 285
|
2018-10-05T16:47:58.000Z
|
2022-03-31T00:58:39.000Z
|
examples/billing/add_billing_setup.py
|
sidakwalia/gads
|
490514da86213c32cf26a43cfb5c479b5c3bb114
|
[
"Apache-2.0"
] | 425
|
2018-09-10T13:32:41.000Z
|
2022-03-31T14:50:05.000Z
|
examples/billing/add_billing_setup.py
|
sidakwalia/gads
|
490514da86213c32cf26a43cfb5c479b5c3bb114
|
[
"Apache-2.0"
] | 369
|
2018-11-28T07:01:00.000Z
|
2022-03-28T09:53:22.000Z
|
#!/usr/bin/env python
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example creates a billing setup for a customer.
A billing setup is a link between a payments account and a customer. The new
billing setup can either reuse an existing payments account, or create a new
payments account with a given payments profile. Billing setups are applicable
for clients on monthly invoicing only. See here for details about applying for
monthly invoicing: https://support.google.com/google-ads/answer/2375377.
In the case of consolidated billing, a payments account is linked to the
manager account and is linked to a customer account via a billing setup.
"""
import argparse
from datetime import datetime, timedelta
import sys
from uuid import uuid4
from google.ads.googleads.client import GoogleAdsClient
from google.ads.googleads.errors import GoogleAdsException
def main(
client, customer_id, payments_account_id=None, payments_profile_id=None
):
"""The main method that creates all necessary entities for the example.
Args:
client: an initialized GoogleAdsClient instance.
customer_id: a client customer ID.
payments_account_id: payments account ID to attach to the new billing
setup. If provided it must be formatted as "1234-5678-9012-3456".
payments_profile_id: payments profile ID to attach to a new payments
account and to the new billing setup. If provided it must be
formatted as "1234-5678-9012".
"""
billing_setup = _create_billing_setup(
client, customer_id, payments_account_id, payments_profile_id
)
_set_billing_setup_date_times(client, customer_id, billing_setup)
billing_setup_operation = client.get_type("BillingSetupOperation")
client.copy_from(billing_setup_operation.create, billing_setup)
billing_setup_service = client.get_service("BillingSetupService")
response = billing_setup_service.mutate_billing_setup(
customer_id=customer_id, operation=billing_setup_operation
)
print(
"Added new billing setup with resource name "
f"{response.result.resource_name}"
)
def _create_billing_setup(
client, customer_id, payments_account_id=None, payments_profile_id=None
):
"""Creates and returns a new billing setup instance.
The new billing setup will have its payment details populated. One of the
payments_account_id or payments_profile_id must be provided.
Args:
client: an initialized GoogleAdsClient instance.
customer_id: a client customer ID.
payments_account_id: payments account ID to attach to the new billing
setup. If provided it must be formatted as "1234-5678-9012-3456".
payments_profile_id: payments profile ID to attach to a new payments
account and to the new billing setup. If provided it must be
formatted as "1234-5678-9012".
Returns:
A newly created BillingSetup instance.
"""
billing_setup = client.get_type("BillingSetup")
# Sets the appropriate payments account field.
if payments_account_id != None:
# If a payments account ID has been provided, set the payments_account
# field to the full resource name of the given payments account ID.
# You can list available payments accounts via the
# PaymentsAccountService's ListPaymentsAccounts method.
billing_setup.payments_account = client.get_service(
"BillingSetupService"
).payments_account_path(customer_id, payments_account_id)
elif payments_profile_id != None:
# Otherwise, create a new payments account by setting the
# payments_account_info field
# See https://support.google.com/google-ads/answer/7268503
# for more information about payments profiles.
billing_setup.payments_account_info.payments_account_name = (
f"Payments Account #{uuid4()}"
)
billing_setup.payments_account_info.payments_profile_id = (
payments_profile_id
)
return billing_setup
def _set_billing_setup_date_times(client, customer_id, billing_setup):
"""Sets the starting and ending date times for the new billing setup.
Queries the customer's account to see if there are any approved billing
setups. If there are any, the new billing setup starting date time is set to
one day after the last. If not, the billing setup is set to start
immediately. The ending date is set to one day after the starting date time.
Args:
client: an initialized GoogleAdsClient instance.
customer_id: a client customer ID.
billing_setup: the billing setup whose starting and ending date times
will be set.
"""
# The query to search existing approved billing setups in the end date time
# descending order. See get_billing_setup.py for a more detailed example of
# how to retrieve billing setups.
query = """
SELECT
billing_setup.end_date_time
FROM billing_setup
WHERE billing_setup.status = APPROVED
ORDER BY billing_setup.end_date_time DESC
LIMIT 1"""
ga_service = client.get_service("GoogleAdsService")
stream = ga_service.search_stream(customer_id=customer_id, query=query)
# Coercing the response iterator to a list causes the stream to be fully
# consumed so that we can easily access the last row in the request.
batches = list(stream)
# Checks if any results were included in the response.
if batches:
# Retrieves the ending_date_time of the last BillingSetup.
last_batch = batches[0]
last_row = last_batch.results[0]
last_ending_date_time = last_row.billing_setup.end_date_time
if not last_ending_date_time:
# A null ending date time indicates that the current billing setup
# is set to run indefinitely. Billing setups cannot overlap, so
# throw an exception in this case.
raise Exception(
"Cannot set starting and ending date times for the new billing "
"setup; the latest existing billing setup is set to run "
"indefinitely."
)
try:
# BillingSetup.end_date_time is a string that can be in the format
# %Y-%m-%d or %Y-%m-%d %H:%M:%S. This checks for the first format.
end_date_time_obj = datetime.strptime(
last_ending_date_time, "%Y-%m-%d"
)
except ValueError:
# If a ValueError is raised then the end_date_time string is in the
# second format that includes hours, minutes and seconds.
end_date_time_obj = datetime.strptime(
last_ending_date_time, "%Y-%m-%d %H:%M:%S"
)
# Sets the new billing setup start date to one day after the end date.
start_date = end_date_time_obj + timedelta(days=1)
else:
# If there are no BillingSetup objecst to retrieve, the only acceptable
# start date time is today.
start_date = datetime.now()
billing_setup.start_date_time = start_date.strftime("%Y-%m-%d %H:%M:%S")
billing_setup.end_date_time = (start_date + timedelta(days=1)).strftime(
"%Y-%m-%d %H:%M:%S"
)
if __name__ == "__main__":
# GoogleAdsClient will read the google-ads.yaml configuration file in the
# home directory if none is specified.
googleads_client = GoogleAdsClient.load_from_storage(version="v8")
parser = argparse.ArgumentParser(
description=("Creates a billing setup for a given customer.")
)
# The following argument(s) should be provided to run the example.
parser.add_argument(
"-c",
"--customer_id",
type=str,
required=True,
help="The Google Ads customer ID.",
)
# Creates a mutually exclusive argument group to ensure that only one of the
# following two arguments are given, otherwise it will raise an error.
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
"-a",
"--payments_account_id",
type=str,
help="Either a payments account ID or a payments profile ID must be "
"provided for the example to run successfully. "
"See: https://developers.google.com/google-ads/api/docs/billing/billing-setups#creating_new_billing_setups. "
"Provide an existing payments account ID to link to the new "
"billing setup. Must be formatted as '1234-5678-9012-3456'.",
)
group.add_argument(
"-p",
"--payments_profile_id",
type=str,
help="Either a payments account ID or a payments profile ID must be "
"provided for the example to run successfully. "
"See: https://developers.google.com/google-ads/api/docs/billing/billing-setups#creating_new_billing_setups. "
"Provide an existing payments profile ID to link to a new payments "
"account and the new billing setup. Must be formatted as: "
"'1234-5678-9012-3456'.",
)
args = parser.parse_args()
try:
main(
googleads_client,
args.customer_id,
args.payments_account_id,
args.payments_profile_id,
)
except GoogleAdsException as ex:
print(
f'Request with ID "{ex.request_id}" failed with status '
f'"{ex.error.code().name}" and includes the following errors:'
)
for error in ex.failure.errors:
print(f'\tError with message "{error.message}".')
if error.location:
for field_path_element in error.location.field_path_elements:
print(f"\t\tOn field: {field_path_element.field_name}")
sys.exit(1)
| 42.22449
| 117
| 0.688352
|
8c40a33af332b611f23e38da73b1bfd735e681f2
| 1,488
|
py
|
Python
|
pro1/interviews/admin.py
|
r3trd/StudentPortal
|
561da1c582d5ea921b29a26e7ff912736de23bca
|
[
"MIT"
] | 9
|
2018-12-03T12:51:25.000Z
|
2018-12-19T23:43:07.000Z
|
pro1/interviews/admin.py
|
r3trd/StudentPortal
|
561da1c582d5ea921b29a26e7ff912736de23bca
|
[
"MIT"
] | 97
|
2018-11-17T18:34:18.000Z
|
2018-12-29T15:34:50.000Z
|
pro1/interviews/admin.py
|
itsvrushabh/StudentPortal
|
e10f2aa7b8da021ae8a285160f64695ad5bc7a72
|
[
"MIT"
] | 34
|
2018-12-01T16:30:09.000Z
|
2019-01-09T16:51:04.000Z
|
from django.contrib import admin
from .models import Internship, Department, Placement
# Register your models here.
class InternshipAdmin(admin.ModelAdmin):
def get_status(self, obj):
return obj.get_status_display()
get_status.short_description = 'Status'
list_display = ('company_name',
'departments',
'job_designation',
'qualification_needed',
'last_date_to_apply',
'test_location',
'salary_offered',
'person_of_contact',
'email_id',
'phone_no',
'form_link',
'get_status',
'other_details',)
class PlacementAdmin(admin.ModelAdmin):
def get_status(self, obj):
return obj.get_status_display()
get_status.short_description = 'Status'
list_display = ('company_name',
'expected_salary',
'departments',
'job_designation',
'qualification_needed',
'last_date_to_apply',
'form_link',
'get_status',
'interview_or_test_location',
'additional_documents',
'additional_information',)
admin.site.register(Department)
admin.site.register(Internship, InternshipAdmin)
admin.site.register(Placement, PlacementAdmin)
| 31
| 53
| 0.539651
|
fb20f31148f1dc913016b1666b611154e87affc4
| 5,256
|
py
|
Python
|
thorpy/gamestools/basegrid.py
|
YannThorimbert/PyMap2D
|
c4ddb6bb4651f19b6ad1b4149f38e64de2daf120
|
[
"MIT"
] | 27
|
2018-04-05T13:06:22.000Z
|
2022-01-24T08:14:47.000Z
|
thorpy/gamestools/basegrid.py
|
YannThorimbert/PyMap2D
|
c4ddb6bb4651f19b6ad1b4149f38e64de2daf120
|
[
"MIT"
] | 6
|
2019-11-23T07:02:53.000Z
|
2021-04-12T19:08:35.000Z
|
thorpy/gamestools/basegrid.py
|
YannThorimbert/PyMap2D
|
c4ddb6bb4651f19b6ad1b4149f38e64de2daf120
|
[
"MIT"
] | 7
|
2018-11-20T01:01:41.000Z
|
2022-01-24T08:14:50.000Z
|
"""Module providing several classes for handling grid objects"""
class __GridIterator__(object):
def __init__(self, nx, ny, x, y):
self.x = x-1
self.y = y
self.nx = nx
self.ny = ny
def __next__(self):
if self.x == self.nx-1:
self.x = 0
self.y += 1
if self.y > self.ny-1:
raise StopIteration()
else:
return (self.x, self.y)
else:
self.x += 1
return (self.x, self.y)
def next(self): #for python2 compatibility
return self.__next__()
class BaseGrid(object):
def __init__(self, nx, ny, value=None, periodicity=(False,False)):
self.nx = nx
self.ny = ny
self.cells = [[value for y in range(ny)] for x in range(nx)]
self.default_value = value
self.periodicity = periodicity
self.min_nxy = min(self.nx, self.ny)
self.set_all = self.fill #alias
def copy(self):
grid = BaseGrid(self.nx, self.ny, self.periodicity)
for x,y in self:
grid[x,y] = self[x,y]
return grid
def __len__(self):
"""Returns the number of cells contained on the grid."""
return self.nx * self.ny
def __getitem__(self, key):
x,y = key
if self.periodicity[0]:
x %= self.nx
if self.periodicity[1]:
y %= self.ny
return self.cells[x][y]
def __setitem__(self, key, value):
self.cells[key[0]][key[1]] = value
def __iter__(self, x=0, y=0):
"""Iterate over coordinates."""
return __GridIterator__(self.nx, self.ny, x, y)
def __repr__(self):
return str(self.cells)
def itercells(self):
"""Iterate over cell values"""
for x,y in self:
yield self[x,y]
def iterline(self, y):
for x in range(self.nx):
yield self[x,y]
def itercolumn(self, x):
for y in range(self.ny):
yield self[x,y]
def iterdiag_up(self, x, y):
n = min(self.nx-x, self.ny-y) #opti sans min?
## print("up", x, y, n)
for i in range(n):
## print(self[x+i,y+i], x+i, y+i)
yield self[x+i,y+i]
def iterdiag_down(self, x, y):
n = min(self.nx-x, y+1) #opti sans min?
## print("down", x, y, n)
for i in range(n):
## print(self[x+i,y-i], x+i, y-i)
yield self[x+i,y-i]
def fill(self, value):
for x,y in self:
self[x,y] = value
def is_inside(self, coord):
"""returns True if <coord> is contained into the domain (not pixels!)"""
return (0 <= coord[0] < self.nx) and (0 <= coord[1] < self.ny)
def shift_values_x(self, amount): #todo: fill new cells (works only for |amount| = 1!!!
if amount > 0: #shift to the right, new column on the left
for x in range(self.nx-1,amount-1,-1):
for y in range(self.ny):
self[x,y] = self[x-1,y]
for y in range(self.ny): #boucler!
self[0,y] = self.default_value
else:
for x in range(self.nx+amount): #amount is negative!
for y in range(self.ny):
self[x,y] = self[x+1,y]
for y in range(self.ny): #boucler!
self[self.nx-1,y] = self.default_value
def shift_values_y(self, amount): #todo: fill new cells (works only for |amount| = 1!!!
if amount > 0: #shift to the right, new column on the left
for y in range(self.ny-1,amount-1,-1):
for x in range(self.nx):
self[x,y] = self[x,y-1]
for x in range(self.nx): #boucler!
self[x,0] = self.default_value
else:
for y in range(self.ny+amount): #amount is negative!
for x in range(self.nx):
self[x,y] = self[x,y+1]
for x in range(self.nx): #boucler!
self[x,self.ny-1] = self.default_value
class DiagonalHelper(object):
def __init__(self, grid):
self.grid = grid
self.diags_up = [[None for y in range(grid.ny)] for x in range(grid.nx)]
self.diags_down = [[None for y in range(grid.ny)] for x in range(grid.nx)]
self._build_diags()
def _build_diags(self):
nx, ny = self.grid.nx, self.grid.ny
for x in range(nx): #coming from up and down lines
n = min(nx-x, ny)
for i in range(n):
self.diags_down[x+i][ny-1-i] = (x, ny-1, n)
self.diags_up[x+i][0+i] = (x, 0, n)
for y in range(ny-1): #down coming from left column
n = min(y+1, nx)
for i in range(n):
self.diags_down[0+i][y-i] = (0, y, n)
for y in range(1, ny): #up coming from left column
n = min(ny-y, nx)
for i in range(n):
self.diags_up[0+i][y+i] = (0, y, n)
def iterdiag_up(self, x, y):
x, y, n = self.diags_up[x][y]
for i in range(n):
yield self.grid[x+i,y+i]
def iterdiag_down(self, x, y):
x, y, n = self.diags_down[x][y]
for i in range(n):
yield self.grid[x+i,y-i]
| 31.854545
| 91
| 0.513508
|
623b7eb30485feb237cc67ddbe76a21b631b0df1
| 1,656
|
py
|
Python
|
rh/apps/content/tests/test_contact_form.py
|
rapidpro/chpro-microsite
|
4e1d1210b49ec60ab0711d78235bf45eeb5c0275
|
[
"BSD-3-Clause"
] | null | null | null |
rh/apps/content/tests/test_contact_form.py
|
rapidpro/chpro-microsite
|
4e1d1210b49ec60ab0711d78235bf45eeb5c0275
|
[
"BSD-3-Clause"
] | 108
|
2018-01-30T15:26:18.000Z
|
2021-06-10T17:29:57.000Z
|
rh/apps/content/tests/test_contact_form.py
|
rapidpro/chpro-microsite
|
4e1d1210b49ec60ab0711d78235bf45eeb5c0275
|
[
"BSD-3-Clause"
] | null | null | null |
from django.core import mail
from django.core.urlresolvers import reverse
from django.test import TestCase
class TestContactForm(TestCase):
def setUp(self):
self.url = reverse('contact_form')
self.data = {
'name': 'Lincoln Clay',
'email': 'lincoln.clay@cia.gov',
'message': "Family isn't who you're born with, it's who you die "
"for.",
}.copy()
def test_good(self):
response = self.client.post(self.url, self.data)
self.assertRedirects(
response, '/thank-you/', fetch_redirect_response=False)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0]
self.assertIn(self.data['name'], message.body)
self.assertIn(self.data['email'], message.body)
self.assertIn(self.data['message'], message.body)
def test_missing_data(self):
del self.data['message']
response = self.client.post(self.url, self.data, HTTP_REFERER='/origin/')
self.assertRedirects(
response, '/origin/', fetch_redirect_response=False)
def test_missing_data_no_referer(self):
del self.data['message']
response = self.client.post(self.url, self.data)
self.assertRedirects(
response, '/', fetch_redirect_response=False)
def test_missing_data_ajax(self):
del self.data['message']
response = self.client.post(
self.url, self.data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 400)
self.assertEqual(
response.content, b'{"message": ["This field is required."]}')
| 36
| 81
| 0.629227
|
61c4eabf11ea4a89412eaaa8e5b3b4cb831a6046
| 519
|
py
|
Python
|
imlib/IO/yaml.py
|
noisysky/imlib
|
625193be4a586d9040a48df9d51dbdd3a17c7d06
|
[
"MIT"
] | null | null | null |
imlib/IO/yaml.py
|
noisysky/imlib
|
625193be4a586d9040a48df9d51dbdd3a17c7d06
|
[
"MIT"
] | 6
|
2020-04-17T12:02:56.000Z
|
2020-05-12T15:20:18.000Z
|
imlib/IO/yaml.py
|
noisysky/imlib
|
625193be4a586d9040a48df9d51dbdd3a17c7d06
|
[
"MIT"
] | 4
|
2020-02-05T18:53:30.000Z
|
2022-02-21T18:50:14.000Z
|
import yaml
def read_yaml_section(yaml_file, section):
yaml_contents = open_yaml(yaml_file)
contents = yaml_contents[section]
return contents
def open_yaml(yaml_file):
with open(yaml_file) as f:
yaml_contents = yaml.load(f, Loader=yaml.FullLoader)
return yaml_contents
def save_yaml(yaml_contents, output_file, default_flow_style=False):
with open(output_file, "w") as outfile:
yaml.dump(
yaml_contents, outfile, default_flow_style=default_flow_style
)
| 24.714286
| 73
| 0.71869
|
10406b139f731103e24033fb59263c2275b9416c
| 6,873
|
py
|
Python
|
jwql/website/apps/jwql/oauth.py
|
laurenmarietta/jwql
|
0edc027778da4232d8ca82abf3a8bf85fafd7e4e
|
[
"BSD-3-Clause"
] | null | null | null |
jwql/website/apps/jwql/oauth.py
|
laurenmarietta/jwql
|
0edc027778da4232d8ca82abf3a8bf85fafd7e4e
|
[
"BSD-3-Clause"
] | null | null | null |
jwql/website/apps/jwql/oauth.py
|
laurenmarietta/jwql
|
0edc027778da4232d8ca82abf3a8bf85fafd7e4e
|
[
"BSD-3-Clause"
] | null | null | null |
"""Provides an OAuth object for authentication of the ``jwql`` web app,
as well as decorator functions to require user authentication in other
views of the web application.
Authors
-------
- Matthew Bourque
- Christian Mesh
Use
---
This module is intended to be imported and used as such:
::
from .oauth import auth_info
from .oauth import auth_required
from .oauth import JWQL_OAUTH
@auth_info
def some_view(request):
pass
@auth_required
def login(request):
pass
References
----------
Much of this code was taken from the ``authlib`` documentation,
found here: ``http://docs.authlib.org/en/latest/client/django.html``
Dependencies
------------
The user must have a configuration file named ``config.json``
placed in the ``jwql/utils/`` directory.
"""
import os
import requests
from authlib.django.client import OAuth
from django.shortcuts import redirect, render
import jwql
from jwql.utils.constants import MONITORS
from jwql.utils.utils import get_base_url, get_config
def register_oauth():
"""Register the ``jwql`` application with the ``auth.mast``
authentication service.
Returns
-------
oauth : Object
An object containing methods to authenticate a user, provided
by the ``auth.mast`` service.
"""
# Get configuration parameters
client_id = get_config()['client_id']
client_secret = get_config()['client_secret']
auth_mast = get_config()['auth_mast']
# Register with auth.mast
oauth = OAuth()
client_kwargs = {'scope': 'mast:user:info'}
oauth.register(
'mast_auth',
client_id='{}'.format(client_id),
client_secret='{}'.format(client_secret),
access_token_url='https://{}/oauth/access_token?client_secret={}'.format(auth_mast, client_secret),
access_token_params=None,
refresh_token_url=None,
authorize_url='https://{}/oauth/authorize'.format(auth_mast),
api_base_url='https://{}/1.1/'.format(auth_mast),
client_kwargs=client_kwargs)
return oauth
JWQL_OAUTH = register_oauth()
def authorize(request):
"""Spawn the authentication process for the user
The authentication process involves retreiving an access token
from ``auth.mast`` and porting the data to a cookie.
Parameters
----------
request : HttpRequest object
Incoming request from the webpage
Returns
-------
HttpResponse object
Outgoing response sent to the webpage
"""
# Get auth.mast token
token = JWQL_OAUTH.mast_auth.authorize_access_token(request, headers={'Accept': 'application/json'})
# Determine domain
base_url = get_base_url()
if '127' in base_url:
domain = '127.0.0.1'
else:
domain = base_url.split('//')[-1]
# Set secure cookie parameters
cookie_args = {}
# cookie_args['domain'] = domain # Currently broken
# cookie_args['secure'] = True # Currently broken
cookie_args['httponly'] = True
# Set the cookie
response = redirect("/")
response.set_cookie("ASB-AUTH", token["access_token"], **cookie_args)
return response
def auth_info(fn):
"""A decorator function that will return user credentials along
with what is returned by the original function.
Parameters
----------
fn : function
The function to decorate
Returns
-------
user_info : function
The decorated function
"""
def user_info(request, **kwargs):
"""Store authenticated user credentials in a cookie and return
it. If the user is not authenticated, store no credentials in
the cookie.
Parameters
----------
request : HttpRequest object
Incoming request from the webpage
Returns
-------
fn : function
The decorated function
"""
cookie = request.COOKIES.get("ASB-AUTH")
# If user is authenticated, return user credentials
if cookie is not None:
response = requests.get(
'https://{}/info'.format(get_config()['auth_mast']),
headers={'Accept': 'application/json',
'Authorization': 'token {}'.format(cookie)})
response = response.json()
# If user is not authenticated, return no credentials
else:
response = {'ezid': None, "anon": True}
return fn(request, response, **kwargs)
return user_info
def auth_required(fn):
"""A decorator function that requires the given function to have
authentication through ``auth.mast`` set up.
Parameters
----------
fn : function
The function to decorate
Returns
-------
check_auth : function
The decorated function
"""
@auth_info
def check_auth(request, user, **kwargs):
"""Check if the user is authenticated through ``auth.mast``.
If not, perform the authorization.
Parameters
----------
request : HttpRequest object
Incoming request from the webpage
user : dict
A dictionary of user credentials
Returns
-------
fn : function
The decorated function
"""
# If user is currently anonymous, require a login
if user['ezid']:
return fn(request, user, **kwargs)
else:
template = 'not_authenticated.html'
context = {'inst': ''}
return render(request, template, context)
return check_auth
@auth_info
def login(request, user):
"""Spawn a login process for the user
The ``auth_requred`` decorator is used to require that the user
authenticate through ``auth.mast``, then the user is redirected
back to the homepage.
Parameters
----------
request : HttpRequest object
Incoming request from the webpage
user : dict
A dictionary of user credentials.
Returns
-------
HttpResponse object
Outgoing response sent to the webpage
"""
# Redirect to oauth login
redirect_uri = os.path.join(get_base_url(), 'authorize')
return JWQL_OAUTH.mast_auth.authorize_redirect(request, redirect_uri)
def logout(request):
"""Spawn a logout process for the user
Upon logout, the user's ``auth.mast`` credientials are removed and
the user is redirected back to the homepage.
Parameters
----------
request : HttpRequest object
Incoming request from the webpage
user : dict
A dictionary of user credentials.
Returns
-------
HttpResponse object
Outgoing response sent to the webpage
"""
response = redirect("/")
response.delete_cookie("ASB-AUTH")
return response
| 25.175824
| 107
| 0.623018
|
3a2305a1ffb6fa26bac9b2fe84c9e450b86b9529
| 1,299
|
py
|
Python
|
normatrix/normatrix/plugged/nb_params.py
|
romainpanno/NorMatrix
|
33bbf02b1881853088c8350f9a3c9d22ba3aa704
|
[
"MIT"
] | null | null | null |
normatrix/normatrix/plugged/nb_params.py
|
romainpanno/NorMatrix
|
33bbf02b1881853088c8350f9a3c9d22ba3aa704
|
[
"MIT"
] | null | null | null |
normatrix/normatrix/plugged/nb_params.py
|
romainpanno/NorMatrix
|
33bbf02b1881853088c8350f9a3c9d22ba3aa704
|
[
"MIT"
] | null | null | null |
try:
from normatrix.source.file_parser import CFileParse
from normatrix.source.config import TypeLine
except ModuleNotFoundError:
from normatrix.normatrix.source.file_parser import CFileParse
from normatrix.normatrix.source.config import TypeLine
import re
reg = re.compile('^(?!.*=)(\w{1,} {0,1}){2,}\((.*?\n{0,1}){0,}?\) {0,1}\n{0,1}\{')
def get_only_func_decl(rest: str):
res = reg.match(rest)
if res != None:
only_decl = rest[res.start():res.end()]
if "=" in only_decl:
return ''
return only_decl
return ''
def check(context, file: CFileParse) -> (int, int, list):
nb_error = 0
list_error = []
if file.basename.endswith('.h') or file.filepath.endswith("Makefile"):
return (0, 0, [])
for i, line in enumerate(file.sub_parsedline):
if line[0] == TypeLine.COMMENT:
continue
all_lines = file.sub_parsedline[i:]
rest_lines = "\n".join([x[1] for x in all_lines])
only_decl = get_only_func_decl(rest_lines)
only_decl = re.sub("\(\*\w*?\)\((.|\n)*?\)", "", only_decl)
n = only_decl.count(',') + 1
if n > 4:
list_error.append((i + 1, f"too many arguments ({n} > 4)"))
nb_error += 1
return (nb_error, 0, list_error)
| 33.307692
| 82
| 0.591224
|
37847b781c0aed0d06cf5a1c44379c076d9d7104
| 34,958
|
py
|
Python
|
ds_catalog_service/backends/schemaevaluation.py
|
TheDataStation/catalog-service
|
de7113288cc3e6d5b856656f8b7be4dfdf5de4be
|
[
"MIT"
] | null | null | null |
ds_catalog_service/backends/schemaevaluation.py
|
TheDataStation/catalog-service
|
de7113288cc3e6d5b856656f8b7be4dfdf5de4be
|
[
"MIT"
] | null | null | null |
ds_catalog_service/backends/schemaevaluation.py
|
TheDataStation/catalog-service
|
de7113288cc3e6d5b856656f8b7be4dfdf5de4be
|
[
"MIT"
] | null | null | null |
import pandas as pd
import datetime
from ds_catalog_service.api.base import CatalogService
from ds_catalog_service.backends.backend_api import NormalizedSQLiteBackend
class SchemaEvaluation:
def __init__(self, datafile, trim_num, num_reps):
self.df = self.convert_to_pd(datafile, trim_num)
self.trim_num = trim_num
self.num_reps = num_reps
self.version = 1
self.asset_idx = 1
self.normSQL = None
self.dvSQL = None
self.normGraph = None
self.dvGraph = None
self.inserts = [] # inserts we've performed
self.queries = [] # queries we've performed
self.concept_map = {'url': {'WhereProfile': 'access_path'},
'name': {'Asset': 'name'},
'alternateName': {'Asset': 'name'},
'description': {'WhatProfile': {'schema': 'instanceMeaning'}},
'variablesMeasured': {'WhatProfile': {'schema': 'variablesRecorded'}},
'measurementTechnique': {'HowProfile': {'schema': 'measurementTechnique'}},
'sameAs': {'Relationship': {'schema': 'sameImageAs'}},
'doi': {'WhereProfile': {'Source': {'SourceType': {'name': 'Image_Repo',
'description': 'A repository or website of images'},
'schema': 'doi'}}},
'identifier': {'WhereProfile': {'Source': {'SourceType': {'name': 'Image_Repo',
'description': 'A repository or website of images'},
'schema': 'identifier'}}},
'author': {'WhoProfile': {'schema': 'author'}},
'isAccessibleForFree': {
'WhereProfile': {'Source': {'SourceType': {'connector': 'Image_Repo',
'serde': 'none', 'datamodel': 'None'},
'schema': 'isAccessibleForFree'}}},
'dateModified': {'WhenProfile': 'Asset_timestamp'},
'distribution': {'WhereProfile': {'Configuration': {'schema': 'distribution'}}},
'spatialCoverage': {'WhatProfile': {'schema': 'spatialCoverage'}},
'provider': {'WhoProfile': {'schema': 'provider'}},
'funder': {'WhoProfile': {'schema': 'funder'}},
'temporalCoverage': {'WhatProfile': {'schema': 'temporalCoverage'}}}
def convert_to_pd(self, datafile, trim_num):
df = pd.read_csv(datafile,
delimiter=',', chunksize=trim_num,
dtype={'url': str, 'name': str,
'alternateName': str,
'description': str,
'variablesMeasured': str,
'measurementTechnique': str,
'sameAs': str,
'doi': str,
'identifier': str,
'author': str,
'isAccessibleForFree': str,
'dateModified': str,
'distribution': str,
'spatialCoverage': str,
'provider': str,
'funder': str,
'temporalCoverage': str})
# print(df.shape)
return df
def init_NSQLsetup(self):
self.normSQL = CatalogService(NormalizedSQLiteBackend('normalized_catalog.db'))
self.normSQL.insert_profile("User", {"name": "admin", "user_type": 1,
"version": 1, "timestamp": str(datetime.datetime.now()),
"schema": {"addr": {"home": "westlake", "company": "bank"},
"phone": 1234567}})
def insert_data_normalized_SQLite(self, chunk):
insertedAssetType = False
insertedRelType = False
insertedSourceType = False
self.init_NSQLsetup()
# convert all nan's to nulls
chunk = chunk.fillna('NULL')
for index, row in chunk.iterrows():
for key in self.concept_map:
i_val = row[key]
if i_val == 'NULL':
continue
# I guess we're really doing this...
# just implement the 17 insert queries...
if key == 'url':
self.normSQL.insert_profile('WhereProfile', {'access_path': i_val,
'configuration': None,
'source': None,
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhereProfile': {'access_path': i_val,
'configuration': None,
'source': None,
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'asset': self.asset_idx}})
elif key == 'name':
if insertedAssetType:
self.normSQL.insert_profile('Asset', {'name': i_val,
'asset_type': 1,
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1})
self.inserts.append({'Asset': {'name': i_val,
'asset_type': 1,
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1}})
else:
# in this case, there's only one asset type, and that's an image
self.normSQL.insert_profile('AssetType', {'name': 'Image',
'description': 'A file consisting of bytes that represent pixels'})
self.inserts.append({'AssetType': {'name': 'Image',
'description': 'A file consisting of bytes that represent pixels'}})
insertedAssetType = True
self.normSQL.insert_profile('Asset', {'name': i_val,
'asset_type': 1,
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1})
self.inserts.append({'Asset': {'name': i_val,
'asset_type': 1,
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1}})
elif key == 'description':
self.normSQL.insert_profile('WhatProfile', {'schema': {'instanceMeaning': i_val},
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhatProfile': {'schema': {'instanceMeaning': i_val},
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'asset': self.asset_idx}})
elif key == 'variablesMeasured':
# print("i_val is: " + str(i_val))
print(i_val)
self.normSQL.insert_profile('WhatProfile', {'schema': {'variablesRecorded': i_val},
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhatProfile': {'schema': {'variablesRecorded': i_val},
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'asset': self.asset_idx}})
elif key == 'measurementTechnique':
self.normSQL.insert_profile('HowProfile', {'schema': {'measurementTechnique': i_val},
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'HowProfile': {'schema': {'measurementTechnique': i_val},
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'asset': self.asset_idx}})
elif key == 'sameAs':
# NOTE: this part highlights a very important problem the catalog service
# will have to do its best to solve: when we're only told that there should
# be a relationship between assets, but we're not given clear links to each,
# how do we establish the relationship? We need to find the asset that matches
# the user's descriptions, otherwise our relationship schema won't be useful.
if insertedRelType:
rel_key = self.normSQL.insert_profile('Relationship', {'schema': {'sameImageAs': i_val},
'version': self.version,
'timestamp': str(
datetime.datetime.now()),
'user': 1,
'relationship_type': 1})
self.inserts.append({'Relationship': {'schema': {'sameImageAs': i_val},
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'relationship_type': 1}})
if rel_key == None:
print("Rel_key is None!")
print(i_val)
self.normSQL.insert_profile('Asset_Relationships',
{'asset': self.asset_idx,
'relationship': rel_key})
self.inserts.append({'Asset_Relationships':
{'asset': self.asset_idx,
'relationship': rel_key}})
else:
self.normSQL.insert_profile('RelationshipType', {'name': 'Identical Images',
'description': 'The images are of exactly the same thing.'})
self.inserts.append({'RelationshipType': {'name': 'Identical Images',
'description': 'The images are of exactly the same thing.'}})
insertedRelType = True
rel_key = self.normSQL.insert_profile('Relationship', {'schema': {'sameImageAs': i_val},
'version': self.version,
'timestamp': str(
datetime.datetime.now()),
'user': 1,
'relationship_type': 1})
self.inserts.append({'Relationship': {'schema': {'sameImageAs': i_val},
'version': self.version,
'timestamp': str(datetime.datetime.now()),
'user': 1,
'relationship_type': 1}})
if rel_key == None:
print("Rel_key is None!")
print(i_val)
self.normSQL.insert_profile('Asset_Relationships',
{'asset': self.asset_idx,
'relationship': rel_key})
self.inserts.append({'Asset_Relationships':
{'asset': self.asset_idx,
'relationship': rel_key}})
elif key == 'doi':
if insertedSourceType:
# we know there's only one kind of source here
# but the question is: should we assume every row comes from a distinct source,
# or should we assume they all come from the same source?
# again, if the name is all that's given, we don't know
# ...let's assume they're all different
source_key = self.normSQL.insert_profile('Source', {'name': 'PNG Repository',
'source_type': 1,
'schema': {'doi': i_val},
'user': 1,
'version': self.version,
'timestamp': datetime.datetime.now()})
self.normSQL.insert_profile('WhereProfile', {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhereProfile': {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
else:
self.normSQL.insert_profile('SourceType', {'connector': 'web browser',
'serde': 'PNG',
'datamodel': 'Regular Image'})
self.inserts.append({'SourceType': {'connector': 'web browser',
'serde': 'PNG',
'datamodel': 'Regular Image'}})
insertedSourceType = True
source_key = self.normSQL.insert_profile('Source', {'name': 'PNG Repository',
'source_type': 1,
'schema': {'doi': i_val},
'user': 1,
'version': self.version,
'timestamp': datetime.datetime.now()})
self.normSQL.insert_profile('WhereProfile', {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhereProfile': {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
elif key == 'identifier':
if insertedSourceType:
# we know there's only one kind of source here
# but the question is: should we assume every row comes from a distinct source,
# or should we assume they all come from the same source?
# again, if the name is all that's given, we don't know
# ...let's assume they're all different
source_key = self.normSQL.insert_profile('Source', {'name': 'PNG Repository',
'source_type': 1,
'schema': {'identifier': i_val},
'user': 1,
'version': self.version,
'timestamp': datetime.datetime.now()})
self.normSQL.insert_profile('WhereProfile', {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhereProfile': {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
else:
self.normSQL.insert_profile('SourceType', {'connector': 'web browser',
'serde': 'PNG',
'datamodel': 'Regular Image'})
self.inserts.append({'SourceType': {'connector': 'web browser',
'serde': 'PNG',
'datamodel': 'Regular Image'}})
insertedSourceType = True
source_key = self.normSQL.insert_profile('Source', {'name': 'PNG Repository',
'source_type': 1,
'schema': {'identifier': i_val},
'user': 1,
'version': self.version,
'timestamp': datetime.datetime.now()})
self.normSQL.insert_profile('WhereProfile', {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhereProfile': {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
elif key == 'author':
self.normSQL.insert_profile('WhoProfile', {'schema': {'author': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'write_user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhoProfile': {'schema': {'author': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'write_user': 1,
'asset': self.asset_idx}})
elif key == 'isAccessibleForFree':
if insertedSourceType:
# we know there's only one kind of source here
# but the question is: should we assume every row comes from a distinct source,
# or should we assume they all come from the same source?
# again, if the name is all that's given, we don't know
# ...let's assume they're all different
source_key = self.normSQL.insert_profile('Source', {'name': 'PNG Repository',
'source_type': 1,
'schema': {'isAccessibleForFree': i_val},
'user': 1,
'version': self.version,
'timestamp': datetime.datetime.now()})
self.normSQL.insert_profile('WhereProfile', {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhereProfile': {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
else:
self.normSQL.insert_profile('SourceType', {'connector': 'web browser',
'serde': 'PNG',
'datamodel': 'Regular Image'})
self.inserts.append({'SourceType': {'connector': 'web browser',
'serde': 'PNG',
'datamodel': 'Regular Image'}})
insertedSourceType = True
source_key = self.normSQL.insert_profile('Source', {'name': 'PNG Repository',
'source_type': 1,
'schema': {'isAccessibleForFree': i_val},
'user': 1,
'version': self.version,
'timestamp': datetime.datetime.now()})
self.normSQL.insert_profile('WhereProfile', {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhereProfile': {'source': source_key,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
elif key == 'dateModified':
self.normSQL.insert_profile('WhenProfile', {'asset_timestamp': i_val,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhenProfile': {'asset_timestamp': i_val,
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
elif key == 'distribution':
self.normSQL.insert_profile('WhereProfile', {'configuration': {'distribution': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhereProfile': {'configuration': {'distribution': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
elif key == 'spatialCoverage':
self.normSQL.insert_profile('WhatProfile',
{'schema': {'spatialCoverage': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhatProfile': {'schema': {'spatialCoverage': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
elif key == 'provider':
self.normSQL.insert_profile('WhoProfile', {'schema': {'provider': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'write_user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhoProfile': {'schema': {'provider': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'write_user': 1,
'asset': self.asset_idx}})
elif key == 'funder':
self.normSQL.insert_profile('WhoProfile', {'schema': {'funder': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'write_user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhoProfile': {'schema': {'funder': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'write_user': 1,
'asset': self.asset_idx}})
elif key == 'temporalCoverage':
self.normSQL.insert_profile('WhatProfile',
{'schema': {'temporalCoverage': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx})
self.inserts.append({'WhatProfile': {'schema': {'temporalCoverage': i_val},
'version': self.version,
'timestamp': datetime.datetime.now(),
'user': 1,
'asset': self.asset_idx}})
self.asset_idx += 1
def insert_full_NSonce(self):
for chunk in self.df:
self.insert_data_normalized_SQLite(chunk)
if __name__ == "__main__":
schema_eval = SchemaEvaluation(
'/Users/psubramaniam/Documents/Fall2020/testcatalogdata/dataset_metadata_2020_08_17.csv',
100000, 1)
# schema_eval = Schema_Evaluation('/home/pranav/dataset_metadata_2020_08_17.csv',
# 1000, 1)
schema_eval.insert_full_NSonce()
| 75.340517
| 138
| 0.322101
|
a9fd4b4428b2a158a2e743476792960856061f5c
| 8,209
|
py
|
Python
|
anpcp/models/solver.py
|
netotz/alpha-neighbor-p-center-problem
|
9ebe2718b749dcfc07884063bc118734457e36a9
|
[
"MIT"
] | 2
|
2021-06-26T22:16:37.000Z
|
2022-02-04T16:44:38.000Z
|
anpcp/models/solver.py
|
netotz/alpha-neighbor-p-center-problem
|
9ebe2718b749dcfc07884063bc118734457e36a9
|
[
"MIT"
] | 11
|
2021-08-31T15:09:28.000Z
|
2022-03-26T06:55:25.000Z
|
anpcp/models/solver.py
|
netotz/alpha-neighbor-p-center-problem
|
9ebe2718b749dcfc07884063bc118734457e36a9
|
[
"MIT"
] | null | null | null |
from dataclasses import dataclass, field
import sys
import random
from typing import List, Sequence, Set, Tuple
from itertools import combinations, product
import timeit
import matplotlib.pyplot as plt
from models import Instance
@dataclass
class Solver:
@dataclass
class Solution:
_solver: 'Solver' = field(repr=False)
indexes: Set[int] = field(default_factory=set)
objective_function: int = field(init=False, default=sys.maxsize)
max_alphath: int = field(init=False, default=-1)
time: float = field(init=False, repr=False, default=-1)
def __post_init__(self):
if self.indexes and len(self.indexes) >= self._solver.alpha:
self.update_obj_func()
def set_random(self) -> None:
self.indexes = set(
random.sample(
self._solver.instance.indexes,
self._solver.p
)
)
self.update_obj_func()
def get_alphath(self, fromindex: int) -> Tuple[int, int]:
alphath = self._solver.alpha
for node, dist in self._solver.instance.sorted_dist[fromindex]:
if node in self.indexes:
alphath -= 1
if alphath == 0:
return node, dist
def eval_obj_func(self) -> Tuple[int, int]:
return max(
(
self.get_alphath(v)
for v in self._solver.instance.indexes - self.indexes
),
key=lambda a: a[1]
)
def update_obj_func(self) -> None:
self.max_alphath, self.objective_function = self.eval_obj_func()
instance: Instance
p: int
alpha: int
with_random_solution: bool = field(repr=False, default=False)
solution: Solution = field(init=False)
history: List[Solution] = field(init=False, repr=False, default_factory=list)
def __post_init__(self):
self.solution = Solver.Solution(self)
if self.with_random_solution:
self.solution.set_random()
def pdp(self, use_alpha_as_p: bool = False, beta: float = 0, update: bool = True) -> Solution:
solution = Solver.Solution(
self,
set(self.instance.get_farthest_indexes())
)
p = self.alpha if use_alpha_as_p else self.p
remaining = self.instance.indexes - solution.indexes
while len(solution.indexes) < p:
costs = [
(v, min(
self.instance.get_dist(v, s)
for s in solution.indexes
))
for v in remaining
]
min_cost = min(costs, key=lambda c: c[1])[1]
max_cost = max(costs, key=lambda c: c[1])[1]
candidates = [
v for v, c in costs
if c >= max_cost - beta * (max_cost - min_cost)
]
chosen = random.choice(candidates)
solution.indexes.add(chosen)
remaining.discard(chosen)
solution.update_obj_func()
if update:
self.solution = solution
return solution
def greedy(self, update: bool = True) -> Solution:
solution = self.pdp(use_alpha_as_p=True, update=False)
remaining = self.instance.indexes - solution.indexes
while len(solution.indexes) < self.p:
index, dist = min(
(
(
v,
# TODO: Refactor method
solution.eval_obj_func(solution | {v})[1]
)
for v in remaining
),
key=lambda m: m[1]
)
solution.indexes.add(index)
remaining.discard(index)
solution.update_obj_func()
if update:
self.solution = solution
return solution
def interchange(
self,
is_first: bool,
k: int = 1,
another_solution: Solution = None,
update: bool = True) -> Solution:
if another_solution:
best_solution = another_solution
update = False
else:
best_solution = self.solution
current_solution = best_solution
is_improved = True
while is_improved:
for selecteds in combinations(best_solution.indexes, k):
unselecteds = self.instance.indexes - best_solution.indexes
for indexes in combinations(unselecteds, k):
new_solution = Solver.Solution(
self,
best_solution.indexes - set(selecteds) | set(indexes)
)
if new_solution.objective_function < current_solution.objective_function:
current_solution = new_solution
if is_first:
break
is_improved = current_solution.objective_function < best_solution.objective_function
if is_improved:
best_solution = current_solution
# explore another neighborhood
break
if update:
self.solution = best_solution
return best_solution
def grasp(self, max_iters: int, beta: float = 0, update: bool = True) -> Set[int]:
'''
Applies the GRASP metaheuristic to the current solver.
`max_iters`: Maximum number of iterations until returning the best found solution.
`beta`: Value between 0 and 1 for the RCL in the constructive heuristic.
'''
best_solution = Solver.Solution(self)
i = 0
while i < max_iters:
start = timeit.default_timer()
current_solution = self.pdp(beta=beta, update=False)
current_solution = self.interchange(
is_first=True,
another_solution=current_solution
)
current_solution.time = timeit.default_timer() - start
self.history.append(current_solution)
if current_solution.objective_function < best_solution.objective_function:
best_solution = current_solution
i += 1
if update:
self.solution = best_solution
return best_solution
def plot(self, axis: bool = True) -> None:
fig, ax = plt.subplots()
clients = list()
facilities = list()
for v in self.instance.vertexes:
if v.index in self.solution.indexes:
facilities.append(v)
else:
clients.append(v)
ax.scatter(
[c.x for c in clients],
[c.y for c in clients],
color='tab:blue',
label='Demand points',
linewidths=0.3,
alpha=0.8,
edgecolors='black'
)
ax.scatter(
[f.x for f in facilities],
[f.y for f in facilities],
color='red',
label='Centers',
linewidths=0.3,
alpha=0.8,
edgecolors='black'
)
for c in clients:
fi, dist = self.solution.get_alphath(c.index)
facility = next(f for f in facilities if f.index == fi)
color = ('orange'
if fi == self.solution.max_alphath and
dist == self.solution.objective_function
else 'gray')
ax.plot(
(c.x, facility.x),
(c.y, facility.y),
color=color,
linestyle=':',
alpha=0.5
)
ax.legend(loc=(1.01, 0))
fig.set_dpi(250)
if not axis:
ax.set_axis_off()
plt.show()
def generate_solvers(
instances: Sequence[Instance],
p_percentages: Sequence[float],
alpha_values: Sequence[int]) -> List[Solver]:
return [
Solver(instance, int(instance.n * p), alpha)
for instance, p, alpha in product(instances, p_percentages, alpha_values)
]
| 30.069597
| 100
| 0.53149
|
b5a015269051577062f7b2cf8e834f3ce06c42c7
| 2,913
|
py
|
Python
|
Supplement/comparison_tools/plots_zircon.py
|
nsekhon91/latools
|
593bdb5f4874ec5a9692db440722bd1f8ce96246
|
[
"MIT"
] | 10
|
2019-01-25T01:37:14.000Z
|
2022-01-14T11:27:01.000Z
|
Supplement/comparison_tools/plots_zircon.py
|
nsekhon91/latools
|
593bdb5f4874ec5a9692db440722bd1f8ce96246
|
[
"MIT"
] | 51
|
2017-04-26T12:45:48.000Z
|
2022-02-19T06:31:44.000Z
|
Supplement/comparison_tools/plots_zircon.py
|
nsekhon91/latools
|
593bdb5f4874ec5a9692db440722bd1f8ce96246
|
[
"MIT"
] | 10
|
2017-05-23T22:11:47.000Z
|
2022-02-15T06:09:52.000Z
|
import re
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
from .stats import fmt_RSS
from .plots import rangecalcx, bland_altman, get_panel_bounds
def fmt_el(el):
e = re.match('.*?([A-z]+).*?', el).groups()[0]
m = re.match('.*?([0-9]+).*?', el).groups()[0]
return e + m
def bland_altman_plots(df, rep_stats=None, els=None, c=(0,0,0,0.6)):
if els is None:
els = ['Li', 'Mg', 'Al', 'P', 'Ti', 'Y', 'La', 'Ce', 'Pr', 'Nd', 'Sm',
'Eu', 'Gd', 'Tb', 'Dy', 'Ho', 'Er', 'Tm', 'Yb', 'Lu', 'Hf', 'Pb', 'Th',
'U']
cols = 4
rows = len(els) // cols
bounds = [.05,.05,.9,.9]
p = 0.66
frame = [.25, .25, .75, .75]
baframe = [frame[0], frame[1], frame[2] * p, frame[3]]
rframe = [frame[0] + frame[2] * p * 1.02, frame[1], frame[2] * (1 - p * 1.02), frame[3]]
fig = plt.figure(figsize=(cols * 2.8, rows * 1.5))
axs = []
for i, e in enumerate(els):
# lax = axs.flatten()[i]
u = 'ppm'
row = i // cols
col = i % cols
lax = fig.add_axes(get_panel_bounds(row, col, bounds, rows, cols, baframe))
dax = fig.add_axes(get_panel_bounds(row, col, bounds, rows, cols, rframe))
axs.append([lax, dax])
dax.set_yticklabels([])
dax.set_xticklabels([])
lax.text(.03, .02, e, transform=lax.transAxes, ha='left', va='bottom')
x1 = df.loc[:, e + '_la'].values
x2 = df.loc[:, e + '_rd'].values
r = x2 - x1
ylim = rangecalcx(r, pad=0.5)
lax.set_ylim(ylim)
dax.set_ylim(ylim)
# draw Bland-Altman plots
if rep_stats is None:
CI = None
else:
CI = rep_stats[e][0]
bland_altman(x1, x2, interval=.75, indep_conf=CI, ax=lax, c=c)
# lax.set_ylabel(e + ' ('+ u + ')\nResidual')
if row == (rows - 1):
lax.set_xlabel('Mean')
else:
lax.set_xlabel('')
if col == 0:
lax.set_ylabel('Residual')
else:
lax.set_ylabel('')
# draw residual PDFs
# remove NaNs
r = r[~np.isnan(r)]
# calculate bins
bins = np.linspace(*ylim, 100)
# calculate KDEs
kde = stats.gaussian_kde(r, .4)
# draw KDEs
dax.fill_betweenx(bins, kde(bins), facecolor=c, alpha=0.5, edgecolor='k', lw=0.75)
# limits and horizontal line
dax.set_xlim([0, dax.get_xlim()[-1] * 1.1])
dax.axhline(0, ls='dashed', c='k', alpha=0.6, zorder=-1)
for ax in axs[-4:]:
ax[0].set_xlabel('[X]')
ax[1].set_xlabel('Resid.\nDens')
# lax.set_title(e[-3:], loc='left')
# # if lax.is_first_row() and lax.is_first_col():
# # lax.set_title('LAtools', loc='left')
# fig.tight_layout()
return fig, np.array(axs)
| 29.13
| 92
| 0.500858
|
00ea2bde1c3ed7ae15ae2b5948dc36a0120948ab
| 10,307
|
py
|
Python
|
chrome/tools/webforms_aggregator_unittests.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
chrome/tools/webforms_aggregator_unittests.py
|
1065672644894730302/Chromium
|
239dd49e906be4909e293d8991e998c9816eaa35
|
[
"BSD-3-Clause"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
chrome/tools/webforms_aggregator_unittests.py
|
1065672644894730302/Chromium
|
239dd49e906be4909e293d8991e998c9816eaa35
|
[
"BSD-3-Clause"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import subprocess
import tempfile
import unittest
# Same name as the aggregator module name.
import webforms_aggregator
logger = logging.getLogger(webforms_aggregator.__name__)
console = logging.StreamHandler()
logger.addHandler(console)
# Commenting out the following line will set logger level to default: WARNING
logger.setLevel(logging.INFO)
class WebformsAggregatorTest(unittest.TestCase):
"""Unit tests for the webforms_aggregator module."""
PORT1 = 8002
PORT2 = 8003
HOME_CONTENT = """
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" \
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>%s</title>
</head>
<body>
<h1>%s</h1>
<p>This is a mock site. Its mere purpose is to contribute towards testing \
the aggregator crawler.</p>
<ul>
<li><a href="%s">page1</a></li>
<li><a href="%s">page2</a></li>
<li><a href="%s">page3</a></li>
</ul>
<hr>
<p>
<a href="%s">sign in</a>
</p>
</body>
</html>
"""
SIMPLE_PAGE_CONTENT = """
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" \
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>%s</title>
</head>
<body>
<h1>%s</h1>
<p>%s</p>
<ul>
<li><a href="%s">%s</a></li>
<li><a href="%s">%s</a></li>
</ul>
<hr>
<p>
<a href="%s">return to home page</a>
</p>
</body>
</html>
"""
SIGNIN_CONTENT = """
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" \
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>%s</title>
</head>
<body>
<h1>Sign in!</h1>
<h3>%s</h3>
<form>
<label>User name: </label><input type="text"><br><br>
<label>password: </label><input type="password"><br><br>
<input type="submit" value="Sign in">
</form>
<hr>
<p><a href="%s">return to home page</a></p>
</body>
</html>
"""
REG_CONTENT = """
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" \
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>%s</title>
</head>
<body>
<h1>Create a user account!</h1>
<h3>Enter your data below:</h3>
<form method="get">
<label>First name: </label><input type="text"><br><br>
<label>Surname: </label><input type="text"><br><br>
<label>User name: </label><input type="text"><br><br>
<label>password: </label><input type="password"><br><br>
<label>retype password: </label><input type="password"><br><br>
<input type="submit" value="Register">
</form>
<hr>
<p><a href="%s">return to home page</a></p>
</body>
</html>
"""
def CreateMockSiteOne(self):
"""Site One has a registration form.
"""
self.files['site1_home'] = 'site1_index.html'
self.files['site1_page1'] = 'site1_page1.html'
self.files['site1_page2'] = 'site1_page2.html'
self.files['site1_page3'] = 'site1_page3.html'
self.files['site1_signin'] = 'site1_signin.html'
self.files['site1_reg'] = 'site1_register.html'
file_content = {}
file_content[self.files['site1_home']] = self.HOME_CONTENT % (
'Site One home page', 'Welcome to site one. It has a reg page!',
self.files['site1_page1'], self.files['site1_page2'],
self.files['site1_page3'], self.files['site1_signin'])
file_content[self.files['site1_page1']] = self.SIMPLE_PAGE_CONTENT % (
'Site One page 1',
'Page 1!', 'This is a useless page. It does almost nothing.',
self.files['site1_page2'], 'page 2', self.files['site1_page3'],
'page 3', self.files['site1_home'])
file_content[self.files['site1_page2']] = self.SIMPLE_PAGE_CONTENT % (
'Site One page 2', 'Page 2!',
'This is another useless page. It does almost what the page 1 does.',
self.files['site1_page1'], 'page 1', self.files['site1_page3'],
'page 3', self.files['site1_home'])
file_content[self.files['site1_page3']] = self.SIMPLE_PAGE_CONTENT % (
'Site One page 3', 'Page 3!',
"This is the last useless page. It doesn't do anything useful at all.",
self.files['site1_page1'], 'page 1', self.files['site1_page2'],
'page 2', self.files['site1_home'])
file_content[self.files['site1_signin']] = self.SIGNIN_CONTENT % (
'Site One signin',
'If you don\'t have a user account click <a href="%s">here</a>.' \
% self.files['site1_reg'],
self.files['site1_home'])
file_content[self.files['site1_reg']] = self.REG_CONTENT % (
'Site One signin', self.files['site1_home'])
for filename, content in file_content.iteritems():
f = open(filename, 'w')
try:
f.write(content)
finally:
f.close()
def CreateMockSiteTwo(self):
""" Site Two has no registration page."""
self.files['site2_home'] = 'site2_index.html'
self.files['site2_page1'] = 'site2_page1.html'
self.files['site2_page2'] = 'site2_page2.html'
self.files['site2_page3'] = 'site2_page3.html'
self.files['site2_signin'] = 'site2_signin.html'
file_content = {}
file_content[self.files['site2_home']] = self.HOME_CONTENT % (
'Site Two home page', 'Welcome to site two. It has no reg page!',
self.files['site2_page1'], self.files['site2_page2'],
self.files['site2_page3'], self.files['site2_signin'])
file_content[self.files['site2_page1']] = self.SIMPLE_PAGE_CONTENT % (
'Site Two page 1',
'Page 1!', 'This is a useless page. It does almost nothing.',
self.files['site2_page2'], 'page 2', self.files['site2_page3'],
'page 3', self.files['site2_home'])
file_content[self.files['site2_page2']] = self.SIMPLE_PAGE_CONTENT % (
'Site Two page 2', 'Page 2!',
'This is another useless page. It does almost what the page 1 does.',
self.files['site2_page1'], 'page 1', self.files['site2_page3'],
'page 3', self.files['site2_home'])
file_content[self.files['site2_page3']] = self.SIMPLE_PAGE_CONTENT % (
'Site Two page 3', 'Page 3!',
"This is the last useless page. It doesn't do anything useful at all.",
self.files['site2_page1'], 'page 1', self.files['site2_page2'],
'page 2', self.files['site2_home'])
file_content[self.files['site2_signin']] = self.SIGNIN_CONTENT % (
'Site Two signin', 'You cannot register online with this site.',
self.files['site2_home'])
for filename, content in file_content.iteritems():
f = open(filename, 'w')
try:
f.write(content)
finally:
f.close()
def setUp(self):
self.cwd = os.getcwdu()
self.temp_dir = tempfile.mkdtemp()
os.chdir(self.temp_dir)
self.files = {}
self.CreateMockSiteOne()
self.CreateMockSiteTwo()
self.files['cookie'] = 'test.cookie'
self.url1 = 'http://localhost:%s/%s' % (self.PORT1,
self.files['site1_home'])
self.url2 = 'http://localhost:%s/%s' % (self.PORT2,
self.files['site2_home'])
self.domain1 = 'localhost:%s' %self.PORT1
self.files['url'] = 'urls.txt'
url_file_handler = open(self.files['url'], 'w')
try:
url_file_handler.write('URLs to crawl:')
url_file_handler.write(os.linesep)
for url in (self.url1, self.url2):
url_file_handler.write(url)
url_file_handler.write(os.linesep)
finally:
url_file_handler.close()
command_line = 'python -u -m SimpleHTTPServer %s' % self.PORT1
args = command_line.split()
self.server1 = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
self.server1.stdout.readline() # Needed in order for the server to start up
command_line = 'python -u -m SimpleHTTPServer %s' % self.PORT2
args = command_line.split()
self.server2 = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
self.server2.stdout.readline() # Needed in order for the server to start up
def tearDown(self):
self.server1.terminate()
self.server2.terminate()
for filename in self.files.values():
if os.path.isfile(filename):
os.unlink(filename)
os.chdir(self.cwd)
os.rmdir(self.temp_dir)
def testRetrieverDownloadsPage(self):
"""Verify the retriever can download a page."""
r = webforms_aggregator.Retriever(self.url1, self.domain1,
self.files['cookie'])
self.assertTrue(r.Download(),
msg='Retriever could not download "%s"' % self.url1)
def testCrawlerFindsRegPageFromUrl(self):
"""Verify that the crawler is able to find a reg page from the given URL."""
c = webforms_aggregator.Crawler(self.url1)
self.assertTrue(
c.Run(), msg='Crawler could not find the reg page of "%s"' % self.url1)
def testCrawlerCannotFindNonExistentRegPageFromUrl(self):
"""Verify that the crawler won't find a non existent reg page
from the given URL."""
c = webforms_aggregator.Crawler(self.url2)
self.assertFalse(
c.Run(),
msg='Crawler found a non existent reg page of "%s"' % self.url1)
def testThreadedCrawlerFindsRegPageFromUrlsFile(self):
"""Verify the threaded crawler finds reg page from a file of URLs."""
c = webforms_aggregator.ThreadedCrawler(self.files['url'])
self.assertNotEqual(
c.Run(), -1,
msg='Threaded crawler could not find the reg page from the URLs file')
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(
WebformsAggregatorTest)
unittest.TextTestRunner(verbosity=2).run(suite)
| 34.471572
| 80
| 0.620452
|
86783ebab803f1ebd38f54e9b64fcd86a3de7115
| 17,996
|
py
|
Python
|
MAPS/Latant_Space_Constrained_VAEs/train_stats_latent_constrained.py
|
gmooers96/CBRAIN-CAM
|
c5a26e415c031dea011d7cb0b8b4c1ca00751e2a
|
[
"MIT"
] | null | null | null |
MAPS/Latant_Space_Constrained_VAEs/train_stats_latent_constrained.py
|
gmooers96/CBRAIN-CAM
|
c5a26e415c031dea011d7cb0b8b4c1ca00751e2a
|
[
"MIT"
] | null | null | null |
MAPS/Latant_Space_Constrained_VAEs/train_stats_latent_constrained.py
|
gmooers96/CBRAIN-CAM
|
c5a26e415c031dea011d7cb0b8b4c1ca00751e2a
|
[
"MIT"
] | 5
|
2019-09-30T20:17:13.000Z
|
2022-03-01T07:03:30.000Z
|
import math
import os
os.environ["CUDA_VISIBLE_DEVICES"]="2"
import argparse
import json
import tensorflow as tf
import tensorflow_probability as tfp
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
import netCDF4
import keras
from keras import layers
from keras import backend as K
from keras.models import Model
from keras.losses import binary_crossentropy, mse
from keras.utils import plot_model
from keras.callbacks import ModelCheckpoint
class AnnealingCallback(keras.callbacks.Callback):
def __init__(self, epochs):
super(AnnealingCallback, self).__init__()
self.epochs = epochs
def on_epoch_begin(self, epoch, logs={}):
new_kl_weight = epoch/self.epochs
K.set_value(self.model.kl_weight, new_kl_weight)
print("Using updated KL Weight:", K.get_value(self.model.kl_weight))
class Sampling(keras.layers.Layer):
def call(self, inputs):
"""
TODO
"""
mean, log_var = inputs
return K.random_normal(tf.shape(log_var)) * K.exp(log_var/2) + mean
def kl_reconstruction_loss(latent_space_value, z_log_var, z_mean, z, vae, lambda_weight, latent_constraint_value):
def _kl_reconstruction_loss(true, pred):
"""
TODO
"""
print("z is",tf.shape(z))
print("z is",z.get_shape)
print("z is", K.int_shape(z))
print("z_log_var is",tf.shape(z_log_var))
print("z_log_var is",z_log_var.get_shape)
print("z_log_var is", K.int_shape(z_log_var))
print("z_mean is",tf.shape(z_mean))
print("z_mean is",z_mean.get_shape)
print("z_mean is", K.int_shape(z_mean))
print("true is",tf.shape(true))
print("true is",true.get_shape)
print("true is", K.int_shape(true))
true = tf.reshape(true, [-1, 128 * 30])
x_mu = pred[:, :128*30]
x_log_var = pred[:, 128*30:]
# Gaussian reconstruction loss
mse = -0.5 * K.sum(K.square(true - x_mu)/K.exp(x_log_var), axis=1)
var_trace = -0.5 * K.sum(x_log_var, axis=1)
log2pi = -0.5 * 128 * 30 * np.log(2 * np.pi)
log_likelihood = mse + var_trace + log2pi
#print("log likelihood shape", log_likelihood.shape)
# NOTE: We don't take a mean here, since we first want to add the KL term
reconstruction_loss = -log_likelihood
# KL divergence loss
kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)
kl_loss = K.sum(kl_loss, axis=1)
kl_loss *= -0.5
print("true is",tf.shape(true))
print("true is",true.get_shape)
print("true is", K.int_shape(true))
print("x_mu is",tf.shape(x_mu))
print("x_mu is",x_mu.get_shape)
print("x_mu is", K.int_shape(x_mu))
print("true is",tf.shape(true))
print("true is",true.get_shape)
print("true is", K.int_shape(true))
wprime_wprime = tf.math.reduce_mean(true, axis=1, keepdims=False, name=None)
print("x_mu is",tf.shape(x_mu))
print("x_mu is",x_mu.get_shape)
print("x_mu is", K.int_shape(x_mu))
print("reconstruction_loss is",tf.shape(reconstruction_loss))
print("reconstruction_loss is",reconstruction_loss.get_shape)
print("reconstruction_loss is", K.int_shape(reconstruction_loss))
print("kl_loss is",tf.shape(kl_loss))
print("kl_loss is",kl_loss.get_shape)
print("kl_loss is", K.int_shape(kl_loss))
#wsum = tf.math.reduce_mean(wprime_wprime, axis=0, keepdims=False, name=None)
z_0 = z[:,0]
print("z_0 is",tf.shape(z_0))
print("z_0 is",z_0.get_shape)
print("z_0 is", K.int_shape(z_0))
theta_0 = tf.convert_to_tensor(1.0, dtype=None, dtype_hint=None, name=None)
linear_loss = latent_constraint_value*(wprime_wprime-theta_0*z_0)
print("linear_loss is",tf.shape(linear_loss))
print("linear_loss is",linear_loss.get_shape)
print("linear_loss is", K.int_shape(linear_loss))
#print(gsdgsgs)
#Frobenius_norm = K.sum(Frobenius_norm, axis = 1)
#####################################################################################
#return K.mean(reconstruction_loss + vae.kl_weight*kl_loss + lambda_weight*Frobenius_norm)
#return K.mean(reconstruction_loss + vae.kl_weight*kl_loss+linear_loss)
return K.mean(reconstruction_loss + vae.kl_weight*kl_loss)
return _kl_reconstruction_loss
def kl(z_log_var, z_mean):
def _kl(true, pred):
"""
TODO
"""
kl_loss = 1 + z_log_var - K.square(z_mean) - K.exp(z_log_var)
kl_loss = K.sum(kl_loss, axis=-1)
kl_loss *= -0.5
return K.mean(kl_loss)
return _kl
def reconstruction(true, pred):
"""
TODO
"""
true = tf.reshape(true, [-1, 128 * 30])
x_mu = pred[:, :128*30]
x_log_var = pred[:, 128*30:]
mse = -0.5 * K.sum(K.square(true - x_mu)/K.exp(x_log_var), axis=1)
var_trace = -0.5 * K.sum(x_log_var, axis=1)
log2pi = -0.5 * 128 * 30 * np.log(2 * np.pi)
log_likelihood = mse + var_trace + log2pi
print("log likelihood shape", log_likelihood.shape)
return K.mean(-log_likelihood)
def constrainer(z_log_var, z_mean, lambda_weight):
def _constrainer(true, pred):
true = tf.reshape(true, [-1, 128 * 30])
x_mu = pred[:, :128*30]
covariance_truth = tfp.stats.covariance(true)
covariance_prediction = tfp.stats.covariance(x_mu)
Frobenius_norm = tf.norm(covariance_prediction-covariance_truth, ord="euclidean")
return lambda_weight*Frobenius_norm
#return 1000000.0*Frobenius_norm
return _constrainer
def linear_regression(latent_space_value, z_log_var, z_mean, z, latent_constraint_value):
def _linear_regression(true, pred):
true = tf.reshape(true, [-1, 128 * 30])
z_0 = z[:,0]
wprime_wprime = tf.math.reduce_mean(true, axis=1, keepdims=False, name=None)
theta_0 = tf.convert_to_tensor(1.0, dtype=None, dtype_hint=None, name=None)
linear_loss = latent_constraint_value*(wprime_wprime-theta_0*z_0)
return K.mean(linear_loss)
return _linear_regression
def encoder_gen(input_shape: tuple, encoder_config: dict, id):
"""
Create the architecture for the VAE encoder.
"""
class EncoderResult():
pass
encoder_result = EncoderResult()
# Construct VAE Encoder layers
inputs = keras.layers.Input(shape=[input_shape[0], input_shape[1], 1])
zero_padded_inputs = keras.layers.ZeroPadding2D(padding=(1, 0))(inputs)
print("shape of input after padding", inputs.shape)
z = keras.layers.convolutional.Conv2D(
encoder_config["conv_1"]["filter_num"],
tuple(encoder_config["conv_1"]["kernel_size"]),
padding='same',
activation=encoder_config["activation"],
strides=encoder_config["conv_1"]["stride"]
)(zero_padded_inputs)
print("shape after first convolutional layer", z.shape)
z = keras.layers.convolutional.Conv2D(
encoder_config["conv_2"]["filter_num"],
tuple(encoder_config["conv_2"]["kernel_size"]),
padding='same',
activation=encoder_config["activation"],
strides=encoder_config["conv_2"]["stride"]
)(z)
print("shape after second convolutional layer", z.shape)
z = keras.layers.convolutional.Conv2D(
encoder_config["conv_3"]["filter_num"],
tuple(encoder_config["conv_3"]["kernel_size"]),
padding='same',
activation=encoder_config["activation"],
strides=encoder_config["conv_3"]["stride"]
)(z)
print("shape after third convolutional layer", z.shape)
z_mean = keras.layers.convolutional.Conv2D(
encoder_config["conv_mu"]["filter_num"],
tuple(encoder_config["conv_mu"]["kernel_size"]),
padding='same',
strides=encoder_config["conv_mu"]["stride"]
)(z)
z_log_var = keras.layers.convolutional.Conv2D(
encoder_config["conv_log_var"]["filter_num"],
tuple(encoder_config["conv_log_var"]["kernel_size"]),
padding='same',
strides=encoder_config["conv_log_var"]["stride"]
)(z)
z_mean = keras.layers.Flatten()(z_mean)
z_log_var = keras.layers.Flatten()(z_log_var)
print("z mean shape", z_mean._keras_shape)
print("z log var shape", z_log_var._keras_shape)
z = Sampling()([z_mean, z_log_var])
# Instantiate Keras model for VAE encoder
vae_encoder = keras.Model(inputs=[inputs], outputs=[z_mean, z_log_var, z])
plot_model(vae_encoder, to_file='./model_graphs/model_diagrams/encoder_{}.png'.format(id), show_shapes=True)
# Package up everything for the encoder
encoder_result.inputs = inputs
encoder_result.z_mean = z_mean
encoder_result.z_log_var = z_log_var
encoder_result.z = z
encoder_result.vae_encoder = vae_encoder
return encoder_result
def decoder_gen(
original_input: tuple,
decoder_config: dict
):
"""
Create the architecture for the VAE decoder
"""
decoder_inputs = keras.layers.Input(shape=[decoder_config["latent_dim"]])
print("decoder_inputs", decoder_inputs._keras_shape)
# Reshape input to be an image
#for original case - for latent space size 64 - config 38
#x = keras.layers.Reshape((2, 8, 4))(decoder_inputs)
#superior for 1024 - works best - config 35
x = keras.layers.Reshape((decoder_config["latent_reshape"]["dim_1"], decoder_config["latent_reshape"]["dim_2"], decoder_config["latent_reshape"]["dim_3"]))(decoder_inputs)
#for foster arch - config 34
#x = keras.layers.Reshape((2, 8, 32))(decoder_inputs)
x = keras.layers.convolutional.Conv2DTranspose(
decoder_config["conv_t_0"]["filter_num"],
tuple(decoder_config["conv_t_0"]["kernel_size"]),
padding='same',
activation=decoder_config["activation"],
strides=decoder_config["conv_t_0"]["stride"]
)(x)
# Start tranpose convolutional layers that upsample the image
print("shape at beginning of decoder", x.shape)
x = keras.layers.Conv2DTranspose(
decoder_config["conv_t_1"]["filter_num"],
tuple(decoder_config["conv_t_1"]["kernel_size"]),
padding='same',
activation=decoder_config["activation"],
strides=decoder_config["conv_t_1"]["stride"]
)(x)
print("shape after first convolutional transpose layer", x._keras_shape)
x = keras.layers.Conv2DTranspose(
decoder_config["conv_t_2"]["filter_num"],
tuple(decoder_config["conv_t_2"]["kernel_size"]),
padding='same',
strides=decoder_config["conv_t_2"]["stride"],
activation=decoder_config["activation"]
)(x)
print("shape after second convolutional transpose layer", x._keras_shape)
x_mu = keras.layers.Conv2DTranspose(
decoder_config["conv_mu"]["filter_num"],
tuple(decoder_config["conv_mu"]["kernel_size"]),
padding='same',
strides=decoder_config["conv_mu"]["stride"],
activation=decoder_config["conv_mu"]["activation"]
)(x)
print("shape after convolutional mu layer", x_mu._keras_shape)
x_log_var = keras.layers.Conv2DTranspose(
decoder_config["conv_log_var"]["filter_num"],
tuple(decoder_config["conv_log_var"]["kernel_size"]),
padding='same',
strides=decoder_config["conv_log_var"]["stride"],
activation=decoder_config["conv_log_var"]["activation"]
)(x)
print("shape after convolutional log var layer", x_mu._keras_shape)
x_mu = keras.layers.Cropping2D(cropping=(1, 0))(x_mu)
print("shape after cropping", x_mu._keras_shape)
x_log_var = keras.layers.Cropping2D(cropping=(1, 0))(x_log_var)
print("shape after cropping", x_log_var._keras_shape)
x_mu = keras.layers.Flatten()(x_mu)
x_log_var = keras.layers.Flatten()(x_log_var)
x_mu_log_var = keras.layers.Concatenate(axis=1)([x_mu, x_log_var])
variational_decoder = keras.Model(inputs=[decoder_inputs], outputs=[x_mu_log_var])
return variational_decoder
def plot_training_losses(h, id):
"""
Plot training loss graphs for
(1) KL term
(2) Reconstruction term
(3) Total ELBO loss
"""
hdict = h.history
print(hdict)
train_reconstruction_losses = hdict['reconstruction']
valid_reconstruction_losses = hdict['val_reconstruction']
kl_train_losses = hdict['_kl']
kl_valid_losses = hdict['val__kl']
constraint_train_losses = hdict['_linear_regression']
constraint_valid_losses = hdict['val__linear_regression']
total_train_losses = hdict['_kl_reconstruction_loss']
total_valid_losses = hdict['val__kl_reconstruction_loss']
epochs = range(1, len(train_reconstruction_losses) + 1)
fig, (ax1, ax2, ax3, ax4) = plt.subplots(1, 4, figsize=(12.8, 4.8))
# Plot combined loss
ax1.plot(epochs, total_train_losses, 'b', label='Train')
ax1.plot(epochs, total_valid_losses, 'r', label='Valid')
ax1.set(xlabel="Epochs", ylabel="Loss")
ax1.legend(prop={'size': 10})
ax1.set_title("Combined Loss")
# Plot KL
ax2.plot(epochs, kl_train_losses, 'b', label='Train')
ax2.plot(epochs, kl_valid_losses, 'r', label='Valid')
ax2.set(xlabel="Epochs", ylabel="Loss")
ax2.legend(prop={'size': 10})
ax2.set_title("KL Loss")
# Plot reconstruction loss
ax3.plot(epochs, train_reconstruction_losses, 'b', label='Train')
ax3.plot(epochs, valid_reconstruction_losses, 'r', label='Valid')
ax3.set(xlabel="Epochs", ylabel="Loss")
ax3.legend(prop={'size': 10})
ax3.set_title("Reconstruction Loss")
ax3.set_ylim(-25000, 10000)
# Plot constraint loss
ax4.plot(epochs, constraint_train_losses, 'b', label='Train')
ax4.plot(epochs, constraint_valid_losses, 'r', label='Valid')
ax4.set(xlabel="Epochs", ylabel="Loss")
ax4.legend(prop={'size': 10})
ax4.set_title("Semisupervized Linear Term")
plt.tight_layout()
plt.savefig('./model_graphs/losses/model_losses_{}.png'.format(id))
def main():
args = argument_parsing()
print("Command line args:", args)
f = open("./model_config/config_{}.json".format(args.id))
model_config = json.load(f)
f.close()
train_data = np.load(model_config["data"]["training_data_path"])
test_data = np.load(model_config["data"]["test_data_path"])
img_width = train_data.shape[1]
img_height = train_data.shape[2]
print("Image shape:", img_width, img_height)
# Construct VAE Encoder
encoder_result = encoder_gen((img_width, img_height), model_config["encoder"], args.id)
# Construct VAE Decoder
vae_decoder = decoder_gen(
(img_width, img_height),
model_config["decoder"]
)
plot_model(vae_decoder, to_file='./model_graphs/model_diagrams/decoder_{}.png'.format(args.id), show_shapes=True)
_, _, z = encoder_result.vae_encoder(encoder_result.inputs)
x_mu_log_var = vae_decoder(z)
vae = keras.Model(inputs=[encoder_result.inputs], outputs=[x_mu_log_var])
plot_model(vae, to_file='./model_graphs/model_diagrams/full_vae_{}.png'.format(args.id), show_shapes=True)
vae.kl_weight = K.variable(model_config["kl_weight"])
# Specify the optimizer
optimizer = keras.optimizers.Adam(lr=model_config['optimizer']['lr'])
stat_weight = model_config['contraint_weight']['lambda']
latent_weight = model_config['contraint_weight']['latent_constraint']
latent_size = model_config['encoder']['latent_dim']
# Compile model
vae.compile(
# loss=reconstruction,
loss=kl_reconstruction_loss(
latent_size,
encoder_result.z_log_var,
encoder_result.z_mean,
encoder_result.z,
vae,
stat_weight,
latent_weight
),
optimizer=optimizer,
metrics=[
reconstruction,
kl(
encoder_result.z_log_var,
encoder_result.z_mean
),
kl_reconstruction_loss(
latent_size,
encoder_result.z_log_var,
encoder_result.z_mean,
encoder_result.z,
vae,
stat_weight,
latent_weight
),
linear_regression(
latent_size,
encoder_result.z_log_var,
encoder_result.z_mean,
encoder_result.z,
latent_weight
)
]
)
vae.summary()
train_data = train_data.reshape(train_data.shape+(1,))
test_data = test_data.reshape(test_data.shape+(1,))
print("train data shape", train_data.shape)
print("test data shape", test_data.shape)
checkpoint = ModelCheckpoint(
'./models/model_{}.th'.format(args.id),
monitor='val_loss',
verbose=1,
save_best_only=True,
save_weights_only=True
)
callbacks_list = [checkpoint]
if model_config["annealing"]:
kl_weight_annealing = AnnealingCallback(model_config["train_epochs"])
callbacks_list.append(kl_weight_annealing)
h = vae.fit(
x=train_data,
y=train_data,
epochs=model_config["train_epochs"],
batch_size=model_config["batch_size"],
validation_data=[test_data, test_data],
callbacks=callbacks_list
)
plot_training_losses(h, args.id)
def argument_parsing():
parser = argparse.ArgumentParser()
parser.add_argument('--id', type=int, help='This option specifies the config file to use to construct and train the VAE.')
args = parser.parse_args()
return args
if __name__ == "__main__":
main()
| 35.425197
| 175
| 0.644365
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.