hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ec6726113587e1a0aef7d4b9d7aedb437406729a
| 1,295
|
py
|
Python
|
colossus/apps/lists/mixins.py
|
CreativeWurks/emailerpro
|
5f8d668d1b98f5add8123794a1802b82381560eb
|
[
"MIT"
] | 372
|
2018-08-13T20:51:32.000Z
|
2022-03-21T12:55:58.000Z
|
colossus/apps/lists/mixins.py
|
CreativeWurks/emailerpro
|
5f8d668d1b98f5add8123794a1802b82381560eb
|
[
"MIT"
] | 30
|
2018-08-13T19:34:17.000Z
|
2022-03-20T21:28:49.000Z
|
colossus/apps/lists/mixins.py
|
CreativeWurks/emailerpro
|
5f8d668d1b98f5add8123794a1802b82381560eb
|
[
"MIT"
] | 117
|
2018-08-13T21:54:42.000Z
|
2022-03-24T16:45:48.000Z
|
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.views.generic.base import ContextMixin
from colossus.apps.subscribers.constants import TemplateKeys
from colossus.apps.subscribers.models import SubscriptionFormTemplate
from .models import MailingList
class MailingListMixin(ContextMixin):
__mailing_list = None
@property
def mailing_list(self):
if self.__mailing_list is None:
self.__mailing_list = get_object_or_404(MailingList, pk=self.kwargs.get('pk'))
return self.__mailing_list
def get_context_data(self, **kwargs):
if 'menu' not in kwargs:
kwargs['menu'] = 'lists'
if 'mailing_list' not in kwargs:
kwargs['mailing_list'] = self.mailing_list
return super().get_context_data(**kwargs)
class FormTemplateMixin:
def get_object(self):
mailing_list_id = self.kwargs.get('pk')
key = self.kwargs.get('form_key')
if key not in TemplateKeys.LABELS.keys():
raise Http404
form_template, created = SubscriptionFormTemplate.objects.get_or_create(
key=key,
mailing_list_id=mailing_list_id
)
if created:
form_template.load_defaults()
return form_template
| 31.585366
| 90
| 0.688803
|
c154fe1155b8499e0d3fdbd6be64ead53aadc801
| 4,987
|
py
|
Python
|
assemyaml/types.py
|
dacut/Assemyaml
|
7d23ce708822f998df51f348df02f8ecc756c919
|
[
"Apache-2.0"
] | 2
|
2017-08-11T00:14:38.000Z
|
2017-08-17T00:51:49.000Z
|
assemyaml/types.py
|
dacut/Assemyaml
|
7d23ce708822f998df51f348df02f8ecc756c919
|
[
"Apache-2.0"
] | null | null | null |
assemyaml/types.py
|
dacut/Assemyaml
|
7d23ce708822f998df51f348df02f8ecc756c919
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import, print_function
from logging import getLogger
from six.moves import range
from yaml.nodes import (
CollectionNode, MappingNode, Node, ScalarNode, SequenceNode,
)
log = getLogger("assemyaml.types")
# Assemyaml-specific tags
ASSEMYAML_NS = u"tag:assemyaml.nz,2017:"
GLOBAL_ASSEMBLY_TAG = ASSEMYAML_NS + u"Assembly"
GLOBAL_TRANSCLUDE_TAG = ASSEMYAML_NS + u"Transclude"
LOCAL_ASSEMBLY_TAG = u"!Assembly"
LOCAL_TRANSCLUDE_TAG = u"!Transclude"
# YAML native types
YAML_NS = u"tag:yaml.org,2002:"
YAML_BINARY_TAG = YAML_NS + u"binary"
YAML_BOOL_TAG = YAML_NS + u"bool"
YAML_FLOAT_TAG = YAML_NS + u"float"
YAML_INT_TAG = YAML_NS + u"int"
YAML_NULL_TAG = YAML_NS + u"null"
YAML_MAP_TAG = YAML_NS + u"map"
YAML_OMAP_TAG = YAML_NS + u"omap"
YAML_PAIRS_TAG = YAML_NS + u"pairs"
YAML_SEQ_TAG = YAML_NS + u"seq"
YAML_SET_TAG = YAML_NS + u"set"
YAML_STR_TAG = YAML_NS + u"str"
YAML_TIMESTAMP_TAG = YAML_NS + u"timestamp"
# Because Python3 removed this from types <sigh>
NoneType = type(None)
# tag-to-function mapping for comparing nodes
comparison_functions = {}
def copy_node(node):
"""
copy_node(node) -> node
Create a deep copy of the specified node or list/tuple of nodes.
If node is not a list, tuple, or Node, it is returned unchanged.
"""
if isinstance(node, tuple):
return tuple([copy_node(el) for el in node])
elif isinstance(node, list):
return [copy_node(el) for el in node]
elif not isinstance(node, Node):
return node
kw = {
"tag": node.tag,
"start_mark": node.start_mark,
"end_mark": node.end_mark,
"value": copy_node(node.value),
}
if isinstance(node, ScalarNode):
kw["style"] = node.style
elif isinstance(node, CollectionNode):
kw["flow_style"] = node.flow_style
return type(node)(**kw)
def comparison_function(*tags):
def add_function(f):
for tag in tags:
comparison_functions[tag] = f
return f
return add_function
def nodes_equal(a, b):
"""
nodes_equal(a, b) -> bool
Indicates whether two nodes are equal (examining both tags and values).
"""
global comparison_functions
if a.tag != b.tag:
return False
try:
return comparison_functions[a.tag](a, b)
except KeyError:
log.info("No comparison function found for %s", a.tag)
if type(a) is not type(b):
return False
if isinstance(a, ScalarNode):
return scalar_compare(a, b)
elif isinstance(a, SequenceNode):
return seq_compare(a, b)
elif isinstance(a, MappingNode):
return map_compare(a, b)
return False
@comparison_function(YAML_BINARY_TAG, YAML_BOOL_TAG, YAML_FLOAT_TAG,
YAML_INT_TAG, YAML_STR_TAG, YAML_TIMESTAMP_TAG)
def scalar_compare(a, b):
return a.value == b.value
@comparison_function(YAML_NULL_TAG)
def null_compare(a, b):
return True
@comparison_function(YAML_OMAP_TAG, YAML_PAIRS_TAG, YAML_SEQ_TAG)
def seq_compare(a, b):
if len(a.value) != len(b.value):
return False
for a_el, b_el in zip(a.value, b.value):
return nodes_equal(a_el, b_el)
@comparison_function(YAML_SET_TAG)
def set_compare(a, b):
# We need to do an unordered comparison. Since we can't put this into a
# Python datastructure, the comparison is O(n^2).
if len(a.value) != len(b.value):
return False
a_values = [key for key, _ in a.value]
b_values = [key for key, _ in b.value]
for a_el in a_values:
# Look for this value anywhere in the b_values
for i in range(len(b_values)):
b_el = b_values[i]
if nodes_equal(a_el, b_el):
# Found a match. Mark it as seen from b_values by deleting it.
del b_values[i]
break
else:
# Not found. We're done.
return False
assert len(b_values) == 0
return True
@comparison_function(YAML_MAP_TAG)
def map_compare(a, b):
# This is similar to set_compare, except the values are 2-tuples in the
# form (key, value).
if len(a.value) != len(b.value):
return False
b_values = list(b.value)
for a_key, a_value in a.value:
# Look for this key anywhere in the b_values
for i in range(len(b_values)):
b_key, b_value = b_values[i]
if nodes_equal(a_key, b_key):
if not nodes_equal(a_value, b_value):
return False
# Found a match. Mark it as seen from b_values by deleting it.
del b_values[i]
break
else:
# Not found. We're done.
return False
assert len(b_values) == 0
return True
def mapping_find(mapping, node):
for i, kv in enumerate(mapping.value):
if nodes_equal(kv[0], node):
return (i, kv[0], kv[1])
return None
| 26.668449
| 78
| 0.636856
|
17652335786c89817cd65699bf4555542f1927cd
| 704
|
py
|
Python
|
energyuse/apps/concepts/management/commands/fixtags.py
|
evhart/energyuse
|
be76bac535bfea33d30867e232c2dcb35e1c7740
|
[
"MIT"
] | null | null | null |
energyuse/apps/concepts/management/commands/fixtags.py
|
evhart/energyuse
|
be76bac535bfea33d30867e232c2dcb35e1c7740
|
[
"MIT"
] | 14
|
2019-12-26T17:01:14.000Z
|
2022-03-21T22:16:52.000Z
|
energyuse/apps/concepts/management/commands/fixtags.py
|
evhart/energyuse
|
be76bac535bfea33d30867e232c2dcb35e1c7740
|
[
"MIT"
] | null | null | null |
from django.core.management.base import BaseCommand, CommandError
import json
import re
import urllib
from biostar.apps.posts.models import Vote, Post, Tag
from energyuse.apps.concepts.models import Concept
from energyuse.apps.eusers.models import User
class Command(BaseCommand):
help = 'Fix missing tags from field'
def handle(self, *args, **options):
for post in Post.objects.filter(type__in=Post.TOP_LEVEL):
try:
tags = ",".join(map(lambda x: x.name, post.tag_set.all()))
post.add_tags(tags)
post.tag_val = tags
post.save()
except:
pass
self.stdout.write('Done...')
| 25.142857
| 73
| 0.627841
|
10ca25647801dd52cd620e39087dc9333dc7b095
| 1,311
|
py
|
Python
|
users/utils.py
|
LeandroFrazao/devsearch-Django-Website
|
e567bd2649bf9c362c4d588b54acba31a499281c
|
[
"Apache-2.0"
] | 1
|
2021-09-02T16:30:05.000Z
|
2021-09-02T16:30:05.000Z
|
users/utils.py
|
LeandroFrazao/devsearch-Django-Website
|
e567bd2649bf9c362c4d588b54acba31a499281c
|
[
"Apache-2.0"
] | null | null | null |
users/utils.py
|
LeandroFrazao/devsearch-Django-Website
|
e567bd2649bf9c362c4d588b54acba31a499281c
|
[
"Apache-2.0"
] | null | null | null |
from django.db.models import Q
from .models import Profile, Skill
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
def paginateProfiles(request, profiles, results):
page = request.GET.get('page')
paginator = Paginator(profiles,results)
try:
profiles = paginator.page(page)
except PageNotAnInteger:
page = 1
profiles = paginator.page(page)
except EmptyPage:
page = paginator.num_pages
profiles = paginator.page(page)
leftIndex = (int(page) -4)
if leftIndex<1:
leftIndex=1
rightIndex = (int(page)+5)
if rightIndex > paginator.num_pages :
rightIndex = paginator.num_pages+1
custom_range = range(leftIndex, rightIndex)
return custom_range, profiles
def searchProfiles(request):
search_query = ''
if request.GET.get('search_query'):
search_query = request.GET.get('search_query')
skills = Skill.objects.filter(name__icontains=search_query)
profiles = Profile.objects.distinct().filter(
Q(name__icontains=search_query) |
Q(short_intro__icontains=search_query) |
Q(bio__icontains = search_query)|
Q(skill__in=skills)
)
#else:
# profiles = Profile.objects.all()
return profiles, search_query
| 26.22
| 72
| 0.670481
|
4398aab2cd7ded774c9eceeabb642cd8318aa026
| 2,467
|
py
|
Python
|
chapter2/Doolittle.py
|
ElliotShang/numerical-analysis
|
769610dc45cc4498b49f8311d7023b725c1c7bc2
|
[
"MIT"
] | null | null | null |
chapter2/Doolittle.py
|
ElliotShang/numerical-analysis
|
769610dc45cc4498b49f8311d7023b725c1c7bc2
|
[
"MIT"
] | null | null | null |
chapter2/Doolittle.py
|
ElliotShang/numerical-analysis
|
769610dc45cc4498b49f8311d7023b725c1c7bc2
|
[
"MIT"
] | null | null | null |
import numpy as np
from scipy.linalg import lu, lu_factor
def Doolittle_withoutprivot(A):
n = len(A)
M = np.zeros((n, n))
# U = np.zeros((n, n))
for k in range(n):
# upper triangular
s = 0
for j in range(k, n):
s = sum(M[k][t] * M[t][j] for t in range(k))
M[k][j] = (A[k][j] - s)
# lower triangular
for i in range(k + 1, n):
# if i == k:
# A[i][i] = 1
# else:
s = sum(M[i][t] * M[t][k] for t in range(k))
M[i][k] = ((A[i][k] - s) / M[k][k])
print(M)
return M
def Doolittleprivot(A):
n = len(A)
lu = np.zeros((n, n))
M = np.zeros(n)
for k in range(n):
s = np.zeros(n)
max = -1e100
for i in range(k, n):
s[i] = A[i][k] - sum(lu[i][t] * lu[t][k] for t in range(k))
for i in range(n):
if max < abs(s[i]):
maxindex = i
max = abs(s[i])
for t in range(k):
tmp = lu[k][t]
lu[k][t] = lu[maxindex][t]
lu[maxindex][t] = tmp
# lu[k], lu[maxindex] = lu[maxindex], lu[k]
# lu[[k, maxindex]] = lu[[maxindex, k]]
for t in range(k, n):
tmp = A[k][t]
A[k][t] = A[maxindex][t]
A[maxindex][t] = tmp
# A[k], A[maxindex] = A[maxindex], A[k]
# A[[k, maxindex]] = A[[maxindex, k]]
tmp = s[k]
s[k] = s[maxindex]
s[maxindex] = tmp
lu[k][k] = s[k]
for j in range(k+1, n):
lu[k][j] = A[k][j] - sum(lu[k][t]*lu[t][j] for t in range(k))
for i in range(k+1, n):
lu[i][k] = s[i]/lu[k][k]
print(lu)
return lu
def SolveLU(A, b):
n = len(b)
x = np.zeros(n)
y = np.zeros(n)
y[0] = b[0]
for i in range(1, n):
y[i] = b[i] - sum(A[i][t] * y[t] for t in range(i))
x[n - 1] = y[n - 1] / A[n - 1][n - 1]
for i in range(n - 2, -1, -1):
x[i] = (y[i] - sum(A[i][t] * x[t] for t in range(i + 1, n))) / A[i][i]
print(x)
return x
mat = [[1, 8, 2, 3],
[-6, -3, 8, 1],
[2, 4, 4, 2],
[10, 5, -5, 6]]
#b = [2.5, 1.8, 7.2]
print('主元')
Doolittleprivot(mat)
print('无主元')
Doolittle_withoutprivot(mat)
print('标准库')
P,L,U = lu(mat)
print(P)
print(L)
print(U)
#SolveLU(A, b)
| 26.244681
| 79
| 0.398054
|
fd3c81402e7fcc92e7ef472c285fc0aa68138a22
| 4,301
|
py
|
Python
|
wooey/tests/test_models.py
|
8dspaces/Wooey-Flask
|
44d3ce02474859cdd8d6f1138ba48ce62b739524
|
[
"BSD-3-Clause"
] | 1
|
2020-11-05T15:04:33.000Z
|
2020-11-05T15:04:33.000Z
|
wooey/tests/test_models.py
|
8dspaces/Wooey-Flask
|
44d3ce02474859cdd8d6f1138ba48ce62b739524
|
[
"BSD-3-Clause"
] | null | null | null |
wooey/tests/test_models.py
|
8dspaces/Wooey-Flask
|
44d3ce02474859cdd8d6f1138ba48ce62b739524
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import uuid
from django.test import TestCase, Client
from . import factories, config, mixins
from .. import version
class ScriptTestCase(mixins.ScriptFactoryMixin, TestCase):
def test_multiple_choices(self):
# load our choice script
script = self.choice_script
multiple_choice_param = 'two_choices'
single_choice_param = 'one_choice'
optional_choice_param = 'all_choices'
# test that we are a multiple choice entry
from ..models import ScriptParameter
param = ScriptParameter.objects.get(slug=multiple_choice_param)
self.assertTrue(param.multiple_choice)
# test our limit
self.assertEqual(param.max_choices, 2)
# test with a singular case
param = ScriptParameter.objects.get(slug=single_choice_param)
self.assertFalse(param.multiple_choice)
self.assertEqual(param.max_choices, 1)
# test cases that have variable requirements
param = ScriptParameter.objects.get(slug=optional_choice_param)
self.assertTrue(param.multiple_choice)
self.assertEqual(param.max_choices, -1)
class ScriptGroupTestCase(TestCase):
def test_script_group_creation(self):
group = factories.ScriptGroupFactory()
class TestJob(mixins.ScriptFactoryMixin, mixins.FileCleanupMixin, TestCase):
urls = 'wooey.test_urls'
def get_local_url(self, fileinfo):
from ..backend import utils
local_storage = utils.get_storage(local=True)
return local_storage.url(fileinfo['object'].filepath.name)
def test_jobs(self):
script = self.translate_script
from ..backend import utils
job = utils.create_wooey_job(script_version_pk=script.pk, data={'job_name': 'abc', 'sequence': 'aaa', 'out': 'abc'})
job = job.submit_to_celery()
old_pk = job.pk
new_job = job.submit_to_celery(resubmit=True)
self.assertNotEqual(old_pk, new_job.pk)
# test rerunning, our output should be removed
from ..models import WooeyFile
old_output = sorted([i.pk for i in WooeyFile.objects.filter(job=new_job)])
job.submit_to_celery(rerun=True)
# check that we overwrite our output
new_output = sorted([i.pk for i in WooeyFile.objects.filter(job=new_job)])
# Django 1.6 has a bug where they are reusing pk numbers
if version.DJANGO_VERSION >= version.DJ17:
self.assertNotEqual(old_output, new_output)
self.assertEqual(len(old_output), len(new_output))
# check the old entries are gone
if version.DJANGO_VERSION >= version.DJ17:
# Django 1.6 has a bug where they are reusing pk numbers, so once again we cannot use this check
self.assertEqual([], list(WooeyFile.objects.filter(pk__in=old_output)))
file_previews = utils.get_file_previews(job)
for group, files in file_previews.items():
for fileinfo in files:
# for testing, we use the local url
response = Client().get(self.get_local_url(fileinfo))
self.assertEqual(response.status_code, 200)
# check our download links are ok
job = utils.create_wooey_job(script_version_pk=script.pk,
data={'fasta': open(os.path.join(config.WOOEY_TEST_DATA, 'fasta.fasta')),
'out': 'abc', 'job_name': 'abc'})
# check our upload link is ok
file_previews = utils.get_file_previews(job)
for group, files in file_previews.items():
for fileinfo in files:
response = Client().get(self.get_local_url(fileinfo))
self.assertEqual(response.status_code, 200)
def test_multiplechoices(self):
script = self.choice_script
choices = ['2', '1', '3']
choice_param = 'two_choices'
from ..backend import utils
job = utils.create_wooey_job(script_version_pk=script.pk, data={'job_name': 'abc', choice_param: choices})
# make sure we have our choices in the parameters
choice_params = [i.value for i in job.get_parameters() if i.parameter.slug == choice_param]
self.assertEqual(choices, choice_params)
job = job.submit_to_celery()
| 40.961905
| 124
| 0.660079
|
891a1b37b4591a867d00f12111c8535c5960f062
| 4,382
|
py
|
Python
|
utility_functions.py
|
Matt-Gleich/Photo-Merge
|
1299378db4e2f9d6011e8a757d0f4b228424de6d
|
[
"MIT"
] | 3
|
2019-11-06T22:57:45.000Z
|
2021-05-31T03:26:16.000Z
|
utility_functions.py
|
gleich/Photo-Sort
|
1299378db4e2f9d6011e8a757d0f4b228424de6d
|
[
"MIT"
] | 17
|
2019-06-01T19:03:35.000Z
|
2019-11-29T03:50:30.000Z
|
utility_functions.py
|
gleich/Photo-Sort
|
1299378db4e2f9d6011e8a757d0f4b228424de6d
|
[
"MIT"
] | 1
|
2019-05-19T22:06:28.000Z
|
2019-05-19T22:06:28.000Z
|
import subprocess
from termcolor import colored
#######################
#Subprocess functions:#
#######################
def get_subprocess_output(subprocess_command):
"""
Will get the output of a subprocess command that has been ran.
:param subprocess_command: the command that was recorded.
:return: the command's output
"""
string_command = str(subprocess_command)
stdout_position = string_command.find("stdout")
stderr_position = string_command.find("stderr")
relative_string = string_command[stdout_position:stderr_position]
final_string = relative_string[relative_string.find("'") + 1:-3]
return final_string
# Testing:
# command = str((subprocess.run(['pwd'], capture_output=True)))
# print(command)
# print(get_subprocess_output(command))
def run_command(shell_command, get_output):
"""
Will run a shell command using the subprocess module
:param shell_command: The command that is going to be ran
:param get_output: Will capture the output of the command
:return: the command output
"""
command_ran = subprocess.run(shell_command, capture_output=get_output)
return command_ran
# Testing:
# print(run_command(["find", ".", "-type", "f"], True))
# Testing with get_subprocess_output:
# print(get_subprocess_output(run_command("pwd", True)))
def file_creation_date(file_path):
"""
Finds when the photo was created.
:param file_path: The path of the file that the date will be gotten for.
:return: string that says what the date of creation is for the file using the ISO format.
"""
ran_command = run_command(["stat", "-f", "%SB", file_path], True)
command_output = get_subprocess_output(ran_command).strip("\\n")
elements = command_output.split(" ")
if len(elements) == 4:
month = elements[0]
day = elements[1]
year = elements[3]
elif len(elements) == 5:
month = elements[0]
day = elements[2]
year = elements[4]
if month in "January":
month = "January"
elif month in "February":
month = "February"
elif month in "March":
month = "March"
elif month in "April":
month = "April"
elif month in "May":
month = "May"
elif month in "June":
month = "June"
elif month in "July":
month = "July"
elif month in "August":
month = "August"
elif month in "September":
month = "September"
elif month in "October":
month = "October"
elif month in "November":
month = "November"
elif month in "December":
month = "December"
return [month, day, year]
# Testing:
# print(file_creation_date('./photos/test_image.jpg'))
#########################
#General Purpose python:#
#########################
def list_to_dict(lst):
"""
Takes a list an turns it into a list
:param lst: the list that will be turned into a dict
"""
if len(lst) % 2 != 1:
odd_indexes = []
even_indexes = []
for i in range(len(lst)):
if i % 2 == 0:
odd_indexes.append(lst[i])
elif i % 2 == 1 or i == 0:
even_indexes.append(lst[i])
final_dict = dict(zip(odd_indexes, even_indexes))
return final_dict
else:
print("The list needs to have an even amount of")
# Testing
# print(list_to_dict(["a", "b", "c", "d"]))
def clear_output(line_number):
"""
Will clear the output screen
:param line_number: number of blank lines that will be printed
:return: none
"""
fix_line_number = line_number / 2
rounded_line_number = round(fix_line_number)
for i in range(rounded_line_number):
print('\n')
def print_colored(string, color):
"""
Will print to the terminal a string in a certain color
:param string: string that will be printed
:param color: color that the string will be printed as
:return: none
"""
print_text = colored(string, color, attrs=['bold'])
print(print_text)
def print_txt_content(file_name):
"""
Will print the contents of a text file to the console.
:param file_name: The name of the text file.
:return: none
"""
with open(file_name) as file:
contents = file.read()
lines = contents.split("\n")
for line in lines:
print(line)
| 27.559748
| 93
| 0.620949
|
abcabdb4ea6ef2352e5fb6e5893ae8f0426e4ad5
| 7,489
|
py
|
Python
|
sweetviz/series_analyzer.py
|
marami52/sweetviz
|
d06df4a2741c73985c574eb63e913a98c4066592
|
[
"MIT"
] | 1
|
2020-12-06T18:15:13.000Z
|
2020-12-06T18:15:13.000Z
|
sweetviz/series_analyzer.py
|
marami52/sweetviz
|
d06df4a2741c73985c574eb63e913a98c4066592
|
[
"MIT"
] | null | null | null |
sweetviz/series_analyzer.py
|
marami52/sweetviz
|
d06df4a2741c73985c574eb63e913a98c4066592
|
[
"MIT"
] | null | null | null |
import pandas as pd
from sweetviz.sv_types import NumWithPercent, FeatureType, FeatureToProcess
from sweetviz.type_detection import determine_feature_type
import sweetviz.series_analyzer_numeric
import sweetviz.series_analyzer_cat
import sweetviz.series_analyzer_text
def get_counts(series: pd.Series) -> dict:
# The value_counts() function is used to get a Series containing counts of unique values.
value_counts_with_nan = series.value_counts(dropna=False)
# Fix for data with only a single value; reset_index was flipping the data returned
if len(value_counts_with_nan) == 1:
if pd.isna(value_counts_with_nan.index[0]):
value_counts_without_nan = pd.Series()
else:
value_counts_without_nan = value_counts_with_nan
else:
value_counts_without_nan = (value_counts_with_nan.reset_index().dropna().set_index("index").iloc[:, 0])
# print(value_counts_without_nan.index.dtype.name)
# IGNORING NAN FOR NOW AS IT CAUSES ISSUES [FIX]
# distinct_count_with_nan = value_counts_with_nan.count()
distinct_count_without_nan = value_counts_without_nan.count()
return {
"value_counts_without_nan": value_counts_without_nan,
"distinct_count_without_nan": distinct_count_without_nan,
"num_rows_with_data": series.count(),
"num_rows_total": len(series),
# IGNORING NAN FOR NOW AS IT CAUSES ISSUES [FIX]:
# "value_counts_with_nan": value_counts_with_nan,
# "distinct_count_with_nan": distinct_count_with_nan,
}
def fill_out_missing_counts_in_other_series(my_counts:dict, other_counts:dict):
# IGNORING NAN FOR NOW AS IT CAUSES ISSUES [FIX]
# to_fill_list = ["value_counts_with_nan", "value_counts_without_nan"]
to_fill_list = ["value_counts_without_nan"]
for to_fill in to_fill_list:
fill_using_strings = True if my_counts[to_fill].index.dtype.name in ('category', 'object') else False
for key, value in other_counts[to_fill].items():
if key not in my_counts[to_fill]:
# If categorical, must do this hack to add new value
if my_counts[to_fill].index.dtype.name == 'category':
my_counts[to_fill] = my_counts[to_fill].reindex(my_counts[to_fill].index.add_categories(key))
# Add empty value at new index, but make sure we are using the right index type
if fill_using_strings:
my_counts[to_fill].at[str(key)] = 0
else:
my_counts[to_fill].at[key] = 0
def add_series_base_stats_to_dict(series: pd.Series, counts: dict, updated_dict: dict) -> dict:
updated_dict["stats"] = dict()
updated_dict["base_stats"] = dict()
base_stats = updated_dict["base_stats"]
num_total = counts["num_rows_total"]
try:
num_zeros = series[series == 0].sum()
except TypeError:
num_zeros = 0
non_nan = counts["num_rows_with_data"]
base_stats["total_rows"] = num_total
base_stats["num_values"] = NumWithPercent(non_nan, num_total)
base_stats["num_missing"] = NumWithPercent(num_total - non_nan, num_total)
base_stats["num_zeroes"] = NumWithPercent(num_zeros, num_total)
base_stats["num_distinct"] = NumWithPercent(counts["distinct_count_without_nan"], num_total)
# This generates everything EXCEPT the "detail pane"
def analyze_feature_to_dictionary(to_process: FeatureToProcess) -> dict:
# start = time.perf_counter()
# Validation: Make sure the targets are the same length as the series
if to_process.source_target is not None and to_process.source is not None:
if len(to_process.source_target) != len(to_process.source):
raise ValueError
if to_process.compare_target is not None and to_process.compare is not None:
if len(to_process.compare_target) != len(to_process.compare):
raise ValueError
# Initialize some dictionary values
returned_feature_dict = dict()
returned_feature_dict["name"] = to_process.source.name
returned_feature_dict["order_index"] = to_process.order
returned_feature_dict["is_target"] = True if to_process.order == -1 else False
# Determine SOURCE feature type
to_process.source_counts = get_counts(to_process.source)
returned_feature_dict["type"] = determine_feature_type(to_process.source, to_process.source_counts,
to_process.predetermined_type, "SOURCE")
source_type = returned_feature_dict["type"]
# Determine COMPARED feature type & initialize
compare_dict = None
if to_process.compare is not None:
to_process.compare_counts = get_counts(to_process.compare)
compare_type = determine_feature_type(to_process.compare,
to_process.compare_counts,
returned_feature_dict["type"], "COMPARED")
if compare_type != FeatureType.TYPE_ALL_NAN and \
source_type != FeatureType.TYPE_ALL_NAN:
# Explicitly show missing categories on each set
if compare_type == FeatureType.TYPE_CAT or compare_type == FeatureType.TYPE_BOOL:
fill_out_missing_counts_in_other_series(to_process.compare_counts, to_process.source_counts)
fill_out_missing_counts_in_other_series(to_process.source_counts, to_process.compare_counts)
returned_feature_dict["compare"] = dict()
compare_dict = returned_feature_dict["compare"]
compare_dict["type"] = compare_type
# Settle all-NaN series, depending on source versus compared
if to_process.compare is not None:
# Settle all-Nan WITH COMPARE: Must consider all cases between source and compare
if compare_type == FeatureType.TYPE_ALL_NAN and source_type == FeatureType.TYPE_ALL_NAN:
returned_feature_dict["type"] = FeatureType.TYPE_TEXT
compare_dict["type"] = FeatureType.TYPE_TEXT
elif compare_type == FeatureType.TYPE_ALL_NAN:
compare_dict["type"] = source_type
elif source_type == FeatureType.TYPE_ALL_NAN:
returned_feature_dict["type"] = compare_type
else:
# Settle all-Nan WITHOUT COMPARE ( trivial: consider as TEXT )
if source_type == FeatureType.TYPE_ALL_NAN:
returned_feature_dict["type"] = FeatureType.TYPE_TEXT
# Establish base stats
add_series_base_stats_to_dict(to_process.source, to_process.source_counts, returned_feature_dict)
if to_process.compare is not None:
add_series_base_stats_to_dict(to_process.compare, to_process.compare_counts, compare_dict)
# Perform full analysis on source/compare/target
if returned_feature_dict["type"] == FeatureType.TYPE_NUM:
sweetviz.series_analyzer_numeric.analyze(to_process, returned_feature_dict)
elif returned_feature_dict["type"] == FeatureType.TYPE_CAT:
sweetviz.series_analyzer_cat.analyze(to_process, returned_feature_dict)
elif returned_feature_dict["type"] == FeatureType.TYPE_BOOL:
sweetviz.series_analyzer_cat.analyze(to_process, returned_feature_dict)
elif returned_feature_dict["type"] == FeatureType.TYPE_TEXT:
sweetviz.series_analyzer_text.analyze(to_process, returned_feature_dict)
else:
raise ValueError
# print(f"{to_process.source.name} PROCESSED ------> "
# f" {time.perf_counter() - start}")
return returned_feature_dict
| 49.926667
| 113
| 0.707304
|
9eea7c820aeb5f7d89bb6698436bbb98fb2818c4
| 150,117
|
py
|
Python
|
test/test_nat44_ed.py
|
yasics/vpp
|
a4d0956082f12ac8269fd415134af7f605c1f3c9
|
[
"Apache-2.0"
] | 1
|
2021-07-01T02:47:27.000Z
|
2021-07-01T02:47:27.000Z
|
test/test_nat44_ed.py
|
yasics/vpp
|
a4d0956082f12ac8269fd415134af7f605c1f3c9
|
[
"Apache-2.0"
] | null | null | null |
test/test_nat44_ed.py
|
yasics/vpp
|
a4d0956082f12ac8269fd415134af7f605c1f3c9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import unittest
from io import BytesIO
from random import randint, shuffle, choice
import scapy.compat
from framework import VppTestCase, VppTestRunner
from scapy.data import IP_PROTOS
from scapy.layers.inet import IP, TCP, UDP, ICMP, GRE
from scapy.layers.inet import IPerror, TCPerror
from scapy.layers.l2 import Ether
from scapy.packet import Raw
from syslog_rfc5424_parser import SyslogMessage, ParseError
from syslog_rfc5424_parser.constants import SyslogSeverity
from util import ppp, ip4_range
from vpp_acl import AclRule, VppAcl, VppAclInterface
from vpp_ip_route import VppIpRoute, VppRoutePath
from vpp_papi import VppEnum
class NAT44EDTestCase(VppTestCase):
nat_addr = '10.0.0.3'
tcp_port_in = 6303
tcp_port_out = 6303
udp_port_in = 6304
udp_port_out = 6304
icmp_id_in = 6305
icmp_id_out = 6305
tcp_external_port = 80
max_sessions = 100
def setUp(self):
super(NAT44EDTestCase, self).setUp()
self.plugin_enable()
def tearDown(self):
super(NAT44EDTestCase, self).tearDown()
if not self.vpp_dead:
self.plugin_disable()
def plugin_enable(self):
self.vapi.nat44_ed_plugin_enable_disable(
sessions=self.max_sessions, enable=1)
def plugin_disable(self):
self.vapi.nat44_ed_plugin_enable_disable(enable=0)
@property
def config_flags(self):
return VppEnum.vl_api_nat_config_flags_t
@property
def nat44_config_flags(self):
return VppEnum.vl_api_nat44_config_flags_t
@property
def syslog_severity(self):
return VppEnum.vl_api_syslog_severity_t
@property
def server_addr(self):
return self.pg1.remote_hosts[0].ip4
@staticmethod
def random_port():
return randint(1025, 65535)
@staticmethod
def proto2layer(proto):
if proto == IP_PROTOS.tcp:
return TCP
elif proto == IP_PROTOS.udp:
return UDP
elif proto == IP_PROTOS.icmp:
return ICMP
else:
raise Exception("Unsupported protocol")
@classmethod
def create_and_add_ip4_table(cls, i, table_id=0):
cls.vapi.ip_table_add_del(is_add=1, table={'table_id': table_id})
i.set_table_ip4(table_id)
@classmethod
def configure_ip4_interface(cls, i, hosts=0, table_id=None):
if table_id:
cls.create_and_add_ip4_table(i, table_id)
i.admin_up()
i.config_ip4()
i.resolve_arp()
if hosts:
i.generate_remote_hosts(hosts)
i.configure_ipv4_neighbors()
@classmethod
def nat_add_interface_address(cls, i):
cls.vapi.nat44_add_del_interface_addr(
sw_if_index=i.sw_if_index, is_add=1)
def nat_add_inside_interface(self, i):
self.vapi.nat44_interface_add_del_feature(
flags=self.config_flags.NAT_IS_INSIDE,
sw_if_index=i.sw_if_index, is_add=1)
def nat_add_outside_interface(self, i):
self.vapi.nat44_interface_add_del_feature(
flags=self.config_flags.NAT_IS_OUTSIDE,
sw_if_index=i.sw_if_index, is_add=1)
def nat_add_address(self, address, twice_nat=0,
vrf_id=0xFFFFFFFF, is_add=1):
flags = self.config_flags.NAT_IS_TWICE_NAT if twice_nat else 0
self.vapi.nat44_add_del_address_range(first_ip_address=address,
last_ip_address=address,
vrf_id=vrf_id,
is_add=is_add,
flags=flags)
def nat_add_static_mapping(self, local_ip, external_ip='0.0.0.0',
local_port=0, external_port=0, vrf_id=0,
is_add=1, external_sw_if_index=0xFFFFFFFF,
proto=0, tag="", flags=0):
if not (local_port and external_port):
flags |= self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_static_mapping(
is_add=is_add,
local_ip_address=local_ip,
external_ip_address=external_ip,
external_sw_if_index=external_sw_if_index,
local_port=local_port,
external_port=external_port,
vrf_id=vrf_id, protocol=proto,
flags=flags,
tag=tag)
@classmethod
def setUpClass(cls):
super(NAT44EDTestCase, cls).setUpClass()
cls.create_pg_interfaces(range(12))
cls.interfaces = list(cls.pg_interfaces[:4])
cls.create_and_add_ip4_table(cls.pg2, 10)
for i in cls.interfaces:
cls.configure_ip4_interface(i, hosts=3)
# test specific (test-multiple-vrf)
cls.vapi.ip_table_add_del(is_add=1, table={'table_id': 1})
# test specific (test-one-armed-nat44-static)
cls.pg4.generate_remote_hosts(2)
cls.pg4.config_ip4()
cls.vapi.sw_interface_add_del_address(
sw_if_index=cls.pg4.sw_if_index,
prefix="10.0.0.1/24")
cls.pg4.admin_up()
cls.pg4.resolve_arp()
cls.pg4._remote_hosts[1]._ip4 = cls.pg4._remote_hosts[0]._ip4
cls.pg4.resolve_arp()
# test specific interface (pg5)
cls.pg5._local_ip4 = "10.1.1.1"
cls.pg5._remote_hosts[0]._ip4 = "10.1.1.2"
cls.pg5.set_table_ip4(1)
cls.pg5.config_ip4()
cls.pg5.admin_up()
cls.pg5.resolve_arp()
# test specific interface (pg6)
cls.pg6._local_ip4 = "10.1.2.1"
cls.pg6._remote_hosts[0]._ip4 = "10.1.2.2"
cls.pg6.set_table_ip4(1)
cls.pg6.config_ip4()
cls.pg6.admin_up()
cls.pg6.resolve_arp()
rl = list()
rl.append(VppIpRoute(cls, "0.0.0.0", 0,
[VppRoutePath("0.0.0.0", 0xffffffff,
nh_table_id=0)],
register=False, table_id=1))
rl.append(VppIpRoute(cls, "0.0.0.0", 0,
[VppRoutePath(cls.pg1.local_ip4,
cls.pg1.sw_if_index)],
register=False))
rl.append(VppIpRoute(cls, cls.pg5.remote_ip4, 32,
[VppRoutePath("0.0.0.0",
cls.pg5.sw_if_index)],
register=False, table_id=1))
rl.append(VppIpRoute(cls, cls.pg6.remote_ip4, 32,
[VppRoutePath("0.0.0.0",
cls.pg6.sw_if_index)],
register=False, table_id=1))
rl.append(VppIpRoute(cls, cls.pg6.remote_ip4, 16,
[VppRoutePath("0.0.0.0", 0xffffffff,
nh_table_id=1)],
register=False, table_id=0))
for r in rl:
r.add_vpp_config()
def get_err_counter(self, path):
return self.statistics.get_err_counter(path)
def reass_hairpinning(self, server_addr, server_in_port, server_out_port,
host_in_port, proto=IP_PROTOS.tcp,
ignore_port=False):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
# send packet from host to server
pkts = self.create_stream_frag(self.pg0,
self.nat_addr,
host_in_port,
server_out_port,
data,
proto)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.nat_addr,
server_addr)
if proto != IP_PROTOS.icmp:
if not ignore_port:
self.assertNotEqual(p[layer].sport, host_in_port)
self.assertEqual(p[layer].dport, server_in_port)
else:
if not ignore_port:
self.assertNotEqual(p[layer].id, host_in_port)
self.assertEqual(data, p[Raw].load)
def frag_out_of_order(self, proto=IP_PROTOS.tcp, dont_translate=False,
ignore_port=False):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
self.port_in = self.random_port()
for i in range(2):
# in2out
pkts = self.create_stream_frag(self.pg0, self.pg1.remote_ip4,
self.port_in, 20, data, proto)
pkts.reverse()
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg1.get_capture(len(pkts))
if not dont_translate:
p = self.reass_frags_and_verify(frags,
self.nat_addr,
self.pg1.remote_ip4)
else:
p = self.reass_frags_and_verify(frags,
self.pg0.remote_ip4,
self.pg1.remote_ip4)
if proto != IP_PROTOS.icmp:
if not dont_translate:
self.assertEqual(p[layer].dport, 20)
if not ignore_port:
self.assertNotEqual(p[layer].sport, self.port_in)
else:
self.assertEqual(p[layer].sport, self.port_in)
else:
if not ignore_port:
if not dont_translate:
self.assertNotEqual(p[layer].id, self.port_in)
else:
self.assertEqual(p[layer].id, self.port_in)
self.assertEqual(data, p[Raw].load)
# out2in
if not dont_translate:
dst_addr = self.nat_addr
else:
dst_addr = self.pg0.remote_ip4
if proto != IP_PROTOS.icmp:
sport = 20
dport = p[layer].sport
else:
sport = p[layer].id
dport = 0
pkts = self.create_stream_frag(self.pg1, dst_addr, sport, dport,
data, proto, echo_reply=True)
pkts.reverse()
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.logger.info(self.vapi.cli("show trace"))
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.pg1.remote_ip4,
self.pg0.remote_ip4)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, 20)
self.assertEqual(p[layer].dport, self.port_in)
else:
self.assertEqual(p[layer].id, self.port_in)
self.assertEqual(data, p[Raw].load)
def reass_frags_and_verify(self, frags, src, dst):
buffer = BytesIO()
for p in frags:
self.assertEqual(p[IP].src, src)
self.assertEqual(p[IP].dst, dst)
self.assert_ip_checksum_valid(p)
buffer.seek(p[IP].frag * 8)
buffer.write(bytes(p[IP].payload))
ip = IP(src=frags[0][IP].src, dst=frags[0][IP].dst,
proto=frags[0][IP].proto)
if ip.proto == IP_PROTOS.tcp:
p = (ip / TCP(buffer.getvalue()))
self.logger.debug(ppp("Reassembled:", p))
self.assert_tcp_checksum_valid(p)
elif ip.proto == IP_PROTOS.udp:
p = (ip / UDP(buffer.getvalue()[:8]) /
Raw(buffer.getvalue()[8:]))
elif ip.proto == IP_PROTOS.icmp:
p = (ip / ICMP(buffer.getvalue()))
return p
def frag_in_order(self, proto=IP_PROTOS.tcp, dont_translate=False,
ignore_port=False):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
self.port_in = self.random_port()
# in2out
pkts = self.create_stream_frag(self.pg0, self.pg1.remote_ip4,
self.port_in, 20, data, proto)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg1.get_capture(len(pkts))
if not dont_translate:
p = self.reass_frags_and_verify(frags,
self.nat_addr,
self.pg1.remote_ip4)
else:
p = self.reass_frags_and_verify(frags,
self.pg0.remote_ip4,
self.pg1.remote_ip4)
if proto != IP_PROTOS.icmp:
if not dont_translate:
self.assertEqual(p[layer].dport, 20)
if not ignore_port:
self.assertNotEqual(p[layer].sport, self.port_in)
else:
self.assertEqual(p[layer].sport, self.port_in)
else:
if not ignore_port:
if not dont_translate:
self.assertNotEqual(p[layer].id, self.port_in)
else:
self.assertEqual(p[layer].id, self.port_in)
self.assertEqual(data, p[Raw].load)
# out2in
if not dont_translate:
dst_addr = self.nat_addr
else:
dst_addr = self.pg0.remote_ip4
if proto != IP_PROTOS.icmp:
sport = 20
dport = p[layer].sport
else:
sport = p[layer].id
dport = 0
pkts = self.create_stream_frag(self.pg1, dst_addr, sport, dport, data,
proto, echo_reply=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.pg1.remote_ip4,
self.pg0.remote_ip4)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, 20)
self.assertEqual(p[layer].dport, self.port_in)
else:
self.assertEqual(p[layer].id, self.port_in)
self.assertEqual(data, p[Raw].load)
def verify_capture_out(self, capture, nat_ip=None, same_port=False,
dst_ip=None, ignore_port=False):
if nat_ip is None:
nat_ip = self.nat_addr
for packet in capture:
try:
self.assert_packet_checksums_valid(packet)
self.assertEqual(packet[IP].src, nat_ip)
if dst_ip is not None:
self.assertEqual(packet[IP].dst, dst_ip)
if packet.haslayer(TCP):
if not ignore_port:
if same_port:
self.assertEqual(
packet[TCP].sport, self.tcp_port_in)
else:
self.assertNotEqual(
packet[TCP].sport, self.tcp_port_in)
self.tcp_port_out = packet[TCP].sport
self.assert_packet_checksums_valid(packet)
elif packet.haslayer(UDP):
if not ignore_port:
if same_port:
self.assertEqual(
packet[UDP].sport, self.udp_port_in)
else:
self.assertNotEqual(
packet[UDP].sport, self.udp_port_in)
self.udp_port_out = packet[UDP].sport
else:
if not ignore_port:
if same_port:
self.assertEqual(
packet[ICMP].id, self.icmp_id_in)
else:
self.assertNotEqual(
packet[ICMP].id, self.icmp_id_in)
self.icmp_id_out = packet[ICMP].id
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet "
"(outside network):", packet))
raise
def verify_capture_in(self, capture, in_if):
for packet in capture:
try:
self.assert_packet_checksums_valid(packet)
self.assertEqual(packet[IP].dst, in_if.remote_ip4)
if packet.haslayer(TCP):
self.assertEqual(packet[TCP].dport, self.tcp_port_in)
elif packet.haslayer(UDP):
self.assertEqual(packet[UDP].dport, self.udp_port_in)
else:
self.assertEqual(packet[ICMP].id, self.icmp_id_in)
except:
self.logger.error(ppp("Unexpected or invalid packet "
"(inside network):", packet))
raise
def create_stream_in(self, in_if, out_if, dst_ip=None, ttl=64):
if dst_ip is None:
dst_ip = out_if.remote_ip4
pkts = []
# TCP
p = (Ether(dst=in_if.local_mac, src=in_if.remote_mac) /
IP(src=in_if.remote_ip4, dst=dst_ip, ttl=ttl) /
TCP(sport=self.tcp_port_in, dport=20))
pkts.extend([p, p])
# UDP
p = (Ether(dst=in_if.local_mac, src=in_if.remote_mac) /
IP(src=in_if.remote_ip4, dst=dst_ip, ttl=ttl) /
UDP(sport=self.udp_port_in, dport=20))
pkts.append(p)
# ICMP
p = (Ether(dst=in_if.local_mac, src=in_if.remote_mac) /
IP(src=in_if.remote_ip4, dst=dst_ip, ttl=ttl) /
ICMP(id=self.icmp_id_in, type='echo-request'))
pkts.append(p)
return pkts
def create_stream_out(self, out_if, dst_ip=None, ttl=64,
use_inside_ports=False):
if dst_ip is None:
dst_ip = self.nat_addr
if not use_inside_ports:
tcp_port = self.tcp_port_out
udp_port = self.udp_port_out
icmp_id = self.icmp_id_out
else:
tcp_port = self.tcp_port_in
udp_port = self.udp_port_in
icmp_id = self.icmp_id_in
pkts = []
# TCP
p = (Ether(dst=out_if.local_mac, src=out_if.remote_mac) /
IP(src=out_if.remote_ip4, dst=dst_ip, ttl=ttl) /
TCP(dport=tcp_port, sport=20))
pkts.extend([p, p])
# UDP
p = (Ether(dst=out_if.local_mac, src=out_if.remote_mac) /
IP(src=out_if.remote_ip4, dst=dst_ip, ttl=ttl) /
UDP(dport=udp_port, sport=20))
pkts.append(p)
# ICMP
p = (Ether(dst=out_if.local_mac, src=out_if.remote_mac) /
IP(src=out_if.remote_ip4, dst=dst_ip, ttl=ttl) /
ICMP(id=icmp_id, type='echo-reply'))
pkts.append(p)
return pkts
def create_tcp_stream(self, in_if, out_if, count):
pkts = []
port = 6303
for i in range(count):
p = (Ether(dst=in_if.local_mac, src=in_if.remote_mac) /
IP(src=in_if.remote_ip4, dst=out_if.remote_ip4, ttl=64) /
TCP(sport=port + i, dport=20))
pkts.append(p)
return pkts
def create_stream_frag(self, src_if, dst, sport, dport, data,
proto=IP_PROTOS.tcp, echo_reply=False):
if proto == IP_PROTOS.tcp:
p = (IP(src=src_if.remote_ip4, dst=dst) /
TCP(sport=sport, dport=dport) /
Raw(data))
p = p.__class__(scapy.compat.raw(p))
chksum = p[TCP].chksum
proto_header = TCP(sport=sport, dport=dport, chksum=chksum)
elif proto == IP_PROTOS.udp:
proto_header = UDP(sport=sport, dport=dport)
elif proto == IP_PROTOS.icmp:
if not echo_reply:
proto_header = ICMP(id=sport, type='echo-request')
else:
proto_header = ICMP(id=sport, type='echo-reply')
else:
raise Exception("Unsupported protocol")
id = self.random_port()
pkts = []
if proto == IP_PROTOS.tcp:
raw = Raw(data[0:4])
else:
raw = Raw(data[0:16])
p = (Ether(src=src_if.remote_mac, dst=src_if.local_mac) /
IP(src=src_if.remote_ip4, dst=dst, flags="MF", frag=0, id=id) /
proto_header /
raw)
pkts.append(p)
if proto == IP_PROTOS.tcp:
raw = Raw(data[4:20])
else:
raw = Raw(data[16:32])
p = (Ether(src=src_if.remote_mac, dst=src_if.local_mac) /
IP(src=src_if.remote_ip4, dst=dst, flags="MF", frag=3, id=id,
proto=proto) /
raw)
pkts.append(p)
if proto == IP_PROTOS.tcp:
raw = Raw(data[20:])
else:
raw = Raw(data[32:])
p = (Ether(src=src_if.remote_mac, dst=src_if.local_mac) /
IP(src=src_if.remote_ip4, dst=dst, frag=5, proto=proto,
id=id) /
raw)
pkts.append(p)
return pkts
def frag_in_order_in_plus_out(self, in_addr, out_addr, in_port, out_port,
proto=IP_PROTOS.tcp):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
port_in = self.random_port()
for i in range(2):
# out2in
pkts = self.create_stream_frag(self.pg0, out_addr,
port_in, out_port,
data, proto)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg1.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.pg0.remote_ip4,
in_addr)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, port_in)
self.assertEqual(p[layer].dport, in_port)
else:
self.assertEqual(p[layer].id, port_in)
self.assertEqual(data, p[Raw].load)
# in2out
if proto != IP_PROTOS.icmp:
pkts = self.create_stream_frag(self.pg1, self.pg0.remote_ip4,
in_port,
p[layer].sport, data, proto)
else:
pkts = self.create_stream_frag(self.pg1, self.pg0.remote_ip4,
p[layer].id, 0, data, proto,
echo_reply=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
out_addr,
self.pg0.remote_ip4)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, out_port)
self.assertEqual(p[layer].dport, port_in)
else:
self.assertEqual(p[layer].id, port_in)
self.assertEqual(data, p[Raw].load)
def frag_out_of_order_in_plus_out(self, in_addr, out_addr, in_port,
out_port, proto=IP_PROTOS.tcp):
layer = self.proto2layer(proto)
if proto == IP_PROTOS.tcp:
data = b"A" * 4 + b"B" * 16 + b"C" * 3
else:
data = b"A" * 16 + b"B" * 16 + b"C" * 3
port_in = self.random_port()
for i in range(2):
# out2in
pkts = self.create_stream_frag(self.pg0, out_addr,
port_in, out_port,
data, proto)
pkts.reverse()
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg1.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
self.pg0.remote_ip4,
in_addr)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].dport, in_port)
self.assertEqual(p[layer].sport, port_in)
self.assertEqual(p[layer].dport, in_port)
else:
self.assertEqual(p[layer].id, port_in)
self.assertEqual(data, p[Raw].load)
# in2out
if proto != IP_PROTOS.icmp:
pkts = self.create_stream_frag(self.pg1, self.pg0.remote_ip4,
in_port,
p[layer].sport, data, proto)
else:
pkts = self.create_stream_frag(self.pg1, self.pg0.remote_ip4,
p[layer].id, 0, data, proto,
echo_reply=True)
pkts.reverse()
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
frags = self.pg0.get_capture(len(pkts))
p = self.reass_frags_and_verify(frags,
out_addr,
self.pg0.remote_ip4)
if proto != IP_PROTOS.icmp:
self.assertEqual(p[layer].sport, out_port)
self.assertEqual(p[layer].dport, port_in)
else:
self.assertEqual(p[layer].id, port_in)
self.assertEqual(data, p[Raw].load)
def init_tcp_session(self, in_if, out_if, in_port, ext_port):
# SYN packet in->out
p = (Ether(src=in_if.remote_mac, dst=in_if.local_mac) /
IP(src=in_if.remote_ip4, dst=out_if.remote_ip4) /
TCP(sport=in_port, dport=ext_port, flags="S"))
in_if.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = out_if.get_capture(1)
p = capture[0]
out_port = p[TCP].sport
# SYN + ACK packet out->in
p = (Ether(src=out_if.remote_mac, dst=out_if.local_mac) /
IP(src=out_if.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port, flags="SA"))
out_if.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
in_if.get_capture(1)
# ACK packet in->out
p = (Ether(src=in_if.remote_mac, dst=in_if.local_mac) /
IP(src=in_if.remote_ip4, dst=out_if.remote_ip4) /
TCP(sport=in_port, dport=ext_port, flags="A"))
in_if.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
out_if.get_capture(1)
return out_port
def twice_nat_common(self, self_twice_nat=False, same_pg=False, lb=False,
client_id=None):
twice_nat_addr = '10.0.1.3'
port_in = 8080
if lb:
if not same_pg:
port_in1 = port_in
port_in2 = port_in
else:
port_in1 = port_in + 1
port_in2 = port_in + 2
port_out = 80
eh_port_out = 4567
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
if lb and same_pg:
server2 = server1
if not lb:
server = server1
pg0 = self.pg0
if same_pg:
pg1 = self.pg0
else:
pg1 = self.pg1
eh_translate = ((not self_twice_nat) or (not lb and same_pg) or
client_id == 1)
self.nat_add_address(self.nat_addr)
self.nat_add_address(twice_nat_addr, twice_nat=1)
flags = 0
if self_twice_nat:
flags |= self.config_flags.NAT_IS_SELF_TWICE_NAT
else:
flags |= self.config_flags.NAT_IS_TWICE_NAT
if not lb:
self.nat_add_static_mapping(pg0.remote_ip4, self.nat_addr,
port_in, port_out,
proto=IP_PROTOS.tcp,
flags=flags)
else:
locals = [{'addr': server1.ip4,
'port': port_in1,
'probability': 50,
'vrf_id': 0},
{'addr': server2.ip4,
'port': port_in2,
'probability': 50,
'vrf_id': 0}]
out_addr = self.nat_addr
self.vapi.nat44_add_del_lb_static_mapping(is_add=1, flags=flags,
external_addr=out_addr,
external_port=port_out,
protocol=IP_PROTOS.tcp,
local_num=len(locals),
locals=locals)
self.nat_add_inside_interface(pg0)
self.nat_add_outside_interface(pg1)
if same_pg:
if not lb:
client = server
else:
assert client_id is not None
if client_id == 1:
client = self.pg0.remote_hosts[0]
elif client_id == 2:
client = self.pg0.remote_hosts[1]
else:
client = pg1.remote_hosts[0]
p = (Ether(src=pg1.remote_mac, dst=pg1.local_mac) /
IP(src=client.ip4, dst=self.nat_addr) /
TCP(sport=eh_port_out, dport=port_out))
pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
if lb:
if ip.dst == server1.ip4:
server = server1
port_in = port_in1
else:
server = server2
port_in = port_in2
self.assertEqual(ip.dst, server.ip4)
if lb and same_pg:
self.assertIn(tcp.dport, [port_in1, port_in2])
else:
self.assertEqual(tcp.dport, port_in)
if eh_translate:
self.assertEqual(ip.src, twice_nat_addr)
self.assertNotEqual(tcp.sport, eh_port_out)
else:
self.assertEqual(ip.src, client.ip4)
self.assertEqual(tcp.sport, eh_port_out)
eh_addr_in = ip.src
eh_port_in = tcp.sport
saved_port_in = tcp.dport
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=server.mac, dst=pg0.local_mac) /
IP(src=server.ip4, dst=eh_addr_in) /
TCP(sport=saved_port_in, dport=eh_port_in))
pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, client.ip4)
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.dport, eh_port_out)
self.assertEqual(tcp.sport, port_out)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
if eh_translate:
sessions = self.vapi.nat44_user_session_dump(server.ip4, 0)
self.assertEqual(len(sessions), 1)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_EXT_HOST_VALID)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_TWICE_NAT)
self.logger.info(self.vapi.cli("show nat44 sessions"))
self.vapi.nat44_del_session(
address=sessions[0].inside_ip_address,
port=sessions[0].inside_port,
protocol=sessions[0].protocol,
flags=(self.config_flags.NAT_IS_INSIDE |
self.config_flags.NAT_IS_EXT_HOST_VALID),
ext_host_address=sessions[0].ext_host_nat_address,
ext_host_port=sessions[0].ext_host_nat_port)
sessions = self.vapi.nat44_user_session_dump(server.ip4, 0)
self.assertEqual(len(sessions), 0)
def verify_syslog_sess(self, data, is_add=True, is_ip6=False):
message = data.decode('utf-8')
try:
message = SyslogMessage.parse(message)
except ParseError as e:
self.logger.error(e)
raise
else:
self.assertEqual(message.severity, SyslogSeverity.info)
self.assertEqual(message.appname, 'NAT')
self.assertEqual(message.msgid, 'SADD' if is_add else 'SDEL')
sd_params = message.sd.get('nsess')
self.assertTrue(sd_params is not None)
if is_ip6:
self.assertEqual(sd_params.get('IATYP'), 'IPv6')
self.assertEqual(sd_params.get('ISADDR'), self.pg0.remote_ip6)
else:
self.assertEqual(sd_params.get('IATYP'), 'IPv4')
self.assertEqual(sd_params.get('ISADDR'), self.pg0.remote_ip4)
self.assertTrue(sd_params.get('SSUBIX') is not None)
self.assertEqual(sd_params.get('ISPORT'), "%d" % self.tcp_port_in)
self.assertEqual(sd_params.get('XATYP'), 'IPv4')
self.assertEqual(sd_params.get('XSADDR'), self.nat_addr)
self.assertEqual(sd_params.get('XSPORT'), "%d" % self.tcp_port_out)
self.assertEqual(sd_params.get('PROTO'), "%d" % IP_PROTOS.tcp)
self.assertEqual(sd_params.get('SVLAN'), '0')
self.assertEqual(sd_params.get('XDADDR'), self.pg1.remote_ip4)
self.assertEqual(sd_params.get('XDPORT'),
"%d" % self.tcp_external_port)
class TestNAT44ED(NAT44EDTestCase):
""" NAT44ED Test Case """
def test_users_dump(self):
""" NAT44ED API test - nat44_user_dump """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_forwarding_enable_disable(enable=1)
local_ip = self.pg0.remote_ip4
external_ip = self.nat_addr
self.nat_add_static_mapping(local_ip, external_ip)
users = self.vapi.nat44_user_dump()
self.assertEqual(len(users), 0)
# in2out - static mapping match
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, same_port=True)
users = self.vapi.nat44_user_dump()
self.assertEqual(len(users), 1)
static_user = users[0]
self.assertEqual(static_user.nstaticsessions, 3)
self.assertEqual(static_user.nsessions, 0)
# in2out - no static mapping match (forwarding test)
host0 = self.pg0.remote_hosts[0]
self.pg0.remote_hosts[0] = self.pg0.remote_hosts[1]
try:
pkts = self.create_stream_out(self.pg1,
dst_ip=self.pg0.remote_ip4,
use_inside_ports=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, nat_ip=self.pg0.remote_ip4,
same_port=True)
finally:
self.pg0.remote_hosts[0] = host0
users = self.vapi.nat44_user_dump()
self.assertEqual(len(users), 2)
if str(users[0].ip_address) == self.pg0.remote_hosts[0].ip4:
non_static_user = users[1]
static_user = users[0]
else:
non_static_user = users[0]
static_user = users[1]
self.assertEqual(static_user.nstaticsessions, 3)
self.assertEqual(static_user.nsessions, 0)
self.assertEqual(non_static_user.nstaticsessions, 0)
self.assertEqual(non_static_user.nsessions, 3)
users = self.vapi.nat44_user_dump()
self.assertEqual(len(users), 2)
if str(users[0].ip_address) == self.pg0.remote_hosts[0].ip4:
non_static_user = users[1]
static_user = users[0]
else:
non_static_user = users[0]
static_user = users[1]
self.assertEqual(static_user.nstaticsessions, 3)
self.assertEqual(static_user.nsessions, 0)
self.assertEqual(non_static_user.nstaticsessions, 0)
self.assertEqual(non_static_user.nsessions, 3)
def test_frag_out_of_order_do_not_translate(self):
""" NAT44ED don't translate fragments arriving out of order """
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_forwarding_enable_disable(enable=True)
self.frag_out_of_order(proto=IP_PROTOS.tcp, dont_translate=True)
def test_forwarding(self):
""" NAT44ED forwarding test """
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_forwarding_enable_disable(enable=1)
real_ip = self.pg0.remote_ip4
alias_ip = self.nat_addr
flags = self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_static_mapping(is_add=1,
local_ip_address=real_ip,
external_ip_address=alias_ip,
external_sw_if_index=0xFFFFFFFF,
flags=flags)
try:
# in2out - static mapping match
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, same_port=True)
# in2out - no static mapping match
host0 = self.pg0.remote_hosts[0]
self.pg0.remote_hosts[0] = self.pg0.remote_hosts[1]
try:
pkts = self.create_stream_out(self.pg1,
dst_ip=self.pg0.remote_ip4,
use_inside_ports=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, nat_ip=self.pg0.remote_ip4,
same_port=True)
finally:
self.pg0.remote_hosts[0] = host0
user = self.pg0.remote_hosts[1]
sessions = self.vapi.nat44_user_session_dump(user.ip4, 0)
self.assertEqual(len(sessions), 3)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_EXT_HOST_VALID)
self.vapi.nat44_del_session(
address=sessions[0].inside_ip_address,
port=sessions[0].inside_port,
protocol=sessions[0].protocol,
flags=(self.config_flags.NAT_IS_INSIDE |
self.config_flags.NAT_IS_EXT_HOST_VALID),
ext_host_address=sessions[0].ext_host_address,
ext_host_port=sessions[0].ext_host_port)
sessions = self.vapi.nat44_user_session_dump(user.ip4, 0)
self.assertEqual(len(sessions), 2)
finally:
self.vapi.nat44_forwarding_enable_disable(enable=0)
flags = self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_static_mapping(
is_add=0,
local_ip_address=real_ip,
external_ip_address=alias_ip,
external_sw_if_index=0xFFFFFFFF,
flags=flags)
def test_output_feature_and_service2(self):
""" NAT44ED interface output feature and service host direct access """
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index, is_add=1,)
# session initiated from service host - translate
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
# session initiated from remote host - do not translate
tcp_port_in = self.tcp_port_in
udp_port_in = self.udp_port_in
icmp_id_in = self.icmp_id_in
self.tcp_port_in = 60303
self.udp_port_in = 60304
self.icmp_id_in = 60305
try:
pkts = self.create_stream_out(self.pg1,
self.pg0.remote_ip4,
use_inside_ports=True)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, nat_ip=self.pg0.remote_ip4,
same_port=True)
finally:
self.tcp_port_in = tcp_port_in
self.udp_port_in = udp_port_in
self.icmp_id_in = icmp_id_in
def test_twice_nat(self):
""" NAT44ED Twice NAT """
self.twice_nat_common()
def test_self_twice_nat_positive(self):
""" NAT44ED Self Twice NAT (positive test) """
self.twice_nat_common(self_twice_nat=True, same_pg=True)
def test_self_twice_nat_lb_positive(self):
""" NAT44ED Self Twice NAT local service load balancing (positive test)
"""
self.twice_nat_common(lb=True, self_twice_nat=True, same_pg=True,
client_id=1)
def test_twice_nat_lb(self):
""" NAT44ED Twice NAT local service load balancing """
self.twice_nat_common(lb=True)
def test_output_feature(self):
""" NAT44ED interface output feature (in2out postrouting) """
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr)
self.nat_add_outside_interface(self.pg0)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index, is_add=1)
# in2out
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
# out2in
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
# in2out
pkts = self.create_stream_in(self.pg0, self.pg1, ttl=2)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
# out2in
pkts = self.create_stream_out(self.pg1, ttl=2)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
# in2out
pkts = self.create_stream_in(self.pg0, self.pg1, ttl=1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
self.assertIn(ICMP, p)
self.assertEqual(p[ICMP].type, 11) # 11 == time-exceeded
def test_static_with_port_out2(self):
""" NAT44ED 1:1 NAPT asymmetrical rule """
external_port = 80
local_port = 8080
self.vapi.nat44_forwarding_enable_disable(enable=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg0.remote_ip4, self.nat_addr,
local_port, external_port,
proto=IP_PROTOS.tcp, flags=flags)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# from client to service
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# ICMP error
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
ICMP(type=11) / capture[0][IP])
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
self.assertEqual(p[IP].src, self.nat_addr)
inner = p[IPerror]
self.assertEqual(inner.dst, self.nat_addr)
self.assertEqual(inner[TCPerror].dport, external_port)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# ICMP error
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
ICMP(type=11) / capture[0][IP])
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
self.assertEqual(p[IP].dst, self.pg0.remote_ip4)
inner = p[IPerror]
self.assertEqual(inner.src, self.pg0.remote_ip4)
self.assertEqual(inner[TCPerror].sport, local_port)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from client to server (no translation)
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.pg0.remote_ip4) /
TCP(sport=12346, dport=local_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client (no translation)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12346))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_static_lb(self):
""" NAT44ED local service load balancing """
external_addr_n = self.nat_addr
external_port = 80
local_port = 8080
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
locals = [{'addr': server1.ip4,
'port': local_port,
'probability': 70,
'vrf_id': 0},
{'addr': server2.ip4,
'port': local_port,
'probability': 30,
'vrf_id': 0}]
self.nat_add_address(self.nat_addr)
self.vapi.nat44_add_del_lb_static_mapping(
is_add=1,
external_addr=external_addr_n,
external_port=external_port,
protocol=IP_PROTOS.tcp,
local_num=len(locals),
locals=locals)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
# from client to service
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
server = None
try:
ip = p[IP]
tcp = p[TCP]
self.assertIn(ip.dst, [server1.ip4, server2.ip4])
if ip.dst == server1.ip4:
server = server1
else:
server = server2
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=server.mac, dst=self.pg0.local_mac) /
IP(src=server.ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
sessions = self.vapi.nat44_user_session_dump(server.ip4, 0)
self.assertEqual(len(sessions), 1)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_EXT_HOST_VALID)
self.vapi.nat44_del_session(
address=sessions[0].inside_ip_address,
port=sessions[0].inside_port,
protocol=sessions[0].protocol,
flags=(self.config_flags.NAT_IS_INSIDE |
self.config_flags.NAT_IS_EXT_HOST_VALID),
ext_host_address=sessions[0].ext_host_address,
ext_host_port=sessions[0].ext_host_port)
sessions = self.vapi.nat44_user_session_dump(server.ip4, 0)
self.assertEqual(len(sessions), 0)
def test_static_lb_2(self):
""" NAT44ED local service load balancing (asymmetrical rule) """
external_addr = self.nat_addr
external_port = 80
local_port = 8080
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
locals = [{'addr': server1.ip4,
'port': local_port,
'probability': 70,
'vrf_id': 0},
{'addr': server2.ip4,
'port': local_port,
'probability': 30,
'vrf_id': 0}]
self.vapi.nat44_forwarding_enable_disable(enable=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.vapi.nat44_add_del_lb_static_mapping(is_add=1, flags=flags,
external_addr=external_addr,
external_port=external_port,
protocol=IP_PROTOS.tcp,
local_num=len(locals),
locals=locals)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
# from client to service
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
server = None
try:
ip = p[IP]
tcp = p[TCP]
self.assertIn(ip.dst, [server1.ip4, server2.ip4])
if ip.dst == server1.ip4:
server = server1
else:
server = server2
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=server.mac, dst=self.pg0.local_mac) /
IP(src=server.ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from client to server (no translation)
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=server1.ip4) /
TCP(sport=12346, dport=local_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
server = None
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, server1.ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client (no translation)
p = (Ether(src=server1.mac, dst=self.pg0.local_mac) /
IP(src=server1.ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12346))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, server1.ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_lb_affinity(self):
""" NAT44ED local service load balancing affinity """
external_addr = self.nat_addr
external_port = 80
local_port = 8080
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
locals = [{'addr': server1.ip4,
'port': local_port,
'probability': 50,
'vrf_id': 0},
{'addr': server2.ip4,
'port': local_port,
'probability': 50,
'vrf_id': 0}]
self.nat_add_address(self.nat_addr)
self.vapi.nat44_add_del_lb_static_mapping(is_add=1,
external_addr=external_addr,
external_port=external_port,
protocol=IP_PROTOS.tcp,
affinity=10800,
local_num=len(locals),
locals=locals)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=1025, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
backend = capture[0][IP].dst
sessions = self.vapi.nat44_user_session_dump(backend, 0)
self.assertEqual(len(sessions), 1)
self.assertTrue(sessions[0].flags &
self.config_flags.NAT_IS_EXT_HOST_VALID)
self.vapi.nat44_del_session(
address=sessions[0].inside_ip_address,
port=sessions[0].inside_port,
protocol=sessions[0].protocol,
flags=(self.config_flags.NAT_IS_INSIDE |
self.config_flags.NAT_IS_EXT_HOST_VALID),
ext_host_address=sessions[0].ext_host_address,
ext_host_port=sessions[0].ext_host_port)
pkts = []
for port in range(1030, 1100):
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=port, dport=external_port))
pkts.append(p)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
self.assertEqual(p[IP].dst, backend)
def test_multiple_vrf_1(self):
""" Multiple VRF - both client & service in VRF1 """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg6.sw_if_index,
is_add=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg5.remote_ip4, external_addr,
local_port, external_port, vrf_id=1,
proto=IP_PROTOS.tcp, flags=flags)
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=external_addr) /
TCP(sport=12345, dport=external_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, external_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_multiple_vrf_2(self):
""" Multiple VRF - dynamic NAT from VRF1 to VRF0 (output-feature) """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
self.nat_add_address(self.nat_addr)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1, flags=flags)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg5.remote_ip4, external_addr,
local_port, external_port, vrf_id=1,
proto=IP_PROTOS.tcp, flags=flags)
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=2345, dport=22))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assert_packet_checksums_valid(p)
port = tcp.sport
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=22, dport=port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, 2345)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_multiple_vrf_3(self):
""" Multiple VRF - client in VRF1, service in VRF0 """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg6.sw_if_index,
is_add=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(
self.pg0.remote_ip4,
external_sw_if_index=self.pg0.sw_if_index,
local_port=local_port,
vrf_id=0,
external_port=external_port,
proto=IP_PROTOS.tcp,
flags=flags
)
# from client VRF1 to service VRF0
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=self.pg0.local_ip4) /
TCP(sport=12346, dport=external_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service VRF0 back to client VRF1
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12346))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.local_ip4)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_multiple_vrf_4(self):
""" Multiple VRF - client in VRF0, service in VRF1 """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1, flags=flags)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg5.remote_ip4, external_addr,
local_port, external_port, vrf_id=1,
proto=IP_PROTOS.tcp, flags=flags)
# from client VRF0 to service VRF1
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=external_addr) /
TCP(sport=12347, dport=external_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service VRF1 back to client VRF0
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg0.remote_ip4) /
TCP(sport=local_port, dport=12347))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, external_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_multiple_vrf_5(self):
""" Multiple VRF - forwarding - no translation """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
port = 0
self.vapi.nat44_forwarding_enable_disable(enable=1)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg5.sw_if_index,
is_add=1, flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg6.sw_if_index,
is_add=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg5.remote_ip4, external_addr,
local_port, external_port, vrf_id=1,
proto=IP_PROTOS.tcp, flags=flags)
self.nat_add_static_mapping(
self.pg0.remote_ip4,
external_sw_if_index=self.pg0.sw_if_index,
local_port=local_port,
vrf_id=0,
external_port=external_port,
proto=IP_PROTOS.tcp,
flags=flags
)
# from client to server (both VRF1, no translation)
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=self.pg5.remote_ip4) /
TCP(sport=12348, dport=local_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from server back to client (both VRF1, no translation)
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12348))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg5.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from client VRF1 to server VRF0 (no translation)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12349))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from server VRF0 back to client VRF1 (no translation)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg6.remote_ip4) /
TCP(sport=local_port, dport=12349))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from client VRF0 to server VRF1 (no translation)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg5.remote_ip4) /
TCP(sport=12344, dport=local_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg5.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg5.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from server VRF1 back to client VRF0 (no translation)
p = (Ether(src=self.pg5.remote_mac, dst=self.pg5.local_mac) /
IP(src=self.pg5.remote_ip4, dst=self.pg0.remote_ip4) /
TCP(sport=local_port, dport=12344))
self.pg5.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg5.remote_ip4)
self.assertEqual(tcp.sport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_outside_address_distribution(self):
""" Outside address distribution based on source address """
x = 100
nat_addresses = []
for i in range(1, x):
a = "10.0.0.%d" % i
nat_addresses.append(a)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_add_del_address_range(
first_ip_address=nat_addresses[0],
last_ip_address=nat_addresses[-1],
vrf_id=0xFFFFFFFF, is_add=1, flags=0)
self.pg0.generate_remote_hosts(x)
pkts = []
for i in range(x):
info = self.create_packet_info(self.pg0, self.pg1)
payload = self.info_to_payload(info)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_hosts[i].ip4,
dst=self.pg1.remote_ip4) /
UDP(sport=7000+i, dport=8000+i) /
Raw(payload))
info.data = p
pkts.append(p)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
recvd = self.pg1.get_capture(len(pkts))
for p_recvd in recvd:
payload_info = self.payload_to_info(p_recvd[Raw])
packet_index = payload_info.index
info = self._packet_infos[packet_index]
self.assertTrue(info is not None)
self.assertEqual(packet_index, info.index)
p_sent = info.data
packed = socket.inet_aton(p_sent[IP].src)
numeric = struct.unpack("!L", packed)[0]
numeric = socket.htonl(numeric)
a = nat_addresses[(numeric-1) % len(nat_addresses)]
self.assertEqual(
a, p_recvd[IP].src,
"Invalid packet (src IP %s translated to %s, but expected %s)"
% (p_sent[IP].src, p_recvd[IP].src, a))
class TestNAT44EDMW(TestNAT44ED):
""" NAT44ED MW Test Case """
vpp_worker_count = 4
max_sessions = 5000
@unittest.skip('MW fix required')
def test_users_dump(self):
""" NAT44ED API test - nat44_user_dump """
@unittest.skip('MW fix required')
def test_frag_out_of_order_do_not_translate(self):
""" NAT44ED don't translate fragments arriving out of order """
@unittest.skip('MW fix required')
def test_forwarding(self):
""" NAT44ED forwarding test """
@unittest.skip('MW fix required')
def test_twice_nat(self):
""" NAT44ED Twice NAT """
@unittest.skip('MW fix required')
def test_twice_nat_lb(self):
""" NAT44ED Twice NAT local service load balancing """
@unittest.skip('MW fix required')
def test_output_feature(self):
""" NAT44ED interface output feature (in2out postrouting) """
@unittest.skip('MW fix required')
def test_static_with_port_out2(self):
""" NAT44ED 1:1 NAPT asymmetrical rule """
@unittest.skip('MW fix required')
def test_output_feature_and_service2(self):
""" NAT44ED interface output feature and service host direct access """
@unittest.skip('MW fix required')
def test_static_lb(self):
""" NAT44ED local service load balancing """
@unittest.skip('MW fix required')
def test_static_lb_2(self):
""" NAT44ED local service load balancing (asymmetrical rule) """
@unittest.skip('MW fix required')
def test_lb_affinity(self):
""" NAT44ED local service load balancing affinity """
@unittest.skip('MW fix required')
def test_multiple_vrf(self):
""" NAT44ED Multiple VRF setup """
@unittest.skip('MW fix required')
def test_self_twice_nat_positive(self):
""" NAT44ED Self Twice NAT (positive test) """
@unittest.skip('MW fix required')
def test_self_twice_nat_lb_positive(self):
""" NAT44ED Self Twice NAT local service load balancing (positive test)
"""
def test_dynamic(self):
""" NAT44ED dynamic translation test """
pkt_count = 1500
tcp_port_offset = 20
udp_port_offset = 20
icmp_id_offset = 20
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# in2out
tc1 = self.statistics['/nat44-ed/in2out/slowpath/tcp']
uc1 = self.statistics['/nat44-ed/in2out/slowpath/udp']
ic1 = self.statistics['/nat44-ed/in2out/slowpath/icmp']
dc1 = self.statistics['/nat44-ed/in2out/slowpath/drops']
i2o_pkts = [[] for x in range(0, self.vpp_worker_count)]
for i in range(pkt_count):
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=tcp_port_offset + i, dport=20))
i2o_pkts[p[TCP].sport % self.vpp_worker_count].append(p)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
UDP(sport=udp_port_offset + i, dport=20))
i2o_pkts[p[UDP].sport % self.vpp_worker_count].append(p)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
ICMP(id=icmp_id_offset + i, type='echo-request'))
i2o_pkts[p[ICMP].id % self.vpp_worker_count].append(p)
for i in range(0, self.vpp_worker_count):
if len(i2o_pkts[i]) > 0:
self.pg0.add_stream(i2o_pkts[i], worker=i)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(pkt_count * 3)
if_idx = self.pg0.sw_if_index
tc2 = self.statistics['/nat44-ed/in2out/slowpath/tcp']
uc2 = self.statistics['/nat44-ed/in2out/slowpath/udp']
ic2 = self.statistics['/nat44-ed/in2out/slowpath/icmp']
dc2 = self.statistics['/nat44-ed/in2out/slowpath/drops']
self.assertEqual(
tc2[:, if_idx].sum() - tc1[:, if_idx].sum(), pkt_count)
self.assertEqual(
uc2[:, if_idx].sum() - uc1[:, if_idx].sum(), pkt_count)
self.assertEqual(
ic2[:, if_idx].sum() - ic1[:, if_idx].sum(), pkt_count)
self.assertEqual(dc2[:, if_idx].sum() - dc1[:, if_idx].sum(), 0)
self.logger.info(self.vapi.cli("show trace"))
# out2in
tc1 = self.statistics['/nat44-ed/out2in/fastpath/tcp']
uc1 = self.statistics['/nat44-ed/out2in/fastpath/udp']
ic1 = self.statistics['/nat44-ed/out2in/fastpath/icmp']
dc1 = self.statistics['/nat44-ed/out2in/fastpath/drops']
recvd_tcp_ports = set()
recvd_udp_ports = set()
recvd_icmp_ids = set()
for p in capture:
if TCP in p:
recvd_tcp_ports.add(p[TCP].sport)
if UDP in p:
recvd_udp_ports.add(p[UDP].sport)
if ICMP in p:
recvd_icmp_ids.add(p[ICMP].id)
recvd_tcp_ports = list(recvd_tcp_ports)
recvd_udp_ports = list(recvd_udp_ports)
recvd_icmp_ids = list(recvd_icmp_ids)
o2i_pkts = [[] for x in range(0, self.vpp_worker_count)]
for i in range(pkt_count):
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(dport=choice(recvd_tcp_ports), sport=20))
o2i_pkts[p[TCP].dport % self.vpp_worker_count].append(p)
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
UDP(dport=choice(recvd_udp_ports), sport=20))
o2i_pkts[p[UDP].dport % self.vpp_worker_count].append(p)
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
ICMP(id=choice(recvd_icmp_ids), type='echo-reply'))
o2i_pkts[p[ICMP].id % self.vpp_worker_count].append(p)
for i in range(0, self.vpp_worker_count):
if len(o2i_pkts[i]) > 0:
self.pg1.add_stream(o2i_pkts[i], worker=i)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(pkt_count * 3)
for packet in capture:
try:
self.assert_packet_checksums_valid(packet)
self.assertEqual(packet[IP].dst, self.pg0.remote_ip4)
if packet.haslayer(TCP):
self.assert_in_range(
packet[TCP].dport, tcp_port_offset,
tcp_port_offset + pkt_count, "dst TCP port")
elif packet.haslayer(UDP):
self.assert_in_range(
packet[UDP].dport, udp_port_offset,
udp_port_offset + pkt_count, "dst UDP port")
else:
self.assert_in_range(
packet[ICMP].id, icmp_id_offset,
icmp_id_offset + pkt_count, "ICMP id")
except:
self.logger.error(ppp("Unexpected or invalid packet "
"(inside network):", packet))
raise
if_idx = self.pg1.sw_if_index
tc2 = self.statistics['/nat44-ed/out2in/fastpath/tcp']
uc2 = self.statistics['/nat44-ed/out2in/fastpath/udp']
ic2 = self.statistics['/nat44-ed/out2in/fastpath/icmp']
dc2 = self.statistics['/nat44-ed/out2in/fastpath/drops']
self.assertEqual(
tc2[:, if_idx].sum() - tc1[:, if_idx].sum(), pkt_count)
self.assertEqual(
uc2[:, if_idx].sum() - uc1[:, if_idx].sum(), pkt_count)
self.assertEqual(
ic2[:, if_idx].sum() - ic1[:, if_idx].sum(), pkt_count)
self.assertEqual(dc2[:, if_idx].sum() - dc1[:, if_idx].sum(), 0)
sc = self.statistics['/nat44-ed/total-sessions']
self.assertEqual(sc[:, 0].sum(), len(recvd_tcp_ports) +
len(recvd_udp_ports) + len(recvd_icmp_ids))
def test_frag_in_order(self):
""" NAT44ED translate fragments arriving in order """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.frag_in_order(proto=IP_PROTOS.tcp, ignore_port=True)
self.frag_in_order(proto=IP_PROTOS.udp, ignore_port=True)
self.frag_in_order(proto=IP_PROTOS.icmp, ignore_port=True)
def test_frag_in_order_do_not_translate(self):
""" NAT44ED don't translate fragments arriving in order """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat44_forwarding_enable_disable(enable=True)
self.frag_in_order(proto=IP_PROTOS.tcp, dont_translate=True)
def test_frag_out_of_order(self):
""" NAT44ED translate fragments arriving out of order """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.frag_out_of_order(proto=IP_PROTOS.tcp, ignore_port=True)
self.frag_out_of_order(proto=IP_PROTOS.udp, ignore_port=True)
self.frag_out_of_order(proto=IP_PROTOS.icmp, ignore_port=True)
def test_frag_in_order_in_plus_out(self):
""" NAT44ED in+out interface fragments in order """
in_port = self.random_port()
out_port = self.random_port()
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg0)
self.nat_add_inside_interface(self.pg1)
self.nat_add_outside_interface(self.pg1)
# add static mappings for server
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
in_port,
out_port,
proto=IP_PROTOS.tcp)
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
in_port,
out_port,
proto=IP_PROTOS.udp)
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
proto=IP_PROTOS.icmp)
# run tests for each protocol
self.frag_in_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.tcp)
self.frag_in_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.udp)
self.frag_in_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.icmp)
def test_frag_out_of_order_in_plus_out(self):
""" NAT44ED in+out interface fragments out of order """
in_port = self.random_port()
out_port = self.random_port()
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg0)
self.nat_add_inside_interface(self.pg1)
self.nat_add_outside_interface(self.pg1)
# add static mappings for server
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
in_port,
out_port,
proto=IP_PROTOS.tcp)
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
in_port,
out_port,
proto=IP_PROTOS.udp)
self.nat_add_static_mapping(self.server_addr,
self.nat_addr,
proto=IP_PROTOS.icmp)
# run tests for each protocol
self.frag_out_of_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.tcp)
self.frag_out_of_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.udp)
self.frag_out_of_order_in_plus_out(self.server_addr,
self.nat_addr,
in_port,
out_port,
IP_PROTOS.icmp)
def test_reass_hairpinning(self):
""" NAT44ED fragments hairpinning """
server_addr = self.pg0.remote_hosts[1].ip4
host_in_port = self.random_port()
server_in_port = self.random_port()
server_out_port = self.random_port()
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# add static mapping for server
self.nat_add_static_mapping(server_addr, self.nat_addr,
server_in_port, server_out_port,
proto=IP_PROTOS.tcp)
self.nat_add_static_mapping(server_addr, self.nat_addr,
server_in_port, server_out_port,
proto=IP_PROTOS.udp)
self.nat_add_static_mapping(server_addr, self.nat_addr)
self.reass_hairpinning(server_addr, server_in_port, server_out_port,
host_in_port, proto=IP_PROTOS.tcp,
ignore_port=True)
self.reass_hairpinning(server_addr, server_in_port, server_out_port,
host_in_port, proto=IP_PROTOS.udp,
ignore_port=True)
self.reass_hairpinning(server_addr, server_in_port, server_out_port,
host_in_port, proto=IP_PROTOS.icmp,
ignore_port=True)
def test_session_limit_per_vrf(self):
""" NAT44ED per vrf session limit """
inside = self.pg0
inside_vrf10 = self.pg2
outside = self.pg1
limit = 5
# 2 interfaces pg0, pg1 (vrf10, limit 1 tcp session)
# non existing vrf_id makes process core dump
self.vapi.nat44_set_session_limit(session_limit=limit, vrf_id=10)
self.nat_add_inside_interface(inside)
self.nat_add_inside_interface(inside_vrf10)
self.nat_add_outside_interface(outside)
# vrf independent
self.nat_add_interface_address(outside)
# BUG: causing core dump - when bad vrf_id is specified
# self.nat_add_address(outside.local_ip4, vrf_id=20)
stream = self.create_tcp_stream(inside_vrf10, outside, limit * 2)
inside_vrf10.add_stream(stream)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = outside.get_capture(limit)
stream = self.create_tcp_stream(inside, outside, limit * 2)
inside.add_stream(stream)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = outside.get_capture(len(stream))
def test_show_max_translations(self):
""" NAT44ED API test - max translations per thread """
nat_config = self.vapi.nat_show_config_2()
self.assertEqual(self.max_sessions,
nat_config.max_translations_per_thread)
def test_lru_cleanup(self):
""" NAT44ED LRU cleanup algorithm """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat_set_timeouts(
udp=1, tcp_established=7440, tcp_transitory=30, icmp=1)
tcp_port_out = self.init_tcp_session(self.pg0, self.pg1, 2000, 80)
pkts = []
for i in range(0, self.max_sessions - 1):
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4, ttl=64) /
UDP(sport=7000+i, dport=80))
pkts.append(p)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(len(pkts))
self.sleep(1.5, "wait for timeouts")
pkts = []
for i in range(0, self.max_sessions - 1):
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4, ttl=64) /
ICMP(id=8000+i, type='echo-request'))
pkts.append(p)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(len(pkts))
def test_session_rst_timeout(self):
""" NAT44ED session RST timeouts """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.vapi.nat_set_timeouts(udp=300, tcp_established=7440,
tcp_transitory=5, icmp=60)
self.init_tcp_session(self.pg0, self.pg1, self.tcp_port_in,
self.tcp_external_port)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=self.tcp_port_in, dport=self.tcp_external_port,
flags="R"))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
self.sleep(6)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=self.tcp_port_in + 1, dport=self.tcp_external_port + 1,
flags="S"))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
def test_dynamic_out_of_ports(self):
""" NAT44ED dynamic translation test: out of ports """
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# in2out and no NAT addresses added
err_old = self.statistics.get_err_counter(
'/err/nat44-ed-in2out-slowpath/out of ports')
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(0, timeout=1)
err_new = self.statistics.get_err_counter(
'/err/nat44-ed-in2out-slowpath/out of ports')
self.assertEqual(err_new - err_old, len(pkts))
# in2out after NAT addresses added
self.nat_add_address(self.nat_addr)
err_old = self.statistics.get_err_counter(
'/err/nat44-ed-in2out-slowpath/out of ports')
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
err_new = self.statistics.get_err_counter(
'/err/nat44-ed-in2out-slowpath/out of ports')
self.assertEqual(err_new, err_old)
def test_unknown_proto(self):
""" NAT44ED translate packet with unknown protocol """
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# in2out
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=self.tcp_port_in, dport=20))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg1.get_capture(1)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
GRE() /
IP(src=self.pg2.remote_ip4, dst=self.pg2.remote_ip4) /
TCP(sport=1234, dport=1234))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg1.get_capture(1)
packet = p[0]
try:
self.assertEqual(packet[IP].src, self.nat_addr)
self.assertEqual(packet[IP].dst, self.pg1.remote_ip4)
self.assertEqual(packet.haslayer(GRE), 1)
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
# out2in
p = (Ether(dst=self.pg1.local_mac, src=self.pg1.remote_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
GRE() /
IP(src=self.pg2.remote_ip4, dst=self.pg2.remote_ip4) /
TCP(sport=1234, dport=1234))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg0.get_capture(1)
packet = p[0]
try:
self.assertEqual(packet[IP].src, self.pg1.remote_ip4)
self.assertEqual(packet[IP].dst, self.pg0.remote_ip4)
self.assertEqual(packet.haslayer(GRE), 1)
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
def test_hairpinning_unknown_proto(self):
""" NAT44ED translate packet with unknown protocol - hairpinning """
host = self.pg0.remote_hosts[0]
server = self.pg0.remote_hosts[1]
host_in_port = 1234
server_out_port = 8765
server_nat_ip = "10.0.0.11"
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
# add static mapping for server
self.nat_add_static_mapping(server.ip4, server_nat_ip)
# host to server
p = (Ether(src=host.mac, dst=self.pg0.local_mac) /
IP(src=host.ip4, dst=server_nat_ip) /
TCP(sport=host_in_port, dport=server_out_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
p = (Ether(dst=self.pg0.local_mac, src=host.mac) /
IP(src=host.ip4, dst=server_nat_ip) /
GRE() /
IP(src=self.pg2.remote_ip4, dst=self.pg2.remote_ip4) /
TCP(sport=1234, dport=1234))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg0.get_capture(1)
packet = p[0]
try:
self.assertEqual(packet[IP].src, self.nat_addr)
self.assertEqual(packet[IP].dst, server.ip4)
self.assertEqual(packet.haslayer(GRE), 1)
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
# server to host
p = (Ether(dst=self.pg0.local_mac, src=server.mac) /
IP(src=server.ip4, dst=self.nat_addr) /
GRE() /
IP(src=self.pg2.remote_ip4, dst=self.pg2.remote_ip4) /
TCP(sport=1234, dport=1234))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
p = self.pg0.get_capture(1)
packet = p[0]
try:
self.assertEqual(packet[IP].src, server_nat_ip)
self.assertEqual(packet[IP].dst, host.ip4)
self.assertEqual(packet.haslayer(GRE), 1)
self.assert_packet_checksums_valid(packet)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
def test_output_feature_and_service(self):
""" NAT44ED interface output feature and services """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr)
flags = self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_identity_mapping(
ip_address=self.pg1.remote_ip4, sw_if_index=0xFFFFFFFF,
flags=flags, is_add=1)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg0.remote_ip4, external_addr,
local_port, external_port,
proto=IP_PROTOS.tcp, flags=flags)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg0)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index, is_add=1)
# from client to service
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=external_addr) /
TCP(sport=12345, dport=external_port))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, external_addr)
self.assertEqual(tcp.sport, external_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from local network host to external network
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
pkts = self.create_stream_in(self.pg0, self.pg1)
self.pg0.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
# from external network back to local network host
pkts = self.create_stream_out(self.pg1)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg0)
def test_output_feature_and_service3(self):
""" NAT44ED interface output feature and DST NAT """
external_addr = '1.2.3.4'
external_port = 80
local_port = 8080
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr)
flags = self.config_flags.NAT_IS_OUT2IN_ONLY
self.nat_add_static_mapping(self.pg1.remote_ip4, external_addr,
local_port, external_port,
proto=IP_PROTOS.tcp, flags=flags)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg0)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index, is_add=1)
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=external_addr) /
TCP(sport=12345, dport=external_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg0.remote_ip4)
self.assertEqual(tcp.sport, 12345)
self.assertEqual(ip.dst, self.pg1.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.pg0.remote_ip4) /
TCP(sport=local_port, dport=12345))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, external_addr)
self.assertEqual(tcp.sport, external_port)
self.assertEqual(ip.dst, self.pg0.remote_ip4)
self.assertEqual(tcp.dport, 12345)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_self_twice_nat_lb_negative(self):
""" NAT44ED Self Twice NAT local service load balancing (negative test)
"""
self.twice_nat_common(lb=True, self_twice_nat=True, same_pg=True,
client_id=2)
def test_self_twice_nat_negative(self):
""" NAT44ED Self Twice NAT (negative test) """
self.twice_nat_common(self_twice_nat=True)
def test_static_lb_multi_clients(self):
""" NAT44ED local service load balancing - multiple clients"""
external_addr = self.nat_addr
external_port = 80
local_port = 8080
server1 = self.pg0.remote_hosts[0]
server2 = self.pg0.remote_hosts[1]
server3 = self.pg0.remote_hosts[2]
locals = [{'addr': server1.ip4,
'port': local_port,
'probability': 90,
'vrf_id': 0},
{'addr': server2.ip4,
'port': local_port,
'probability': 10,
'vrf_id': 0}]
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg1.sw_if_index,
is_add=1)
self.nat_add_address(self.nat_addr)
self.vapi.nat44_add_del_lb_static_mapping(is_add=1,
external_addr=external_addr,
external_port=external_port,
protocol=IP_PROTOS.tcp,
local_num=len(locals),
locals=locals)
server1_n = 0
server2_n = 0
clients = ip4_range(self.pg1.remote_ip4, 10, 50)
pkts = []
for client in clients:
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=client, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
pkts.append(p)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
if p[IP].dst == server1.ip4:
server1_n += 1
else:
server2_n += 1
self.assertGreaterEqual(server1_n, server2_n)
local = {
'addr': server3.ip4,
'port': local_port,
'probability': 20,
'vrf_id': 0
}
# add new back-end
self.vapi.nat44_lb_static_mapping_add_del_local(
is_add=1,
external_addr=external_addr,
external_port=external_port,
local=local,
protocol=IP_PROTOS.tcp)
server1_n = 0
server2_n = 0
server3_n = 0
clients = ip4_range(self.pg1.remote_ip4, 60, 110)
pkts = []
for client in clients:
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=client, dst=self.nat_addr) /
TCP(sport=12346, dport=external_port))
pkts.append(p)
self.assertGreater(len(pkts), 0)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
if p[IP].dst == server1.ip4:
server1_n += 1
elif p[IP].dst == server2.ip4:
server2_n += 1
else:
server3_n += 1
self.assertGreater(server1_n, 0)
self.assertGreater(server2_n, 0)
self.assertGreater(server3_n, 0)
local = {
'addr': server2.ip4,
'port': local_port,
'probability': 10,
'vrf_id': 0
}
# remove one back-end
self.vapi.nat44_lb_static_mapping_add_del_local(
is_add=0,
external_addr=external_addr,
external_port=external_port,
local=local,
protocol=IP_PROTOS.tcp)
server1_n = 0
server2_n = 0
server3_n = 0
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(len(pkts))
for p in capture:
if p[IP].dst == server1.ip4:
server1_n += 1
elif p[IP].dst == server2.ip4:
server2_n += 1
else:
server3_n += 1
self.assertGreater(server1_n, 0)
self.assertEqual(server2_n, 0)
self.assertGreater(server3_n, 0)
def test_syslog_sess(self):
""" NAT44ED Test syslog session creation and deletion """
self.vapi.syslog_set_filter(
self.syslog_severity.SYSLOG_API_SEVERITY_INFO)
self.vapi.syslog_set_sender(self.pg3.local_ip4, self.pg3.remote_ip4)
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
p = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=self.tcp_port_in, dport=self.tcp_external_port))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg1.get_capture(1)
self.tcp_port_out = capture[0][TCP].sport
capture = self.pg3.get_capture(1)
self.verify_syslog_sess(capture[0][Raw].load)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.nat_add_address(self.nat_addr, is_add=0)
capture = self.pg3.get_capture(1)
self.verify_syslog_sess(capture[0][Raw].load, False)
def test_twice_nat_interface_addr(self):
""" NAT44ED Acquire twice NAT addresses from interface """
flags = self.config_flags.NAT_IS_TWICE_NAT
self.vapi.nat44_add_del_interface_addr(
sw_if_index=self.pg11.sw_if_index,
flags=flags, is_add=1)
# no address in NAT pool
adresses = self.vapi.nat44_address_dump()
self.assertEqual(0, len(adresses))
# configure interface address and check NAT address pool
self.pg11.config_ip4()
adresses = self.vapi.nat44_address_dump()
self.assertEqual(1, len(adresses))
self.assertEqual(str(adresses[0].ip_address),
self.pg11.local_ip4)
self.assertEqual(adresses[0].flags, flags)
# remove interface address and check NAT address pool
self.pg11.unconfig_ip4()
adresses = self.vapi.nat44_address_dump()
self.assertEqual(0, len(adresses))
def test_output_feature_stateful_acl(self):
""" NAT44ED output feature works with stateful ACL """
self.nat_add_address(self.nat_addr)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg0.sw_if_index,
flags=self.config_flags.NAT_IS_INSIDE, is_add=1)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg1.sw_if_index,
flags=self.config_flags.NAT_IS_OUTSIDE, is_add=1)
# First ensure that the NAT is working sans ACL
# send packets out2in, no sessions yet so packets should drop
pkts_out2in = self.create_stream_out(self.pg1)
self.send_and_assert_no_replies(self.pg1, pkts_out2in)
# send packets into inside intf, ensure received via outside intf
pkts_in2out = self.create_stream_in(self.pg0, self.pg1)
capture = self.send_and_expect(self.pg0, pkts_in2out, self.pg1,
len(pkts_in2out))
self.verify_capture_out(capture, ignore_port=True)
# send out2in again, with sessions created it should work now
pkts_out2in = self.create_stream_out(self.pg1)
capture = self.send_and_expect(self.pg1, pkts_out2in, self.pg0,
len(pkts_out2in))
self.verify_capture_in(capture, self.pg0)
# Create an ACL blocking everything
out2in_deny_rule = AclRule(is_permit=0)
out2in_acl = VppAcl(self, rules=[out2in_deny_rule])
out2in_acl.add_vpp_config()
# create an ACL to permit/reflect everything
in2out_reflect_rule = AclRule(is_permit=2)
in2out_acl = VppAcl(self, rules=[in2out_reflect_rule])
in2out_acl.add_vpp_config()
# apply as input acl on interface and confirm it blocks everything
acl_if = VppAclInterface(self, sw_if_index=self.pg1.sw_if_index,
n_input=1, acls=[out2in_acl])
acl_if.add_vpp_config()
self.send_and_assert_no_replies(self.pg1, pkts_out2in)
# apply output acl
acl_if.acls = [out2in_acl, in2out_acl]
acl_if.add_vpp_config()
# send in2out to generate ACL state (NAT state was created earlier)
capture = self.send_and_expect(self.pg0, pkts_in2out, self.pg1,
len(pkts_in2out))
self.verify_capture_out(capture, ignore_port=True)
# send out2in again. ACL state exists so it should work now.
# TCP packets with the syn flag set also need the ack flag
for p in pkts_out2in:
if p.haslayer(TCP) and p[TCP].flags & 0x02:
p[TCP].flags |= 0x10
capture = self.send_and_expect(self.pg1, pkts_out2in, self.pg0,
len(pkts_out2in))
self.verify_capture_in(capture, self.pg0)
self.logger.info(self.vapi.cli("show trace"))
def test_tcp_close(self):
""" NAT44ED Close TCP session from inside network - output feature """
old_timeouts = self.vapi.nat_get_timeouts()
new_transitory = 2
self.vapi.nat_set_timeouts(
udp=old_timeouts.udp,
tcp_established=old_timeouts.tcp_established,
icmp=old_timeouts.icmp,
tcp_transitory=new_transitory)
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.pg1.local_ip4)
twice_nat_addr = '10.0.1.3'
service_ip = '192.168.16.150'
self.nat_add_address(twice_nat_addr, twice_nat=1)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg0.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_output_feature(
is_add=1,
sw_if_index=self.pg1.sw_if_index)
flags = (self.config_flags.NAT_IS_OUT2IN_ONLY |
self.config_flags.NAT_IS_TWICE_NAT)
self.nat_add_static_mapping(self.pg0.remote_ip4,
service_ip, 80, 80,
proto=IP_PROTOS.tcp,
flags=flags)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
start_sessnum = len(sessions)
# SYN packet out->in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="S"))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg0.get_capture(1)
p = capture[0]
tcp_port = p[TCP].sport
# SYN + ACK packet in->out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=twice_nat_addr) /
TCP(sport=80, dport=tcp_port, flags="SA"))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# ACK packet out->in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="A"))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
# FIN packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=twice_nat_addr) /
TCP(sport=80, dport=tcp_port, flags="FA", seq=100, ack=300))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# FIN+ACK packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
# ACK packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=twice_nat_addr) /
TCP(sport=80, dport=tcp_port, flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# session now in transitory timeout
# try SYN packet out->in - should be dropped
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="S"))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.sleep(new_transitory, "wait for transitory timeout")
self.pg0.assert_nothing_captured(0)
# session should still exist
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - start_sessnum, 1)
# send FIN+ACK packet out -> in - will cause session to be wiped
# but won't create a new session
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=service_ip) /
TCP(sport=33898, dport=80, flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - start_sessnum, 0)
self.pg0.assert_nothing_captured(0)
def test_tcp_session_close_in(self):
""" NAT44ED Close TCP session from inside network """
in_port = self.tcp_port_in
out_port = 10505
ext_port = self.tcp_external_port
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.nat_add_static_mapping(self.pg0.remote_ip4, self.nat_addr,
in_port, out_port, proto=IP_PROTOS.tcp,
flags=self.config_flags.NAT_IS_TWICE_NAT)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
session_n = len(sessions)
self.vapi.nat_set_timeouts(udp=300, tcp_established=7440,
tcp_transitory=2, icmp=5)
self.init_tcp_session(self.pg0, self.pg1, in_port, ext_port)
# FIN packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="FA", seq=100, ack=300))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
pkts = []
# ACK packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="A", seq=300, ack=101))
pkts.append(p)
# FIN packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=101))
pkts.append(p)
self.pg1.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(2)
# ACK packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 1)
out2in_drops = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
in2out_drops = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
# extra FIN packet out -> in - this should be dropped
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured()
# extra ACK packet in -> out - this should be dropped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
stats = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
self.assertEqual(stats - out2in_drops, 1)
stats = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
self.assertEqual(stats - in2out_drops, 1)
self.sleep(3)
# extra ACK packet in -> out - this will cause session to be wiped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 0)
def test_tcp_session_close_out(self):
""" NAT44ED Close TCP session from outside network """
in_port = self.tcp_port_in
out_port = 10505
ext_port = self.tcp_external_port
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.nat_add_static_mapping(self.pg0.remote_ip4, self.nat_addr,
in_port, out_port, proto=IP_PROTOS.tcp,
flags=self.config_flags.NAT_IS_TWICE_NAT)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
session_n = len(sessions)
self.vapi.nat_set_timeouts(udp=300, tcp_established=7440,
tcp_transitory=2, icmp=5)
_ = self.init_tcp_session(self.pg0, self.pg1, in_port, ext_port)
# FIN packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=100, ack=300))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
# FIN+ACK packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="FA", seq=300, ack=101))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# ACK packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="A", seq=101, ack=301))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 1)
out2in_drops = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
in2out_drops = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
# extra FIN packet out -> in - this should be dropped
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured()
# extra ACK packet in -> out - this should be dropped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
stats = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
self.assertEqual(stats - out2in_drops, 1)
stats = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
self.assertEqual(stats - in2out_drops, 1)
self.sleep(3)
# extra ACK packet in -> out - this will cause session to be wiped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 0)
def test_tcp_session_close_simultaneous(self):
""" NAT44ED Close TCP session from inside network """
in_port = self.tcp_port_in
ext_port = 10505
self.nat_add_address(self.nat_addr)
self.nat_add_inside_interface(self.pg0)
self.nat_add_outside_interface(self.pg1)
self.nat_add_static_mapping(self.pg0.remote_ip4, self.nat_addr,
in_port, ext_port, proto=IP_PROTOS.tcp,
flags=self.config_flags.NAT_IS_TWICE_NAT)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
session_n = len(sessions)
self.vapi.nat_set_timeouts(udp=300, tcp_established=7440,
tcp_transitory=2, icmp=5)
out_port = self.init_tcp_session(self.pg0, self.pg1, in_port, ext_port)
# FIN packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="FA", seq=100, ack=300))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# FIN packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=100))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
# ACK packet in -> out
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.get_capture(1)
# ACK packet out -> in
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="A", seq=301, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.get_capture(1)
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 1)
out2in_drops = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
in2out_drops = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
# extra FIN packet out -> in - this should be dropped
p = (Ether(src=self.pg1.remote_mac, dst=self.pg1.local_mac) /
IP(src=self.pg1.remote_ip4, dst=self.nat_addr) /
TCP(sport=ext_port, dport=out_port,
flags="FA", seq=300, ack=101))
self.pg1.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg0.assert_nothing_captured()
# extra ACK packet in -> out - this should be dropped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
stats = self.get_err_counter(
'/err/nat44-ed-out2in/drops due to TCP in transitory timeout')
self.assertEqual(stats - out2in_drops, 1)
stats = self.get_err_counter(
'/err/nat44-ed-in2out/drops due to TCP in transitory timeout')
self.assertEqual(stats - in2out_drops, 1)
self.sleep(3)
# extra ACK packet in -> out - this will cause session to be wiped
p = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=in_port, dport=ext_port,
flags="A", seq=101, ack=301))
self.pg0.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.pg1.assert_nothing_captured()
sessions = self.vapi.nat44_user_session_dump(self.pg0.remote_ip4, 0)
self.assertEqual(len(sessions) - session_n, 0)
def test_dynamic_vrf(self):
""" NAT44ED dynamic translation test: different VRF"""
vrf_id_in = 33
vrf_id_out = 34
self.nat_add_address(self.nat_addr, vrf_id=vrf_id_in)
try:
self.configure_ip4_interface(self.pg7, table_id=vrf_id_in)
self.configure_ip4_interface(self.pg8, table_id=vrf_id_out)
self.nat_add_inside_interface(self.pg7)
self.nat_add_outside_interface(self.pg8)
# just basic stuff nothing special
pkts = self.create_stream_in(self.pg7, self.pg8)
self.pg7.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg8.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
pkts = self.create_stream_out(self.pg8)
self.pg8.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg7.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg7)
finally:
self.pg7.unconfig()
self.pg8.unconfig()
self.vapi.ip_table_add_del(is_add=0,
table={'table_id': vrf_id_in})
self.vapi.ip_table_add_del(is_add=0,
table={'table_id': vrf_id_out})
def test_dynamic_output_feature_vrf(self):
""" NAT44ED dynamic translation test: output-feature, VRF"""
# other then default (0)
new_vrf_id = 22
self.nat_add_address(self.nat_addr)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg7.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat44_interface_add_del_output_feature(
sw_if_index=self.pg8.sw_if_index,
is_add=1)
try:
self.configure_ip4_interface(self.pg7, table_id=new_vrf_id)
self.configure_ip4_interface(self.pg8, table_id=new_vrf_id)
# in2out
tcpn = self.statistics['/nat44-ed/in2out/slowpath/tcp']
udpn = self.statistics['/nat44-ed/in2out/slowpath/udp']
icmpn = self.statistics['/nat44-ed/in2out/slowpath/icmp']
drops = self.statistics['/nat44-ed/in2out/slowpath/drops']
pkts = self.create_stream_in(self.pg7, self.pg8)
self.pg7.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg8.get_capture(len(pkts))
self.verify_capture_out(capture, ignore_port=True)
if_idx = self.pg7.sw_if_index
cnt = self.statistics['/nat44-ed/in2out/slowpath/tcp']
self.assertEqual(cnt[:, if_idx].sum() - tcpn[:, if_idx].sum(), 2)
cnt = self.statistics['/nat44-ed/in2out/slowpath/udp']
self.assertEqual(cnt[:, if_idx].sum() - udpn[:, if_idx].sum(), 1)
cnt = self.statistics['/nat44-ed/in2out/slowpath/icmp']
self.assertEqual(cnt[:, if_idx].sum() - icmpn[:, if_idx].sum(), 1)
cnt = self.statistics['/nat44-ed/in2out/slowpath/drops']
self.assertEqual(cnt[:, if_idx].sum() - drops[:, if_idx].sum(), 0)
# out2in
tcpn = self.statistics['/nat44-ed/out2in/fastpath/tcp']
udpn = self.statistics['/nat44-ed/out2in/fastpath/udp']
icmpn = self.statistics['/nat44-ed/out2in/fastpath/icmp']
drops = self.statistics['/nat44-ed/out2in/fastpath/drops']
pkts = self.create_stream_out(self.pg8)
self.pg8.add_stream(pkts)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg7.get_capture(len(pkts))
self.verify_capture_in(capture, self.pg7)
if_idx = self.pg8.sw_if_index
cnt = self.statistics['/nat44-ed/out2in/fastpath/tcp']
self.assertEqual(cnt[:, if_idx].sum() - tcpn[:, if_idx].sum(), 2)
cnt = self.statistics['/nat44-ed/out2in/fastpath/udp']
self.assertEqual(cnt[:, if_idx].sum() - udpn[:, if_idx].sum(), 1)
cnt = self.statistics['/nat44-ed/out2in/fastpath/icmp']
self.assertEqual(cnt[:, if_idx].sum() - icmpn[:, if_idx].sum(), 1)
cnt = self.statistics['/nat44-ed/out2in/fastpath/drops']
self.assertEqual(cnt[:, if_idx].sum() - drops[:, if_idx].sum(), 0)
sessions = self.statistics['/nat44-ed/total-sessions']
self.assertEqual(sessions[:, 0].sum(), 3)
finally:
self.pg7.unconfig()
self.pg8.unconfig()
self.vapi.ip_table_add_del(is_add=0,
table={'table_id': new_vrf_id})
def test_next_src_nat(self):
""" NAT44ED On way back forward packet to nat44-in2out node. """
twice_nat_addr = '10.0.1.3'
external_port = 80
local_port = 8080
post_twice_nat_port = 0
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(twice_nat_addr, twice_nat=1)
flags = (self.config_flags.NAT_IS_OUT2IN_ONLY |
self.config_flags.NAT_IS_SELF_TWICE_NAT)
self.nat_add_static_mapping(self.pg6.remote_ip4, self.pg1.remote_ip4,
local_port, external_port,
proto=IP_PROTOS.tcp, vrf_id=1,
flags=flags)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg6.sw_if_index,
is_add=1)
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=self.pg1.remote_ip4) /
TCP(sport=12345, dport=external_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, twice_nat_addr)
self.assertNotEqual(tcp.sport, 12345)
post_twice_nat_port = tcp.sport
self.assertEqual(ip.dst, self.pg6.remote_ip4)
self.assertEqual(tcp.dport, local_port)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
p = (Ether(src=self.pg6.remote_mac, dst=self.pg6.local_mac) /
IP(src=self.pg6.remote_ip4, dst=twice_nat_addr) /
TCP(sport=local_port, dport=post_twice_nat_port))
self.pg6.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg6.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.pg1.remote_ip4)
self.assertEqual(tcp.sport, external_port)
self.assertEqual(ip.dst, self.pg6.remote_ip4)
self.assertEqual(tcp.dport, 12345)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
def test_one_armed_nat44_static(self):
""" NAT44ED One armed NAT and 1:1 NAPT asymmetrical rule """
remote_host = self.pg4.remote_hosts[0]
local_host = self.pg4.remote_hosts[1]
external_port = 80
local_port = 8080
eh_port_in = 0
self.vapi.nat44_forwarding_enable_disable(enable=1)
self.nat_add_address(self.nat_addr, twice_nat=1)
flags = (self.config_flags.NAT_IS_OUT2IN_ONLY |
self.config_flags.NAT_IS_TWICE_NAT)
self.nat_add_static_mapping(local_host.ip4, self.nat_addr,
local_port, external_port,
proto=IP_PROTOS.tcp, flags=flags)
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg4.sw_if_index,
is_add=1)
self.vapi.nat44_interface_add_del_feature(
sw_if_index=self.pg4.sw_if_index,
flags=flags, is_add=1)
# from client to service
p = (Ether(src=self.pg4.remote_mac, dst=self.pg4.local_mac) /
IP(src=remote_host.ip4, dst=self.nat_addr) /
TCP(sport=12345, dport=external_port))
self.pg4.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg4.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.dst, local_host.ip4)
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(tcp.dport, local_port)
self.assertNotEqual(tcp.sport, 12345)
eh_port_in = tcp.sport
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
# from service back to client
p = (Ether(src=self.pg4.remote_mac, dst=self.pg4.local_mac) /
IP(src=local_host.ip4, dst=self.nat_addr) /
TCP(sport=local_port, dport=eh_port_in))
self.pg4.add_stream(p)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
capture = self.pg4.get_capture(1)
p = capture[0]
try:
ip = p[IP]
tcp = p[TCP]
self.assertEqual(ip.src, self.nat_addr)
self.assertEqual(ip.dst, remote_host.ip4)
self.assertEqual(tcp.sport, external_port)
self.assertEqual(tcp.dport, 12345)
self.assert_packet_checksums_valid(p)
except:
self.logger.error(ppp("Unexpected or invalid packet:", p))
raise
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| 39.671512
| 79
| 0.572553
|
daed0000c6137c31b6fc25867909c0032976bb4f
| 48
|
py
|
Python
|
evaluate/__init__.py
|
YantaoShen/openBCT
|
69e798c2dd6380572da7a88b68e0e9d31d9b08a4
|
[
"BSD-2-Clause"
] | 64
|
2020-10-13T06:24:41.000Z
|
2022-03-08T11:23:22.000Z
|
evaluate/__init__.py
|
YantaoShen/openBCT
|
69e798c2dd6380572da7a88b68e0e9d31d9b08a4
|
[
"BSD-2-Clause"
] | 4
|
2020-12-29T05:57:34.000Z
|
2022-01-13T18:07:05.000Z
|
evaluate/__init__.py
|
YantaoShen/openBCT
|
69e798c2dd6380572da7a88b68e0e9d31d9b08a4
|
[
"BSD-2-Clause"
] | 10
|
2020-10-13T06:25:51.000Z
|
2022-03-03T00:06:06.000Z
|
from .evaluators import *
from .ranking import *
| 24
| 25
| 0.770833
|
71f88d312aa65f720ee0fcd653b3c5dfa6c07869
| 604
|
py
|
Python
|
setup.py
|
blakeaw/ORBILT
|
ed402dd496534dccd00f3e75b57007d944c58c1d
|
[
"MIT"
] | 11
|
2019-07-29T16:21:53.000Z
|
2022-02-02T11:44:57.000Z
|
setup.py
|
blakeaw/ORBILT
|
ed402dd496534dccd00f3e75b57007d944c58c1d
|
[
"MIT"
] | 11
|
2019-05-15T09:30:05.000Z
|
2021-07-19T16:49:59.000Z
|
setup.py
|
blakeaw/ORBILT
|
ed402dd496534dccd00f3e75b57007d944c58c1d
|
[
"MIT"
] | 9
|
2019-08-12T11:14:45.000Z
|
2020-12-22T18:22:55.000Z
|
from distutils.core import setup
setup(name='pybilt',
version='0.3.0',
description='Lipid bilayer analysis toolkit.',
author='Blake A. Wilson',
author_email='blake.a.wilson@vanderbilt.edu',
url='http://pybilt.readthedocs.io/en/latest/index.html',
packages=['pybilt', 'pybilt.bilayer_analyzer', 'pybilt.common',
'pybilt.com_trajectory', 'pybilt.diffusion',
'pybilt.lipid_grid', 'pybilt.mda_tools',
'pybilt.plot_generation'],
license='MIT',
keywords=['lipid bilayer', 'molecular dynamics', 'analysis']
)
| 37.75
| 69
| 0.627483
|
a2663592a3e7f6ad58f2d7ad6d9c3c4c621516a6
| 16,124
|
py
|
Python
|
DRACO/inv_changed.py
|
RahulSajnani/DRACO-Weakly-Supervised-Dense-Reconstruction-And-Canonicalization-of-Objects
|
d697905da990487589f88068c886a32d2ef57118
|
[
"MIT"
] | 3
|
2021-06-20T17:46:32.000Z
|
2021-12-17T16:55:00.000Z
|
DRACO/inv_changed.py
|
RahulSajnani/DRACO-Weakly-Supervised-Dense-Reconstruction-And-Canonicalization-of-Objects
|
d697905da990487589f88068c886a32d2ef57118
|
[
"MIT"
] | 1
|
2022-01-13T01:41:00.000Z
|
2022-01-13T06:54:29.000Z
|
DRACO/inv_changed.py
|
RahulSajnani/DRACO-Weakly-Supervised-Dense-Reconstruction-And-Canonicalization-of-Objects
|
d697905da990487589f88068c886a32d2ef57118
|
[
"MIT"
] | 1
|
2021-09-14T06:17:55.000Z
|
2021-09-14T06:17:55.000Z
|
# Code adapted from https://github.com/ClementPinard/SfmLearner-Pytorch/blob/master/inverse_warp.py
from __future__ import division
from pytorch3d.ops.knn import knn_points
import torch
import torch.nn.functional as F
import cv2
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
import helper_functions
FLOAT_EPS = np.finfo(np.float).eps
pixel_coords = None
import kornia
from scipy.spatial.transform import Rotation as R
def preprocess_depth_output_2_point_cloud_all(depth_maps, masks, intrinsics):
'''
Pre process data for pose network
Function mean subtracts the point cloud to bring it to origin and downsamples it to 2048 points
'''
batch_size, num_views, height, width = depth_maps.size()
depth_maps = helper_functions.sigmoid_2_depth(depth_maps)
point_cloud_list_all_views = []
rotated_point_cloud_list_all_views = []
for view in range(num_views):
src_camera_coords = pixel2cam(depth_maps[:, view].unsqueeze(0), intrinsics.inverse())
src_camera_coords = src_camera_coords.reshape(batch_size, 3, height*width) # [B 3 H*W]
if torch.cuda.is_available():
random_rotation = torch.from_numpy(R.random(batch_size, random_state=1024).as_matrix()).cuda().float() # [B 3 3]
else:
random_rotation = torch.from_numpy(R.random(batch_size, random_state=1024).as_matrix()).float() # [B 3 3]
point_cloud_list = []
rotated_point_cloud_list = []
masks_batch = masks[:, view]
for i in range(batch_size):
src_camera_coords_view = src_camera_coords[i] # [3 H*W]
mask = masks_batch[i] # [H W]
mask = mask.reshape(1, -1).squeeze() # [H*W]
# Extracting the points only within mask region
src_camera_coords_view = src_camera_coords_view[:, (mask == 1.0)]
# Mean center value
src_camera_coords_view = src_camera_coords_view - src_camera_coords_view.mean(axis = 1).unsqueeze(1).repeat(1, src_camera_coords_view.size(1)) #[3 masksize]
# Downsample to 2048 points
src_camera_coords_view = torch.nn.functional.interpolate(src_camera_coords_view.unsqueeze(0), size = 2048).squeeze(0)
point_cloud_list.append(src_camera_coords_view)
src_camera_coords_downsampled = torch.stack(point_cloud_list) # [B 3 2048]
rot_src_camera_coords = random_rotation @ src_camera_coords_downsampled # [B 3 2048]
point_cloud_list_all_views.append(src_camera_coords_downsampled)
rotated_point_cloud_list_all_views.append(rot_src_camera_coords)
camera_point_clouds_downsampled = torch.stack(point_cloud_list_all_views, dim = 1) # [B views 2048]
rotated_camera_point_clouds_downsampled = torch.stack(rotated_point_cloud_list_all_views, dim = 1) # [B views 2048]
return camera_point_clouds_downsampled, rotated_camera_point_clouds_downsampled
def preprocess_depth_output_2_point_cloud(depth_maps, masks_batch, intrinsics):
'''
Pre process data for pose network
Function mean subtracts the point cloud to bring it to origin and downsamples it to 2048 points
'''
batch_size, _, height, width = depth_maps.size()
depth_maps = helper_functions.sigmoid_2_depth(depth_maps)
src_camera_coords = pixel2cam(depth_maps[:, 0].unsqueeze(0), intrinsics.inverse())
src_camera_coords = src_camera_coords.reshape(batch_size, 3, height*width) # [B 3 H*W]
if torch.cuda.is_available():
random_rotation = torch.from_numpy(R.random(batch_size, random_state=1024).as_matrix()).cuda().float() # [B 3 3]
else:
random_rotation = torch.from_numpy(R.random(batch_size, random_state=1024).as_matrix()).float() # [B 3 3]
point_cloud_list = []
rotated_point_cloud_list = []
for i in range(batch_size):
src_camera_coords_view = src_camera_coords[i] # [3 H*W]
mask = masks_batch[i] # [H W]
mask = mask.reshape(1, -1).squeeze() # [H*W]
# Extracting the points only within mask region
src_camera_coords_view = src_camera_coords_view[:, (mask == 1.0)]
# mean center value
src_camera_coords_view = src_camera_coords_view - src_camera_coords_view.mean(axis = 1).unsqueeze(1).repeat(1, src_camera_coords_view.size(1)) #[3 masksize]
# Downsample to 2048 points
src_camera_coords_view = torch.nn.functional.interpolate(src_camera_coords_view.unsqueeze(0), size = 2048).squeeze(0)
point_cloud_list.append(src_camera_coords_view)
src_camera_coords_downsampled = torch.stack(point_cloud_list) # [B 3 2048]
rot_src_camera_coords = random_rotation @ src_camera_coords_downsampled # [B 3 2048]
return src_camera_coords_downsampled, rot_src_camera_coords
def depth_decode(depth_image):
# # first 16 bits (first 2 channels) are 16-bit depth
# R is the 8 LSB and G are the others
depth_image_16 = depth_image[:,:,[1, 0]]
# B are 8-bit version
depth_image_8 = depth_image[:,:,2]
# last 8 are empty
depth_single_channel = np.zeros((depth_image_16.shape[0], depth_image_16.shape[1]))
# convert 16 bit to actual depth values
for i in range(depth_single_channel.shape[0]):
for j in range(depth_single_channel.shape[1]):
bit_str = '{0:08b}'.format(depth_image_16[i, j, 0]) + '{0:08b}'.format(depth_image_16[i, j, 1])
depth_single_channel[i, j] = int(bit_str, 2)
return depth_single_channel
def set_id_grid(depth):
global pixel_coords
b, _, h, w = depth.size()
i_range = torch.arange(0, h).view(1, h, 1).expand(1,h,w).type_as(depth) # [1, H, W]
j_range = torch.arange(0, w).view(1, 1, w).expand(1,h,w).type_as(depth) # [1, H, W]
ones = torch.ones(1,h,w).type_as(depth)
#print("i_range",i_range.device)
#print("j_range",j_range.device)
#print("ones",ones.device)
pixel_coords = torch.stack((j_range, i_range, ones), dim=1).type_as(depth) # [1, 3, H, W]
pixel_coords.to(depth.device)
def cam2pixel(cam_coords, proj_c2p_rot, proj_c2p_tr):
b, _, h, w = cam_coords.size()
cam_coords_flat = cam_coords.reshape(b, 3, -1) # [B, 3, H*W]
if proj_c2p_rot is not None:
pcoords = proj_c2p_rot.float() @ cam_coords_flat
else:
pcoords = cam_coords_flat
if proj_c2p_tr is not None:
pcoords = pcoords + proj_c2p_tr.float() # [B, 3, H*W]
X = pcoords[:, 0]
Y = pcoords[:, 1]
Z = pcoords[:, 2].clamp(min=1e-4)
X_norm = 2*(X / Z)/(w-1) - 1 # Normalized, -1 if on extreme left, 1 if on extreme right (x = w-1) [B, H*W]
Y_norm = 2*(Y / Z)/(h-1) - 1 # Idem [B, H*W]
pixel_coords = torch.stack([X_norm, Y_norm], dim=2) # [B, H*W, 2]
# print(pixel_coords.reshape(b,h,w,2).shape)
return pixel_coords.reshape(b,h,w,2)
def pixel2cam(depth, intrinsics_inv):
global pixel_coords
b, _, h, w = depth.size()
if (pixel_coords is None) or pixel_coords.size(2) < h:
set_id_grid(depth)
pixel_coords = pixel_coords.to(depth.device)
current_pixel_coords = pixel_coords[:,:,:h,:w].expand(b,3,h,w).reshape(b, 3, -1) # [B, 3, H*W]
#print("-"*10)
#print("Pixel", pixel_coords.device)
#print("Depth", depth.device)
#print("intrinsics_inv",intrinsics_inv.device)
#print("current_pixel_coords",current_pixel_coords.device)
#print("-"*10)
cam_coords = (intrinsics_inv.float() @ current_pixel_coords.float())
cam_coords = cam_coords.reshape(b, 3, h, w)
return cam_coords * depth.clamp(min=1e-1)
def quat2mat(quat):
x, y, z, w = quat[:,0], quat[:,1], quat[:,2], quat[:,3]
B = quat.size(0)
w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2)
n = w2 + x2 + y2 + z2
x = x / n
y = y / n
z = z / n
w = w / n
w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2)
wx, wy, wz = w*x, w*y, w*z
xy, xz, yz = x*y, x*z, y*z
rotMat = torch.stack([1 - 2*y2 - 2*z2, 2*xy - 2*wz, 2*wy + 2*xz,
2*wz + 2*xy, 1 - 2*x2 - 2*z2, 2*yz - 2*wx,
2*xz - 2*wy, 2*wx + 2*yz, 1 - 2*x2 - 2*y2], dim=1).reshape(B, 3, 3)
return rotMat
def pose_vec2mat(vec):
size_list = list(vec.size())
if len(size_list) == 3:
# if dimension is [B 4 4] for multiview blender dataset
return vec
else:
# If dimension is [B 7] for multiview nocs dataset
b = vec.size(0)
translation = vec[:, :3].unsqueeze(-1) # [B, 3, 1]
rot = vec[:,3:]
rot_mat = quat2mat(rot) # [B, 3, 3]
invert_mat = torch.eye(4)
invert_mat[0, 0] *= -1
invert_mat[1, 1] *= -1
# Adding 0.5 offset for dataset
transform_mat = torch.cat([rot_mat, (translation) + 0.5], dim=2) # [B, 3, 4]
transform_mat = torch.cat([transform_mat, torch.tensor([[0,0,0,1]]).unsqueeze(0).expand(1,1,4).type_as(transform_mat).repeat(b, 1, 1)], dim=1) # [B, 4, 4]
return transform_mat @ invert_mat.type_as(transform_mat)
def inverse_warp(tgt_image, depth, intrinsics, src_pose, tgt_pose):
src_camera_coords = pixel2cam(depth, intrinsics.inverse())
src_pose_mat = pose_vec2mat(src_pose)
tgt_pose_mat = pose_vec2mat(tgt_pose)
src_cam_to_tgt_cam = tgt_pose_mat.inverse() @ src_pose_mat
tgt_cam_2_proj = intrinsics @ src_cam_to_tgt_cam[:, :3, :] # Bx3x3 Bx3x4
rot, tr = tgt_cam_2_proj[:,:,:3], tgt_cam_2_proj[:,:,-1:]
tgt_pix_coords = cam2pixel(src_camera_coords, rot, tr)
tgt_image = tgt_image.type_as(tgt_pix_coords)
projected_img = F.grid_sample(tgt_image, tgt_pix_coords, padding_mode='zeros', align_corners=False)
valid_points = tgt_pix_coords.abs().max(dim=-1)[0] <= 1
return projected_img, valid_points
def inverse_warp_2(tgt_image, depth, intrinsics, src_pose, tgt_pose):
'''
Inverse warp function using Kornia
'''
src_pose_mat = pose_vec2mat(src_pose)
tgt_pose_mat = pose_vec2mat(tgt_pose)
b = tgt_image.size(0)
h = torch.tensor(tgt_image.size(2)).repeat(b)
w = torch.tensor(tgt_image.size(3)).repeat(b)
intrinsics = torch.cat([intrinsics.float(), torch.tensor([[0, 0, 0]]).unsqueeze(2).expand(1, 3, 1).type_as(intrinsics).repeat(b, 1, 1).float()], dim = 2)
intrinsics = torch.cat([intrinsics, torch.tensor([[0, 0, 0, 1]]).expand(1, 1, 4).type_as(intrinsics).repeat(b, 1, 1).float() ], dim = 1)
pinhole_tgt = kornia.geometry.PinholeCamera(intrinsics, tgt_pose_mat.float(), h, w)
pinhole_src = kornia.geometry.PinholeCamera(intrinsics, src_pose_mat.float(), h, w)
image_src = kornia.geometry.depth_warp(pinhole_tgt, pinhole_src, depth.float(), tgt_image.float(), tgt_image.size(2), tgt_image.size(3))
return image_src, image_src
def project_depth_point_cloud(depth, intrinsics, src_pose, tgt_pose):
'''
Project point cloud from src to tgt pose
'''
src_camera_coords = pixel2cam(depth, intrinsics.inverse()) # [B, 3, H, W]
b, _, h, w = src_camera_coords.size()
src_pose_mat = pose_vec2mat(src_pose)
tgt_pose_mat = pose_vec2mat(tgt_pose)
# source camera coordinates
src_camera_coords = src_camera_coords.reshape(b, 3, h*w)
src_cam_to_tgt_cam = tgt_pose_mat.inverse() @ src_pose_mat
ones = torch.ones((b, 1, h*w), device=src_camera_coords.device)
#print("ones",ones.device)
#print("src_camera_coords",src_camera_coords.device)
src_camera_coords_homogeneous = torch.cat([src_camera_coords, ones], dim = 1) # [B, 4, H*W]
# destination camera coordinates
projected_coords = src_cam_to_tgt_cam.float() @ src_camera_coords_homogeneous.float() # [B, 4, H*W]
projected_coords = projected_coords[:, :3, :]
return src_camera_coords, projected_coords
def NOCS_map_2_point_cloud(nocs_image_tensor, mask):
'''
Convert NOCS maps to point cloud
Input:
nocs_image_tensor - [B, 3, H, W] - torch tensor
mask - [B, H, W] - torch tensor
Returns:
nocs_point_cloud_list - B element list - [3, masked dims]
indices_list - B element list - [2, masked dims]
'''
indices_list = []
nocs_point_cloud_list = []
B, views, H, W = nocs_image_tensor.shape
for i in range(nocs_image_tensor.shape[0]):
ind = torch.from_numpy(((mask[i, :, :] > 0.5).nonzero().cpu()).numpy())
h = ind[:, 0]
w = ind[:, 1]
#torch.sigmoid((mask[i, :, :] - 0.5)* 100)
#h = h.detach()
#w = w.detach()
#print(h.max(), w.max(), h.min(), w.min())
nocs_point_cloud = nocs_image_tensor[i, :, h, w] # [3, mask]
nocs_point_cloud.detach_()
nocs_point_cloud_list.append(nocs_point_cloud)
indices_list.append(torch.stack([h, w]).detach()) # [2, mask]
return nocs_point_cloud_list, indices_list
def get_NOCS_correspondences(nocs_image_tensor_source, mask_source, nocs_image_tensor_target, mask_target):
'''
Get NOCS correspondences
Input:
nocs_image_tensor_source - [B, 3, H, W]
mask_source - [B, H, W]
nocs_image_tensor_target - [B, 3, H, W]
mask_target - [B, H, W]
Returns:
indices_depth_list - list of tensors with indices of shape [2, masked_dim]
'''
B, views, H, W = nocs_image_tensor_source.shape
indices_depth_list_target = []
indices_depth_list_source = []
for i in range(B):
nocs_point_cloud_list_source, indices_list_source = NOCS_map_2_point_cloud(nocs_image_tensor_source[i, :, :, :].unsqueeze(0), mask_source[i, 0, :, :].unsqueeze(0))
nocs_point_cloud_list_target, indices_list_target = NOCS_map_2_point_cloud(nocs_image_tensor_target[i, :, :, :].unsqueeze(0), mask_target[i, 0, :, :].unsqueeze(0))
pc_1, ind_1 = nocs_point_cloud_list_source[0], indices_list_source[0] # [3, mask_size], [2, mask_size]
pc_2, ind_2 = nocs_point_cloud_list_target[0], indices_list_target[0] # [3, mask_size]
# Perform NOCS KNN matching
out = knn_points(pc_1.transpose(0, 1).unsqueeze(0), pc_2.transpose(0, 1).unsqueeze(0)) # [1, masked_dim, 3]
corresponding_idx = out.idx[0, :, 0] # [masked_dim]
corresponding_idx = ind_2[:, corresponding_idx]
indices_depth_list_source.append(ind_1)
indices_depth_list_target.append(corresponding_idx)
return indices_depth_list_source, indices_depth_list_target
if __name__ == "__main__":
src_pose = torch.tensor([[1663.45703125, 46.258087158203128, -2127.346435546875, 0.008096654899418354, -0.3257482051849365, 0.0027897413820028307, 0.9454177618026733]])
tgt_pose = torch.tensor([[1889.214599609375, 221.49795532226563, -1699.667724609375, 0.039696164429187778, -0.4065377712249756, 0.01768353208899498, 0.9125999212265015]])
src_pose_2 = torch.tensor([[2011.62060546875, 374.8108215332031, -1255.8643798828125,0.06847226619720459, -0.48349833488464358, 0.03797297552227974, 0.8718366026878357]])
depth = Image.open('./test-images/depth.png')
depth = np.array(depth)
depth = depth_decode(depth)
depth = torch.tensor(depth).unsqueeze(0).unsqueeze(1).float()
# print(depth)
# plt.imshow(depth[0][0])
# plt.show()
tgt_image = cv2.imread('./test-images/rgb.png')
tgt_image = torch.tensor(tgt_image).unsqueeze(0).permute(0, 3, 1, 2).float() / 255.0
intrinsics = torch.tensor([
[617.1,0.0,320.0],
[0.0,617.1,240.0],
[0.0,0.0,1.0],
])
scale_factor = 1
src_pose[0, :3] = src_pose[0, :3] / scale_factor
tgt_pose[0, :3] = tgt_pose[0, :3] / scale_factor
src_pose_2[0, :3] = src_pose_2[0, :3] / scale_factor
x_factor = -1
src_pose[0, 0] = src_pose[0, 0] * x_factor
tgt_pose[0, 0] = tgt_pose[0, 0] * x_factor
src_pose_2[0, 0] = src_pose_2[0, 0] * x_factor
src_pose[0, 4:6] = src_pose[0, 4:6] * -1
tgt_pose[0, 4:6] = tgt_pose[0, 4:6] * -1
src_pose_2[0, 4:6] = src_pose_2[0, 4:6] * -1
intrinsics = intrinsics.unsqueeze(0)
warp=inverse_warp(tgt_image, depth, intrinsics, tgt_pose, src_pose)
warp=warp[0].permute(0,2,3,1)
plt.imshow(warp[0])
plt.show()
| 38.028302
| 174
| 0.661808
|
f073caa0a15c3edb6d2672b3bf00e7ef7bde7f5e
| 3,436
|
py
|
Python
|
orquesta/tests/unit/conducting/test_workflow_conductor_data_flow.py
|
batk0/orquesta
|
f03f3f2f3820bf111a9277f4f6c5d6c83a89d004
|
[
"Apache-2.0"
] | null | null | null |
orquesta/tests/unit/conducting/test_workflow_conductor_data_flow.py
|
batk0/orquesta
|
f03f3f2f3820bf111a9277f4f6c5d6c83a89d004
|
[
"Apache-2.0"
] | null | null | null |
orquesta/tests/unit/conducting/test_workflow_conductor_data_flow.py
|
batk0/orquesta
|
f03f3f2f3820bf111a9277f4f6c5d6c83a89d004
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from orquesta import conducting
from orquesta import events
from orquesta.specs import native as specs
from orquesta import states
from orquesta.tests.unit import base
class WorkflowConductorDataFlowTest(base.WorkflowConductorTest):
def _prep_conductor(self, context=None, inputs=None, state=None):
wf_def = """
version: 1.0
description: A basic sequential workflow.
input:
- a1
- b1: <% ctx().a1 %>
vars:
- a2: <% ctx().b1 %>
- b2: <% ctx().a2 %>
output:
- a5: <% ctx().b4 %>
- b5: <% ctx().a5 %>
tasks:
task1:
action: core.noop
next:
- when: <% succeeded() %>
publish:
- a3: <% ctx().b2 %>
- b3: <% ctx().a3 %>
do: task2
task2:
action: core.noop
next:
- when: <% succeeded() %>
publish: a4=<% ctx().b3 %> b4=<% ctx().a4 %>
do: task3
task3:
action: core.noop
"""
spec = specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
kwargs = {
'context': context if context is not None else None,
'inputs': inputs if inputs is not None else None
}
conductor = conducting.WorkflowConductor(spec, **kwargs)
if state:
conductor.request_workflow_state(state)
return conductor
def assert_data_flow(self, input_value):
inputs = {'a1': input_value}
expected_output = {'a5': inputs['a1'], 'b5': inputs['a1']}
conductor = self._prep_conductor(inputs=inputs, state=states.RUNNING)
for i in range(1, len(conductor.spec.tasks) + 1):
task_name = 'task' + str(i)
conductor.update_task_flow(task_name, events.ActionExecutionEvent(states.RUNNING))
conductor.update_task_flow(task_name, events.ActionExecutionEvent(states.SUCCEEDED))
self.assertEqual(conductor.get_workflow_state(), states.SUCCEEDED)
self.assertDictEqual(conductor.get_workflow_output(), expected_output)
def test_data_flow_string(self):
self.assert_data_flow('xyz')
def test_data_flow_integer(self):
self.assert_data_flow(123)
self.assert_data_flow(-123)
def test_data_flow_float(self):
self.assert_data_flow(99.99)
self.assert_data_flow(-99.99)
def test_data_flow_boolean(self):
self.assert_data_flow(True)
self.assert_data_flow(False)
def test_data_flow_dict(self):
self.assert_data_flow({'x': 123, 'y': 'abc'})
def test_data_flow_list(self):
self.assert_data_flow([123, 'abc', True])
def test_data_flow_unicode(self):
self.assert_data_flow('光合作用')
| 30.40708
| 96
| 0.606519
|
9b22197c3b0915e6f764e2d9fc2f420f0b3f043b
| 2,576
|
py
|
Python
|
caffe2/python/operator_test/listwise_l2r_operator_test.py
|
KevinKecc/caffe2
|
a2b6c6e2f0686358a84277df65e9489fb7d9ddb2
|
[
"Apache-2.0"
] | 58
|
2019-01-03T02:20:41.000Z
|
2022-02-25T14:24:13.000Z
|
caffe2/python/operator_test/listwise_l2r_operator_test.py
|
mingzhe09088/caffe2
|
8f41717c46d214aaf62b53e5b3b9b308b5b8db91
|
[
"Apache-2.0"
] | 27
|
2018-04-14T06:44:22.000Z
|
2018-08-01T18:02:39.000Z
|
caffe2/python/operator_test/listwise_l2r_operator_test.py
|
mingzhe09088/caffe2
|
8f41717c46d214aaf62b53e5b3b9b308b5b8db91
|
[
"Apache-2.0"
] | 23
|
2018-04-13T10:47:31.000Z
|
2021-05-06T08:38:06.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core, workspace
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
class TestListwiseL2rOps(hu.HypothesisTestCase):
def ref_lambda_rank_ndcg_loss(self, y, r):
n = len(y)
def get_discounts(v):
x = np.argsort(v)
d = [0 for _ in range(n)]
for i in range(n):
d[x[i]] = 1. / np.log2(n - i + 1.)
return d
def sigm(x):
return 1 / (1 + np.exp(-x))
def log_sigm(x):
return -np.log(1 + np.exp(-x))
g = [2**r[i] for i in range(n)]
d = get_discounts(r)
idcg = sum([g[i] * d[i] for i in range(n)])
d = get_discounts(y)
loss = 0
dy = np.zeros(n)
for i in range(n):
for j in range(n):
if i == j:
continue
lambda_weight = np.abs((2**r[i] - 2**r[j]) * (d[i] - d[j]))
rank_loss = log_sigm(
y[i] - y[j] if r[i] > r[j] else y[j] - y[i]
)
rank_dy = (1. if r[i] > r[j] else 0.) - sigm(y[i] - y[j])
loss += lambda_weight * rank_loss / idcg
dy[i] += lambda_weight * rank_dy / idcg
return loss, dy
@given(n=st.integers(1, 20), k=st.integers(2, 5))
def test_lambda_rank_ndcg_loss(self, n, k):
y = np.random.rand(n).astype(np.float32)
r = np.random.randint(k, size=n).astype(np.float32)
dloss = np.random.random(1).astype(np.float32)
workspace.blobs['y'] = y
workspace.blobs['r'] = r
workspace.blobs['dloss'] = dloss
op = core.CreateOperator('LambdaRankNdcg', ['y', 'r'], ['loss', 'dy'])
workspace.RunOperatorOnce(op)
loss = workspace.blobs['loss']
dy = workspace.blobs['dy']
ref_loss, ref_dy = self.ref_lambda_rank_ndcg_loss(y, r)
self.assertAlmostEqual(np.asscalar(loss), ref_loss, delta=1e-4)
np.testing.assert_allclose(dy, ref_dy, rtol=1e-5, atol=1e-6)
op = core.CreateOperator(
'LambdaRankNdcgGradient', ['y', 'dy', 'dloss'], ['dy_back']
)
workspace.RunOperatorOnce(op)
dy_back = workspace.blobs['dy_back']
np.testing.assert_allclose(
dy_back, np.asscalar(dloss) * ref_dy, rtol=1e-5, atol=1e-6
)
| 34.346667
| 78
| 0.550466
|
ccbe8a9d93495091e7ed3afe8dc730a382529ffe
| 5,823
|
py
|
Python
|
sdk/python/pulumi_google_native/binaryauthorization/v1/get_policy_iam_policy.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/binaryauthorization/v1/get_policy_iam_policy.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/binaryauthorization/v1/get_policy_iam_policy.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetPolicyIamPolicyResult',
'AwaitableGetPolicyIamPolicyResult',
'get_policy_iam_policy',
'get_policy_iam_policy_output',
]
@pulumi.output_type
class GetPolicyIamPolicyResult:
def __init__(__self__, bindings=None, etag=None, version=None):
if bindings and not isinstance(bindings, list):
raise TypeError("Expected argument 'bindings' to be a list")
pulumi.set(__self__, "bindings", bindings)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if version and not isinstance(version, int):
raise TypeError("Expected argument 'version' to be a int")
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def bindings(self) -> Sequence['outputs.BindingResponse']:
"""
Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:alice@example.com`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.
"""
return pulumi.get(self, "bindings")
@property
@pulumi.getter
def etag(self) -> str:
"""
`etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def version(self) -> int:
"""
Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
return pulumi.get(self, "version")
class AwaitableGetPolicyIamPolicyResult(GetPolicyIamPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPolicyIamPolicyResult(
bindings=self.bindings,
etag=self.etag,
version=self.version)
def get_policy_iam_policy(options_requested_policy_version: Optional[str] = None,
project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPolicyIamPolicyResult:
"""
Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.
"""
__args__ = dict()
__args__['optionsRequestedPolicyVersion'] = options_requested_policy_version
__args__['project'] = project
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('google-native:binaryauthorization/v1:getPolicyIamPolicy', __args__, opts=opts, typ=GetPolicyIamPolicyResult).value
return AwaitableGetPolicyIamPolicyResult(
bindings=__ret__.bindings,
etag=__ret__.etag,
version=__ret__.version)
@_utilities.lift_output_func(get_policy_iam_policy)
def get_policy_iam_policy_output(options_requested_policy_version: Optional[pulumi.Input[Optional[str]]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetPolicyIamPolicyResult]:
"""
Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.
"""
...
| 60.030928
| 1,084
| 0.712348
|
92791df3ac0af78b20b7eec4c5bbe219048c526c
| 5,983
|
py
|
Python
|
src/generator/AutoRest.Python.Tests/AcceptanceTests/string_tests.py
|
yugangw-msft/AutoRest
|
32775a902b522e5e7fda9debb8dfc1d40cfb6de0
|
[
"MIT"
] | 3
|
2018-03-20T22:36:32.000Z
|
2021-07-15T02:36:51.000Z
|
src/generator/AutoRest.Python.Tests/AcceptanceTests/string_tests.py
|
yugangw-msft/AutoRest
|
32775a902b522e5e7fda9debb8dfc1d40cfb6de0
|
[
"MIT"
] | null | null | null |
src/generator/AutoRest.Python.Tests/AcceptanceTests/string_tests.py
|
yugangw-msft/AutoRest
|
32775a902b522e5e7fda9debb8dfc1d40cfb6de0
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
import unittest
import subprocess
import sys
import isodate
import tempfile
import json
from datetime import date, datetime, timedelta
import os
from os.path import dirname, pardir, join, realpath
cwd = dirname(realpath(__file__))
log_level = int(os.environ.get('PythonLogLevel', 30))
tests = realpath(join(cwd, pardir, "Expected", "AcceptanceTests"))
sys.path.append(join(tests, "BodyString"))
from msrest.serialization import Deserializer
from msrest.exceptions import DeserializationError, SerializationError
from autorestswaggerbatservice import AutoRestSwaggerBATService
from autorestswaggerbatservice.models.auto_rest_swagger_bat_service_enums import *
class StringTests(unittest.TestCase):
def test_string(self):
client = AutoRestSwaggerBATService(base_url="http://localhost:3000")
self.assertIsNone(client.string.get_null())
client.string.put_null(None)
self.assertEqual("", client.string.get_empty())
client.string.put_empty("")
try:
test_str = (
"\xe5\x95\x8a\xe9\xbd\x84\xe4\xb8\x82\xe7\x8b\x9b\xe7\x8b"
"\x9c\xef\xa7\xb1\xef\xa4\xac\xef\xa7\xb1\xef\xa8\x8c\xef"
"\xa8\xa9\xcb\x8a\xe3\x80\x9e\xe3\x80\xa1\xef\xbf\xa4\xe2"
"\x84\xa1\xe3\x88\xb1\xe2\x80\x90\xe3\x83\xbc\xef\xb9\xa1"
"\xef\xb9\xa2\xef\xb9\xab\xe3\x80\x81\xe3\x80\x93\xe2\x85"
"\xb0\xe2\x85\xb9\xe2\x92\x88\xe2\x82\xac\xe3\x88\xa0\xe3"
"\x88\xa9\xe2\x85\xa0\xe2\x85\xab\xef\xbc\x81\xef\xbf\xa3"
"\xe3\x81\x81\xe3\x82\x93\xe3\x82\xa1\xe3\x83\xb6\xce\x91"
"\xef\xb8\xb4\xd0\x90\xd0\xaf\xd0\xb0\xd1\x8f\xc4\x81\xc9"
"\xa1\xe3\x84\x85\xe3\x84\xa9\xe2\x94\x80\xe2\x95\x8b\xef"
"\xb8\xb5\xef\xb9\x84\xef\xb8\xbb\xef\xb8\xb1\xef\xb8\xb3"
"\xef\xb8\xb4\xe2\x85\xb0\xe2\x85\xb9\xc9\x91\xee\x9f\x87"
"\xc9\xa1\xe3\x80\x87\xe3\x80\xbe\xe2\xbf\xbb\xe2\xba\x81"
"\xee\xa1\x83\xe4\x9c\xa3\xee\xa1\xa4\xe2\x82\xac").decode('utf-8')
except AttributeError:
test_str = (
b"\xe5\x95\x8a\xe9\xbd\x84\xe4\xb8\x82\xe7\x8b\x9b\xe7\x8b"
b"\x9c\xef\xa7\xb1\xef\xa4\xac\xef\xa7\xb1\xef\xa8\x8c\xef"
b"\xa8\xa9\xcb\x8a\xe3\x80\x9e\xe3\x80\xa1\xef\xbf\xa4\xe2"
b"\x84\xa1\xe3\x88\xb1\xe2\x80\x90\xe3\x83\xbc\xef\xb9\xa1"
b"\xef\xb9\xa2\xef\xb9\xab\xe3\x80\x81\xe3\x80\x93\xe2\x85"
b"\xb0\xe2\x85\xb9\xe2\x92\x88\xe2\x82\xac\xe3\x88\xa0\xe3"
b"\x88\xa9\xe2\x85\xa0\xe2\x85\xab\xef\xbc\x81\xef\xbf\xa3"
b"\xe3\x81\x81\xe3\x82\x93\xe3\x82\xa1\xe3\x83\xb6\xce\x91"
b"\xef\xb8\xb4\xd0\x90\xd0\xaf\xd0\xb0\xd1\x8f\xc4\x81\xc9"
b"\xa1\xe3\x84\x85\xe3\x84\xa9\xe2\x94\x80\xe2\x95\x8b\xef"
b"\xb8\xb5\xef\xb9\x84\xef\xb8\xbb\xef\xb8\xb1\xef\xb8\xb3"
b"\xef\xb8\xb4\xe2\x85\xb0\xe2\x85\xb9\xc9\x91\xee\x9f\x87"
b"\xc9\xa1\xe3\x80\x87\xe3\x80\xbe\xe2\xbf\xbb\xe2\xba\x81"
b"\xee\xa1\x83\xe4\x9c\xa3\xee\xa1\xa4\xe2\x82\xac").decode('utf-8')
self.assertEqual(test_str, client.string.get_mbcs())
client.string.put_mbcs(test_str)
test_str = " Now is the time for all good men to come to the aid of their country "
self.assertEqual(test_str, client.string.get_whitespace())
client.string.put_whitespace(test_str)
self.assertIsNone(client.string.get_not_provided())
self.assertEqual(Colors.redcolor, client.enum.get_not_expandable())
client.enum.put_not_expandable('red color')
client.enum.put_not_expandable(Colors.redcolor)
with self.assertRaises(SerializationError):
client.enum.put_not_expandable('not a colour')
self.assertEqual(client.string.get_base64_encoded(), 'a string that gets encoded with base64'.encode())
self.assertEqual(client.string.get_base64_url_encoded(), 'a string that gets encoded with base64url'.encode())
self.assertIsNone(client.string.get_null_base64_url_encoded())
client.string.put_base64_url_encoded('a string that gets encoded with base64url'.encode())
client.enum.put_referenced(Colors.redcolor)
client.enum.put_referenced("red color")
client.enum.put_referenced_constant()
self.assertEqual(client.enum.get_referenced(), Colors.redcolor)
self.assertEqual(client.enum.get_referenced_constant().color_constant, Colors.green_color.value)
if __name__ == '__main__':
unittest.main()
| 49.040984
| 118
| 0.660872
|
201c3b01dc9a473c0cc594946b6bf0c1fdfb00c4
| 8,249
|
py
|
Python
|
sfx-restaurant/input/convert.py
|
leotilli/tgen
|
cbc13df5dc1c6b343d5b17a8d5bc6f63008003f3
|
[
"Apache-2.0"
] | null | null | null |
sfx-restaurant/input/convert.py
|
leotilli/tgen
|
cbc13df5dc1c6b343d5b17a8d5bc6f63008003f3
|
[
"Apache-2.0"
] | null | null | null |
sfx-restaurant/input/convert.py
|
leotilli/tgen
|
cbc13df5dc1c6b343d5b17a8d5bc6f63008003f3
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Converting the SFX data sets (Cambridge, Wen et al. NAACL 2015) to our data format.
"""
from __future__ import unicode_literals
import json
import re
import argparse
from math import ceil
import sys
import os
sys.path.insert(0, os.path.abspath('../../')) # add tgen main directory to modules path
from tgen.data import Abst, DA, DAI
from tgen.delex import delex_sent
def postprocess_sent(sent):
"""Postprocess a sentence from the format used in Cambridge NN into plain English."""
# TODO remove ?
#sent = re.sub(r'child -s', 'children', sent)
#sent = re.sub(r' -s', 's', sent)
#sent = re.sub(r' -ly', 'ly', sent)
sent = re.sub(r'\s+', ' ', sent)
return sent
def fix_capitalization(sent):
# TODO remove ?
#sent = re.sub(r'( [.?!] [a-z])', lambda m: m.group(1).upper(), sent)
#sent = re.sub(r'\b(Ok|ok|i)\b', lambda m: m.group(1).upper(), sent)
#sent = sent[0].upper() + sent[1:]
return sent
def relexicalize(texts, cur_abst):
"""Lexicalize given texts (list of pairs abstracted text -- abstraction instructions) based on
the current slot values (stored in abstraction instructions)."""
ret = []
for text, abst in texts:
abst.sort(key=lambda a: a.slot)
cur_abst.sort(key=lambda a: a.slot)
assert len(abst) == len(cur_abst)
toks = text.split(' ')
for a, c in zip(abst, cur_abst):
assert a.slot == c.slot
if a.start < 0: # skip values that are actually not realized on the surface
continue
toks[a.start] = c.value
ret.append(' '.join(toks))
return ret
def filter_abst(abst, slots_to_abstract):
"""Filter abstraction instruction to only contain slots that are actually to be abstracted."""
return [a for a in abst if a.slot in slots_to_abstract]
def convert(args):
"""Main function – read in the JSON data and output TGEN-specific files."""
# find out which slots should be abstracted (from command-line argument)
slots_to_abstract = set()
if args.abstract is not None:
slots_to_abstract.update(re.split(r'[, ]+', args.abstract))
# initialize storage
conc_das = []
das = [] # abstracted DAs
concs = [] # concrete sentences
texts = [] # abstracted sentences
absts = [] # abstraction descriptions
# statistics about different DAs
da_keys = {}
turns = 0
def process_instance(da, conc):
da.sort()
conc_das.append(da) # store the non-delexicalized version of the DA
# delexicalize
text, da, abst = delex_sent(da, conc, slots_to_abstract, args.slot_names)
da.sort() # delexicalization does not keep DAI order, need to sort again
# store the DA
text = fix_capitalization(text)
conc = fix_capitalization(conc)
da_keys[unicode(da)] = da_keys.get(unicode(da), 0) + 1
das.append(da)
concs.append(conc)
absts.append(abst)
texts.append(text)
# process the input data and store it in memory
with open(args.in_file, 'r') as fh:
data = json.load(fh, encoding='UTF-8')
for dialogue in data:
if isinstance(dialogue, dict):
for turn in dialogue['dial']:
da = DA.parse_cambridge_da(turn['S']['dact'])
if args.skip_hello and len(da) == 1 and da[0].da_type == 'hello':
continue # skip hello() DAs
conc = postprocess_sent(turn['S']['ref'])
process_instance(da, conc)
turns += 1
else:
da = DA.parse_cambridge_da(dialogue[0])
conc = postprocess_sent(dialogue[1])
process_instance(da, conc)
turns += 1
print 'Processed', turns, 'turns.'
print '%d different DAs.' % len(da_keys)
print '%.2f average DAIs per DA' % (sum([len(d) for d in das]) / float(len(das)))
if args.split:
# get file name prefixes and compute data sizes for all the parts to be split
out_names = re.split(r'[, ]+', args.out_name)
data_sizes = [int(part_size) for part_size in args.split.split(':')]
assert len(out_names) == len(data_sizes)
# compute sizes for all but the 1st part (+ round them up, as Wen does)
total = float(sum(data_sizes))
remain = turns
for part_no in xrange(len(data_sizes) - 1, 0, -1):
part_size = int(ceil(turns * (data_sizes[part_no] / total)))
data_sizes[part_no] = part_size
remain -= part_size
# put whatever remained into the 1st part
data_sizes[0] = remain
else:
# use just one part -- containing all the data
data_sizes = [turns]
out_names = [args.out_name]
# write all data parts
for part_size, part_name in zip(data_sizes, out_names):
# create multiple lexicalized references for each instance by relexicalizing sentences
# with the same DA from the same part
if args.multi_ref and part_name in ['devel', 'test', 'dtest', 'etest']:
# group sentences with the same DA
da_groups = {}
for da, text, abst in zip(das[0:part_size], texts[0:part_size], absts[0:part_size]):
da_groups[unicode(da)] = da_groups.get(unicode(da), [])
da_groups[unicode(da)].append((text, filter_abst(abst, slots_to_abstract)))
for da_str in da_groups.keys():
seen = set()
uniq = []
for text, abst in da_groups[da_str]:
sig = text + "\n" + ' '.join([a.slot + str(a.start) for a in abst])
if sig not in seen:
seen.add(sig)
uniq.append((text, abst))
da_groups[da_str] = uniq
# relexicalize all abstract sentences for each DA
relex = []
for da, abst in zip(das[0:part_size], absts[0:part_size]):
relex.append(relexicalize(da_groups[unicode(da)],
filter_abst(abst, slots_to_abstract)))
with open(part_name + '-ref.txt', 'w') as fh:
for relex_pars in relex:
fh.write("\n".join(relex_pars).encode('utf-8') + "\n\n")
with open(part_name + '-das.txt', 'w') as fh:
for da in das[0:part_size]:
fh.write(unicode(da).encode('utf-8') + "\n")
del das[0:part_size]
with open(part_name + '-conc_das.txt', 'w') as fh:
for conc_da in conc_das[0:part_size]:
fh.write(unicode(conc_da).encode('utf-8') + "\n")
del conc_das[0:part_size]
with open(part_name + '-conc.txt', 'w') as fh:
for conc in concs[0:part_size]:
fh.write(conc.encode('utf-8') + "\n")
del concs[0:part_size]
with open(part_name + '-abst.txt', 'w') as fh:
for abst in absts[0:part_size]:
fh.write("\t".join([unicode(a) for a in abst]).encode('utf-8') + "\n")
del absts[0:part_size]
with open(part_name + '-text.txt', 'w') as fh:
for text in texts[0:part_size]:
fh.write(text.encode('utf-8') + "\n")
del texts[0:part_size]
if __name__ == '__main__':
argp = argparse.ArgumentParser()
argp.add_argument('in_file', help='Input JSON file')
argp.add_argument('out_name', help='Output files name prefix(es - when used with -s, comma-separated)')
argp.add_argument('-a', '--abstract', help='Comma-separated list of slots to be abstracted')
argp.add_argument('-s', '--split', help='Colon-separated sizes of splits (e.g.: 3:1:1)')
argp.add_argument('-m', '--multi-ref',
help='Multiple reference mode: relexicalize all possible references', action='store_true')
argp.add_argument('-n', '--slot-names', help='Include slot names in delexicalized texts', action='store_true')
argp.add_argument('-i', '--skip-hello', help='Ignore hello() DAs', action='store_true')
args = argp.parse_args()
convert(args)
| 38.910377
| 114
| 0.582252
|
527bb2d3efebe9fc16220773a6f0dade6177c97c
| 529
|
py
|
Python
|
api/migrations/0025_activity_creation.py
|
eiling/SchoolIdolAPI
|
a05980fdb33b143dbe2febfc1ad6cf723f025c8d
|
[
"Apache-2.0"
] | 65
|
2017-12-29T12:28:11.000Z
|
2022-03-15T06:42:26.000Z
|
api/migrations/0025_activity_creation.py
|
eiling/SchoolIdolAPI
|
a05980fdb33b143dbe2febfc1ad6cf723f025c8d
|
[
"Apache-2.0"
] | 31
|
2017-12-18T02:03:09.000Z
|
2022-01-13T00:43:35.000Z
|
api/migrations/0025_activity_creation.py
|
eiling/SchoolIdolAPI
|
a05980fdb33b143dbe2febfc1ad6cf723f025c8d
|
[
"Apache-2.0"
] | 7
|
2018-08-27T15:11:01.000Z
|
2021-08-16T05:15:13.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.datetime_safe
class Migration(migrations.Migration):
dependencies = [
('api', '0024_auto_20150227_0315'),
]
operations = [
migrations.AddField(
model_name='activity',
name='creation',
field=models.DateTimeField(default=django.utils.datetime_safe.datetime.today, auto_now_add=True),
preserve_default=False,
),
]
| 24.045455
| 109
| 0.650284
|
f4a7f0179c7330dbf97a203b02591a965ad4530f
| 5,091
|
py
|
Python
|
google-cloud-sdk/lib/surface/compute/instances/detach_disk.py
|
bopopescu/searchparty
|
afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6
|
[
"Apache-2.0"
] | null | null | null |
google-cloud-sdk/lib/surface/compute/instances/detach_disk.py
|
bopopescu/searchparty
|
afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6
|
[
"Apache-2.0"
] | null | null | null |
google-cloud-sdk/lib/surface/compute/instances/detach_disk.py
|
bopopescu/searchparty
|
afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6
|
[
"Apache-2.0"
] | 3
|
2017-07-27T18:44:13.000Z
|
2020-07-25T17:48:53.000Z
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for detaching a disk from an instance."""
from apitools.base.py import encoding
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.compute.instances import flags
from googlecloudsdk.core import log
class DetachDisk(base.UpdateCommand):
"""Detach disks from Compute Engine virtual machine instances.
*{command}* is used to detach disks from virtual machines.
Detaching a disk without first unmounting it may result in
incomplete I/O operations and data corruption.
To unmount a persistent disk on a Linux-based image,
ssh into the instance and run:
$ sudo umount /dev/disk/by-id/google-DEVICE_NAME
"""
@staticmethod
def Args(parser):
flags.INSTANCE_ARG.AddArgument(parser)
disk_group = parser.add_mutually_exclusive_group(required=True)
disk_group.add_argument(
'--disk',
help="""\
Specifies a disk to detach by its resource name. If you specify a
disk to remove by persistent disk name, then you must not specify its
device name using the ``--device-name'' flag.
""")
disk_group.add_argument(
'--device-name',
help="""\
Specifies a disk to detach by its device name, which is the name
that the guest operating system sees. The device name is set
at the time that the disk is attached to the instance, and needs not be
the same as the persistent disk name. If the disk's device name is
specified, then its persistent disk name must not be specified
using the ``--disk'' flag.
""")
def CreateReference(self, client, resources, args):
return flags.INSTANCE_ARG.ResolveAsResource(
args, resources, scope_lister=flags.GetInstanceZoneScopeLister(client))
def GetGetRequest(self, client, instance_ref):
return (client.apitools_client.instances,
'Get',
client.messages.ComputeInstancesGetRequest(**instance_ref.AsDict()))
def GetSetRequest(self, client, instance_ref, replacement, existing):
removed_disk = list(
set(disk.deviceName for disk in existing.disks) -
set(disk.deviceName for disk in replacement.disks))[0]
return (client.apitools_client.instances,
'DetachDisk',
client.messages.ComputeInstancesDetachDiskRequest(
deviceName=removed_disk,
**instance_ref.AsDict()))
def Modify(self, resources, args, instance_ref, existing):
replacement = encoding.CopyProtoMessage(existing)
if args.disk:
disk_ref = resources.Parse(
args.disk, collection='compute.disks',
params={
'project': instance_ref.project,
'zone': instance_ref.zone
})
replacement.disks = [disk for disk in existing.disks
if disk.source != disk_ref.SelfLink()]
if len(existing.disks) == len(replacement.disks):
raise exceptions.ToolException(
'Disk [{0}] is not attached to instance [{1}] in zone [{2}].'
.format(disk_ref.Name(), instance_ref.instance, instance_ref.zone))
else:
replacement.disks = [disk for disk in existing.disks
if disk.deviceName != args.device_name]
if len(existing.disks) == len(replacement.disks):
raise exceptions.ToolException(
'No disk with device name [{0}] is attached to instance [{1}] in '
'zone [{2}].'
.format(args.device_name, instance_ref.instance, instance_ref.zone))
return replacement
def Run(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
client = holder.client
instance_ref = self.CreateReference(client, holder.resources, args)
get_request = self.GetGetRequest(client, instance_ref)
objects = client.MakeRequests([get_request])
new_object = self.Modify(holder.resources, args, instance_ref, objects[0])
# If existing object is equal to the proposed object or if
# Modify() returns None, then there is no work to be done, so we
# print the resource and return.
if objects[0] == new_object:
log.status.Print(
'No change requested; skipping update for [{0}].'.format(
objects[0].name))
return objects
return client.MakeRequests(
[self.GetSetRequest(client, instance_ref, new_object, objects[0])])
| 37.711111
| 80
| 0.685327
|
35bb5ac1200646e41f3275a250552b234b800421
| 2,653
|
py
|
Python
|
sunpos.py
|
marcocipriani01/AllSky-new
|
8e3951bccdb4709ef3abfeb7c5bf8961e9c8d3f1
|
[
"MIT"
] | null | null | null |
sunpos.py
|
marcocipriani01/AllSky-new
|
8e3951bccdb4709ef3abfeb7c5bf8961e9c8d3f1
|
[
"MIT"
] | null | null | null |
sunpos.py
|
marcocipriani01/AllSky-new
|
8e3951bccdb4709ef3abfeb7c5bf8961e9c8d3f1
|
[
"MIT"
] | null | null | null |
# Credits:
# https://levelup.gitconnected.com/python-sun-position-for-solar-energy-and-research-7a4ead801777
import datetime
from dateutil.tz import tzutc
from math import sin, cos, tan, asin, atan2, radians as rad, degrees as deg
def sun_position(location, utc=None, refraction=True):
if utc is None:
utc = datetime.datetime.now(tzutc())
latitude, longitude = location
# Convert latitude and longitude to radians
rlat = rad(latitude)
rlon = rad(longitude)
# Decimal hour of the day at Greenwich
greenwichtime = utc.hour + utc.minute / 60 + utc.second / 3600
# Days from J2000, accurate from 1901 to 2099
daynum = (
367 * utc.year
- 7 * (utc.year + (utc.month + 9) // 12) // 4
+ 275 * utc.month // 9
+ utc.day
- 730531.5
+ greenwichtime / 24
)
# Mean longitude of the sun
mean_long = daynum * 0.01720279239 + 4.894967873
# Mean anomaly of the Sun
mean_anom = daynum * 0.01720197034 + 6.240040768
# Ecliptic longitude of the sun
eclip_long = (
mean_long
+ 0.03342305518 * sin(mean_anom)
+ 0.0003490658504 * sin(2 * mean_anom)
)
# Obliquity of the ecliptic
obliquity = 0.4090877234 - 0.000000006981317008 * daynum
# Right ascension of the sun
rasc = atan2(cos(obliquity) * sin(eclip_long), cos(eclip_long))
# Declination of the sun
decl = asin(sin(obliquity) * sin(eclip_long))
# Local sidereal time
sidereal = 4.894961213 + 6.300388099 * daynum + rlon
# Hour angle of the sun
hour_ang = sidereal - rasc
# Local elevation of the sun
elevation = asin(sin(decl) * sin(rlat) + cos(decl) * cos(rlat) * cos(hour_ang))
# Local azimuth of the sun
azimuth = atan2(
-cos(decl) * cos(rlat) * sin(hour_ang),
sin(decl) - sin(rlat) * sin(elevation),
)
# Convert azimuth and elevation to degrees
azimuth = into_range(deg(azimuth), 0, 360)
elevation = into_range(deg(elevation), -180, 180)
# Refraction correction (optional)
if refraction:
targ = rad((elevation + (10.3 / (elevation + 5.11))))
elevation += (1.02 / tan(targ)) / 60
# Return azimuth and elevation in degrees
return (round(azimuth, 2), round(elevation, 2))
def into_range(x, range_min, range_max):
shiftedx = x - range_min
delta = range_max - range_min
return (((shiftedx % delta) + delta) % delta) + range_min
if __name__ == "__main__":
location = (41.902782, 12.496366) # Rome, Italy
azimuth, elevation = sun_position(location)
print("Azimuth: ", azimuth)
print("Elevation: ", elevation)
| 34.907895
| 97
| 0.637392
|
084bbd62b70b208da78b085a71cadd9fbb907ca7
| 77,606
|
py
|
Python
|
gtfspy/gtfs.py
|
Leo-Ryu/gtfspy
|
732abdf6bfb6427454ac4c0a676dc3f8fc838cf4
|
[
"MIT"
] | 118
|
2017-03-14T11:17:54.000Z
|
2022-03-31T07:46:31.000Z
|
gtfspy/gtfs.py
|
Leo-Ryu/gtfspy
|
732abdf6bfb6427454ac4c0a676dc3f8fc838cf4
|
[
"MIT"
] | 27
|
2017-05-02T12:39:36.000Z
|
2020-03-24T18:29:20.000Z
|
gtfspy/gtfs.py
|
Leo-Ryu/gtfspy
|
732abdf6bfb6427454ac4c0a676dc3f8fc838cf4
|
[
"MIT"
] | 29
|
2017-08-21T15:22:41.000Z
|
2022-03-13T07:27:52.000Z
|
import calendar
import datetime
import logging
import os
import sqlite3
import sys
import time
import warnings
from collections import Counter, defaultdict
from datetime import timedelta
import numpy
import pandas as pd
import pytz
from six import string_types
from gtfspy import shapes
from gtfspy.route_types import ALL_ROUTE_TYPES
from gtfspy.route_types import WALK
from gtfspy.util import wgs84_distance, wgs84_width, wgs84_height, set_process_timezone
class GTFS(object):
def __init__(self, fname_or_conn):
"""Open a GTFS object
Parameters
----------
fname_or_conn: str | sqlite3.Connection
path to the preprocessed gtfs database or a connection to a gtfs database
"""
if isinstance(fname_or_conn, string_types):
if os.path.isfile(fname_or_conn):
self.conn = sqlite3.connect(fname_or_conn)
self.fname = fname_or_conn
# memory-mapped IO size, in bytes
self.conn.execute('PRAGMA mmap_size = 1000000000;')
# page cache size, in negative KiB.
self.conn.execute('PRAGMA cache_size = -2000000;')
else:
raise FileNotFoundError("File " + fname_or_conn + " missing")
elif isinstance(fname_or_conn, sqlite3.Connection):
self.conn = fname_or_conn
self._dont_close = True
else:
raise NotImplementedError(
"Initiating GTFS using an object with type " + str(type(fname_or_conn)) + " is not supported")
assert self.conn.execute("SELECT name FROM sqlite_master WHERE type='table';").fetchone() is not None
self.meta = GTFSMetadata(self.conn)
# Bind functions
self.conn.create_function("find_distance", 4, wgs84_distance)
# Set timezones
self._timezone = pytz.timezone(self.get_timezone_name())
def __del__(self):
if not getattr(self, '_dont_close', False) and hasattr(self, "conn"):
self.conn.close()
@classmethod
def from_directory_as_inmemory_db(cls, gtfs_directory):
"""
Instantiate a GTFS object by computing
Parameters
----------
gtfs_directory: str
path to the directory for importing the database
"""
# this import is here to avoid circular imports (which turned out to be a problem)
from gtfspy.import_gtfs import import_gtfs
conn = sqlite3.connect(":memory:")
import_gtfs(gtfs_directory,
conn,
preserve_connection=True,
print_progress=False)
return cls(conn)
def get_main_database_path(self):
"""
Should return the path to the database
Returns
-------
path : unicode
path to the database, empty string for in-memory databases
"""
cur = self.conn.cursor()
cur.execute("PRAGMA database_list")
rows = cur.fetchall()
for row in rows:
if row[1] == str("main"):
return row[2]
def get_location_name(self):
return self.meta.get('location_name', "location_unknown")
def get_shape_distance_between_stops(self, trip_I, from_stop_seq, to_stop_seq):
"""
Get the distance along a shape between stops
Parameters
----------
trip_I : int
trip_ID along which we travel
from_stop_seq : int
the sequence number of the 'origin' stop
to_stop_seq : int
the sequence number of the 'destination' stop
Returns
-------
distance : float, None
If the shape calculation succeeded, return a float, otherwise return None
(i.e. in the case where the shapes table is empty)
"""
query_template = "SELECT shape_break FROM stop_times WHERE trip_I={trip_I} AND seq={seq} "
stop_seqs = [from_stop_seq, to_stop_seq]
shape_breaks = []
for seq in stop_seqs:
q = query_template.format(seq=seq, trip_I=trip_I)
shape_breaks.append(self.conn.execute(q).fetchone())
query_template = "SELECT max(d) - min(d) " \
"FROM shapes JOIN trips ON(trips.shape_id=shapes.shape_id) " \
"WHERE trip_I={trip_I} AND shapes.seq>={from_stop_seq} AND shapes.seq<={to_stop_seq};"
distance_query = query_template.format(trip_I=trip_I, from_stop_seq=from_stop_seq, to_stop_seq=to_stop_seq)
return self.conn.execute(distance_query).fetchone()[0]
def get_stop_distance(self, from_stop_I, to_stop_I):
query_template = "SELECT d_walk FROM stop_distances WHERE from_stop_I={from_stop_I} AND to_stop_I={to_stop_I} "
q = query_template.format(from_stop_I=int(from_stop_I), to_stop_I=int(to_stop_I))
if self.conn.execute(q).fetchone():
return self.conn.execute(q).fetchone()[0]
else:
return None
def get_stops_within_distance(self, stop, distance):
query = """SELECT stops.* FROM stop_distances, stops
WHERE stop_distances.to_stop_I = stops.stop_I
AND d < %s AND from_stop_I = %s""" % (distance, stop)
return pd.read_sql_query(query, self.conn)
def get_directly_accessible_stops_within_distance(self, stop, distance):
"""
Returns stops that are accessible without transfer from the stops that are within a specific walking distance
:param stop: int
:param distance: int
:return:
"""
query = """SELECT stop.* FROM
(SELECT st2.* FROM
(SELECT * FROM stop_distances
WHERE from_stop_I = %s) sd,
(SELECT * FROM stop_times) st1,
(SELECT * FROM stop_times) st2
WHERE sd.d < %s AND sd.to_stop_I = st1.stop_I AND st1.trip_I = st2.trip_I
GROUP BY st2.stop_I) sq,
(SELECT * FROM stops) stop
WHERE sq.stop_I = stop.stop_I""" % (stop, distance)
return pd.read_sql_query(query, self.conn)
def get_cursor(self):
"""
Return a cursor to the underlying sqlite3 object
"""
return self.conn.cursor()
def get_table(self, table_name):
"""
Return a pandas.DataFrame object corresponding to the sql table
Parameters
----------
table_name: str
name of the table in the database
Returns
-------
df : pandas.DataFrame
"""
return pd.read_sql("SELECT * FROM " + table_name, self.conn)
def get_row_count(self, table):
"""
Get number of rows in a table
"""
return self.conn.cursor().execute("SELECT count(*) FROM " + table).fetchone()[0]
def get_table_names(self):
"""
Return a list of the underlying tables in the database.
Returns
-------
table_names: list[str]
"""
return list(pd.read_sql("SELECT * FROM main.sqlite_master WHERE type='table'", self.conn)["name"])
def set_current_process_time_zone(self):
"""
This function queries a GTFS connection, finds the timezone of this
database, and sets it in the TZ environment variable. This is a
process-global configuration, by the nature of the C library!
Returns
-------
None
Alters os.environ['TZ']
"""
TZ = self.conn.execute('SELECT timezone FROM agencies LIMIT 1').fetchall()[0][0]
# TODO!: This is dangerous (?).
# In my opinion, we should get rid of this at some point (RK):
return set_process_timezone(TZ)
def get_timezone_pytz(self):
return self._timezone
def get_timezone_name(self):
"""
Get name of the GTFS timezone
Returns
-------
timezone_name : str
name of the time zone, e.g. "Europe/Helsinki"
"""
tz_name = self.conn.execute('SELECT timezone FROM agencies LIMIT 1').fetchone()
if tz_name is None:
raise ValueError("This database does not have a timezone defined.")
return tz_name[0]
def get_timezone_string(self, dt=None):
"""
Return the timezone of the GTFS database object as a string.
The assumed time when the timezone (difference) is computed
is the download date of the file.
This might not be optimal in all cases.
So this function should return values like:
"+0200" or "-1100"
Parameters
----------
dt : datetime.datetime, optional
The (unlocalized) date when the timezone should be computed.
Defaults first to download_date, and then to the runtime date.
Returns
-------
timezone_string : str
"""
if dt is None:
download_date = self.meta.get('download_date')
if download_date:
dt = datetime.datetime.strptime(download_date, '%Y-%m-%d')
else:
dt = datetime.datetime.today()
loc_dt = self._timezone.localize(dt)
# get the timezone
timezone_string = loc_dt.strftime("%z")
return timezone_string
def unixtime_seconds_to_gtfs_datetime(self, unixtime):
"""
Convert unixtime to localized datetime
Parameters
----------
unixtime : int
Returns
-------
gtfs_datetime: datetime.datetime
time localized to gtfs_datetime's timezone
"""
return datetime.datetime.fromtimestamp(unixtime, self._timezone)
def unlocalized_datetime_to_ut_seconds(self, unlocalized_datetime):
"""
Convert datetime (in GTFS timezone) to unixtime
Parameters
----------
unlocalized_datetime : datetime.datetime
(tz coerced to GTFS timezone, should NOT be UTC.)
Returns
-------
output : int (unixtime)
"""
loc_dt = self._timezone.localize(unlocalized_datetime)
unixtime_seconds = calendar.timegm(loc_dt.utctimetuple())
return unixtime_seconds
def get_day_start_ut(self, date):
"""
Get day start time (as specified by GTFS) as unix time in seconds
Parameters
----------
date : str | unicode | datetime.datetime
something describing the date
Returns
-------
day_start_ut : int
start time of the day in unixtime
"""
if isinstance(date, string_types):
date = datetime.datetime.strptime(date, '%Y-%m-%d')
date_noon = datetime.datetime(date.year, date.month, date.day, 12, 0, 0)
ut_noon = self.unlocalized_datetime_to_ut_seconds(date_noon)
return ut_noon - 12 * 60 * 60 # this comes from GTFS: noon-12 hrs
def get_trip_trajectories_within_timespan(self, start, end, use_shapes=True, filter_name=None):
"""
Get complete trip data for visualizing public transport operation based on gtfs.
Parameters
----------
start: number
Earliest position data to return (in unix time)
end: number
Latest position data to return (in unix time)
use_shapes: bool, optional
Whether or not shapes should be included
filter_name: str
Pick only routes having this name.
Returns
-------
trips: dict
trips['trips'] is a list whose each element (e.g. el = trips['trips'][0])
is a dict with the following properties:
el['lats'] -- list of latitudes
el['lons'] -- list of longitudes
el['times'] -- list of passage_times
el['route_type'] -- type of vehicle as specified by GTFS
el['name'] -- name of the route
"""
trips = []
trip_df = self.get_tripIs_active_in_range(start, end)
print("gtfs_viz.py: fetched " + str(len(trip_df)) + " trip ids")
shape_cache = {}
# loop over all trips:
for row in trip_df.itertuples():
trip_I = row.trip_I
day_start_ut = row.day_start_ut
shape_id = row.shape_id
trip = {}
name, route_type = self.get_route_name_and_type_of_tripI(trip_I)
trip['route_type'] = int(route_type)
trip['name'] = str(name)
if filter_name and (name != filter_name):
continue
stop_lats = []
stop_lons = []
stop_dep_times = []
shape_breaks = []
stop_seqs = []
# get stop_data and store it:
stop_time_df = self.get_trip_stop_time_data(trip_I, day_start_ut)
for stop_row in stop_time_df.itertuples():
stop_lats.append(float(stop_row.lat))
stop_lons.append(float(stop_row.lon))
stop_dep_times.append(float(stop_row.dep_time_ut))
try:
stop_seqs.append(int(stop_row.seq))
except TypeError:
stop_seqs.append(None)
if use_shapes:
try:
shape_breaks.append(int(stop_row.shape_break))
except (TypeError, ValueError):
shape_breaks.append(None)
if use_shapes:
# get shape data (from cache, if possible)
if shape_id not in shape_cache:
shape_cache[shape_id] = shapes.get_shape_points2(self.conn.cursor(), shape_id)
shape_data = shape_cache[shape_id]
# noinspection PyBroadException
try:
trip['times'] = shapes.interpolate_shape_times(shape_data['d'], shape_breaks, stop_dep_times)
trip['lats'] = shape_data['lats']
trip['lons'] = shape_data['lons']
start_break = shape_breaks[0]
end_break = shape_breaks[-1]
trip['times'] = trip['times'][start_break:end_break + 1]
trip['lats'] = trip['lats'][start_break:end_break + 1]
trip['lons'] = trip['lons'][start_break:end_break + 1]
except:
# In case interpolation fails:
trip['times'] = stop_dep_times
trip['lats'] = stop_lats
trip['lons'] = stop_lons
else:
trip['times'] = stop_dep_times
trip['lats'] = stop_lats
trip['lons'] = stop_lons
trips.append(trip)
return {"trips": trips}
def get_stop_count_data(self, start_ut, end_ut):
"""
Get stop count data.
Parameters
----------
start_ut : int
start time in unixtime
end_ut : int
end time in unixtime
Returns
-------
stopData : pandas.DataFrame
each row in the stopData dataFrame is a dictionary with the following elements
stop_I, count, lat, lon, name
with data types
(int, int, float, float, str)
"""
# TODO! this function could perhaps be made a single sql query now with the new tables?
trips_df = self.get_tripIs_active_in_range(start_ut, end_ut)
# stop_I -> count, lat, lon, name
stop_counts = Counter()
# loop over all trips:
for row in trips_df.itertuples():
# get stop_data and store it:
stops_seq = self.get_trip_stop_time_data(row.trip_I, row.day_start_ut)
for stop_time_row in stops_seq.itertuples(index=False):
if (stop_time_row.dep_time_ut >= start_ut) and (stop_time_row.dep_time_ut <= end_ut):
stop_counts[stop_time_row.stop_I] += 1
all_stop_data = self.stops()
counts = [stop_counts[stop_I] for stop_I in all_stop_data["stop_I"].values]
all_stop_data.loc[:, "count"] = pd.Series(counts, index=all_stop_data.index)
return all_stop_data
def get_segment_count_data(self, start, end, use_shapes=True):
"""
Get segment data including PTN vehicle counts per segment that are
fully _contained_ within the interval (start, end)
Parameters
----------
start : int
start time of the simulation in unix time
end : int
end time of the simulation in unix time
use_shapes : bool, optional
whether to include shapes (if available)
Returns
-------
seg_data : list
each element in the list is a dict containing keys:
"trip_I", "lats", "lons", "shape_id", "stop_seqs", "shape_breaks"
"""
cur = self.conn.cursor()
# get all possible trip_ids that take place between start and end
trips_df = self.get_tripIs_active_in_range(start, end)
# stop_I -> count, lat, lon, name
segment_counts = Counter()
seg_to_info = {}
# tripI_to_seq = "inverted segToShapeData"
tripI_to_seq = defaultdict(list)
# loop over all trips:
for row in trips_df.itertuples():
# get stop_data and store it:
stops_df = self.get_trip_stop_time_data(row.trip_I, row.day_start_ut)
for i in range(len(stops_df) - 1):
(stop_I, dep_time_ut, s_lat, s_lon, s_seq, shape_break) = stops_df.iloc[i]
(stop_I_n, dep_time_ut_n, s_lat_n, s_lon_n, s_seq_n, shape_break_n) = stops_df.iloc[i + 1]
# test if _contained_ in the interval
# overlap would read:
# (dep_time_ut <= end) and (start <= dep_time_ut_n)
if (dep_time_ut >= start) and (dep_time_ut_n <= end):
seg = (stop_I, stop_I_n)
segment_counts[seg] += 1
if seg not in seg_to_info:
seg_to_info[seg] = {
u"trip_I": row.trip_I,
u"lats": [s_lat, s_lat_n],
u"lons": [s_lon, s_lon_n],
u"shape_id": row.shape_id,
u"stop_seqs": [s_seq, s_seq_n],
u"shape_breaks": [shape_break, shape_break_n]
}
tripI_to_seq[row.trip_I].append(seg)
stop_names = {}
for (stop_I, stop_J) in segment_counts.keys():
for s in [stop_I, stop_J]:
if s not in stop_names:
stop_names[s] = self.stop(s)[u'name'].values[0]
seg_data = []
for seg, count in segment_counts.items():
segInfo = seg_to_info[seg]
shape_breaks = segInfo[u"shape_breaks"]
seg_el = {}
if use_shapes and shape_breaks and shape_breaks[0] and shape_breaks[1]:
shape = shapes.get_shape_between_stops(
cur,
segInfo[u'trip_I'],
shape_breaks=shape_breaks
)
seg_el[u'lats'] = segInfo[u'lats'][:1] + shape[u'lat'] + segInfo[u'lats'][1:]
seg_el[u'lons'] = segInfo[u'lons'][:1] + shape[u'lon'] + segInfo[u'lons'][1:]
else:
seg_el[u'lats'] = segInfo[u'lats']
seg_el[u'lons'] = segInfo[u'lons']
seg_el[u'name'] = stop_names[seg[0]] + u"-" + stop_names[seg[1]]
seg_el[u'count'] = count
seg_data.append(seg_el)
return seg_data
def get_all_route_shapes(self, use_shapes=True):
"""
Get the shapes of all routes.
Parameters
----------
use_shapes : bool, optional
by default True (i.e. use shapes as the name of the function indicates)
if False (fall back to lats and longitudes)
Returns
-------
routeShapes: list of dicts that should have the following keys
name, type, agency, lats, lons
with types
list, list, str, list, list
"""
cur = self.conn.cursor()
# all shape_id:s corresponding to a route_I:
# query = "SELECT DISTINCT name, shape_id, trips.route_I, route_type
# FROM trips LEFT JOIN routes USING(route_I)"
# data1 = pd.read_sql_query(query, self.conn)
# one (arbitrary) shape_id per route_I ("one direction") -> less than half of the routes
query = "SELECT routes.name as name, shape_id, route_I, trip_I, routes.type, " \
" agency_id, agencies.name as agency_name, max(end_time_ds-start_time_ds) as trip_duration " \
"FROM trips " \
"LEFT JOIN routes " \
"USING(route_I) " \
"LEFT JOIN agencies " \
"USING(agency_I) " \
"GROUP BY routes.route_I"
data = pd.read_sql_query(query, self.conn)
routeShapes = []
for i, row in enumerate(data.itertuples()):
datum = {"name": str(row.name), "type": int(row.type), "route_I": row.route_I, "agency": str(row.agency_id),
"agency_name": str(row.agency_name)}
# this function should be made also non-shape friendly (at this point)
if use_shapes and row.shape_id:
shape = shapes.get_shape_points2(cur, row.shape_id)
lats = shape['lats']
lons = shape['lons']
else:
stop_shape = self.get_trip_stop_coordinates(row.trip_I)
lats = list(stop_shape['lat'])
lons = list(stop_shape['lon'])
datum['lats'] = [float(lat) for lat in lats]
datum['lons'] = [float(lon) for lon in lons]
routeShapes.append(datum)
return routeShapes
def get_tripIs_active_in_range(self, start, end):
"""
Obtain from the (standard) GTFS database, list of trip_IDs (and other trip_related info)
that are active between given 'start' and 'end' times.
The start time of a trip is determined by the departure time at the last stop of the trip.
The end time of a trip is determined by the arrival time at the last stop of the trip.
Parameters
----------
start, end : int
the start and end of the time interval in unix time seconds
Returns
-------
active_trips : pandas.DataFrame with columns
trip_I, day_start_ut, start_time_ut, end_time_ut, shape_id
"""
to_select = "trip_I, day_start_ut, start_time_ut, end_time_ut, shape_id "
query = "SELECT " + to_select + \
"FROM day_trips " \
"WHERE " \
"(end_time_ut > {start_ut} AND start_time_ut < {end_ut})".format(start_ut=start, end_ut=end)
return pd.read_sql_query(query, self.conn)
def get_trip_counts_per_day(self):
"""
Get trip counts per day between the start and end day of the feed.
Returns
-------
trip_counts : pandas.DataFrame
Has columns "date_str" (dtype str) "trip_counts" (dtype int)
"""
query = "SELECT date, count(*) AS number_of_trips FROM day_trips GROUP BY date"
# this yields the actual data
trip_counts_per_day = pd.read_sql_query(query, self.conn, index_col="date")
# the rest is simply code for filling out "gaps" in the time span
# (necessary for some visualizations)
max_day = trip_counts_per_day.index.max()
min_day = trip_counts_per_day.index.min()
min_date = datetime.datetime.strptime(min_day, '%Y-%m-%d')
max_date = datetime.datetime.strptime(max_day, '%Y-%m-%d')
num_days = (max_date - min_date).days
dates = [min_date + datetime.timedelta(days=x) for x in range(num_days + 1)]
trip_counts = []
date_strings = []
for date in dates:
date_string = date.strftime("%Y-%m-%d")
date_strings.append(date_string)
try:
value = trip_counts_per_day.loc[date_string, 'number_of_trips']
except KeyError:
# set value to 0 if dsut is not present, i.e. when no trips
# take place on that day
value = 0
trip_counts.append(value)
# check that all date_strings are included (move this to tests?)
for date_string in trip_counts_per_day.index:
assert date_string in date_strings
data = {"date": dates, "date_str": date_strings, "trip_counts": trip_counts}
return pd.DataFrame(data)
def get_suitable_date_for_daily_extract(self, date=None, ut=False):
"""
Parameters
----------
date : str
ut : bool
Whether to return the date as a string or as a an int (seconds after epoch).
Returns
-------
Selects suitable date for daily extract
Iterates trough the available dates forward and backward from the download date accepting the first day that has
at least 90 percent of the number of trips of the maximum date. The condition can be changed to something else.
If the download date is out of range, the process will look through the dates from first to last.
"""
daily_trips = self.get_trip_counts_per_day()
max_daily_trips = daily_trips[u'trip_counts'].max(axis=0)
if date in daily_trips[u'date_str']:
start_index = daily_trips[daily_trips[u'date_str'] == date].index.tolist()[0]
daily_trips[u'old_index'] = daily_trips.index
daily_trips[u'date_dist'] = abs(start_index - daily_trips.index)
daily_trips = daily_trips.sort_values(by=[u'date_dist', u'old_index']).reindex()
for row in daily_trips.itertuples():
if row.trip_counts >= 0.9 * max_daily_trips:
if ut:
return self.get_day_start_ut(row.date_str)
else:
return row.date_str
def get_weekly_extract_start_date(self, ut=False, weekdays_at_least_of_max=0.9,
verbose=False, download_date_override=None):
"""
Find a suitable weekly extract start date (monday).
The goal is to obtain as 'usual' week as possible.
The weekdays of the weekly extract week should contain
at least 0.9 of the total maximum of trips.
Parameters
----------
ut: return unixtime?
weekdays_at_least_of_max: float
download_date_override: str, semi-optional
Download-date in format %Y-%m-%d, weeks close to this.
Overrides the (possibly) recorded downloaded date in the database
Returns
-------
date: int or str
Raises
------
error: RuntimeError
If no download date could be found.
"""
daily_trip_counts = self.get_trip_counts_per_day()
if isinstance(download_date_override, str):
search_start_date = datetime.datetime.strptime(download_date_override, "%Y-%m-%d")
elif isinstance(download_date_override, datetime.datetime):
search_start_date = download_date_override
else:
assert download_date_override is None
download_date_str = self.meta['download_date']
if download_date_str == "":
warnings.warn("Download date is not speficied in the database. "
"Download date used in GTFS." + self.get_weekly_extract_start_date.__name__ +
"() defaults to the smallest date when any operations take place.")
search_start_date = daily_trip_counts['date'].min()
else:
search_start_date = datetime.datetime.strptime(download_date_str, "%Y-%m-%d")
feed_min_date = daily_trip_counts['date'].min()
feed_max_date = daily_trip_counts['date'].max()
assert (feed_max_date - feed_min_date >= datetime.timedelta(days=7)), \
"Dataset is not long enough for providing week long extracts"
# get first a valid monday where the search for the week can be started:
next_monday_from_search_start_date = search_start_date + timedelta(days=(7 - search_start_date.weekday()))
if not (feed_min_date <= next_monday_from_search_start_date <= feed_max_date):
warnings.warn("The next monday after the (possibly user) specified download date is not present in the database."
"Resorting to first monday after the beginning of operations instead.")
next_monday_from_search_start_date = feed_min_date + timedelta(days=(7 - feed_min_date.weekday()))
max_trip_count = daily_trip_counts['trip_counts'].quantile(0.95)
# Take 95th percentile to omit special days, if any exist.
threshold = weekdays_at_least_of_max * max_trip_count
threshold_fulfilling_days = daily_trip_counts['trip_counts'] > threshold
# look forward first
# get the index of the trip:
search_start_monday_index = daily_trip_counts[daily_trip_counts['date'] == next_monday_from_search_start_date].index[0]
# get starting point
while_loop_monday_index = search_start_monday_index
while len(daily_trip_counts.index) >= while_loop_monday_index + 7:
if all(threshold_fulfilling_days[while_loop_monday_index:while_loop_monday_index + 5]):
row = daily_trip_counts.iloc[while_loop_monday_index]
if ut:
return self.get_day_start_ut(row.date_str)
else:
return row['date']
while_loop_monday_index += 7
while_loop_monday_index = search_start_monday_index - 7
# then backwards
while while_loop_monday_index >= 0:
if all(threshold_fulfilling_days[while_loop_monday_index:while_loop_monday_index + 5]):
row = daily_trip_counts.iloc[while_loop_monday_index]
if ut:
return self.get_day_start_ut(row.date_str)
else:
return row['date']
while_loop_monday_index -= 7
raise RuntimeError("No suitable weekly extract start date could be determined!")
def get_spreading_trips(self, start_time_ut, lat, lon,
max_duration_ut=4 * 3600,
min_transfer_time=30,
use_shapes=False):
"""
Starting from a specific point and time, get complete single source
shortest path spreading dynamics as trips, or "events".
Parameters
----------
start_time_ut: number
Start time of the spreading.
lat: float
latitude of the spreading seed location
lon: float
longitude of the spreading seed location
max_duration_ut: int
maximum duration of the spreading process (in seconds)
min_transfer_time : int
minimum transfer time in seconds
use_shapes : bool
whether to include shapes
Returns
-------
trips: dict
trips['trips'] is a list whose each element (e.g. el = trips['trips'][0])
is a dict with the following properties:
el['lats'] : list of latitudes
el['lons'] : list of longitudes
el['times'] : list of passage_times
el['route_type'] : type of vehicle as specified by GTFS, or -1 if walking
el['name'] : name of the route
"""
from gtfspy.spreading.spreader import Spreader
spreader = Spreader(self, start_time_ut, lat, lon, max_duration_ut, min_transfer_time, use_shapes)
return spreader.spread()
def get_closest_stop(self, lat, lon):
"""
Get closest stop to a given location.
Parameters
----------
lat: float
latitude coordinate of the location
lon: float
longitude coordinate of the location
Returns
-------
stop_I: int
the index of the stop in the database
"""
cur = self.conn.cursor()
min_dist = float("inf")
min_stop_I = None
rows = cur.execute("SELECT stop_I, lat, lon FROM stops")
for stop_I, lat_s, lon_s in rows:
dist_now = wgs84_distance(lat, lon, lat_s, lon_s)
if dist_now < min_dist:
min_dist = dist_now
min_stop_I = stop_I
return min_stop_I
def get_stop_coordinates(self, stop_I):
cur = self.conn.cursor()
results = cur.execute("SELECT lat, lon FROM stops WHERE stop_I={stop_I}".format(stop_I=stop_I))
lat, lon = results.fetchone()
return lat, lon
def get_bounding_box_by_stops(self, stop_Is, buffer_ratio=None):
lats = []
lons = []
for stop_I in stop_Is:
lat, lon = self.get_stop_coordinates(stop_I)
lats.append(lat)
lons.append(lon)
min_lat = min(lats)
max_lat = max(lats)
min_lon = min(lons)
max_lon = max(lons)
lon_diff = 0
lat_diff = 0
if buffer_ratio:
distance = buffer_ratio * wgs84_distance(min_lat, min_lon, max_lat, max_lon)
lat_diff = wgs84_height(distance)
lon_diff = wgs84_width(distance, (max_lat - min_lat) / 2 + min_lat)
return {"lat_min": min_lat - lat_diff,
"lat_max": max_lat + lat_diff,
"lon_min": min_lon - lon_diff,
"lon_max": max_lon + lon_diff}
def get_route_name_and_type_of_tripI(self, trip_I):
"""
Get route short name and type
Parameters
----------
trip_I: int
short trip index created when creating the database
Returns
-------
name: str
short name of the route, eg. 195N
type: int
route_type according to the GTFS standard
"""
cur = self.conn.cursor()
results = cur.execute("SELECT name, type FROM routes JOIN trips USING(route_I) WHERE trip_I={trip_I}"
.format(trip_I=trip_I))
name, rtype = results.fetchone()
return u"%s" % str(name), int(rtype)
def get_route_name_and_type(self, route_I):
"""
Get route short name and type
Parameters
----------
route_I: int
route index (database specific)
Returns
-------
name: str
short name of the route, eg. 195N
type: int
route_type according to the GTFS standard
"""
cur = self.conn.cursor()
results = cur.execute("SELECT name, type FROM routes WHERE route_I=(?)", (route_I,))
name, rtype = results.fetchone()
return name, int(rtype)
def get_trip_stop_coordinates(self, trip_I):
"""
Get coordinates for a given trip_I
Parameters
----------
trip_I : int
the integer id of the trip
Returns
-------
stop_coords : pandas.DataFrame
with columns "lats" and "lons"
"""
query = """SELECT lat, lon
FROM stop_times
JOIN stops
USING(stop_I)
WHERE trip_I={trip_I}
ORDER BY stop_times.seq""".format(trip_I=trip_I)
stop_coords = pd.read_sql(query, self.conn)
return stop_coords
def get_trip_stop_time_data(self, trip_I, day_start_ut):
"""
Obtain from the (standard) GTFS database, trip stop data
(departure time in ut, lat, lon, seq, shape_break) as a pandas DataFrame
Some filtering could be applied here, if only e.g. departure times
corresponding within some time interval should be considered.
Parameters
----------
trip_I : int
integer index of the trip
day_start_ut : int
the start time of the day in unix time (seconds)
Returns
-------
df: pandas.DataFrame
df has the following columns
'departure_time_ut, lat, lon, seq, shape_break'
"""
to_select = "stop_I, " + str(day_start_ut) + "+dep_time_ds AS dep_time_ut, lat, lon, seq, shape_break"
str_to_run = "SELECT " + to_select + """
FROM stop_times JOIN stops USING(stop_I)
WHERE (trip_I ={trip_I}) ORDER BY seq
"""
str_to_run = str_to_run.format(trip_I=trip_I)
return pd.read_sql_query(str_to_run, self.conn)
def get_events_by_tripI_and_dsut(self, trip_I, day_start_ut,
start_ut=None, end_ut=None):
"""
Get trip data as a list of events (i.e. dicts).
Parameters
----------
trip_I : int
shorthand index of the trip.
day_start_ut : int
the start time of the day in unix time (seconds)
start_ut : int, optional
consider only events that start after this time
If not specified, this filtering is not applied.
end_ut : int, optional
Consider only events that end before this time
If not specified, this filtering is not applied.
Returns
-------
events: list of dicts
each element contains the following data:
from_stop: int (stop_I)
to_stop: int (stop_I)
dep_time_ut: int (in unix time)
arr_time_ut: int (in unix time)
"""
# for checking input:
assert day_start_ut <= start_ut
assert day_start_ut <= end_ut
assert start_ut <= end_ut
events = []
# check that trip takes place on that day:
if not self.tripI_takes_place_on_dsut(trip_I, day_start_ut):
return events
query = """SELECT stop_I, arr_time_ds+?, dep_time_ds+?
FROM stop_times JOIN stops USING(stop_I)
WHERE
(trip_I = ?)
"""
params = [day_start_ut, day_start_ut,
trip_I]
if start_ut:
query += "AND (dep_time_ds > ?-?)"
params += [start_ut, day_start_ut]
if end_ut:
query += "AND (arr_time_ds < ?-?)"
params += [end_ut, day_start_ut]
query += "ORDER BY arr_time_ds"
cur = self.conn.cursor()
rows = cur.execute(query, params)
stop_data = list(rows)
for i in range(len(stop_data) - 1):
event = {
"from_stop": stop_data[i][0],
"to_stop": stop_data[i + 1][0],
"dep_time_ut": stop_data[i][2],
"arr_time_ut": stop_data[i + 1][1]
}
events.append(event)
return events
def tripI_takes_place_on_dsut(self, trip_I, day_start_ut):
"""
Check that a trip takes place during a day
Parameters
----------
trip_I : int
index of the trip in the gtfs data base
day_start_ut : int
the starting time of the day in unix time (seconds)
Returns
-------
takes_place: bool
boolean value describing whether the trip takes place during
the given day or not
"""
query = "SELECT * FROM days WHERE trip_I=? AND day_start_ut=?"
params = (trip_I, day_start_ut)
cur = self.conn.cursor()
rows = list(cur.execute(query, params))
if len(rows) == 0:
return False
else:
assert len(rows) == 1, 'On a day, a trip_I should be present at most once'
return True
# unused and (untested) code:
#
# def get_tripIs_from_stopI_within_time_range(self, stop_I, day_start_ut, start_ut, end_ut):
# """
# Obtain a list of trip_Is that go through some stop during a given time.
#
# Parameters
# ----------
# stop_I : int
# index of the stop to be considered
# day_start_ut : int
# start of the day in unix time (seconds)
# start_ut: int
# the first possible departure time from the stop
# in unix time (seconds)
# end_ut: int
# the last possible departure time from the stop
# in unix time (seconds)
#
# Returns
# -------
# trip_Is: list
# list of integers (trip_Is)
# """
# start_ds = start_ut - day_start_ut
# end_ds = end_ut - day_start_ut
# # is the _distinct_ really required?
# query = "SELECT distinct(trip_I) " \
# "FROM days " \
# "JOIN stop_times " \
# "USING(trip_I) " \
# "WHERE (days.day_start_ut == ?)" \
# "AND (stop_times.stop_I=?) " \
# "AND (stop_times.dep_time_ds >= ?) " \
# "AND (stop_times.dep_time_ds <= ?)"
# params = (day_start_ut, stop_I, start_ds, end_ds)
# cur = self.conn.cursor()
# trip_Is = [el[0] for el in cur.execute(query, params)]
# return trip_Is
def day_start_ut(self, ut):
"""
Convert unixtime to unixtime on GTFS start-of-day.
GTFS defines the start of a day as "noon minus 12 hours" to solve
most DST-related problems. This means that on DST-changing days,
the day start isn't midnight. This function isn't idempotent.
Running it twice on the "move clocks backwards" day will result in
being one day too early.
Parameters
----------
ut: int
Unixtime
Returns
-------
ut: int
Unixtime corresponding to start of day
"""
# set timezone to the one of gtfs
old_tz = self.set_current_process_time_zone()
ut = time.mktime(time.localtime(ut)[:3] + (12, 00, 0, 0, 0, -1)) - 43200
set_process_timezone(old_tz)
return ut
def increment_day_start_ut(self, day_start_ut, n_days=1):
"""Increment the GTFS-definition of "day start".
Parameters
----------
day_start_ut : int
unixtime of the previous start of day. If this time is between
12:00 or greater, there *will* be bugs. To solve this, run the
input through day_start_ut first.
n_days: int
number of days to increment
"""
old_tz = self.set_current_process_time_zone()
day0 = time.localtime(day_start_ut + 43200) # time of noon
dayN = time.mktime(day0[:2] + # YYYY, MM
(day0[2] + n_days,) + # DD
(12, 00, 0, 0, 0, -1)) - 43200 # HHMM, etc. Minus 12 hours.
set_process_timezone(old_tz)
return dayN
def _get_possible_day_starts(self, start_ut, end_ut, max_time_overnight=None):
"""
Get all possible day start times between start_ut and end_ut
Currently this function is used only by get_tripIs_within_range_by_dsut
Parameters
----------
start_ut : list<int>
start time in unix time
end_ut : list<int>
end time in unix time
max_time_overnight : list<int>
the maximum length of time that a trip can take place on
during the next day (i.e. after midnight run times like 25:35)
Returns
-------
day_start_times_ut : list
list of ints (unix times in seconds) for returning all possible day
start times
start_times_ds : list
list of ints (unix times in seconds) stating the valid start time in
day seconds
end_times_ds : list
list of ints (unix times in seconds) stating the valid end times in
day_seconds
"""
if max_time_overnight is None:
# 7 hours:
max_time_overnight = 7 * 60 * 60
# sanity checks for the timezone parameter
# assert timezone < 14
# assert timezone > -14
# tz_seconds = int(timezone*3600)
assert start_ut < end_ut
start_day_ut = self.day_start_ut(start_ut)
# start_day_ds = int(start_ut+tz_seconds) % seconds_in_a_day #??? needed?
start_day_ds = start_ut - start_day_ut
# assert (start_day_ut+tz_seconds) % seconds_in_a_day == 0
end_day_ut = self.day_start_ut(end_ut)
# end_day_ds = int(end_ut+tz_seconds) % seconds_in_a_day #??? needed?
# end_day_ds = end_ut - end_day_ut
# assert (end_day_ut+tz_seconds) % seconds_in_a_day == 0
# If we are early enough in a day that we might have trips from
# the previous day still running, decrement the start day.
if start_day_ds < max_time_overnight:
start_day_ut = self.increment_day_start_ut(start_day_ut, n_days=-1)
# day_start_times_ut = range(start_day_ut, end_day_ut+seconds_in_a_day, seconds_in_a_day)
# Create a list of all possible day start times. This is roughly
# range(day_start_ut, day_end_ut+1day, 1day).
day_start_times_ut = [start_day_ut]
while day_start_times_ut[-1] < end_day_ut:
day_start_times_ut.append(self.increment_day_start_ut(day_start_times_ut[-1]))
start_times_ds = []
end_times_ds = []
# For every possible day start:
for dsut in day_start_times_ut:
# start day_seconds starts at either zero, or time - daystart
day_start_ut = max(0, start_ut - dsut)
start_times_ds.append(day_start_ut)
# end day_seconds is time-day_start
day_end_ut = end_ut - dsut
end_times_ds.append(day_end_ut)
# Return three tuples which can be zip:ped together.
return day_start_times_ut, start_times_ds, end_times_ds
def get_tripIs_within_range_by_dsut(self,
start_time_ut,
end_time_ut):
"""
Obtain a list of trip_Is that take place during a time interval.
The trip needs to be only partially overlapping with the given time interval.
The grouping by dsut (day_start_ut) is required as same trip_I could
take place on multiple days.
Parameters
----------
start_time_ut : int
start of the time interval in unix time (seconds)
end_time_ut: int
end of the time interval in unix time (seconds)
Returns
-------
trip_I_dict: dict
keys: day_start_times to list of integers (trip_Is)
"""
cur = self.conn.cursor()
assert start_time_ut <= end_time_ut
dst_ut, st_ds, et_ds = \
self._get_possible_day_starts(start_time_ut, end_time_ut, 7)
# noinspection PyTypeChecker
assert len(dst_ut) >= 0
trip_I_dict = {}
for day_start_ut, start_ds, end_ds in \
zip(dst_ut, st_ds, et_ds):
query = """
SELECT distinct(trip_I)
FROM days
JOIN trips
USING(trip_I)
WHERE
(days.day_start_ut == ?)
AND (
(trips.start_time_ds <= ?)
AND
(trips.end_time_ds >= ?)
)
"""
params = (day_start_ut, end_ds, start_ds)
trip_Is = [el[0] for el in cur.execute(query, params)]
if len(trip_Is) > 0:
trip_I_dict[day_start_ut] = trip_Is
return trip_I_dict
def stops(self):
"""
Get all stop data as a pandas DataFrame
Returns
-------
df: pandas.DataFrame
"""
return self.get_table("stops")
def stop(self, stop_I):
"""
Get all stop data as a pandas DataFrame for all stops, or an individual stop'
Parameters
----------
stop_I : int
stop index
Returns
-------
stop: pandas.DataFrame
"""
return pd.read_sql_query("SELECT * FROM stops WHERE stop_I={stop_I}".format(stop_I=stop_I), self.conn)
def add_coordinates_to_df(self, df, join_column='stop_I', lat_name="lat", lon_name="lon"):
assert join_column in df.columns
stops_df = self.stops()
coord_df = stops_df[["stop_I", "lat", "lon"]]
df_merged = pd.merge(coord_df, df, left_on='stop_I', right_on=join_column)
df_merged.drop(["stop_I"], axis=1, inplace=True)
df_merged3 = df_merged.rename(columns={"lat": lat_name, "lon": lon_name})
return df_merged3
def get_n_stops(self):
return pd.read_sql_query("SELECT count(*) from stops;", self.conn).values[0, 0]
def get_modes(self):
modes = list(pd.read_sql_query("SELECT distinct(type) from routes;", self.conn).values.flatten())
return modes
def get_stops_for_route_type(self, route_type):
"""
Parameters
----------
route_type: int
Returns
-------
stops: pandas.DataFrame
"""
if route_type is WALK:
return self.stops()
else:
return pd.read_sql_query("SELECT DISTINCT stops.* "
"FROM stops JOIN stop_times ON stops.stop_I == stop_times.stop_I "
" JOIN trips ON stop_times.trip_I = trips.trip_I"
" JOIN routes ON trips.route_I == routes.route_I "
"WHERE routes.type=(?)", self.conn, params=(route_type,))
def get_stops_connected_to_stop(self):
pass
def generate_routable_transit_events(self, start_time_ut=None, end_time_ut=None, route_type=None):
"""
Generates events that take place during a time interval [start_time_ut, end_time_ut].
Each event needs to be only partially overlap the given time interval.
Does not include walking events.
This is just a quick and dirty implementation to get a way of quickly get a
method for generating events compatible with the routing algorithm
Parameters
----------
start_time_ut: int
end_time_ut: int
route_type: ?
Yields
------
event: namedtuple
containing:
dep_time_ut: int
arr_time_ut: int
from_stop_I: int
to_stop_I: int
trip_I : int
route_type : int
seq: int
"""
from gtfspy.networks import temporal_network
df = temporal_network(self, start_time_ut=start_time_ut, end_time_ut=end_time_ut, route_type=route_type)
df.sort_values("dep_time_ut", ascending=False, inplace=True)
for row in df.itertuples():
yield row
def get_transit_events(self, start_time_ut=None, end_time_ut=None, route_type=None):
"""
Obtain a list of events that take place during a time interval.
Each event needs to be only partially overlap the given time interval.
Does not include walking events.
Parameters
----------
start_time_ut : int
start of the time interval in unix time (seconds)
end_time_ut: int
end of the time interval in unix time (seconds)
route_type: int
consider only events for this route_type
Returns
-------
events: pandas.DataFrame
with the following columns and types
dep_time_ut: int
arr_time_ut: int
from_stop_I: int
to_stop_I: int
trip_I : int
shape_id : int
route_type : int
See also
--------
get_transit_events_in_time_span : an older version of the same thing
"""
table_name = self._get_day_trips_table_name()
event_query = "SELECT stop_I, seq, trip_I, route_I, routes.route_id AS route_id, routes.type AS route_type, " \
"shape_id, day_start_ut+dep_time_ds AS dep_time_ut, day_start_ut+arr_time_ds AS arr_time_ut " \
"FROM " + table_name + " " \
"JOIN trips USING(trip_I) " \
"JOIN routes USING(route_I) " \
"JOIN stop_times USING(trip_I)"
where_clauses = []
if end_time_ut:
where_clauses.append(table_name + ".start_time_ut< {end_time_ut}".format(end_time_ut=end_time_ut))
where_clauses.append("dep_time_ut <={end_time_ut}".format(end_time_ut=end_time_ut))
if start_time_ut:
where_clauses.append(table_name + ".end_time_ut > {start_time_ut}".format(start_time_ut=start_time_ut))
where_clauses.append("arr_time_ut >={start_time_ut}".format(start_time_ut=start_time_ut))
if route_type is not None:
assert route_type in ALL_ROUTE_TYPES
where_clauses.append("routes.type={route_type}".format(route_type=route_type))
if len(where_clauses) > 0:
event_query += " WHERE "
for i, where_clause in enumerate(where_clauses):
if i is not 0:
event_query += " AND "
event_query += where_clause
# ordering is required for later stages
event_query += " ORDER BY trip_I, day_start_ut+dep_time_ds;"
events_result = pd.read_sql_query(event_query, self.conn)
# 'filter' results so that only real "events" are taken into account
from_indices = numpy.nonzero(
(events_result['trip_I'][:-1].values == events_result['trip_I'][1:].values) *
(events_result['seq'][:-1].values < events_result['seq'][1:].values)
)[0]
to_indices = from_indices + 1
# these should have same trip_ids
assert (events_result['trip_I'][from_indices].values == events_result['trip_I'][to_indices].values).all()
trip_Is = events_result['trip_I'][from_indices]
from_stops = events_result['stop_I'][from_indices]
to_stops = events_result['stop_I'][to_indices]
shape_ids = events_result['shape_id'][from_indices]
dep_times = events_result['dep_time_ut'][from_indices]
arr_times = events_result['arr_time_ut'][to_indices]
route_types = events_result['route_type'][from_indices]
route_ids = events_result['route_id'][from_indices]
route_Is = events_result['route_I'][from_indices]
durations = arr_times.values - dep_times.values
assert (durations >= 0).all()
from_seqs = events_result['seq'][from_indices]
to_seqs = events_result['seq'][to_indices]
data_tuples = zip(from_stops, to_stops, dep_times, arr_times,
shape_ids, route_types, route_ids, trip_Is,
durations, from_seqs, to_seqs, route_Is)
columns = ["from_stop_I", "to_stop_I", "dep_time_ut", "arr_time_ut",
"shape_id", "route_type", "route_id", "trip_I",
"duration", "from_seq", "to_seq", "route_I"]
df = pd.DataFrame.from_records(data_tuples, columns=columns)
return df
def get_route_difference_with_other_db(self, other_gtfs, start_time, end_time, uniqueness_threshold=None,
uniqueness_ratio=None):
"""
Compares the routes based on stops in the schedule with the routes in another db and returns the ones without match.
Uniqueness thresholds or ratio can be used to allow small differences
:param uniqueness_threshold:
:param uniqueness_ratio:
:return:
"""
from gtfspy.stats import frequencies_by_generated_route
this_df = frequencies_by_generated_route(self, start_time, end_time)
other_df = frequencies_by_generated_route(other_gtfs, start_time, end_time)
this_routes = {x: set(x.split(',')) for x in this_df["route"]}
other_routes = {x: set(x.split(',')) for x in other_df["route"]}
# this_df["route_set"] = this_df.apply(lambda x: set(x.route.split(',')), axis=1)
# other_df["route_set"] = other_df.apply(lambda x: set(x.route.split(',')), axis=1)
this_uniques = list(this_routes.keys())
other_uniques = list(other_routes.keys())
print("initial routes A:", len(this_uniques))
print("initial routes B:", len(other_uniques))
for i_key, i in this_routes.items():
for j_key, j in other_routes.items():
union = i | j
intersection = i & j
symmetric_difference = i ^ j
if uniqueness_ratio:
if len(intersection) / len(union) >= uniqueness_ratio:
try:
this_uniques.remove(i_key)
this_df = this_df[this_df["route"] != i_key]
except ValueError:
pass
try:
other_uniques.remove(j_key)
other_df = other_df[other_df["route"] != j_key]
except ValueError:
pass
print("unique routes A", len(this_df))
print("unique routes B", len(other_df))
return this_df, other_df
def get_section_difference_with_other_db(self, other_conn, start_time, end_time):
query = """SELECT from_stop_I, to_stop_I, sum(n_trips) AS n_trips, count(*) AS n_routes,
group_concat(route_id) AS all_routes FROM
(SELECT route_id, from_stop_I, to_stop_I, count(*) AS n_trips FROM
(SELECT stop_I AS from_stop_I, seq, trip_I FROM stop_times
WHERE dep_time_ds >= %s) t1,
(SELECT stop_I AS to_stop_I, seq, trip_I FROM stop_times
WHERE arr_time_ds <= %s) t2,
trips,
routes
WHERE t1.seq +1 = t2.seq AND t1.trip_I = t2.trip_I
AND t1.trip_I = trips.trip_I AND trips.route_I = routes.route_I
GROUP BY from_stop_I, to_stop_I, routes.route_I
ORDER BY route_id) sq1
GROUP BY from_stop_I, to_stop_I""" % (start_time, end_time)
prev_df = None
result = pd.DataFrame
for conn in [self.conn, other_conn]:
df = conn.execute_custom_query_pandas(query)
df.set_index(["from_stop_I", "to_stop_I"], inplace=True, drop=True)
if prev_df is not None:
result = prev_df.merge(df, how="outer", left_index=True, right_index=True, suffixes=["_old", "_new"])
break
prev_df = df
for suffix in ["_new", "_old"]:
result["all_routes" + suffix] = result["all_routes" + suffix].fillna(value="")
result["all_routes" + suffix] = result["all_routes" + suffix].apply(lambda x: x.split(","))
result.reset_index(inplace=True)
result.fillna(value=0, inplace=True)
for column in ["n_trips", "n_routes"]:
result["diff_" + column] = result[column + "_new"] - result[column + "_old"]
return result
def get_straight_line_transfer_distances(self, stop_I=None):
"""
Get (straight line) distances to stations that can be transferred to.
Parameters
----------
stop_I : int, optional
If not specified return all possible transfer distances
Returns
-------
distances: pandas.DataFrame
each row has the following items
from_stop_I: int
to_stop_I: int
d: float or int #distance in meters
"""
if stop_I is not None:
query = u""" SELECT from_stop_I, to_stop_I, d
FROM stop_distances
WHERE
from_stop_I=?
"""
params = (u"{stop_I}".format(stop_I=stop_I),)
else:
query = """ SELECT from_stop_I, to_stop_I, d
FROM stop_distances
"""
params = None
stop_data_df = pd.read_sql_query(query, self.conn, params=params)
return stop_data_df
def update_stats(self, stats):
self.meta.update(stats)
self.meta['stats_calc_at_ut'] = time.time()
def get_approximate_schedule_time_span_in_ut(self):
"""
Return conservative estimates of start_time_ut and end_time_uts.
All trips, events etc. should start after start_time_ut_conservative and end before end_time_ut_conservative
Returns
-------
start_time_ut_conservative : int
end_time_ut_conservative : int
"""
first_day_start_ut, last_day_start_ut = self.get_day_start_ut_span()
# 28 (instead of 24) comes from the GTFS standard
return first_day_start_ut, last_day_start_ut + 28 * 3600
def get_day_start_ut_span(self):
"""
Return the first and last day_start_ut
Returns
-------
first_day_start_ut: int
last_day_start_ut: int
"""
cur = self.conn.cursor()
first_day_start_ut, last_day_start_ut = \
cur.execute("SELECT min(day_start_ut), max(day_start_ut) FROM days;").fetchone()
return first_day_start_ut, last_day_start_ut
def get_min_date(self):
cur = self.conn.cursor()
return cur.execute("SELECT min(date) FROM days").fetchone()[0]
def get_max_date(self):
cur = self.conn.cursor()
return cur.execute("SELECT max(date) FROM days").fetchone()[0]
def print_validation_warnings(self):
"""
See Validator.validate for more information.
Returns
-------
warnings_container: validator.TimetableValidationWarningsContainer
"""
from .timetable_validator import TimetableValidator
validator = TimetableValidator(self)
return validator.validate_and_get_warnings()
def execute_custom_query(self, query):
return self.conn.cursor().execute(query)
def execute_custom_query_pandas(self, query):
return pd.read_sql(query, self.conn)
def get_stats(self):
from gtfspy import stats
return stats.get_stats(self)
def _get_day_trips_table_name(self):
cur = self.conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='day_trips2'")
if len(cur.fetchall()) > 0:
table_name = "day_trips2"
else:
table_name = "day_trips"
return table_name
# TODO: The following methods could be moved to a "edit gtfs" -module
def homogenize_stops_table_with_other_db(self, source):
"""
This function takes an external database, looks of common stops and adds the missing stops to both databases.
In addition the stop_pair_I column is added. This id links the stops between these two sources.
:param source: directory of external database
:return:
"""
cur = self.conn.cursor()
self.attach_gtfs_database(source)
query_inner_join = """SELECT t1.*
FROM stops t1
INNER JOIN other.stops t2
ON t1.stop_id=t2.stop_id
AND find_distance(t1.lon, t1.lat, t2.lon, t2.lat) <= 50"""
df_inner_join = self.execute_custom_query_pandas(query_inner_join)
print("number of common stops: ", len(df_inner_join.index))
df_not_in_other = self.execute_custom_query_pandas("SELECT * FROM stops EXCEPT " + query_inner_join)
print("number of stops missing in second feed: ", len(df_not_in_other.index))
df_not_in_self = self.execute_custom_query_pandas("SELECT * FROM other.stops EXCEPT " +
query_inner_join.replace("t1.*", "t2.*"))
print("number of stops missing in first feed: ", len(df_not_in_self.index))
try:
self.execute_custom_query("""ALTER TABLE stops ADD COLUMN stop_pair_I INT """)
self.execute_custom_query("""ALTER TABLE other.stops ADD COLUMN stop_pair_I INT """)
except sqlite3.OperationalError:
pass
stop_id_stub = "added_stop_"
counter = 0
rows_to_update_self = []
rows_to_update_other = []
rows_to_add_to_self = []
rows_to_add_to_other = []
for items in df_inner_join.itertuples(index=False):
rows_to_update_self.append((counter, items[1]))
rows_to_update_other.append((counter, items[1]))
counter += 1
for items in df_not_in_other.itertuples(index=False):
rows_to_update_self.append((counter, items[1]))
rows_to_add_to_other.append((stop_id_stub + str(counter),) + tuple(items[x] for x in [2, 3, 4, 5, 6, 8, 9])
+ (counter,))
counter += 1
for items in df_not_in_self.itertuples(index=False):
rows_to_update_other.append((counter, items[1]))
rows_to_add_to_self.append((stop_id_stub + str(counter),) + tuple(items[x] for x in [2, 3, 4, 5, 6, 8, 9])
+ (counter,))
counter += 1
query_add_row = """INSERT INTO stops(
stop_id,
code,
name,
desc,
lat,
lon,
location_type,
wheelchair_boarding,
stop_pair_I) VALUES (%s) """ % (", ".join(["?" for x in range(9)]))
query_update_row = """UPDATE stops SET stop_pair_I=? WHERE stop_id=?"""
print("adding rows to databases")
cur.executemany(query_add_row, rows_to_add_to_self)
cur.executemany(query_update_row, rows_to_update_self)
cur.executemany(query_add_row.replace("stops", "other.stops"), rows_to_add_to_other)
cur.executemany(query_update_row.replace("stops", "other.stops"), rows_to_update_other)
self.conn.commit()
print("finished")
def replace_stop_i_with_stop_pair_i(self):
cur = self.conn.cursor()
queries = [
"UPDATE stop_times SET stop_I = "
"(SELECT stops.stop_pair_I AS stop_I FROM stops WHERE stops.stop_I = stop_times.stop_I)",
# Replace stop_distances
"ALTER TABLE stop_distances RENAME TO stop_distances_old",
"CREATE TABLE stop_distances (from_stop_I INT, to_stop_I INT, d INT, d_walk INT, min_transfer_time INT, "
"timed_transfer INT, UNIQUE (from_stop_I, to_stop_I))",
"INSERT INTO stop_distances(from_stop_I, to_stop_I, d, d_walk, min_transfer_time, timed_transfer) "
"SELECT f_stop.stop_pair_I AS from_stop_I, t_stop.stop_pair_I AS to_stop_I, d, d_walk, min_transfer_time, "
"timed_transfer "
"FROM "
"(SELECT from_stop_I, to_stop_I, d, d_walk, min_transfer_time, "
"timed_transfer "
"FROM stop_distances_old) sd_o "
"LEFT JOIN "
"(SELECT stop_I, stop_pair_I FROM stops) f_stop "
"ON sd_o.from_stop_I = f_stop.stop_I "
" JOIN "
"(SELECT stop_I, stop_pair_I FROM stops) t_stop "
"ON sd_o.to_stop_I = t_stop.stop_I ;",
"DROP TABLE stop_distances_old",
# Replace stops table with other
"ALTER TABLE stops RENAME TO stops_old",
"CREATE TABLE stops (stop_I INTEGER PRIMARY KEY, stop_id TEXT UNIQUE NOT NULL, code TEXT, name TEXT, "
"desc TEXT, lat REAL, lon REAL, parent_I INT, location_type INT, wheelchair_boarding BOOL, "
"self_or_parent_I INT, old_stop_I INT)",
"INSERT INTO stops(stop_I, stop_id, code, name, desc, lat, lon, parent_I, location_type, "
"wheelchair_boarding, self_or_parent_I, old_stop_I) "
"SELECT stop_pair_I AS stop_I, stop_id, code, name, desc, lat, lon, parent_I, location_type, "
"wheelchair_boarding, self_or_parent_I, stop_I AS old_stop_I "
"FROM stops_old;",
"DROP TABLE stops_old",
"CREATE INDEX idx_stops_sid ON stops (stop_I)"]
for query in queries:
cur.execute(query)
self.conn.commit()
def regenerate_parent_stop_I(self):
raise NotImplementedError
# get max stop_I
cur = self.conn.cursor()
query = "SELECT stop_I FROM stops ORDER BY stop_I DESC LIMIT 1"
max_stop_I = cur.execute(query).fetchall()[0]
query_update_row = """UPDATE stops SET parent_I=? WHERE parent_I=?"""
def add_stops_from_csv(self, csv_dir):
stops_to_add = pd.read_csv(csv_dir, encoding='utf-8')
assert all([x in stops_to_add.columns for x in ["stop_id", "code", "name", "desc", "lat", "lon"]])
for s in stops_to_add.itertuples():
self.add_stop(s.stop_id, s.code, s.name, s.desc, s.lat, s.lon)
def add_stop(self, stop_id, code, name, desc, lat, lon):
cur = self.conn.cursor()
query_add_row = 'INSERT INTO stops( stop_id, code, name, desc, lat, lon) ' \
'VALUES (?, ?, ?, ?, ?, ?)'
cur.executemany(query_add_row, [[stop_id, code, name, desc, lat, lon]])
self.conn.commit()
def recalculate_stop_distances(self, max_distance):
from gtfspy.calc_transfers import calc_transfers
calc_transfers(self.conn, max_distance)
def attach_gtfs_database(self, gtfs_dir):
cur = self.conn.cursor()
cur.execute("ATTACH '%s' AS 'other'" % str(gtfs_dir))
cur.execute("PRAGMA database_list")
print("GTFS database attached:", cur.fetchall())
def update_stop_coordinates(self, stop_updates):
"""
:param stop_updates: DataFrame
:return:
"""
cur = self.conn.cursor()
stop_values = [(values.lat, values.lon, values.stop_id) for values in stop_updates.itertuples()]
cur.executemany("""UPDATE stops SET lat = ?, lon = ? WHERE stop_id = ?""", stop_values)
self.conn.commit()
class GTFSMetadata(object):
"""
This provides dictionary protocol for updating GTFS metadata ("meta table").
TODO: does not rep ???
"""
def __init__(self, conn):
self._conn = conn
def __getitem__(self, key):
val = self._conn.execute('SELECT value FROM metadata WHERE key=?',
(key,)).fetchone()
if not val:
raise KeyError("This GTFS does not have metadata: %s" % key)
return val[0]
def __setitem__(self, key, value):
"""Get metadata from the DB"""
if isinstance(value, bytes):
value = value.decode('utf-8')
self._conn.execute('INSERT OR REPLACE INTO metadata '
'(key, value) VALUES (?, ?)',
(key, value)).fetchone()
self._conn.commit()
def __delitem__(self, key):
self._conn.execute('DELETE FROM metadata WHERE key=?',
(key,)).fetchone()
self._conn.commit()
def __iter__(self):
cur = self._conn.execute('SELECT key FROM metadata ORDER BY key')
return (x[0] for x in cur)
def __contains__(self, key):
val = self._conn.execute('SELECT value FROM metadata WHERE key=?',
(key,)).fetchone()
return val is not None
def get(self, key, default=None):
val = self._conn.execute('SELECT value FROM metadata WHERE key=?',
(key,)).fetchone()
if not val:
return default
return val[0]
def items(self):
cur = self._conn.execute('SELECT key, value FROM metadata ORDER BY key')
return cur
def keys(self):
cur = self._conn.execute('SELECT key FROM metadata ORDER BY key')
return cur
def values(self):
cur = self._conn.execute('SELECT value FROM metadata ORDER BY key')
return cur
def update(self, dict_):
# Would be more efficient to do it in a new query here, but
# preferring simplicity. metadata updates are probably
# infrequent.
if hasattr(dict_, 'items'):
for key, value in dict_.items():
self[key] = value
else:
for key, value in dict_:
self[key] = value
def main(cmd, args):
from gtfspy import filter
# noinspection PyPackageRequirements
if cmd == 'stats':
print(args[0])
G = GTFS(args[0])
stats = G.get_stats()
G.update_stats(stats)
for row in G.meta.items():
print(row)
elif cmd == "validate":
G = GTFS(args[0])
G.print_validation_warnings()
elif cmd == 'metadata-list':
# print args[0] # need to not print to be valid json on stdout
G = GTFS(args[0])
# for row in G.meta.items():
# print row
stats = dict(G.meta.items())
import json
print(json.dumps(stats, sort_keys=True,
indent=4, separators=(',', ': ')))
elif cmd == 'make-daily':
from_db = args[0]
g = GTFS(from_db)
to_db = args[1]
download_date = g.meta['download_date']
d = datetime.datetime.strptime(download_date, '%Y-%m-%d').date()
start_time = d + datetime.timedelta(7 - d.isoweekday() + 1) # inclusive
end_time = d + datetime.timedelta(7 - d.isoweekday() + 1 + 1) # exclusive
filter.filter_extract(g, to_db, start_date=start_time, end_date=end_time)
elif cmd == 'make-weekly':
from_db = args[0]
g = GTFS(from_db)
to_db = args[1]
download_date = g.meta['download_date']
d = datetime.datetime.strptime(download_date, '%Y-%m-%d').date()
start_time = d + datetime.timedelta(7 - d.isoweekday() + 1) # inclusive
end_time = d + datetime.timedelta(7 - d.isoweekday() + 1 + 7) # exclusive
print(start_time, end_time)
filter.filter_extract(g, to_db, start_date=start_time, end_date=end_time)
elif cmd == "spatial-extract":
try:
from_db = args[0]
lat = float(args[1])
lon = float(args[2])
radius_in_km = float(args[3])
to_db = args[4]
except Exception as e:
print("spatial-extract usage: python gtfs.py spatial-extract fromdb.sqlite center_lat center_lon "
"radius_in_km todb.sqlite")
raise e
logging.basicConfig(level=logging.INFO)
logging.info("Loading initial database")
g = GTFS(from_db)
filter.filter_extract(g, to_db, buffer_distance=radius_in_km * 1000, buffer_lat=lat, buffer_lon=lon)
elif cmd == 'interact':
# noinspection PyUnusedLocal
G = GTFS(args[0])
# noinspection PyPackageRequirements
import IPython
IPython.embed()
elif 'export_shapefile' in cmd:
from gtfspy.util import write_shapefile
from_db = args[
0] # '/m/cs/project/networks/jweckstr/transit/scratch/proc_latest/helsinki/2016-04-06/main.day.sqlite'
shapefile_path = args[1] # '/m/cs/project/networks/jweckstr/TESTDATA/helsinki_routes.shp'
g = GTFS(from_db)
if cmd == 'export_shapefile_routes':
data = g.get_all_route_shapes(use_shapes=True)
elif cmd == 'export_shapefile_segment_counts':
date = args[2] # '2016-04-06'
d = datetime.datetime.strptime(date, '%Y-%m-%d').date()
day_start = g.get_day_start_ut(d + datetime.timedelta(7 - d.isoweekday() + 1))
start_time = day_start + 3600 * 7
end_time = day_start + 3600 * 8
data = g.get_segment_count_data(start_time, end_time, use_shapes=True)
write_shapefile(data, shapefile_path)
else:
print("Unrecognized command: %s" % cmd)
exit(1)
if __name__ == "__main__":
main(sys.argv[1], sys.argv[2:])
| 39.859271
| 127
| 0.575445
|
3a4c62880f7d84686898a28d3a02f2a767b78fbe
| 2,735
|
py
|
Python
|
src/gtk/toga_gtk/window.py
|
jrwdunham/toga
|
db343c99d0f174aea86c4656849ea25ffb7bfe7a
|
[
"BSD-3-Clause"
] | null | null | null |
src/gtk/toga_gtk/window.py
|
jrwdunham/toga
|
db343c99d0f174aea86c4656849ea25ffb7bfe7a
|
[
"BSD-3-Clause"
] | null | null | null |
src/gtk/toga_gtk/window.py
|
jrwdunham/toga
|
db343c99d0f174aea86c4656849ea25ffb7bfe7a
|
[
"BSD-3-Clause"
] | null | null | null |
from gi.repository import Gtk
from toga.interface.window import Window as WindowInterface
from .command import SEPARATOR, SPACER, EXPANDING_SPACER
from .container import Container
from .utils import wrapped_handler
from . import dialogs
class Window(WindowInterface):
_IMPL_CLASS = Gtk.Window
_CONTAINER_CLASS = Container
_DIALOG_MODULE = dialogs
def __init__(self, title=None, position=(100, 100), size=(640, 480), toolbar=None, resizeable=True, closeable=True, minimizable=True):
super().__init__(title=title, position=position, size=size, toolbar=toolbar, resizeable=resizeable, closeable=closeable, minimizable=minimizable)
self._create()
def create(self):
self._impl = self._IMPL_CLASS()
self._impl.connect("delete-event", self._on_close)
self._impl.set_default_size(self._size[0], self._size[1])
def _set_title(self, title):
self._impl.set_title(title)
def _set_app(self, app):
app._impl.add_window(self._impl)
def _set_toolbar(self, items):
self._toolbar_impl = Gtk.Toolbar()
self._toolbar_impl.set_style(Gtk.ToolbarStyle.BOTH)
for toolbar_item in items:
if toolbar_item in (SEPARATOR, SPACER, EXPANDING_SPACER):
item_impl = Gtk.SeparatorToolItem()
if toolbar_item == EXPANDING_SPACER:
item_impl.set_expand(True)
item_impl.set_draw(toolbar_item == SEPARATOR)
else:
item_impl = Gtk.ToolButton()
item_impl.set_icon_widget(toolbar_item.icon._impl_32)
item_impl.set_label(toolbar_item.label)
item_impl.set_tooltip_text(toolbar_item.tooltip)
item_impl.connect("clicked", wrapped_handler(toolbar_item, toolbar_item.action))
toolbar_item._widgets.append(item_impl)
self._toolbar_impl.insert(item_impl, -1)
def _set_content(self, widget):
self._window_layout = Gtk.VBox()
if self._toolbar:
self._window_layout.pack_start(self._toolbar_impl, False, False, 0)
self._window_layout.pack_start(self._container._impl, True, True, 0)
self._impl.add(self._window_layout)
self._container._impl.connect('size-allocate', self._on_size_allocate)
def show(self):
self._impl.show_all()
def _on_close(self, widget, data):
self.on_close()
def _on_size_allocate(self, widget, allocation):
# print("ON WINDOW SIZE ALLOCATION", allocation.width, allocation.height)
self.content._update_layout(
width=allocation.width,
height=allocation.height
)
def close(self):
self._impl.close()
| 35.986842
| 153
| 0.670201
|
54d7caf3003cc8c38a940ad9f53736eeae0dfda4
| 2,817
|
py
|
Python
|
Pilot3/P3B5/p3b5_darts.py
|
vgutta/Benchmarks
|
f739c1fb2b02dd8fb310e2182fa8c4baaaea7caf
|
[
"MIT"
] | 51
|
2017-01-24T20:57:27.000Z
|
2022-02-15T00:33:45.000Z
|
Pilot3/P3B5/p3b5_darts.py
|
vgutta/Benchmarks
|
f739c1fb2b02dd8fb310e2182fa8c4baaaea7caf
|
[
"MIT"
] | 59
|
2017-08-21T22:19:44.000Z
|
2021-11-01T16:05:35.000Z
|
Pilot3/P3B5/p3b5_darts.py
|
vgutta/Benchmarks
|
f739c1fb2b02dd8fb310e2182fa8c4baaaea7caf
|
[
"MIT"
] | 90
|
2016-11-22T03:57:07.000Z
|
2022-01-11T04:43:23.000Z
|
import os
import sys
import candle
import p3b5 as bmk
import torch
import torch.nn as nn
from torch import optim
import torch.nn.functional as F
from torch.utils.data import DataLoader
file_path = os.path.dirname(os.path.realpath(__file__))
lib_path = os.path.abspath(os.path.join(file_path, '..'))
sys.path.append(lib_path)
lib_path2 = os.path.abspath(os.path.join(file_path, '..', '..', 'common'))
sys.path.append(lib_path2)
import darts
def train(trainloader, validloader, model, architecture,
criterion, optimizer, lr, args, tasks, device, meter):
valid_iter = iter(trainloader)
for step, (data, target) in enumerate(trainloader):
batch_size = data.size(0)
model.train()
data = data.to(device)
for task, label in target.items():
target[task] = target[task].to(device)
x_search, target_search = next(valid_iter)
x_search = x_search.to(device)
for task, label in target_search.items():
target_search[task] = target_search[task].to(device)
# 1. update alpha
architecture.step(
data,
target,
x_search,
target_search,
lr,
optimizer,
unrolled=args.unrolled
)
logits = model(data)
loss = darts.multitask_loss(target, logits, criterion, reduce='mean')
# 2. update weight
optimizer.zero_grad()
loss.backward()
nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip)
optimizer.step()
prec1 = darts.multitask_accuracy_topk(logits, target)
meter.update_batch_loss(loss.item(), batch_size)
meter.update_batch_accuracy(prec1, batch_size)
if step % args.log_interval == 0:
print(f'Step: {step} loss: {meter.loss_meter.avg:.4}')
meter.update_epoch()
meter.save(args.save_path)
def infer(validloader, model, criterion, args, tasks, device, meter):
model.eval()
with torch.no_grad():
for step, (data, target) in enumerate(validloader):
data = data.to(device)
for task, label in target.items():
target[task] = target[task].to(device)
batch_size = data.size(0)
logits = model(data)
loss = darts.multitask_loss(target, logits, criterion, reduce='mean')
prec1 = darts.multitask_accuracy_topk(logits, target)
meter.update_batch_loss(loss.item(), batch_size)
meter.update_batch_accuracy(prec1, batch_size)
if step % args.log_interval == 0:
print(f'>> Validation: {step} loss: {meter.loss_meter.avg:.4}')
meter.update_epoch()
meter.save(args.save_path)
return meter.loss_meter.avg
if __name__ == '__main__':
main()
| 26.828571
| 81
| 0.624778
|
c950cc5e703e07a9357ee9957b76d59c2f9f4f94
| 1,436
|
py
|
Python
|
setup.py
|
bmjjr/pyecore
|
d8ef263e259dc0835806a15a21fc68f581ef27bf
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
bmjjr/pyecore
|
d8ef263e259dc0835806a15a21fc68f581ef27bf
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
bmjjr/pyecore
|
d8ef263e259dc0835806a15a21fc68f581ef27bf
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import sys
from setuptools import setup
if sys.version_info < (3, 3):
sys.exit('Sorry, Python < 3.3 is not supported')
packages = ['pyecore',
'pyecore.resources',
'pyecore.type']
setup(
name='pyecore',
version='0.9.0',
description=('A Python(ic) Implementation of the Eclipse Modeling '
'Framework (EMF/Ecore)'),
long_description=open('README.rst').read(),
keywords='model metamodel EMF Ecore MDE',
url='https://github.com/pyecore/pyecore',
author='Vincent Aranega',
author_email='vincent.aranega@gmail.com',
packages=packages,
package_data={'': ['README.rst', 'LICENSE', 'CHANGELOG.rst']},
include_package_data=True,
install_requires=['ordered-set',
'lxml',
'defusedxml',
'restrictedpython>=4.0b6'],
tests_require=['pytest'],
license='BSD 3-Clause',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
]
)
| 31.217391
| 71
| 0.589136
|
369e9b0e28e5fac18e6c9b72b46907ce27baed0e
| 16,639
|
py
|
Python
|
src/unity/python/turicreate/test/test_python_decision_tree.py
|
fossabot/turicreate
|
a500d5e52143ad15ebdf771d9f74198982c7c45c
|
[
"BSD-3-Clause"
] | 1
|
2019-04-16T19:51:18.000Z
|
2019-04-16T19:51:18.000Z
|
src/unity/python/turicreate/test/test_python_decision_tree.py
|
tashby/turicreate
|
7f07ce795833d0c56c72b3a1fb9339bed6d178d1
|
[
"BSD-3-Clause"
] | 3
|
2021-09-08T02:18:00.000Z
|
2022-03-12T00:39:44.000Z
|
src/unity/python/turicreate/test/test_python_decision_tree.py
|
tashby/turicreate
|
7f07ce795833d0c56c72b3a1fb9339bed6d178d1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright © 2017 Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can
# be found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
import unittest
import turicreate as tc
from turicreate.toolkits._decision_tree import DecisionTree, Node
from turicreate.toolkits._main import ToolkitError
def _make_tree(sf):
model = tc.decision_tree_classifier.create(sf,
'target', validation_set = None, max_depth=10)
tree = DecisionTree.from_model(model)
return tree
class PythonDecisionTreeCorrectness(unittest.TestCase):
def test_categorical(self):
# Arrange
sf = tc.SFrame({
'cat1': ['1', '1', '2', '2', '2'] * 100,
'cat2': ['1', '3', '3', '1', '1'] * 100,
'target': ['1', '2', '1', '2', '1'] * 100,
})
# Act
tree = _make_tree(sf)
root = tree.root
# Check the root node.
self.assertEquals(len(tree.nodes), 7)
self.assertEquals(root.to_dict(), {'is_leaf': False,
'left_id': 2,
'node_id': 0,
'missing_id': 1,
'node_type': u'indicator',
'parent_id': None,
'right_id': 1,
'split_feature_column': 'cat1',
'split_feature_index': '1',
'value': 1})
# Check prediction paths.
self.assertEquals(tree.get_prediction_path(0), [])
self.assertEquals(tree.get_prediction_path(1), [
{'child_id': 1, 'feature': 'cat1', 'index': '1',
'node_type': 'indicator', 'node_id': 0, 'sign': '!=',
'value': 1, 'is_missing': False}])
self.assertEquals(tree.get_prediction_path(2), [{'child_id': 2,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '=',
'value': 1, 'node_type': 'indicator', 'is_missing': False}])
self.assertEquals(tree.get_prediction_path(3), [{'child_id': 1,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '!=',
'value': 1, 'node_type': 'indicator', 'is_missing': False},
{'child_id': 3, 'feature': 'cat2', 'index': '1', 'node_id': 1,
'sign': '!=', 'value': 1, 'node_type': 'indicator',
'is_missing': False}])
self.assertEquals(tree.get_prediction_path(4), [{'child_id': 1,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '!=',
'value': 1, 'node_type': 'indicator', 'is_missing': False}, {'child_id': 4,
'feature': 'cat2', 'index': '1', 'node_id': 1, 'sign': '=', 'value': 1,
'node_type': 'indicator', 'is_missing': False}])
self.assertEquals(tree.get_prediction_path(5), [{'child_id': 2,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '=',
'value': 1, 'node_type': 'indicator', 'is_missing': False},
{'child_id': 5, 'feature': 'cat2', 'index': '1', 'node_id': 2,
'sign': '!=', 'value': 1,'node_type': 'indicator', 'is_missing': False}])
self.assertEquals(tree.get_prediction_path(6), [{'child_id': 2,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '=',
'value': 1, 'node_type': 'indicator', 'is_missing': False},
{'child_id': 6, 'feature': 'cat2', 'index': '1', 'node_id': 2,
'sign': '=', 'value': 1, 'node_type': 'indicator',
'is_missing': False}])
def test_dict(self):
# Arrange
sf = tc.SFrame({
'cat1': ['1', '1', '2', '2', '2'] * 100,
'cat2': ['1', '3', '3', '1', '1'] * 100,
'target': ['1', '2', '1', '2', '1'] * 100,
})
sf['cat1'] = sf['cat1'].apply(lambda x: {x:1})
sf['cat2'] = sf['cat2'].apply(lambda x: {x:1})
# Act
tree = _make_tree(sf)
root = tree.root
# Check the root node.
self.assertEquals(len(tree.nodes), 7)
self.assertEquals(root.to_dict(), {'is_leaf': False,
'left_id': 1,
'node_id': 0,
'node_type': u'float',
'parent_id': None,
'right_id': 2,
'missing_id': 1,
'split_feature_column': 'cat1',
'split_feature_index': '1',
'value': -1e-5})
# Check prediction paths.
self.assertEquals(tree.get_prediction_path(0), [])
self.assertEquals(tree.get_prediction_path(1), [
{'child_id': 1, 'feature': 'cat1', 'index': '1',
'node_id': 0, 'sign': '<', 'value': -1e-5,'node_type': 'float',
'is_missing': False}])
self.assertEquals(tree.get_prediction_path(2), [{'child_id': 2,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '>=',
'value': -1e-5, 'node_type': 'float', 'is_missing': False}])
self.assertEquals(tree.get_prediction_path(3), [{'child_id': 1,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '<',
'value': -1e-05, 'node_type': 'float', 'is_missing': False}, {'child_id': 3,
'feature': 'cat2', 'index': '1', 'node_id': 1, 'sign': '<',
'value': -1e-05, 'node_type': 'float', 'is_missing': False}])
self.assertEquals(tree.get_prediction_path(4), [{'child_id': 1,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '<',
'value': -1e-05, 'node_type': 'float', 'is_missing': False},
{'child_id': 4, 'feature': 'cat2', 'index': '1', 'node_id': 1,
'sign': '>=', 'value': -1e-05, 'node_type': 'float',
'is_missing': False}])
self.assertEquals(tree.get_prediction_path(5), [{'child_id': 2,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '>=',
'value': -1e-05, 'node_type': 'float', 'is_missing': False},
{'child_id': 5, 'feature': 'cat2', 'index': '1', 'node_id': 2,
'sign': '<', 'value': -1e-05, 'node_type': 'float',
'is_missing': False}])
self.assertEquals(tree.get_prediction_path(6), [{'child_id': 2,
'feature': 'cat1', 'index': '1', 'node_id': 0, 'sign': '>=',
'value': -1e-05, 'node_type': 'float', 'is_missing': False},
{'child_id': 6, 'feature': 'cat2', 'index': '1', 'node_id': 2,
'sign': '>=', 'value': -1e-05, 'node_type': 'float',
'is_missing': False}])
def test_cat_dict(self):
# Arrange
sf = tc.SFrame({
'cat1': [str(i) for i in range(500)],
'dict2': [{'1' : 1, '2' : 3.2},
{'1' : 3.1,},
{1 : 1, 'b' : 2},
{1 : 1, 'b' : 3},
{'a' : 2, 'b' : 3} ] * 100,
'target': ['1', '2', '1', '2', '1'] * 100,
})
# Act
tree = _make_tree(sf)
root = tree.root
# Assert.
self.assertEquals(len(tree.nodes), 7)
self.assertEquals(root.to_dict(), {'is_leaf': False, 'left_id': 1,
'node_id': 0, 'node_type': u'float', 'parent_id': None, 'right_id':
2, 'split_feature_column': 'dict2', 'split_feature_index': '1',
'value': 2.05, 'node_type': 'float', 'missing_id': 1})
def test_numeric(self):
sf = tc.SFrame({
'num1' : [1,2,3.5,4,5] * 100,
'num2' : [1,2,3.5,4,5] * 100,
'num3' : [1,2,3.5,4,5] * 100,
'target': ['1', '2', '1', '2', '1'] * 100,
})
# Act
tree = _make_tree(sf)
root = tree.root
# Assert.
self.assertEquals(len(tree.nodes), 9)
self.assertEquals(root.to_dict(), {'is_leaf': False, 'left_id': 1,
'node_id': 0, 'node_type': u'float', 'parent_id': None, 'right_id':
2, 'split_feature_column': 'num1', 'split_feature_index': None,
'value': 4.5, 'node_type': 'float', 'missing_id': 1})
def test_vector(self):
sf = tc.SFrame({
'num1' : [1,2,3.5,4,5] * 100,
'num2' : [1,2,3.5,4,5] * 100,
'vect' : [[1,2,3.5,4,5]] * 500,
'target': ['1', '2', '1', '2', '1'] * 100,
})
# Act
tree = _make_tree(sf)
root = tree.root
# Assert.
self.assertEquals(len(tree.nodes), 9)
self.assertEquals(root.to_dict(), {'is_leaf': False, 'left_id': 1,
'node_id': 0, 'node_type': u'float', 'parent_id': None, 'right_id':
2, 'split_feature_column': 'num1', 'split_feature_index': None,
'value': 4.5, 'node_type': 'float', 'missing_id': 1})
def test_numeric_dict(self):
sf = tc.SFrame({
'num1' : [1,2,3.5,4,5] * 100,
'num2' : [1,2,3.5,4,5] * 100,
'vect' : [[1,2,3.5,4,5]] * 500,
'target': ['1', '2', '1', '2', '1'] * 100,
'dict[2]': [{'1' : 1, '2' : 3.2},
{'1' : 3.1,},
{1 : 1, 'b' : 2},
{1 : 1, 'b' : 3},
{'a' : 2, 'b' : 3} ] * 100,
})
# Act
tree = _make_tree(sf)
root = tree.root
# Assert.
self.assertEquals(len(tree.nodes), 7)
self.assertEquals(root.to_dict(), {'is_leaf': False, 'left_id': 1,
'node_id': 0, 'node_type': u'float', 'parent_id': None, 'right_id':
2, 'split_feature_column': 'dict[2]', 'split_feature_index': '1',
'value': 2.05, 'node_type': 'float', 'missing_id': 1})
class PythonDecisionTreeAllModelsTest(unittest.TestCase):
def _run_test(self, sf):
sf['target'] = [i < sf.num_rows()/2 for i in range(sf.num_rows())]
for model in [
tc.regression.boosted_trees_regression,
tc.classifier.boosted_trees_classifier,
tc.regression.random_forest_regression,
tc.classifier.random_forest_classifier,
tc.regression.decision_tree_regression,
tc.classifier.decision_tree_classifier]:
m = model.create(sf, 'target', validation_set = None, max_depth=2)
tree = DecisionTree.from_model(m)
for nid, node in tree.nodes.items():
val = tree.get_prediction_score(nid)
if node.is_leaf == True:
self.assertTrue(type(val) in {float, int})
else:
self.assertEquals(val, None)
def test_categorical_1(self):
sf = tc.SFrame({
'cat1': ['1', '1', '2', '2', '2'] * 100,
'cat2': ['1', '3', '3', '1', '1'] * 100
})
self._run_test(sf)
def test_categorical_2(self):
sf = tc.SFrame({
'cat[1]': ['1', '1', '2', '2', '2'] * 100,
'cat[2]': ['1', '3', '3', '1', '1'] * 100
})
self._run_test(sf)
def test_dict_1(self):
sf = tc.SFrame({
'dict1': [{'1' : 1, '2' : 3.2},
{'1' : 3.1,},
{'1' : 1, 'b' : 2},
{'1' : 1, 'b' : 3},
{'a' : 2, 'b' : 3} ] * 100
})
self._run_test(sf)
def test_dict_2(self):
sf = tc.SFrame({
'dict1': [{'1' : 1, '2' : 3.2},
{'1' : 3.1,},
{1 : 1, 'b' : 2},
{1 : 1, 'b' : 3},
{'a' : 2, 'b' : 3} ] * 100
})
self._run_test(sf)
def test_dict_3(self):
sf = tc.SFrame({
'dict': [{'1' : 1, '2' : 3.2},
{'1' : 3.1,},
{1 : 1, 'b' : 2},
{1 : 1, 'b' : 3},
{'a' : 2, 'b' : 3} ] * 100,
'dict[2]': [{'1' : 1, '2' : 3.2},
{'1' : 3.1,},
{1 : 1, 'b' : 2},
{1 : 1, 'b' : 3},
{'a' : 2, 'b' : 3} ] * 100,
'dict[3]': [{'1' : 1, '2' : 3.2},
{'1' : 3.1,},
{1 : 1, 'b' : 2},
{1 : 1, 'b' : 3},
{'a' : 2, 'b' : 3} ] * 100
})
self._run_test(sf)
def test_cat_dict_1(self):
sf = tc.SFrame({
'cat1': [str(i) for i in range(500)],
'dict2': [{'1' : 1, '2' : 3.2},
{'1' : 3.1,},
{1 : 1, 'b' : 2},
{1 : 1, 'b' : 3},
{'a' : 2, 'b' : 3} ] * 100
})
self._run_test(sf)
def test_numeric_1(self):
sf = tc.SFrame({
'num1' : [1,2,3.5,4,5] * 100,
'num2' : [1,2,3.5,4,5] * 100,
'num3' : [1,2,3.5,4,5] * 100
})
self._run_test(sf)
def test_numeric_2(self):
sf = tc.SFrame({
'num1' : [1,2,3.5,4,5] * 100,
'num2' : [1,2,3.5,4,5] * 100,
'vect' : [[1,2,3.5,4,5]] * 500
})
self._run_test(sf)
def test_numeric_dict(self):
sf = tc.SFrame({
'num1' : [1,2,3.5,4,5] * 100,
'num2' : [1,2,3.5,4,5] * 100,
'vect' : [[1,2,3.5,4,5]] * 500,
'dict[2]': [{'1' : 1, '2' : 3.2},
{'1' : 3.1,},
{1 : 1, 'b' : 2},
{1 : 1, 'b' : 3},
{'a' : 2, 'b' : 3} ] * 100,
})
self._run_test(sf)
class PythonDecisionTreeTest(unittest.TestCase):
@classmethod
def setUpClass(self):
sf = tc.SFrame({
'cat1': ['1', '1', '2', '2', '2'] * 100,
'cat2': ['1', '3', '3', '1', '1'] * 100,
'target': ['1', '2', '1', '2', '1'] * 100,
})
model = tc.classifier.boosted_trees_classifier.create(sf, 'target',
validation_set = None, max_depth=2)
tree = DecisionTree.from_model(model)
self.tree = tree
def test_repr(self):
# Arrange
tree = self.tree
# Act
out = tree.__repr__()
# Assert
self.assertEquals(type(out), str)
def test_to_json(self):
# Arrange
tree = self.tree
# Act
out = tree.to_json()
# Assert
self.assertEquals(type(out), dict)
with self.assertRaises(TypeError):
score = tree.to_json("foo")
with self.assertRaises(ToolkitError):
score = tree.to_json(-1)
def get_prediction_score(self):
# Arrange
tree = self.tree
# Act
out_1 = tree.get_prediction_score(0)
out_2 = tree.get_prediction_score(5)
# Assert
self.assertEquals(out_1, None)
self.assertEquals(type(out_1), float)
with self.assertRaises(TypeError):
score = tree.get_prediction_score("foo")
with self.assertRaises(ToolkitError):
score = tree.get_prediction_score(-1)
def get_prediction_path(self, node_id):
# Arrange
tree = self.tree
# Act
out_1 = tree.get_prediction_path(0)
out_2 = tree.get_prediction_path(5)
# Assert
self.assertEquals(type(out_1), dict)
self.assertEquals(type(out_2), dict)
with self.assertRaises(TypeError):
score = tree.get_prediction_path("foo")
with self.assertRaises(ToolkitError):
score = tree.get_prediction_path(-1)
def root(self):
# Arrange
tree = self.tree
# Act
out = tree.root
# Assert
self.assertEquals(type(out), Node)
def test_getitem(self):
# Arrange
tree = self.tree
# Act & Assert
for i in range(tree.num_nodes):
self.assertEquals(type(tree[i]), Node)
def test_iter(self):
# Arrange
tree = self.tree
# Act & Assert
for node in tree:
self.assertEquals(type(node), Node)
| 36.975556
| 88
| 0.446
|
101c8c359c4788edeb74ea0188c24e828c9a9464
| 1,567
|
py
|
Python
|
applications/cli/commands/predict/view.py
|
starcell/deepcell-ncluster
|
885d6b3678c1413ecdd8681c08306402484706e9
|
[
"Apache-2.0"
] | null | null | null |
applications/cli/commands/predict/view.py
|
starcell/deepcell-ncluster
|
885d6b3678c1413ecdd8681c08306402484706e9
|
[
"Apache-2.0"
] | null | null | null |
applications/cli/commands/predict/view.py
|
starcell/deepcell-ncluster
|
885d6b3678c1413ecdd8681c08306402484706e9
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import click
from platform_resources.run import RunKinds
from util.cli_state import common_options
from util.aliascmd import AliasCmd
from cli_text_consts import PredictViewCmdTexts
import commands.experiment.view
@click.command(
help=PredictViewCmdTexts.HELP,
short_help=PredictViewCmdTexts.SHORT_HELP,
cls=AliasCmd,
alias='v',
options_metavar='[options]')
@click.argument("prediction_instance_name")
@click.option('-u', '--username', help=PredictViewCmdTexts.HELP_U)
@click.pass_context
@common_options()
def view(ctx: click.Context, prediction_instance_name: str, username: str):
"""
Displays details of an prediction instance.
"""
commands.experiment.view.Texts = PredictViewCmdTexts # type: ignore
ctx.invoke(commands.experiment.view.view, experiment_name=prediction_instance_name, username=username,
tensorboard=False, accepted_run_kinds=(RunKinds.INFERENCE.value,))
| 35.613636
| 107
| 0.745373
|
4bd5fb9d27a17bf84c555e775c0b57b1514dde4a
| 620
|
py
|
Python
|
setup.py
|
michelepagot/ciaomuloni
|
8f7c2187d223bb3d26aeeafe180d39841094d1ad
|
[
"MIT"
] | null | null | null |
setup.py
|
michelepagot/ciaomuloni
|
8f7c2187d223bb3d26aeeafe180d39841094d1ad
|
[
"MIT"
] | null | null | null |
setup.py
|
michelepagot/ciaomuloni
|
8f7c2187d223bb3d26aeeafe180d39841094d1ad
|
[
"MIT"
] | null | null | null |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="ciaomuloni",
version="0.0.1",
author="Michele Pagot",
author_email="michele.pagot@hotmail.it",
description="A small example package",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/michelepagot/ciaomuloni",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
| 29.52381
| 53
| 0.670968
|
950a65299e4aa9b7b0acdcfb163917b3dfdbdb2a
| 6,997
|
py
|
Python
|
blitzdb/backends/file/queries.py
|
cwoebker/blitzdb
|
b2e8c1459d322d801a58be5e12ba6f4a7ce9ce17
|
[
"MIT"
] | null | null | null |
blitzdb/backends/file/queries.py
|
cwoebker/blitzdb
|
b2e8c1459d322d801a58be5e12ba6f4a7ce9ce17
|
[
"MIT"
] | null | null | null |
blitzdb/backends/file/queries.py
|
cwoebker/blitzdb
|
b2e8c1459d322d801a58be5e12ba6f4a7ce9ce17
|
[
"MIT"
] | null | null | null |
"""Query operators for the file backend."""
import operator
import re
import six
if six.PY3:
from functools import reduce
def boolean_operator_query(boolean_operator):
"""Generate boolean operator checking function."""
def _boolean_operator_query(expressions):
"""Apply boolean operator to expressions."""
def _apply_boolean_operator(query_function, expressions=expressions):
"""Return if expressions with boolean operator are satisfied."""
compiled_expressions = [compile_query(e) for e in expressions]
return reduce(
boolean_operator,
[e(query_function) for e in compiled_expressions]
)
return _apply_boolean_operator
return _boolean_operator_query
def filter_query(key, expression):
"""Filter documents with a key that satisfies an expression."""
if (isinstance(expression, dict)
and len(expression) == 1
and list(expression.keys())[0].startswith('$')):
compiled_expression = compile_query(expression)
elif callable(expression):
def _filter(index, expression=expression):
result = [store_key
for value, store_keys in index.get_index().items()
if expression(value)
for store_key in store_keys]
return result
compiled_expression = _filter
else:
compiled_expression = expression
def _get(query_function, key=key, expression=compiled_expression):
"""Get document key and check against expression."""
return query_function(key, expression)
return _get
def not_query(expression):
"""Apply logical not operator to expression."""
compiled_expression = compile_query(expression)
def _not(index, expression=compiled_expression):
"""Return store key for documents that satisfy expression."""
all_keys = index.get_all_keys()
returned_keys = expression(index)
return [key for key in all_keys if key not in returned_keys]
return _not
def comparison_operator_query(comparison_operator):
"""Generate comparison operator checking function."""
def _comparison_operator_query(expression):
"""Apply binary operator to expression."""
def _apply_comparison_operator(index, expression=expression):
"""Return store key for documents that satisfy expression."""
ev = expression() if callable(expression) else expression
return [
store_key
for value, store_keys
in index.get_index().items()
if comparison_operator(value, ev)
for store_key in store_keys
]
return _apply_comparison_operator
return _comparison_operator_query
def exists_query(expression):
"""Check that documents have a key that satisfies expression."""
def _exists(index, expression=expression):
"""Return store key for documents that satisfy expression."""
ev = expression() if callable(expression) else expression
if ev:
return [
store_key
for store_keys
in index.get_index().values()
for store_key in store_keys
]
else:
return index.get_undefined_keys()
return _exists
def regex_query(expression):
"""Apply regular expression to result of expression."""
def _regex(index, expression=expression):
"""Return store key for documents that satisfy expression."""
pattern = re.compile(expression)
return [
store_key
for value, store_keys
in index.get_index().items()
if (isinstance(value, six.string_types)
and re.match(pattern, value))
for store_key in store_keys
]
return _regex
def all_query(expression):
"""Match arrays that contain all elements in the query."""
def _all(index, expression=expression):
"""Return store key for documents that satisfy expression."""
ev = expression() if callable(expression) else expression
try:
iter(ev)
except TypeError:
raise AttributeError('$all argument must be an iterable!')
hashed_ev = [index.get_hash_for(v) for v in ev]
store_keys = set([])
if len(hashed_ev) == 0:
return []
store_keys = set(index.get_keys_for(hashed_ev[0]))
for value in hashed_ev[1:]:
store_keys &= set(index.get_keys_for(value))
return list(store_keys)
return _all
def elemMatch_query(expression):
"""Select documents if element in array field matches all conditions."""
def _elemMatch(index, expression=expression):
"""Raise exception since this operator is not implemented yet."""
raise ValueError(
'$elemMatch query is currently not supported by file backend!')
return _elemMatch
def in_query(expression):
"""Match any of the values that exist in an array specified in query."""
def _in(index, expression=expression):
"""Return store key for documents that satisfy expression."""
ev = expression() if callable(expression) else expression
try:
iter(ev)
except TypeError:
raise AttributeError('$in argument must be an iterable!')
hashed_ev = [index.get_hash_for(v) for v in ev]
store_keys = set()
for value in hashed_ev:
store_keys |= set(index.get_keys_for(value))
return list(store_keys)
return _in
def compile_query(query):
"""Compile each expression in query recursively."""
if isinstance(query, dict):
expressions = []
for key, value in query.items():
if key.startswith('$'):
if key not in query_funcs:
raise AttributeError('Invalid operator: {0}'.format(key))
expressions.append(query_funcs[key](value))
else:
expressions.append(filter_query(key, value))
if len(expressions) > 1:
return boolean_operator_query(operator.and_)(expressions)
else:
return (
expressions[0]
if len(expressions)
else lambda query_function: query_function(None, None)
)
else:
return query
query_funcs = {
'$regex': regex_query,
'$exists': exists_query,
'$and': boolean_operator_query(operator.and_),
'$all': all_query,
'$elemMatch': elemMatch_query,
'$or': boolean_operator_query(operator.or_),
'$gte': comparison_operator_query(operator.ge),
'$lte': comparison_operator_query(operator.le),
'$gt': comparison_operator_query(operator.gt),
'$lt': comparison_operator_query(operator.lt),
'$ne': comparison_operator_query(operator.ne),
'$not': not_query,
'$in': in_query,
}
| 33.966019
| 77
| 0.630556
|
eec0adde50a20fe01d4a077d64f78bd74475dd61
| 1,225
|
py
|
Python
|
ddtrace/contrib/cassandra/__init__.py
|
mykytarudenko/new-project
|
e06a912382239739dd3f93b54d545b9506102372
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
ddtrace/contrib/cassandra/__init__.py
|
mykytarudenko/new-project
|
e06a912382239739dd3f93b54d545b9506102372
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1
|
2021-01-27T04:53:24.000Z
|
2021-01-27T04:53:24.000Z
|
ddtrace/contrib/cassandra/__init__.py
|
mykytarudenko/new-project
|
e06a912382239739dd3f93b54d545b9506102372
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
"""Instrument Cassandra to report Cassandra queries.
``patch_all`` will automatically patch your Cluster instance to make it work.
::
from ddtrace import Pin, patch
from cassandra.cluster import Cluster
# If not patched yet, you can patch cassandra specifically
patch(cassandra=True)
# This will report spans with the default instrumentation
cluster = Cluster(contact_points=["127.0.0.1"], port=9042)
session = cluster.connect("my_keyspace")
# Example of instrumented query
session.execute("select id from my_table limit 10;")
# Use a pin to specify metadata related to this cluster
cluster = Cluster(contact_points=['10.1.1.3', '10.1.1.4', '10.1.1.5'], port=9042)
Pin.override(cluster, service='cassandra-backend')
session = cluster.connect("my_keyspace")
session.execute("select id from my_table limit 10;")
"""
from ...utils.importlib import require_modules
required_modules = ['cassandra.cluster']
with require_modules(required_modules) as missing_modules:
if not missing_modules:
from .session import get_traced_cassandra
from .session import patch
__all__ = [
'get_traced_cassandra',
'patch',
]
| 33.108108
| 85
| 0.702041
|
179d2ab97f67d755875df51fa69f11d27a53a39c
| 373
|
py
|
Python
|
lang/Python/amb-2.py
|
ethansaxenian/RosettaDecode
|
8ea1a42a5f792280b50193ad47545d14ee371fb7
|
[
"MIT"
] | null | null | null |
lang/Python/amb-2.py
|
ethansaxenian/RosettaDecode
|
8ea1a42a5f792280b50193ad47545d14ee371fb7
|
[
"MIT"
] | null | null | null |
lang/Python/amb-2.py
|
ethansaxenian/RosettaDecode
|
8ea1a42a5f792280b50193ad47545d14ee371fb7
|
[
"MIT"
] | null | null | null |
# joins :: String -> String -> Bool
def joins(a, b):
return a[-1] == b[0]
print(
[
' '.join([w1, w2, w3, w4])
for w1 in ['the', 'that', 'a']
for w2 in ['frog', 'elephant', 'thing']
for w3 in ['walked', 'treaded', 'grows']
for w4 in ['slowly', 'quickly']
if joins(w1, w2) and joins(w2, w3) and joins(w3, w4)
]
)
| 23.3125
| 60
| 0.474531
|
d8405c1c3dee676d0f5738eb10bb599a0c2b1d72
| 4,202
|
py
|
Python
|
zuul.d/octavia/controller/worker/amphora_rate_limit.py
|
yi-cloud/octavia
|
b7f5cfa4c3c454925a90c24984049539228806d7
|
[
"Apache-2.0"
] | null | null | null |
zuul.d/octavia/controller/worker/amphora_rate_limit.py
|
yi-cloud/octavia
|
b7f5cfa4c3c454925a90c24984049539228806d7
|
[
"Apache-2.0"
] | null | null | null |
zuul.d/octavia/controller/worker/amphora_rate_limit.py
|
yi-cloud/octavia
|
b7f5cfa4c3c454925a90c24984049539228806d7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import time
from oslo_config import cfg
from oslo_log import log as logging
from octavia.common import exceptions
from octavia.db import api as db_apis
from octavia.db import repositories as repo
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.import_group('haproxy_amphora', 'octavia.common.config')
class AmphoraBuildRateLimit(object):
def __init__(self):
self.amp_build_slots_repo = repo.AmphoraBuildSlotsRepository()
self.amp_build_req_repo = repo.AmphoraBuildReqRepository()
def add_to_build_request_queue(self, amphora_id, build_priority):
self.amp_build_req_repo.add_to_build_queue(
db_apis.get_session(),
amphora_id=amphora_id,
priority=build_priority)
LOG.debug("Added build request for %s to the queue", amphora_id)
self.wait_for_build_slot(amphora_id)
def has_build_slot(self):
build_rate_limit = CONF.haproxy_amphora.build_rate_limit
session = db_apis.get_session()
with session.begin(subtransactions=True):
used_build_slots = (self.amp_build_slots_repo
.get_used_build_slots_count(session))
available_build_slots = build_rate_limit - used_build_slots
LOG.debug("Available build slots %d", available_build_slots)
return available_build_slots > 0
def has_highest_priority(self, amphora_id):
session = db_apis.get_session()
with session.begin(subtransactions=True):
highest_priority_build_req = (
self.amp_build_req_repo.get_highest_priority_build_req(
session))
LOG.debug("Highest priority req: %s, Current req: %s",
highest_priority_build_req, amphora_id)
return amphora_id == highest_priority_build_req
def update_build_status_and_available_build_slots(self, amphora_id):
session = db_apis.get_session()
with session.begin(subtransactions=True):
self.amp_build_slots_repo.update_count(session, action='increment')
self.amp_build_req_repo.update_req_status(session, amphora_id)
def remove_from_build_req_queue(self, amphora_id):
session = db_apis.get_session()
with session.begin(subtransactions=True):
self.amp_build_req_repo.delete(session, amphora_id=amphora_id)
self.amp_build_slots_repo.update_count(session, action='decrement')
LOG.debug("Removed request for %s from queue"
" and released the build slot", amphora_id)
def remove_all_from_build_req_queue(self):
session = db_apis.get_session()
with session.begin(subtransactions=True):
self.amp_build_req_repo.delete_all(session)
self.amp_build_slots_repo.update_count(session, action='reset')
LOG.debug("Removed all the build requests and "
"released the build slots")
def wait_for_build_slot(self, amphora_id):
LOG.debug("Waiting for a build slot")
for i in range(CONF.haproxy_amphora.build_active_retries):
if (self.has_build_slot() and
self.has_highest_priority(amphora_id)):
self.update_build_status_and_available_build_slots(amphora_id)
return
time.sleep(CONF.haproxy_amphora.build_retry_interval)
self.remove_all_from_build_req_queue()
raise exceptions.ComputeBuildQueueTimeoutException()
| 43.319588
| 80
| 0.684912
|
a0c227ff10bd6aeb91199f91d7015088f839ae61
| 9,629
|
py
|
Python
|
lib/googlecloudsdk/command_lib/certificate_manager/resource_args.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 2
|
2019-11-10T09:17:07.000Z
|
2019-12-18T13:44:08.000Z
|
lib/googlecloudsdk/command_lib/certificate_manager/resource_args.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | null | null | null |
lib/googlecloudsdk/command_lib/certificate_manager/resource_args.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 1
|
2020-07-25T01:40:19.000Z
|
2020-07-25T01:40:19.000Z
|
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared resource flags for Certificate Manager commands."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.calliope.concepts import deps
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.command_lib.util.concepts import presentation_specs
def CertificateMapAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='map', help_text='The certificate map for the {resource}.')
def CertificateMapEntryAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='entry', help_text='The certificate map entry for the {resource}.')
def CertificateAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='certificate', help_text='The certificate for the {resource}.')
def LocationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='location',
help_text='The Cloud location for the {resource}.',
fallthroughs=[
deps.Fallthrough(lambda: 'global', 'location is always global')
])
def OperationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='operation',
help_text='Certificate Manager operation for the {resource}.')
def GetCertificateMapResourceSpec():
return concepts.ResourceSpec(
'certificatemanager.projects.locations.certificateMaps',
resource_name='certificate map',
certificateMapsId=CertificateMapAttributeConfig(),
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=False)
def GetCertificateMapEntryResourceSpec():
return concepts.ResourceSpec(
'certificatemanager.projects.locations.certificateMaps.certificateMapEntries',
resource_name='certificate map entry',
certificateMapEntriesId=CertificateMapEntryAttributeConfig(),
certificateMapsId=CertificateMapAttributeConfig(),
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=False)
def GetCertificateResourceSpec():
return concepts.ResourceSpec(
'certificatemanager.projects.locations.certificates',
resource_name='certificate',
certificatesId=CertificateAttributeConfig(),
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=False)
def GetLocationResourceSpec():
return concepts.ResourceSpec(
'certificatemanager.projects.locations',
resource_name='location',
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG)
def GetOperationResourceSpec():
return concepts.ResourceSpec(
'certificatemanager.projects.locations.operations',
resource_name='operation',
operationsId=OperationAttributeConfig(),
locationsId=LocationAttributeConfig(),
projectsId=concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG,
disable_auto_completers=False)
def _GetCertificateResourcePresentationSpec(flag,
noun,
verb,
required=True,
plural=False,
group=None):
return presentation_specs.ResourcePresentationSpec(
flag,
GetCertificateResourceSpec(),
'{} {}.'.format(noun, verb),
required=required,
plural=plural,
group=group,
flag_name_overrides={
'location': '' # location is always global so don't create a flag.
})
def _GetCertificateMapEntryResourcePresentationSpec(flag,
noun,
verb,
required=True,
plural=False,
group=None):
return presentation_specs.ResourcePresentationSpec(
flag,
GetCertificateMapEntryResourceSpec(),
'{} {}.'.format(noun, verb),
required=required,
plural=plural,
group=group,
flag_name_overrides={
'location': '' # location is always global so don't create a flag.
})
def AddCertificateMapResourceArg(parser, verb, noun=None, positional=True):
"""Add a resource argument for a Certificate Manager certificate map.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser: the parser for the command.
verb: str, the verb to describe the resource, such as 'to update'.
noun: str, the resource; default: 'The certificate map'.
positional: bool, if True, means that the map ID is a positional
arg rather than a flag.
"""
noun = noun or 'The certificate map'
concept_parsers.ConceptParser.ForResource(
'map' if positional else '--map',
GetCertificateMapResourceSpec(),
'{} {}.'.format(noun, verb),
required=True,
flag_name_overrides={
'location': '' # location is always global so don't create a flag.
}).AddToParser(parser)
def AddCertificateMapEntryResourceArg(parser, verb, noun=None, positional=True):
"""Add a resource argument for a Certificate Manager certificate map entry.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser: the parser for the command.
verb: str, the verb to describe the resource, such as 'to update'.
noun: str, the resource; default: 'The certificate map'.
positional: bool, if True, means that the map ID is a positional arg rather
than a flag.
"""
noun = noun or 'The certificate map entry'
concept_parsers.ConceptParser([
_GetCertificateMapEntryResourcePresentationSpec(
'entry' if positional else '--entry', noun, verb),
]).AddToParser(parser)
def AddCertificateMapEntryAndCertificatesResourceArgs(parser,
entry_verb,
entry_noun=None,
cert_verb=None,
cert_noun=None,
cert_group=None):
"""Add a resource argument for a Certificate Manager certificate map entry and certificates.
NOTE: Must be used only if these are the only resource args in the command.
Args:
parser: the parser for the command.
entry_verb: str, the verb to describe the entry, such as 'to update'.
entry_noun: str, the entry resource; default: 'The certificate map entry'.
cert_verb: str, the verb to describe the cert, default: 'to be attached to
the entry'.
cert_noun: str, the certificate resources; default: 'The certificates'.
cert_group: args group certificates should belong to.
"""
entry_noun = entry_noun or 'The certificate map entry'
cert_noun = cert_noun or 'The certificates'
cert_verb = cert_verb or 'to be attached to the entry'
concept_parsers.ConceptParser([
_GetCertificateMapEntryResourcePresentationSpec('entry', entry_noun,
entry_verb),
_GetCertificateResourcePresentationSpec(
'--certificates',
cert_noun,
cert_verb,
required=False,
plural=True,
group=cert_group),
]).AddToParser(parser)
def AddCertificateResourceArg(parser, verb, noun=None, positional=True):
"""Add a resource argument for a Certificate Manager certificate.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser: the parser for the command.
verb: str, the verb to describe the resource, such as 'to update'.
noun: str, the resource; default: 'The certificate'.
positional: bool, if True, means that the certificate ID is a positional arg
rather than a flag.
"""
noun = noun or 'The certificate'
concept_parsers.ConceptParser([
_GetCertificateResourcePresentationSpec(
'certificate' if positional else '--certificate', noun, verb),
]).AddToParser(parser)
def AddLocationResourceArg(parser, verb=''):
"""Add a resource argument for a cloud location.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser: the parser for the command.
verb: str, the verb to describe the resource, such as 'to update'.
"""
concept_parsers.ConceptParser.ForResource(
'--location',
GetLocationResourceSpec(),
'The Cloud location {}.'.format(verb),
required=True,
flag_name_overrides={
'location': '' # location is always global so don't create a flag.
}).AddToParser(parser)
| 37.760784
| 94
| 0.672552
|
99ac054760433bf5b848263e3cff5bd6e28611e4
| 131
|
py
|
Python
|
constants.py
|
abkraynak/snake
|
2cbc24a589ff8099546c152ebbc7e7c93709bf93
|
[
"MIT"
] | null | null | null |
constants.py
|
abkraynak/snake
|
2cbc24a589ff8099546c152ebbc7e7c93709bf93
|
[
"MIT"
] | null | null | null |
constants.py
|
abkraynak/snake
|
2cbc24a589ff8099546c152ebbc7e7c93709bf93
|
[
"MIT"
] | null | null | null |
BOXSIZE = 20
CLOCK = 20
WIN_HGT = 400
WIN_WID = 400
BLACK = (0, 0, 0)
BLUE = (0, 0, 255)
RED = (255, 0, 0)
WHITE = (255, 255, 255)
| 14.555556
| 23
| 0.572519
|
23aef1017cf4c7eca2c62975a3dc9e50bf056f46
| 19,090
|
py
|
Python
|
madminer/ml/score.py
|
siyuchen95/madminer
|
dfcbd7ee26c47dd294610c195fafce15f74c10eb
|
[
"MIT"
] | 2
|
2020-09-09T20:58:31.000Z
|
2020-09-10T01:35:35.000Z
|
madminer/ml/score.py
|
siyuchen95/madminer
|
dfcbd7ee26c47dd294610c195fafce15f74c10eb
|
[
"MIT"
] | null | null | null |
madminer/ml/score.py
|
siyuchen95/madminer
|
dfcbd7ee26c47dd294610c195fafce15f74c10eb
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
import logging
import numpy as np
from collections import OrderedDict
from ..utils.ml.models.score import DenseLocalScoreModel
from ..utils.ml.eval import evaluate_local_score_model
from ..utils.ml.utils import get_optimizer, get_loss
from ..utils.various import load_and_check, shuffle, restrict_samplesize
from ..utils.various import separate_information_blocks
from ..utils.ml.trainer import LocalScoreTrainer
from .base import Estimator, TheresAGoodReasonThisDoesntWork
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
logger = logging.getLogger(__name__)
class ScoreEstimator(Estimator):
""" A neural estimator of the score evaluated at a fixed reference hypothesis as a function of the
observation x.
Parameters
----------
features : list of int or None, optional
Indices of observables (features) that are used as input to the neural networks. If None, all observables
are used. Default value: None.
n_hidden : tuple of int, optional
Units in each hidden layer in the neural networks. If method is 'nde' or 'scandal', this refers to the
setup of each individual MADE layer. Default value: (100,).
activation : {'tanh', 'sigmoid', 'relu'}, optional
Activation function. Default value: 'tanh'.
"""
def __init__(self, features=None, n_hidden=(100,), activation="tanh", dropout_prob=0.0):
super(ScoreEstimator, self).__init__(features, n_hidden, activation, dropout_prob)
self.nuisance_profile_matrix = None
self.nuisance_project_matrix = None
self.nuisance_mode_default = "keep"
def train(
self,
method,
x,
t_xz,
x_val=None,
t_xz_val=None,
optimizer="amsgrad",
n_epochs=50,
batch_size=128,
initial_lr=0.001,
final_lr=0.0001,
nesterov_momentum=None,
validation_split=0.25,
early_stopping=True,
scale_inputs=True,
shuffle_labels=False,
limit_samplesize=None,
memmap=False,
verbose="some",
n_workers=8,
clip_gradient=None,
early_stopping_patience=None,
):
"""
Trains the network.
Parameters
----------
method : str
The inference method used for training. Currently values are 'sally' and 'sallino', but at the training
stage they are identical. So right now it doesn't matter which one you use.
x : ndarray or str
Path to an unweighted sample of observations, as saved by the `madminer.sampling.SampleAugmenter` functions.
Required for all inference methods.
t_xz : ndarray or str
Joint scores at the reference hypothesis, or filename of a pickled numpy array.
optimizer : {"adam", "amsgrad", "sgd"}, optional
Optimization algorithm. Default value: "amsgrad".
n_epochs : int, optional
Number of epochs. Default value: 50.
batch_size : int, optional
Batch size. Default value: 128.
initial_lr : float, optional
Learning rate during the first epoch, after which it exponentially decays to final_lr. Default value:
0.001.
final_lr : float, optional
Learning rate during the last epoch. Default value: 0.0001.
nesterov_momentum : float or None, optional
If trainer is "sgd", sets the Nesterov momentum. Default value: None.
validation_split : float or None, optional
Fraction of samples used for validation and early stopping (if early_stopping is True). If None, the entire
sample is used for training and early stopping is deactivated. Default value: 0.25.
early_stopping : bool, optional
Activates early stopping based on the validation loss (only if validation_split is not None). Default value:
True.
scale_inputs : bool, optional
Scale the observables to zero mean and unit variance. Default value: True.
shuffle_labels : bool, optional
If True, the labels (`y`, `r_xz`, `t_xz`) are shuffled, while the observations (`x`) remain in their
normal order. This serves as a closure test, in particular as cross-check against overfitting: an estimator
trained with shuffle_labels=True should predict to likelihood ratios around 1 and scores around 0.
limit_samplesize : int or None, optional
If not None, only this number of samples (events) is used to train the estimator. Default value: None.
memmap : bool, optional.
If True, training files larger than 1 GB will not be loaded into memory at once. Default value: False.
verbose : {"all", "many", "some", "few", "none}, optional
Determines verbosity of training. Default value: "some".
Returns
-------
None
"""
if method not in ["sally", "sallino"]:
logger.warning("Method %s not allowed for score estimators. Using 'sally' instead.", method)
method = "sally"
logger.info("Starting training")
logger.info(" Batch size: %s", batch_size)
logger.info(" Optimizer: %s", optimizer)
logger.info(" Epochs: %s", n_epochs)
logger.info(" Learning rate: %s initially, decaying to %s", initial_lr, final_lr)
if optimizer == "sgd":
logger.info(" Nesterov momentum: %s", nesterov_momentum)
logger.info(" Validation split: %s", validation_split)
logger.info(" Early stopping: %s", early_stopping)
logger.info(" Scale inputs: %s", scale_inputs)
logger.info(" Shuffle labels %s", shuffle_labels)
if limit_samplesize is None:
logger.info(" Samples: all")
else:
logger.info(" Samples: %s", limit_samplesize)
# Load training data
logger.info("Loading training data")
memmap_threshold = 1.0 if memmap else None
x = load_and_check(x, memmap_files_larger_than_gb=memmap_threshold)
t_xz = load_and_check(t_xz, memmap_files_larger_than_gb=memmap_threshold)
# Infer dimensions of problem
n_samples = x.shape[0]
n_observables = x.shape[1]
n_parameters = t_xz.shape[1]
logger.info("Found %s samples with %s parameters and %s observables", n_samples, n_parameters, n_observables)
# Limit sample size
if limit_samplesize is not None and limit_samplesize < n_samples:
logger.info("Only using %s of %s training samples", limit_samplesize, n_samples)
x, t_xz = restrict_samplesize(limit_samplesize, x, t_xz)
# Validation data
external_validation = x_val is not None and t_xz_val is not None
if external_validation:
x_val = load_and_check(x_val, memmap_files_larger_than_gb=memmap_threshold)
t_xz_val = load_and_check(t_xz_val, memmap_files_larger_than_gb=memmap_threshold)
logger.info("Found %s separate validation samples", x_val.shape[0])
assert x_val.shape[1] == n_observables
assert t_xz_val.shape[1] == n_parameters
# Scale features
if scale_inputs:
self.initialize_input_transform(x, overwrite=False)
x = self._transform_inputs(x)
if external_validation:
x_val = self._transform_inputs(x_val)
else:
self.initialize_input_transform(x, False, overwrite=False)
# Shuffle labels
if shuffle_labels:
logger.info("Shuffling labels")
logger.warning("Are you sure you want this?")
t_xz = shuffle(t_xz)
# Features
if self.features is not None:
x = x[:, self.features]
logger.info("Only using %s of %s observables", x.shape[1], n_observables)
n_observables = x.shape[1]
if external_validation:
x_val = x_val[:, self.features]
# Check consistency of input with model
if self.n_observables is None:
self.n_observables = n_observables
if self.n_parameters is None:
self.n_parameters = n_parameters
if n_parameters != self.n_parameters:
raise RuntimeError(
"Number of parameters does not match model: {} vs {}".format(n_parameters, self.n_parameters)
)
if n_observables != self.n_observables:
raise RuntimeError(
"Number of observables does not match model: {} vs {}".format(n_observables, self.n_observables)
)
# Data
data = self._package_training_data(x, t_xz)
if external_validation:
data_val = self._package_training_data(x_val, t_xz_val)
else:
data_val = None
# Create model
if self.model is None:
logger.info("Creating model")
self._create_model()
# Losses
loss_functions, loss_labels, loss_weights = get_loss(method, None)
# Optimizer
opt, opt_kwargs = get_optimizer(optimizer, nesterov_momentum)
# Train model
logger.info("Training model")
trainer = LocalScoreTrainer(self.model, n_workers=n_workers)
result = trainer.train(
data=data,
data_val=data_val,
loss_functions=loss_functions,
loss_weights=loss_weights,
loss_labels=loss_labels,
epochs=n_epochs,
batch_size=batch_size,
optimizer=opt,
optimizer_kwargs=opt_kwargs,
initial_lr=initial_lr,
final_lr=final_lr,
validation_split=validation_split,
early_stopping=early_stopping,
verbose=verbose,
clip_gradient=clip_gradient,
early_stopping_patience=early_stopping_patience,
)
return result
def set_nuisance(self, fisher_information, parameters_of_interest):
"""
Prepares the calculation of profiled scores, see https://arxiv.org/pdf/1903.01473.pdf.
Parameters
----------
fisher_information : ndarray
Fisher informatioin with shape `(n_parameters, n_parameters)`.
parameters_of_interest : list of int
List of int, with 0 <= remaining_compoinents[i] < n_parameters. Denotes which parameters are kept in the
profiling, and their new order.
Returns
-------
None
"""
if fisher_information.shape != (self.n_parameters, self.n_parameters):
raise ValueError(
"Fisher information has wrong shape {}, expected {}".format(
fisher_information.shape, (self.n_parameters, self.n_parameters)
)
)
n_parameters_of_interest = len(parameters_of_interest)
# Separate Fisher information parts
nuisance_parameters, information_phys, information_mix, information_nuisance = separate_information_blocks(
fisher_information, parameters_of_interest
)
# Calculate projection matrix
self.nuisance_project_matrix = np.zeros((n_parameters_of_interest, self.n_parameters)) # (n_phys, n_all)
for theta_new, theta_old in enumerate(parameters_of_interest):
self.nuisance_project_matrix[theta_new, theta_old] = 1.0
logger.debug("Nuisance projection matrix:/n%s", self.nuisance_project_matrix)
# Calculate profiling matrix
inverse_information_nuisance = np.linalg.inv(information_nuisance) # (n_nuisance, n_nuisance)
profiling_matrix = -information_mix.T.dot(inverse_information_nuisance) # (n_phys, n_nuisance)
self.nuisance_profile_matrix = np.copy(self.nuisance_project_matrix) # (n_phys, n_all)
for theta_new, theta_old in enumerate(parameters_of_interest):
for nuis_new, nuis_old in enumerate(nuisance_parameters):
self.nuisance_profile_matrix[theta_new, nuis_old] += profiling_matrix[theta_new, nuis_new]
logger.debug("Nuisance profiling matrix:/n%s", self.nuisance_project_matrix)
def evaluate_score(self, x, theta=None, nuisance_mode="auto"):
"""
Evaluates the score.
Parameters
----------
x : str or ndarray
Observations, or filename of a pickled numpy array.
theta: None or ndarray, optional
Has no effect for ScoreEstimator. Introduced just for conformity with other Estimators.
nuisance_mode : {"auto", "keep", "profile", "project"}
Decides how nuisance parameters are treated. If nuisance_mode is "auto", the returned score is the (n+k)-
dimensional score in the space of n parameters of interest and k nuisance parameters if `set_profiling`
has not been called, and the n-dimensional profiled score in the space of the parameters of interest
if it has been called. For "keep", the returned score is always (n+k)-dimensional. For "profile", it is
the n-dimensional profiled score. For "project", it is the n-dimensional projected score, i.e. ignoring
the nuisance parameters.
Returns
-------
score : ndarray
Estimated score with shape `(n_observations, n_parameters)`.
"""
if self.model is None:
raise ValueError("No model -- train or load model before evaluating it!")
if nuisance_mode == "auto":
logger.debug("Using nuisance mode %s", self.nuisance_mode_default)
nuisance_mode = self.nuisance_mode_default
# Load training data
if isinstance(x, str):
logger.debug("Loading evaluation data")
x = load_and_check(x)
# Scale observables
x = self._transform_inputs(x)
# Restrict featuers
if self.features is not None:
x = x[:, self.features]
# Evaluation
logger.debug("Starting score evaluation")
t_hat = evaluate_local_score_model(model=self.model, xs=x)
# Treatment of nuisance paramters
if nuisance_mode == "keep":
logger.debug("Keeping nuisance parameter in score")
elif nuisance_mode == "project":
if self.nuisance_project_matrix is None:
raise ValueError(
"evaluate_score() was called with nuisance_mode = project, but nuisance parameters "
"have not been set up yet. Please call set_nuisance() first!"
)
logger.debug("Projecting nuisance parameter from score")
t_hat = np.einsum("ij,xj->xi", self.nuisance_project_matrix, t_hat)
elif nuisance_mode == "profile":
if self.nuisance_profile_matrix is None:
raise ValueError(
"evaluate_score() was called with nuisance_mode = profile, but nuisance parameters "
"have not been set up yet. Please call set_nuisance() first!"
)
logger.debug("Profiling nuisance parameter from score")
t_hat = np.einsum("ij,xj->xi", self.nuisance_profile_matrix, t_hat)
else:
raise ValueError("Unknown nuisance_mode {}".format(nuisance_mode))
return t_hat
def evaluate_log_likelihood(self, *args, **kwargs):
raise TheresAGoodReasonThisDoesntWork("This estimator can only estimate the score, not the likelihood!")
def evaluate_log_likelihood_ratio(self, *args, **kwargs):
raise TheresAGoodReasonThisDoesntWork("This estimator can only estimate the score, not the likelihood ratio!")
def evaluate(self, *args, **kwargs):
return self.evaluate_score(*args, **kwargs)
def calculate_fisher_information(self, x, theta=None, weights=None, n_events=1, sum_events=True):
fisher_information = super(ScoreEstimator, self).calculate_fisher_information(
x, theta, weights, n_events, sum_events
)
return fisher_information
def save(self, filename, save_model=False):
super(ScoreEstimator, self).save(filename, save_model)
# Also save Fisher information information for profiling / projections
if self.nuisance_profile_matrix is not None and self.nuisance_project_matrix is not None:
logger.debug(
"Saving nuisance profiling / projection information to %s_nuisance_profile_matrix.npy and "
"%s_nuisance_project_matrix.npy",
filename,
filename,
)
np.save(filename + "_nuisance_profile_matrix.npy", self.nuisance_profile_matrix)
np.save(filename + "_nuisance_project_matrix.npy", self.nuisance_project_matrix)
def load(self, filename):
super(ScoreEstimator, self).load(filename)
# Load scaling
try:
self.nuisance_profile_matrix = np.load(filename + "_nuisance_profile_matrix.npy")
self.nuisance_project_matrix = np.load(filename + "_nuisance_project_matrix.npy")
logger.debug(
" Found nuisance profiling / projection matrices:\nProfiling:\n%s\nProjection:\n%s",
self.nuisance_profile_matrix,
self.nuisance_project_matrix,
)
except:
logger.debug("Did not find nuisance profiling / projection setup in %s", filename)
self.nuisance_profile_matrix = None
self.nuisance_project_matrix = None
def _create_model(self):
self.model = DenseLocalScoreModel(
n_observables=self.n_observables,
n_parameters=self.n_parameters,
n_hidden=self.n_hidden,
activation=self.activation,
dropout_prob=self.dropout_prob,
)
@staticmethod
def _package_training_data(x, t_xz):
data = OrderedDict()
data["x"] = x
data["t_xz"] = t_xz
return data
def _wrap_settings(self):
settings = super(ScoreEstimator, self)._wrap_settings()
settings["estimator_type"] = "score"
settings["nuisance_mode_default"] = self.nuisance_mode_default
return settings
def _unwrap_settings(self, settings):
super(ScoreEstimator, self)._unwrap_settings(settings)
estimator_type = str(settings["estimator_type"])
if estimator_type != "score":
raise RuntimeError("Saved model is an incompatible estimator type {}.".format(estimator_type))
try:
self.nuisance_mode_default = str(settings["nuisance_mode_default"])
except KeyError:
self.nuisance_mode_default = "keep"
logger.warning("Did not find entry nuisance_mode_default in saved model, using default 'keep'.")
| 40.359408
| 120
| 0.637926
|
4879b6d25e9bc9924a5d5a0f10250f1d909f1224
| 12,337
|
py
|
Python
|
symphony/bdk/gen/pod_model/stream_list.py
|
symphony-mariacristina/symphony-bdk-python
|
ef65762739890e826ccfaf38f7a41d61b95e7f22
|
[
"Apache-2.0"
] | null | null | null |
symphony/bdk/gen/pod_model/stream_list.py
|
symphony-mariacristina/symphony-bdk-python
|
ef65762739890e826ccfaf38f7a41d61b95e7f22
|
[
"Apache-2.0"
] | null | null | null |
symphony/bdk/gen/pod_model/stream_list.py
|
symphony-mariacristina/symphony-bdk-python
|
ef65762739890e826ccfaf38f7a41d61b95e7f22
|
[
"Apache-2.0"
] | null | null | null |
"""
Pod API
This document refers to Symphony API calls that do not need encryption or decryption of content. - sessionToken can be obtained by calling the authenticationAPI on the symphony back end and the key manager respectively. Refer to the methods described in authenticatorAPI.yaml. - Actions are defined to be atomic, ie will succeed in their entirety or fail and have changed nothing. - If it returns a 40X status then it will have made no change to the system even if ome subset of the request would have succeeded. - If this contract cannot be met for any reason then this is an error and the response code will be 50X. # noqa: E501
The version of the OpenAPI document: 20.14.0-SNAPSHOT
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from typing import List, Union
from symphony.bdk.gen.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from symphony.bdk.gen.exceptions import ApiAttributeError
from symphony.bdk.gen.pod_model.stream_attributes import StreamAttributes
globals()['StreamAttributes'] = StreamAttributes
class StreamList(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a pod_model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': ([StreamAttributes],),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""StreamList - a pod_model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] ([StreamAttributes]): A list of streams of which the requesting user is a member.. # noqa: E501
Keyword Args:
value ([StreamAttributes]): A list of streams of which the requesting user is a member.. # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the pod_model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value: List[StreamAttributes] = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""StreamList - a pod_model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] ([StreamAttributes]): A list of streams of which the requesting user is a member.. # noqa: E501
Keyword Args:
value ([StreamAttributes]): A list of streams of which the requesting user is a member.. # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the pod_model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self
| 43.748227
| 637
| 0.577693
|
c30eb0501746ed89bf30970af677f25ccb7b5a99
| 3,982
|
py
|
Python
|
benchmark/startCirq1402.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startCirq1402.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startCirq1402.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=5
# total number=54
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=3
c.append(cirq.H.on(input_qubit[1])) # number=4
c.append(cirq.H.on(input_qubit[2])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=6
c.append(cirq.H.on(input_qubit[0])) # number=41
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=42
c.append(cirq.H.on(input_qubit[0])) # number=43
c.append(cirq.Z.on(input_qubit[1])) # number=37
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=38
c.append(cirq.H.on(input_qubit[4])) # number=21
c.append(cirq.X.on(input_qubit[2])) # number=39
for i in range(2):
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.H.on(input_qubit[0])) # number=51
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=52
c.append(cirq.H.on(input_qubit[0])) # number=53
c.append(cirq.H.on(input_qubit[0])) # number=48
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=49
c.append(cirq.H.on(input_qubit[0])) # number=50
c.append(cirq.Z.on(input_qubit[3])) # number=46
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=47
c.append(cirq.X.on(input_qubit[4])) # number=40
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=35
c.append(cirq.H.on(input_qubit[0])) # number=17
c.append(cirq.H.on(input_qubit[1])) # number=18
c.append(cirq.H.on(input_qubit[2])) # number=19
c.append(cirq.H.on(input_qubit[3])) # number=20
c.append(cirq.X.on(input_qubit[0])) # number=9
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=29
c.append(cirq.X.on(input_qubit[1])) # number=30
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=31
c.append(cirq.X.on(input_qubit[2])) # number=11
c.append(cirq.X.on(input_qubit[1])) # number=44
c.append(cirq.X.on(input_qubit[3])) # number=12
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=24
c.append(cirq.X.on(input_qubit[0])) # number=25
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=26
c.append(cirq.X.on(input_qubit[1])) # number=14
c.append(cirq.X.on(input_qubit[2])) # number=15
c.append(cirq.X.on(input_qubit[3])) # number=16
c.append(cirq.X.on(input_qubit[1])) # number=22
c.append(cirq.Y.on(input_qubit[1])) # number=32
c.append(cirq.X.on(input_qubit[1])) # number=23
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 5
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq1402.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
| 38.288462
| 77
| 0.651683
|
19bc73f518f38f06aa98ba63decb19e4dadcc244
| 459
|
py
|
Python
|
setup.py
|
philipwfowler/bashthebug
|
8b229078405b9e1f46d3152fcfe7cd9d77c54b30
|
[
"MIT"
] | null | null | null |
setup.py
|
philipwfowler/bashthebug
|
8b229078405b9e1f46d3152fcfe7cd9d77c54b30
|
[
"MIT"
] | null | null | null |
setup.py
|
philipwfowler/bashthebug
|
8b229078405b9e1f46d3152fcfe7cd9d77c54b30
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
install_requires=[
"pandas >= 0.21.0",
"tqdm >= 4.19",
"ujson >= 1.35",
"matplotlib >= 2.1.1"
],
name='bashthebug',
version='0.1.0',
url='https://github.com/philipwfowler/bashthebug',
author='Philip W Fowler',
packages=['bashthebug'],
license='MIT',
scripts=['bin/bashthebug-classifications-analyse.py'],
long_description=open('README.md').read(),
)
| 24.157895
| 58
| 0.590414
|
053a93c59bc0242ef5de5a4c872ede47baea251f
| 11,804
|
py
|
Python
|
ksiga/main.py
|
yumyai/ksiga
|
a5bd73669d11842696c21af83a17296e13d6c2f5
|
[
"Apache-2.0"
] | null | null | null |
ksiga/main.py
|
yumyai/ksiga
|
a5bd73669d11842696c21af83a17296e13d6c2f5
|
[
"Apache-2.0"
] | 5
|
2017-01-31T20:02:13.000Z
|
2017-07-21T18:01:36.000Z
|
ksiga/main.py
|
yumyai/ksiga
|
a5bd73669d11842696c21af83a17296e13d6c2f5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
""" Main entry of the script.
"""
import argparse
import sys
import os
import gzip
import datetime
import numpy as np
from sklearn.preprocessing import normalize
from ksiga import ksignature
from ksiga import logutil
from ksiga import fsig
USAGE = """
"""
def openner(filename, **kwargs):
"""Try to return a sensible filehandle
Args:
filename (string): name of a file. Absolute/Relative path should work.
Returns: TODO
"""
if filename.endswith(".gz"):
return gzip.open(filename, **kwargs)
else:
return open(filename, **kwargs)
def main():
commands = {"index": index,
"relent": relative_entropy,
"cre_kmer": cre_kmer,
"acf": average_common_feature,
"acf_kmer": acf_kmer,
"ofc": observe_feature_frequency,
"ofc_kmer": ofc_kmer,
"gen_dmatrix": generate_distance_matrix
}
parser = argparse.ArgumentParser(description="Signature for virus",
usage="""ksiga <command> [<args>]
Commands can be:
index <filenames> Compute k-mer.
cre_kmer <filename.sig> Compute optimal k-mer from CRE.
acf_kmer <filename.sig> Compute optimal k-mer from ACF.
ofc_kmer <filename.sig> Compute optimal k-mer from OFC.
cre <filename.sig> Compute cumulative relative entropy.
acf <filenames.sig> Compute average number of common feature between signatures.
ofc <filenames.sig> Compute observed feature frequencies.
relent <filename.sig> Compute relative entropy.
dmatrix <filenames.sig> Compute distance matrix.
""")
parser.add_argument('command')
args = parser.parse_args(sys.argv[1:2])
if args.command not in commands:
parser.print_help()
sys.exit(1)
cmd = commands.get(args.command)
cmd(sys.argv[2:])
def index(args):
""" Create index for input sequences
Args:
args (TODO): TODO
Returns: TODO
"""
parser = argparse.ArgumentParser(usage="usage:'%(prog)s index [options]'")
parser.add_argument("filenames", nargs="+", help="file(s) of sequences")
parser.add_argument("-k", "--ksize", required=True, type=int)
parser.add_argument("-o", "--output")
parser.add_argument("-f", "--force", action="store_true")
parser.add_argument("-r", "--canon", action="store_true", default=False, help="Use cannonical k-mer representation")
args = parser.parse_args(args)
filenames = args.filenames
ksize = args.ksize
od = args.output
force = args.force
for filename in od:
if not os.path.exists(filename):
# TODO: Warn or exit here.
pass
# Change this, since using mulitple filename does not make sense.
#for filename in filenames:
filename = filenames[0]
outputName = "{fn}".format(fn=od)
fInputH = openner(filename, mode="rt")
ksignature.build_signature(fInputH, ksize, outputName, force)
def relative_entropy(args):
""" Calculate relative entropy of genome.
Args:
args (TODO): TODO
Returns: TODO
"""
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--file", required=True, help="")
parser.add_argument("-k", "--ksize", required=True, type=int)
parser.add_argument("-o", "--output")
args = parser.parse_args(args)
if args.output is None:
foh = sys.stdout
else:
foh = open(args.output, "w")
relEntropy = fsig.calculate_relative_entropy(args.file, args.ksize)
# Print metadata
print("# input file: {}".format(args.file))
print("# Run on {}".format(str(datetime.datetime.now())))
print(relEntropy, file=foh)
def average_common_feature(args):
""" Calculate an average number of common feature pairwise
between one genome against others
Args:
args (TODO): TODO
Returns: TODO
"""
desc = "Calculate average number of common feature"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("filenames", nargs="+", help="file(s) of signature")
parser.add_argument("-k", "--ksize", required=True, type=int)
parser.add_argument("-o", "--output")
parser.add_argument("--lowmem", action="store_true")
args = parser.parse_args(args)
filenames = args.filenames
outF = args.output
if outF is None:
outHandle = sys.stdout
else:
outHandle = open(outF, "w")
# Choose to use low mem but slow, or fast but eat memoery like a whale.
if args.lowmem:
acf_func = fsig.lowmem_calculate_average_common_feature
else:
acf_func = fsig.calculate_average_common_feature
acf = acf_func(args.filenames, args.ksize)
acf = np.round(acf, 2)
baseFilenames = (os.path.basename(filename) for filename in filenames)
for filename, val in zip(baseFilenames, acf):
print("{}\t{}".format(filename, val), file=outHandle)
def observe_feature_frequency(args):
""" Calculate an observe feature frequency
Args:
args (TODO): TODO
Returns: TODO
"""
parser = argparse.ArgumentParser()
parser.add_argument("filenames", nargs="+", help="file(s) of signature")
parser.add_argument("-k", "--ksize", required=True, type=int)
parser.add_argument("-w", "--wd", default=os.getcwd())
parser.add_argument("-o", "--output")
parser.add_argument("--lowmem", action="store_true")
args = parser.parse_args(args)
ksize = args.ksize
output = args.output
outputFH = open(output, "w") if output else sys.stdout
if args.lowmem:
ofc_func = fsig.lowmem_calculate_ofc_shannon
else:
ofc_func = fsig.calculate_ofc_shannon
shannon_size = ofc_func(args.filenames, ksize)
outputLine = "{}\t{}".format(ksize, shannon_size)
print(outputLine, file=outputFH)
def cre_kmer(args):
""" Calculate optimal k-mer through CRE value.
Args:
args (TODO): TODO
Returns: TODO
"""
desc = "Calculate k-mer from cumulative relative entropy of all genomes"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("filenames", nargs="+", help="file(s) of signature")
parser.add_argument("-ks", "--kfrom", required=True, type=int, help="Calculate from k-mer")
parser.add_argument("-ke", "--kend", required=True, type=int, help="last k-mer")
parser.add_argument("-o", "--output")
parser.add_argument("-r", "--report", default="cre.txt")
args = parser.parse_args(args)
filenames = args.filenames
kmerStart = args.kfrom
kmerEnd = args.kend
cres = []
kmers = []
for filename in filenames:
logutil.debug("Working on {}".format(filename))
cre, kmer = fsig.calculate_cre_kmer(filename, kmerStart, kmerEnd)
cres.append(cre)
kmers.append(kmer)
cres = np.vstack(cres)
# Write report.
suggestKmer = int(round(np.mean(kmers)))
print("Suggest k-mer based on CRE value is {}".format(suggestKmer))
def acf_kmer(args):
""" Calculate an average number of common feature pairwise
between one genome against others
Args:
args (TODO): TODO
Returns: TODO
"""
desc = "Calculate optimal k-mer from average number of common feature"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("filenames", nargs="+", help="file(s) of signature")
parser.add_argument("-ks", "--kfrom", required=True, type=int, help="Calculate from k-mer")
parser.add_argument("-ke", "--kend", required=True, type=int, help="last k-mer")
parser.add_argument("-r", "--report", default="acf.txt")
parser.add_argument("-o", "--output")
args = parser.parse_args(args)
filenames = args.filenames
outF = args.output
kmerStart = args.kfrom
kmerEnd = args.kend
if outF is None:
outHandle = sys.stdout.buffer
else:
outHandle = open(outF, "wb") # wb for numpy write
acf, kmers = fsig.calculate_acf_kmer(filenames, kmerStart, kmerEnd)
acf = np.hstack(acf)
suggestKmer = int(round(np.mean(kmers)))
print("Suggest k-mer based on ACF value is {}".format(suggestKmer))
def ofc_kmer(args):
""" Calculate an observe feature frequency
Args:
args (TODO): TODO
Returns: TODO
"""
desc = "Calculate average number of common feature"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("filenames", nargs="+", help="file(s) of signature")
parser.add_argument("-ks", "--kfrom", required=True, type=int, help="Calculate from k-mer")
parser.add_argument("-ke", "--kend", required=True, type=int, help="last k-mer")
parser.add_argument("-r", "--report", default="ofc.txt")
parser.add_argument("-o", "--output")
args = parser.parse_args(args)
filenames = args.filenames
outF = args.output
kmerStart = args.kfrom
kmerEnd = args.kend
percentage, suggestKmer = fsig.calculate_ofc_kmer(filenames, kmerStart, kmerEnd)
print("Suggest k-mer based on OCF value is {}".format(suggestKmer))
outF = args.output
if outF is None:
outHandle = sys.stdout.buffer
else:
outHandle = open(outF, "wb") # wb for numpy write
def generate_distance_matrix(args):
"""Generate distance matrix base on k-mer
The output will
Args:
args (TODO): TODO
Returns: TODO
"""
import ksiga.fsig as fsig
from ksiga import distance
parser = argparse.ArgumentParser()
parser.add_argument("filenames", nargs="+", help="file(s) of signature")
parser.add_argument("-k", "--ksize", required=True, type=int)
parser.add_argument("-o", "--output")
parser.add_argument("-t", "--n_thread", type=int, default=1)
parser.add_argument("-d", "--distance", default="euclid")
args = parser.parse_args(args)
fn = distance.get_distance_function(args.distance)
# Delegate to function in distance.
filenames = args.filenames
ksize = args.ksize
outF = args.output
if outF is None:
outHandle = sys.stdout.buffer
else:
outHandle = open(outF, "wb") # wb for numpy write
# Check for existence of file.
for filename in args.filenames:
if not os.path.exists(filename):
# TODO: Do something about this
pass
csr_matrix = fsig.rebuild_sparse_matrix(filenames, ksize)
rowNum = csr_matrix.shape[0]
# Normalize data before calculate distance
csr_matrix_norm = normalize(csr_matrix, norm='l1', axis=1)
result = fn(csr_matrix_norm)
np.savetxt(outHandle, result)
# Output for file
flistH = open("{}.inputlist".format(outF), 'w')
for f in filenames:
flistH.write(f)
flistH.write("\n")
# Logging
logutil.notify("Result is written to {}".format(outF))
logutil.notify("Filelist is written to {}".format(outF))
sys.exit(0)
def lowmem_generate_distance_matrix(args):
"""Generate distance matrix base on k-mer. Unlike the normal version counterpart, it relied heavily on looping.
Args:
args (TODO): TODO
Returns: TODO
"""
import ksiga.fsig as fsig
from ksiga import distance
parser = argparse.ArgumentParser()
parser.add_argument("filenames", nargs="+", help="file(s) of signature")
parser.add_argument("-k", "--ksize", required=True, type=int)
parser.add_argument("-o", "--output")
parser.add_argument("-t", "--n_thread", type=int, default=1)
parser.add_argument("-d", "--distance", default="euclid")
args = parser.parse_args(args)
fn = distance.get_distance_function(args.distance)
# Temporary array.
| 29.658291
| 120
| 0.644358
|
646c9ecc0970eb7e5e967f8f6418696246b4f9de
| 3,188
|
py
|
Python
|
scout/constants/so_terms.py
|
Clinical-Genomics/scout
|
1ec4daa76093c2ffa4655612b63d325970253f58
|
[
"BSD-3-Clause"
] | 111
|
2015-01-15T11:53:20.000Z
|
2022-03-26T19:55:24.000Z
|
scout/constants/so_terms.py
|
Clinical-Genomics/scout
|
1ec4daa76093c2ffa4655612b63d325970253f58
|
[
"BSD-3-Clause"
] | 2,995
|
2015-01-15T16:14:20.000Z
|
2022-03-31T13:36:32.000Z
|
scout/constants/so_terms.py
|
Clinical-Genomics/scout
|
1ec4daa76093c2ffa4655612b63d325970253f58
|
[
"BSD-3-Clause"
] | 55
|
2015-05-31T19:09:49.000Z
|
2021-11-01T10:50:31.000Z
|
# These are the valid SO terms with corresponfing severity rank
SO_TERMS = {
"transcript_ablation": {"rank": 1, "region": "exonic"},
"splice_donor_variant": {"rank": 2, "region": "splicing"},
"splice_acceptor_variant": {"rank": 3, "region": "splicing"},
"stop_gained": {"rank": 4, "region": "exonic"},
"frameshift_variant": {"rank": 5, "region": "exonic"},
"stop_lost": {"rank": 6, "region": "exonic"},
"start_lost": {"rank": 7, "region": "exonic"},
"initiator_codon_variant": {"rank": 8, "region": "exonic"},
"inframe_insertion": {"rank": 9, "region": "exonic"},
"inframe_deletion": {"rank": 10, "region": "exonic"},
"missense_variant": {"rank": 11, "region": "exonic"},
"protein_altering_variant": {"rank": 12, "region": "exonic"},
"transcript_amplification": {"rank": 13, "region": "exonic"},
"regulatory_region_ablation": {"rank": 14, "region": "regulatory_region"},
"splice_region_variant": {"rank": 15, "region": "splicing"},
"incomplete_terminal_codon_variant": {"rank": 16, "region": "exonic"},
"synonymous_variant": {"rank": 17, "region": "exonic"},
"start_retained_variant": {"rank": 18, "region": "exonic"},
"stop_retained_variant": {"rank": 19, "region": "exonic"},
"coding_sequence_variant": {"rank": 20, "region": "exonic"},
"mature_miRNA_variant": {"rank": 21, "region": "ncRNA_exonic"},
"5_prime_UTR_variant": {"rank": 22, "region": "5UTR"},
"3_prime_UTR_variant": {"rank": 23, "region": "3UTR"},
"non_coding_transcript_exon_variant": {"rank": 24, "region": "ncRNA_exonic"},
"non_coding_exon_variant": {"rank": 25, "region": "ncRNA_exonic"},
"non_coding_transcript_variant": {"rank": 26, "region": "ncRNA_exonic"},
"nc_transcript_variant": {"rank": 27, "region": "ncRNA_exonic"},
"intron_variant": {"rank": 28, "region": "intronic"},
"NMD_transcript_variant": {"rank": 29, "region": "ncRNA"},
"upstream_gene_variant": {"rank": 30, "region": "upstream"},
"downstream_gene_variant": {"rank": 31, "region": "downstream"},
"TFBS_ablation": {"rank": 32, "region": "TFBS"},
"TFBS_amplification": {"rank": 33, "region": "TFBS"},
"TF_binding_site_variant": {"rank": 34, "region": "TFBS"},
"regulatory_region_amplification": {"rank": 35, "region": "regulatory_region"},
"regulatory_region_variant": {"rank": 36, "region": "regulatory_region"},
"feature_elongation": {"rank": 37, "region": "genomic_feature"},
"feature_truncation": {"rank": 38, "region": "genomic_feature"},
"intergenic_variant": {"rank": 39, "region": "intergenic_variant"},
}
SO_TERM_KEYS = list(SO_TERMS.keys())
SEVERE_SO_TERMS = (
"transcript_ablation",
"splice_donor_variant",
"splice_acceptor_variant",
"stop_gained",
"frameshift_variant",
"stop_lost",
"start_lost",
"initiator_codon_variant",
"inframe_insertion",
"inframe_deletion",
"missense_variant",
"protein_altering_variant",
"transcript_amplification",
"splice_region_variant",
"incomplete_terminal_codon_variant",
"synonymous_variant",
"start_retained_variant",
"stop_retained_variant",
"coding_sequence_variant",
)
| 47.58209
| 83
| 0.647114
|
c82cd5ccf335fac2f031db4c508782a0f208bf28
| 8,934
|
py
|
Python
|
ctrl4ai/helper.py
|
vkreat-tech/ctrl4ai
|
dfcea2735a024216557ad01fc72a22fb7117f687
|
[
"MIT"
] | 12
|
2020-06-29T20:09:29.000Z
|
2022-03-23T10:56:20.000Z
|
ctrl4ai/helper.py
|
vkreat-tech/ctrl4ai
|
dfcea2735a024216557ad01fc72a22fb7117f687
|
[
"MIT"
] | 1
|
2020-12-08T15:47:56.000Z
|
2020-12-08T15:47:56.000Z
|
build/lib/ctrl4ai/helper.py
|
vkreat-tech/ctrl4ai
|
dfcea2735a024216557ad01fc72a22fb7117f687
|
[
"MIT"
] | 5
|
2020-06-27T12:24:37.000Z
|
2022-03-03T05:28:41.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Tue May 19 15:27:31 2020
@author: Shaji,Charu,Selva
"""
import numpy as np
import pandas as pd
from math import radians, cos, sin, asin, sqrt
from scipy import stats
import json
from . import _ordinal_dictionary
pd.set_option('mode.chained_assignment', None)
def isNaN(num):
"""
Usage: [arg1]:[numeric value]
Description: Checks if the value is null (numpy.NaN)
Returns: Boolean [True/False]
"""
return num != num
def added_constant_log(dataset,
col,
min_value=None):
"""
Usage: [arg1]:[dataset], [arg2]:[column in which log transform should be done]
Description: Log transforms the specified column
Returns: DataFrame
"""
if min_value is None:
min_value = dataset[col].min()
if min_value <= 0:
dataset[col] = dataset[col].apply(lambda x: np.log(x + np.abs(min_value) + 1))
else:
dataset[col] = dataset[col].apply(lambda x: np.log(x))
return dataset
def yeojohnsonlog(x):
"""
Usage: [arg1]:[real/float value]
Description: Log transforms the specified column based on Yeo Joshson Power Transform
Returns: Log value (numeric)
"""
if x < 0:
y = -np.log(-x + 1)
else:
y = np.log(x + 1)
return y
def distance_calculator(start_latitude,
start_longitude,
end_latitude,
end_longitude):
"""
Usage: [arg1]:[numeric-start_latitude],[arg2]:[numeric-start_longitude],[arg3]:[numeric-end_latitude],[arg4]:[numeric-end_longitude]
Returns: Numeric [Distance in kilometers]
"""
if isNaN(start_latitude) or isNaN(start_longitude) or isNaN(end_latitude) or isNaN(end_longitude):
return np.NaN
else:
lat1 = radians(start_latitude)
lon1 = radians(start_longitude)
lat2 = radians(end_latitude)
lon2 = radians(end_longitude)
# Haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
# Radius of earth in kilometers.
r = 6371
return (c * r)
def test_numeric(test_string):
"""
Usage: [arg1]:[String/Number]
Description: Checks if the value is numeric
Returns: Boolean [True/False]
"""
try:
float(test_string)
res = True
except:
res = False
return res
def check_numeric_col(col_series):
"""
Usage: [arg1]:[Pandas Series / Single selected column of a dataframe]
Description: Checks if all the values in the series are numerical
Returns: Boolean [True/False]
"""
if all(col_series.apply(lambda x: test_numeric(x))):
return True
else:
return False
def check_ordinal_col(col_series):
"""
Usage: [arg1]:[Pandas Series / Single selected column of a dataframe]
Description: Checks if all the column contains ordinal values checking against the Ctrl4AI's data dictionary
Returns: Boolean [True/False], dict [ordinal to numeric mapper]
"""
result = False
result_dict = dict()
if not check_numeric_col(col_series):
mode_val = col_series.mode()[0]
col_series = col_series.fillna(mode_val).astype('str')
distinct_elements = list(col_series.unique())
unique_elements = [str.lower(val.replace(' ', '')) for val in col_series.unique()]
unique_elements = list(set(unique_elements))
count = len(unique_elements)
possible_scales = _ordinal_dictionary._get_possible_scales(count)
for scale in possible_scales:
unique_keys = [str.lower(val.replace(' ', '')) for val in scale.keys()]
if set(unique_keys) == set(unique_elements):
result = True
transformed_mapper = dict()
for key in scale.keys():
new_key = str.lower(key.replace(' ', ''))
transformed_mapper[new_key] = scale[key]
for val in distinct_elements:
result_dict[val] = transformed_mapper[str.lower(val.replace(' ', ''))]
return result, result_dict
def check_categorical_col(col_series,
categorical_threshold=0.3):
"""
Usage: [arg1]:[Pandas Series / Single selected column of a dataframe],[categorical_threshold(default=0.3)]:[Threshold for determining categorical column based on the percentage of unique values(optional)]
Description: Breaks the values to chunks and checks if the proportion of unique values is less than the threshold
Returns: Boolean [True/False]
"""
col_array = np.array(col_series.apply(lambda x: str(x).strip()).astype(str).fillna(str(0)))
if col_array.size >= 1000:
n = 10
k = 100
elif col_array.size > 100:
n = np.floor(col_array.size / 100)
k = 100
else:
n = 1
k = col_array.size
if n % 2 == 0:
n -= 1
t = 0
f = 0
for i in range(int(n)):
sample = np.random.choice(col_array, size=k, replace=False)
if np.unique(sample).size / sample.size <= categorical_threshold:
t += 1
else:
f += 1
if t > f:
return True
else:
return False
def single_valued_col(col_series):
"""
Usage: [arg1]:[Pandas Series / Single selected column of a dataframe]
Description: Checks if the column has only one value
Returns: Boolean [True/False]
"""
if col_series.dropna().unique().shape[0] == 1:
return True
else:
return False
def one_hot_encoding(dataset,
categorical_cols_list,
drop_first=True):
"""
Usage: [arg1]:[pandas dataframe],[arg2]:[list of columns to be encoded]
Description: Transformation for categorical features by getting dummies
Returns: Dataframe [with separate column for each categorical values]
"""
dataset = pd.merge(dataset, pd.get_dummies(dataset[categorical_cols_list], columns=categorical_cols_list,
drop_first=drop_first),
left_index=True, right_index=True)
dataset = dataset.drop(categorical_cols_list, axis=1)
return dataset
def freedman_diaconis(data, returnas="width"):
"""
Usage: [arg1]:[Pandas Series],[arg2]:[returnas: {"width", "bins"}]
Description: Use Freedman Diaconis rule to compute optimal histogram bin width. ``returnas`` can be one of "width" or "bins", indicating whether the bin width or number of bins should be returned respectively.
Returns: Numeric [Width/No.of bins - whatever is opted]
"""
data = np.asarray(data, dtype=np.float_)
IQR = stats.iqr(data, rng=(25, 75), scale=1.0, nan_policy="omit")
N = data.size
bw = (2 * IQR) / np.power(N, 1 / 3)
if returnas == "width":
result = bw
else:
datmin, datmax = data.min(), data.max()
datrng = datmax - datmin
result = int((datrng // bw) + 1)
return result
def bool_to_int(dataset):
"""
Usage: [arg1]:[pandas dataframe]
Description: Transformation for boolean features to integers
Returns: Dataframe [with booleans converted to integers]
"""
for col in dataset:
if check_categorical_col(dataset[col]):
mode_val = dataset[col].mode()[0]
dataset[col] = dataset[col].fillna(mode_val)
if dataset[col].dtype == 'bool':
dataset[col] = dataset[col].astype('int')
return dataset
def get_columns_subsets(cols, min_cols=1):
"""
Usage: [arg1]:[list of columns], [min_cols (default=1):[values in the range of 1 to length of columns]
Description: Gets all subsets of the column list
Returns: [column list]
"""
lists = []
col_list = list(cols)
for i in range(len(col_list) + 1):
for j in range(i):
subset = col_list[j: i]
subset.sort()
if len(subset) >= min_cols:
lists.append(subset)
return lists
def get_absolute(num):
"""
Usage: [arg1]:[numeric value]
Description: Converts to a positive number
Returns: [positive numeric value]
"""
if num >= 0:
return num
else:
return -num
def correlation_threshold(rows):
return 2 / np.sqrt(rows)
def collinearity_threshold(rows):
if rows <= 100:
return 0.99
else:
return 2 / np.log10(rows)
def intersection(seq1, seq2):
seq3 = [value for value in seq1 if value in seq2]
return seq3
def difference(seq1, seq2):
return [x for x in seq1 if x not in seq2]
def load_artifact(artifact_file):
artifact_json = open(artifact_file).readline()
artifact = json.loads(artifact_json)
return artifact
def categoric_threshold(rows):
return np.sqrt(rows)/rows
| 30.701031
| 213
| 0.61585
|
fc04b61a0cfd5f520f562b43e38b337046ab41b3
| 4,376
|
py
|
Python
|
test/conftest.py
|
selik/xport
|
8ef991022a7bfb6a25c2b038d112019051f6fc6f
|
[
"MIT"
] | 36
|
2016-09-12T19:51:41.000Z
|
2022-01-20T04:00:14.000Z
|
test/conftest.py
|
gcp-lopo/xport-1
|
077ce12bf28486e8550890a729183ba4d96a633f
|
[
"MIT"
] | 73
|
2016-07-05T13:21:17.000Z
|
2022-03-27T00:19:57.000Z
|
test/conftest.py
|
gcp-lopo/xport-1
|
077ce12bf28486e8550890a729183ba4d96a633f
|
[
"MIT"
] | 21
|
2015-09-12T19:59:13.000Z
|
2022-02-16T08:43:32.000Z
|
"""
Shared test fixtures.
"""
# Standard Library
from datetime import datetime
# Community Packages
import pytest
# Xport Modules
import xport
@pytest.fixture(scope='session') # Take care not to mutate!
def library():
"""
Create a 4-column, 6-row dataset with numbers and text.
"""
ds = xport.Dataset(
data={
'VIT_STAT': ['ALIVE'] * 3 + ['DEAD'] * 3,
'ECON': ['POOR', 'NOT', 'UNK'] * 2,
'COUNT': [1216, 1761, 2517, 254, 60, 137],
'TEMP': [98.6, 95.4, 86.7, 93.4, 103.5, 56.7],
},
name='ECON',
dataset_label='Blank-padded dataset label',
dataset_type='',
)
ds.created = ds.modified = datetime(2015, 11, 13, 10, 35, 8)
ds.sas_os = 'W32_7PRO'
ds.sas_version = '9.3'
ds['VIT_STAT'].label = 'Vital status'
ds['VIT_STAT'].format = '$5.'
ds['VIT_STAT'].informat = xport.Informat()
ds['VIT_STAT'].width = 8
ds['ECON'].label = 'Economic status'
ds['ECON'].format = xport.Format('$CHAR', 4, 0, xport.FormatAlignment.RIGHT)
ds['ECON'].informat = xport.Informat()
ds['ECON'].width = 8
ds['COUNT'].label = 'Count'
ds['COUNT'].format = 'comma8.0'
ds['COUNT'].informat = xport.Informat()
ds['COUNT'].width = 8
ds['TEMP'].label = 'Temperature'
ds['TEMP'].format = '8.1'
ds['TEMP'].informat = xport.Informat()
ds['TEMP'].width = 8
return xport.Library(
members=[ds],
created=ds.created,
modified=ds.modified,
sas_os=ds.sas_os,
sas_version=ds.sas_version,
)
@pytest.fixture(scope='session')
def library_bytestring():
"""
Create the same dataset in SAS V5 Transport format.
"""
return b'''\
HEADER RECORD*******LIBRARY HEADER RECORD!!!!!!!000000000000000000000000000000 \
SAS SAS SASLIB 9.3 W32_7PRO 13NOV15:10:35:08\
13NOV15:10:35:08 \
HEADER RECORD*******MEMBER HEADER RECORD!!!!!!!000000000000000001600000000140 \
HEADER RECORD*******DSCRPTR HEADER RECORD!!!!!!!000000000000000000000000000000 \
SAS ECON SASDATA 9.3 W32_7PRO 13NOV15:10:35:08\
13NOV15:10:35:08 Blank-padded dataset label \
HEADER RECORD*******NAMESTR HEADER RECORD!!!!!!!000000000400000000000000000000 \
\x00\x02\x00\x00\x00\x08\x00\x01VIT_STATVital status \
$ \x00\x05\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x02\x00\x00\x00\x08\x00\x02ECON Economic status \
$CHAR \x00\x04\x00\x00\x00\x01\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x08\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x01\x00\x00\x00\x08\x00\x03COUNT Count \
COMMA \x00\x08\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x10\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x01\x00\x00\x00\x08\x00\x04TEMP Temperature \
\x00\x08\x00\x01\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x18\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
HEADER RECORD*******OBS HEADER RECORD!!!!!!!000000000000000000000000000000 \
ALIVE POOR CL\x00\x00\x00\x00\x00\x00Bb\x99\x99\x99\x99\x99\x98\
ALIVE NOT Cn\x10\x00\x00\x00\x00\x00B_fffffh\
ALIVE UNK C\x9dP\x00\x00\x00\x00\x00BV\xb333334\
DEAD POOR B\xfe\x00\x00\x00\x00\x00\x00B]fffffh\
DEAD NOT B<\x00\x00\x00\x00\x00\x00Bg\x80\x00\x00\x00\x00\x00\
DEAD UNK B\x89\x00\x00\x00\x00\x00\x00B8\xb333334\
\
'''
| 46.553191
| 209
| 0.603062
|
5dd36625e409b80ee6b5dc559957df413c043a79
| 2,986
|
py
|
Python
|
distseq/training/ops/pytorch/util.py
|
Shenggan/distseq
|
5c9a84d32b953b9cf230bb587b4ad5170164ed0f
|
[
"Apache-2.0"
] | null | null | null |
distseq/training/ops/pytorch/util.py
|
Shenggan/distseq
|
5c9a84d32b953b9cf230bb587b4ad5170164ed0f
|
[
"Apache-2.0"
] | null | null | null |
distseq/training/ops/pytorch/util.py
|
Shenggan/distseq
|
5c9a84d32b953b9cf230bb587b4ad5170164ed0f
|
[
"Apache-2.0"
] | null | null | null |
import math
import numpy as np
import torch
def copy_para(x):
return torch.nn.Parameter(torch.empty_like(x).copy_(x))
def state_dict(module, destination=None, prefix="", keep_vars=False):
destination = torch.nn.Module.state_dict(
module, destination=destination, prefix=prefix, keep_vars=keep_vars
)
for key in destination.keys():
if "para_16" in key:
destination.pop(key)
return destination
def check_config(config):
if config.hidden_size % config.nhead != 0:
raise Exception(f"hidden_size % nhead != 0")
factor = 8*4 if config.fp16 else 4*4
upbound = factor * 1024
if config.hidden_size > upbound:
# as required by ln backward kernel currently
raise Exception(f"hidden_size > {upbound}")
head_dim = config.hidden_size // config.nhead
if head_dim % factor != 0:
# as required by reshape kernel
raise Exception(f"head_dim({head_dim}) % {factor} != 0")
def calc_offset(sizes):
offsets = [0]
tmp = 0
for x in sizes:
tmp += x
offsets.append(tmp)
return offsets
def get_pos_embedding(max_length, embedding_dim):
half_dim = embedding_dim // 2
emb = math.log(10000) / (half_dim - 1)
emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)
emb = torch.arange(max_length, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0)
emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(max_length, -1)
if embedding_dim % 2 == 1:
emb = torch.cat([emb, torch.zeros(max_length, 1)], dim=1)
return emb
def base_architecture(args):
args.setdefault("hidden_size", 512)
args.setdefault("intermediate_size", 2048)
args.setdefault("nhead", 8)
args.setdefault("attn_prob_dropout_ratio", 0.0)
args.setdefault("activation_dropout_ratio", 0.0)
args.setdefault("hidden_dropout_ratio", 0.1)
args.setdefault("pre_layer_norm", True)
args.setdefault("activation_fn", "relu")
def transformer_base(args):
base_architecture(args)
def transformer_big(args):
args.setdefault("hidden_size", 1024)
args.setdefault("intermediate_size", 4096)
args.setdefault("nhead", 16)
args.setdefault("attn_prob_dropout_ratio", 0.1)
args.setdefault("activation_dropout_ratio", 0.1)
base_architecture(args)
def bert_base(args):
args.setdefault("hidden_size", 768)
args.setdefault("intermediate_size", 3072)
args.setdefault("nhead", 12)
args.setdefault("attn_prob_dropout_ratio", 0.1)
args.setdefault("activation_dropout_ratio", 0.1)
args.setdefault("pre_layer_norm", False)
args.setdefault("activation_fn", "gelu")
base_architecture(args)
def bert_big(args):
args.setdefault("pre_layer_norm", False)
args.setdefault("activation_fn", "gelu")
transformer_big(args)
MODEL_ARCH = {
"transformer-base": transformer_base,
"transformer-big": transformer_big,
"bert-base": bert_base,
"bert-big": bert_big,
}
| 28.438095
| 85
| 0.683188
|
8c5b5c48aed19e0db02bb04919c5f423f180832c
| 1,225
|
py
|
Python
|
corehq/apps/app_manager/views/media_utils.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2020-05-05T13:10:01.000Z
|
2020-05-05T13:10:01.000Z
|
corehq/apps/app_manager/views/media_utils.py
|
kkrampa/commcare-hq
|
d64d7cad98b240325ad669ccc7effb07721b4d44
|
[
"BSD-3-Clause"
] | 1
|
2019-12-09T14:00:14.000Z
|
2019-12-09T14:00:14.000Z
|
corehq/apps/app_manager/views/media_utils.py
|
MaciejChoromanski/commcare-hq
|
fd7f65362d56d73b75a2c20d2afeabbc70876867
|
[
"BSD-3-Clause"
] | 5
|
2015-11-30T13:12:45.000Z
|
2019-07-01T19:27:07.000Z
|
from __future__ import unicode_literals
def process_media_attribute(attribute, resp, val):
if val:
val = interpolate_media_path(val)
resp['corrections'][attribute] = val
else:
val = None
return val
def interpolate_media_path(val):
if not val:
return val
if val.startswith('jr://'):
pass
elif val.startswith('/file/'):
val = 'jr:/' + val
elif val.startswith('file/'):
val = 'jr://' + val
elif val.startswith('/'):
val = 'jr://file' + val
else:
val = 'jr://file/' + val
return val
def handle_media_edits(request, item, should_edit, resp, lang, prefix=''):
if 'corrections' not in resp:
resp['corrections'] = {}
for attribute in ('media_image', 'media_audio'):
param = prefix + attribute
if should_edit(param):
media_path = process_media_attribute(param, resp, request.POST.get(param))
item._set_media(attribute, lang, media_path)
for attribute in ('use_default_image_for_all', 'use_default_audio_for_all'):
param = prefix + attribute
if should_edit(param):
setattr(item, attribute, request.POST.get(param) == 'true')
| 27.222222
| 86
| 0.609796
|
5e1f0f7cc7ed3f4fc4f98bb8d221fdcd896433a0
| 384
|
py
|
Python
|
crawler/job_info/excute_collect.py
|
lupingchn/python-demo
|
97129ca38d4cbe87796b6180503ae2e2a1c7a592
|
[
"Unlicense"
] | null | null | null |
crawler/job_info/excute_collect.py
|
lupingchn/python-demo
|
97129ca38d4cbe87796b6180503ae2e2a1c7a592
|
[
"Unlicense"
] | null | null | null |
crawler/job_info/excute_collect.py
|
lupingchn/python-demo
|
97129ca38d4cbe87796b6180503ae2e2a1c7a592
|
[
"Unlicense"
] | null | null | null |
# coding=utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import data_collect.collecter_51job
import data_collect.collecter_liepin
import data_collect.collecter_zhilian
job = 'java' # 以爬取java工程师职业为例
data_collect.collecter_51job.get_job_in_51job(job)
data_collect.collecter_zhilian.get_job_url_in_zhilian(job)
data_collect.collecter_liepin.get_job_url_in_zhilian(job)
| 22.588235
| 58
| 0.856771
|
9a3130bcf97961fe6a3a5e9fc6e35d9e8e682ee9
| 267
|
py
|
Python
|
app/database.py
|
sreenathmmenon/malware-checker
|
84cca46bdef51536a8278c306776afc864f61640
|
[
"Apache-2.0"
] | null | null | null |
app/database.py
|
sreenathmmenon/malware-checker
|
84cca46bdef51536a8278c306776afc864f61640
|
[
"Apache-2.0"
] | null | null | null |
app/database.py
|
sreenathmmenon/malware-checker
|
84cca46bdef51536a8278c306776afc864f61640
|
[
"Apache-2.0"
] | 1
|
2021-05-17T15:00:58.000Z
|
2021-05-17T15:00:58.000Z
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine('mysql://mlwrUser:mlwr768@127.0.0.1:3306/malware_checker?charset=utf8', convert_unicode=True)
| 44.5
| 116
| 0.846442
|
b78318b8c6c21b9d697bf56aafa0b1045dd28fc4
| 811
|
py
|
Python
|
src/pandas_profiling/report/presentation/flavours/html/alerts.py
|
abhicantdraw/pandas-profiling
|
a12ebb7a94b9371df94bf611237a389d99f8bc00
|
[
"MIT"
] | 8,107
|
2018-01-07T23:27:39.000Z
|
2022-02-22T12:57:11.000Z
|
src/pandas_profiling/report/presentation/flavours/html/alerts.py
|
abhicantdraw/pandas-profiling
|
a12ebb7a94b9371df94bf611237a389d99f8bc00
|
[
"MIT"
] | 771
|
2018-01-06T11:33:08.000Z
|
2022-02-21T11:16:02.000Z
|
src/pandas_profiling/report/presentation/flavours/html/alerts.py
|
abhicantdraw/pandas-profiling
|
a12ebb7a94b9371df94bf611237a389d99f8bc00
|
[
"MIT"
] | 1,308
|
2018-01-08T21:22:08.000Z
|
2022-02-21T04:10:21.000Z
|
from pandas_profiling.report.presentation.core.alerts import Alerts
from pandas_profiling.report.presentation.flavours.html import templates
class HTMLAlerts(Alerts):
def render(self) -> str:
styles = {
"constant": "warning",
"unsupported": "warning",
"type_date": "warning",
"constant_length": "primary",
"high_cardinality": "primary",
"unique": "primary",
"uniform": "primary",
"infinite": "info",
"zeros": "info",
"truncated": "info",
"missing": "info",
"skewed": "info",
"high_correlation": "default",
"duplicates": "default",
}
return templates.template("alerts.html").render(**self.content, styles=styles)
| 32.44
| 86
| 0.549938
|
cb85049595ee55f43aeeeed933bec3962300b666
| 6,461
|
py
|
Python
|
chemdataextractor/reader/uspto.py
|
edbeard/chemdataextractor-uvvis2018
|
a5750d5313a250468e29d244cd4aeafdfc3250da
|
[
"MIT"
] | 6
|
2019-12-05T17:10:19.000Z
|
2021-08-10T15:15:10.000Z
|
chemdataextractor/reader/uspto.py
|
edbeard/chemdataextractor-uvvis2018
|
a5750d5313a250468e29d244cd4aeafdfc3250da
|
[
"MIT"
] | null | null | null |
chemdataextractor/reader/uspto.py
|
edbeard/chemdataextractor-uvvis2018
|
a5750d5313a250468e29d244cd4aeafdfc3250da
|
[
"MIT"
] | 2
|
2020-06-29T06:58:53.000Z
|
2021-03-21T08:39:36.000Z
|
# -*- coding: utf-8 -*-
"""
chemdataextractor.reader.uspto
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Readers for USPTO patents.
:copyright: Copyright 2016 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from ..scrape.clean import clean
from ..doc.table import Table, Cell
from ..doc.text import Caption, Footnote
from .markup import XmlReader
# TODO: The below has only been tested with us-patent-grant-v42
class UsptoXmlReader(XmlReader):
"""Reader for USPTO XML documents."""
cleaners = [clean] # tidy_nlm_references, space_labels
root_css = 'us-patent-grant' # TODO: Other roots
title_css = 'invention-title'
heading_css = 'heading, p[id^="h-"]'
table_css = 'table'
table_body_row_css = 'table row'
table_cell_css = 'entry'
# figure_css = 'img'
reference_css = 'claim-ref'
# citation_css = 'ref-list ref'
ignore_css = 'us-bibliographic-data-grant *:not(invention-title)'
inline_elements = {
'b', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'cite', 'code', 'dfn', 'em', 'kbd', 'strong', 'samp', 'var',
'a', 'bdo', 'br', 'img', 'map', 'object', 'q', 'script', 'span', 'sub', 'sup', 'button', 'input', 'label',
'select', 'textarea', 'blink', 'font', 'marquee', 'nobr', 's', 'strike', 'u', 'wbr',
'xref', 'underline', 'italic', 'bold', 'inline-formula', 'alternatives', 'tex-math',
'{http://www.w3.org/1998/math/mathml}math', '{http://www.w3.org/1998/math/mathml}msubsup',
'{http://www.w3.org/1998/math/mathml}mrow', '{http://www.w3.org/1998/math/mathml}mo',
'{http://www.w3.org/1998/math/mathml}mi', '{http://www.w3.org/1998/math/mathml}mn',
'claim-ref', 'figref'
}
def detect(self, fstring, fname=None):
""""""
if fname and not fname.lower().endswith('.xml'):
return False
if b'us-patent-grant' in fstring:
return True
# TODO: Other DTDs
return False
def _parse_table(self, el, refs, specials):
hdict = {}
for row, tr in enumerate(self._css(self.table_body_row_css, el)):
colnum = 0
for td in self._css(self.table_cell_css, tr):
cell = self._parse_text(td, refs=refs, specials=specials, element_cls=Cell)
colspan = int(td.get('colspan', '1'))
rowspan = int(td.get('rowspan', '1'))
for i in range(colspan):
for j in range(rowspan):
rownum = row + j
if not rownum in hdict:
hdict[rownum] = {}
while colnum in hdict[rownum]:
colnum += 1
hdict[rownum][colnum] = cell[0] if len(cell) > 0 else Cell('')
colnum += 1
potential_rows = []
most_cols = 0
for row in sorted(hdict):
potential_rows.append([])
most_cols = max(most_cols, len(hdict[row]))
for col in sorted(hdict[row]):
potential_rows[-1].append(hdict[row][col])
hrows = []
rows = []
label = None
caption = None
footnotes = []
for i, r in enumerate(potential_rows):
# Skip empty rows
if all(cell.text.strip() == '' for cell in r):
continue
# Top row label?
if len(rows) == 0 and len(r) == 1 and r[0].text.lower().startswith('table ') and not label:
label = r[0].text
continue
# Top row caption?
if len(rows) == 0 and len(r) == 1 and r[0].text.strip() and not caption:
caption = Caption(r[0].text)
continue
# Top row heading?
if len(rows) == 0:
# If any blank rows between here and 10th row of table, this is a heading
max_heading_row = min(10, int(len(potential_rows) / 2))
if i < max_heading_row:
hasblank = False
for nextrow in potential_rows[i+1:max_heading_row]:
if all(cell.text.strip() == '' for cell in nextrow):
hasblank = True
if hasblank:
hrows.append(r)
continue
# Footnotes in final rows? (all remaining rows only have 1 cell)
if all(len(frow) == 1 for frow in potential_rows[i:]):
footnotes.append(Footnote(r[0].text))
continue
rows.append(r)
for r in hrows:
r.extend([Cell('')] * (len(max(hrows, key=len)) - len(r)))
for r in rows:
r.extend([Cell('')] * (len(max(rows, key=len)) - len(r)))
rows = [r for r in rows if any(r)]
tab = Table(label=label, caption=caption or Caption(''), headings=hrows, rows=rows, footnotes=footnotes, id=el.get('id', None))
return [tab]
def _parse_table_rows(self, els, refs, specials):
hdict = {}
for row, tr in enumerate(els):
colnum = 0
for td in self._css(self.table_cell_css, tr):
cell = self._parse_text(td, refs=refs, specials=specials, element_cls=Cell)
colspan = int(td.get('colspan', '1'))
rowspan = int(td.get('rowspan', '1'))
for i in range(colspan):
for j in range(rowspan):
rownum = row + j
if not rownum in hdict:
hdict[rownum] = {}
while colnum in hdict[rownum]:
colnum += 1
hdict[rownum][colnum] = cell[0] if len(cell) > 0 else Cell('')
colnum += 1
rows = []
for row in sorted(hdict):
rows.append([])
for col in sorted(hdict[row]):
rows[-1].append(hdict[row][col])
for r in rows:
r.extend([Cell('')] * (len(max(rows, key=len)) - len(r)))
rows = [r for r in rows if any(r)]
return rows
def _parse_table_footnotes(self, fns, refs, specials):
return [self._parse_text(fn, refs=refs, specials=specials, element_cls=Footnote)[0] for fn in fns]
| 40.892405
| 135
| 0.524222
|
9c457c6e1cfc6796fa9ca617098435e78153d1a9
| 30,898
|
py
|
Python
|
salt/modules/boto_route53.py
|
amaclean199/salt
|
8aaac011b4616e3c9e74a1daafb4a2146a5a430f
|
[
"Apache-2.0"
] | null | null | null |
salt/modules/boto_route53.py
|
amaclean199/salt
|
8aaac011b4616e3c9e74a1daafb4a2146a5a430f
|
[
"Apache-2.0"
] | null | null | null |
salt/modules/boto_route53.py
|
amaclean199/salt
|
8aaac011b4616e3c9e74a1daafb4a2146a5a430f
|
[
"Apache-2.0"
] | 1
|
2019-06-10T17:42:31.000Z
|
2019-06-10T17:42:31.000Z
|
# -*- coding: utf-8 -*-
'''
Connection module for Amazon Route53
.. versionadded:: 2014.7.0
:configuration: This module accepts explicit route53 credentials but can also
utilize IAM roles assigned to the instance through Instance Profiles.
Dynamic credentials are then automatically obtained from AWS API and no
further configuration is necessary. More Information available at:
.. code-block:: yaml
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
If IAM roles are not used you need to specify them either in a pillar or
in the minion's config file:
.. code-block:: yaml
route53.keyid: GKTADJGHEIQSXMKKRBJ08H
route53.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
A region may also be specified in the configuration:
.. code-block:: yaml
route53.region: us-east-1
If a region is not specified, the default is 'universal', which is what the boto_route53
library expects, rather than None.
It's also possible to specify key, keyid and region via a profile, either
as a passed in dict, or as a string to pull from pillars or minion config:
.. code-block:: yaml
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
:depends: boto
'''
# keep lint from choking on _get_conn and _cache_id
#pylint: disable=E0602
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import logging
import time
# Import salt libs
import salt.utils.compat
import salt.utils.versions
import salt.utils.odict as odict
import salt.utils.versions
from salt.exceptions import SaltInvocationError
log = logging.getLogger(__name__)
# Import third party libs
try:
#pylint: disable=unused-import
import boto
import boto.route53
import boto.route53.healthcheck
from boto.route53.exception import DNSServerError
#pylint: enable=unused-import
logging.getLogger('boto').setLevel(logging.CRITICAL)
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def __virtual__():
'''
Only load if boto libraries exist.
'''
# create_zone params were changed in boto 2.35+
return salt.utils.versions.check_boto_reqs(
boto_ver='2.35.0',
check_boto3=False
)
def __init__(opts):
salt.utils.compat.pack_dunder(__name__)
if HAS_BOTO:
__utils__['boto.assign_funcs'](__name__, 'route53', pack=__salt__)
def _get_split_zone(zone, _conn, private_zone):
'''
With boto route53, zones can only be matched by name
or iterated over in a list. Since the name will be the
same for public and private zones in a split DNS situation,
iterate over the list and match the zone name and public/private
status.
'''
for _zone in _conn.get_zones():
if _zone.name == zone:
_private_zone = True if _zone.config['PrivateZone'].lower() == 'true' else False
if _private_zone == private_zone:
return _zone
return False
def describe_hosted_zones(zone_id=None, domain_name=None, region=None,
key=None, keyid=None, profile=None):
'''
Return detailed info about one, or all, zones in the bound account.
If neither zone_id nor domain_name is provided, return all zones.
Note that the return format is slightly different between the 'all'
and 'single' description types.
zone_id
The unique identifier for the Hosted Zone
domain_name
The FQDN of the Hosted Zone (including final period)
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
CLI Example:
.. code-block:: bash
salt myminion boto_route53.describe_hosted_zones domain_name=foo.bar.com. \
profile='{"region": "us-east-1", "keyid": "A12345678AB", "key": "xblahblahblah"}'
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if zone_id and domain_name:
raise SaltInvocationError('At most one of zone_id or domain_name may '
'be provided')
retries = 10
while retries:
try:
if zone_id:
zone_id = zone_id.replace('/hostedzone/',
'') if zone_id.startswith('/hostedzone/') else zone_id
ret = getattr(conn.get_hosted_zone(zone_id),
'GetHostedZoneResponse', None)
elif domain_name:
ret = getattr(conn.get_hosted_zone_by_name(domain_name),
'GetHostedZoneResponse', None)
else:
marker = None
ret = None
while marker is not '':
r = conn.get_all_hosted_zones(start_marker=marker,
zone_list=ret)
ret = r['ListHostedZonesResponse']['HostedZones']
marker = r['ListHostedZonesResponse'].get('NextMarker', '')
return ret if ret else []
except DNSServerError as e:
if retries:
if 'Throttling' == e.code:
log.debug('Throttled by AWS API.')
elif 'PriorRequestNotComplete' == e.code:
log.debug('The request was rejected by AWS API.\
Route 53 was still processing a prior request')
time.sleep(3)
retries -= 1
continue
log.error('Could not list zones: %s', e.message)
return []
def list_all_zones_by_name(region=None, key=None, keyid=None, profile=None):
'''
List, by their FQDNs, all hosted zones in the bound account.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
CLI Example:
.. code-block:: bash
salt myminion boto_route53.list_all_zones_by_name
'''
ret = describe_hosted_zones(region=region, key=key, keyid=keyid,
profile=profile)
return [r['Name'] for r in ret]
def list_all_zones_by_id(region=None, key=None, keyid=None, profile=None):
'''
List, by their IDs, all hosted zones in the bound account.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
CLI Example:
.. code-block:: bash
salt myminion boto_route53.list_all_zones_by_id
'''
ret = describe_hosted_zones(region=region, key=key, keyid=keyid,
profile=profile)
return [r['Id'].replace('/hostedzone/', '') for r in ret]
def zone_exists(zone, region=None, key=None, keyid=None, profile=None,
retry_on_rate_limit=None, rate_limit_retries=None,
retry_on_errors=True, error_retries=5):
'''
Check for the existence of a Route53 hosted zone.
.. versionadded:: 2015.8.0
CLI Example::
salt myminion boto_route53.zone_exists example.org
'''
if region is None:
region = 'universal'
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if retry_on_rate_limit or rate_limit_retries is not None:
salt.utils.versions.warn_until(
'Neon',
'The \'retry_on_rate_limit\' and \'rate_limit_retries\' arguments '
'have been deprecated in favor of \'retry_on_errors\' and '
'\'error_retries\' respectively. Their functionality will be '
'removed, as such, their usage is no longer required.'
)
if retry_on_rate_limit is not None:
retry_on_errors = retry_on_rate_limit
if rate_limit_retries is not None:
error_retries = rate_limit_retries
while error_retries > 0:
try:
return bool(conn.get_zone(zone))
except DNSServerError as e:
if retry_on_errors:
if 'Throttling' == e.code:
log.debug('Throttled by AWS API.')
elif 'PriorRequestNotComplete' == e.code:
log.debug('The request was rejected by AWS API.\
Route 53 was still processing a prior request')
time.sleep(3)
error_retries -= 1
continue
raise e
def create_zone(zone, private=False, vpc_id=None, vpc_region=None, region=None,
key=None, keyid=None, profile=None):
'''
Create a Route53 hosted zone.
.. versionadded:: 2015.8.0
zone
DNS zone to create
private
True/False if the zone will be a private zone
vpc_id
VPC ID to associate the zone to (required if private is True)
vpc_region
VPC Region (required if private is True)
region
region endpoint to connect to
key
AWS key
keyid
AWS keyid
profile
AWS pillar profile
CLI Example::
salt myminion boto_route53.create_zone example.org
'''
if region is None:
region = 'universal'
if private:
if not vpc_id or not vpc_region:
msg = 'vpc_id and vpc_region must be specified for a private zone'
raise SaltInvocationError(msg)
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
_zone = conn.get_zone(zone)
if _zone:
return False
conn.create_zone(zone, private_zone=private, vpc_id=vpc_id,
vpc_region=vpc_region)
return True
def create_healthcheck(ip_addr=None, fqdn=None, region=None, key=None, keyid=None, profile=None,
port=53, hc_type='TCP', resource_path='', string_match=None, request_interval=30,
failure_threshold=3, retry_on_errors=True, error_retries=5):
'''
Create a Route53 healthcheck
.. versionadded:: Oxygen
ip_addr
IP address to check. ip_addr or fqdn is required.
fqdn
Domain name of the endpoint to check. ip_addr or fqdn is required
port
Port to check
hc_type
Healthcheck type. HTTP | HTTPS | HTTP_STR_MATCH | HTTPS_STR_MATCH | TCP
resource_path
Path to check
string_match
If hc_type is HTTP_STR_MATCH or HTTPS_STR_MATCH, the string to search for in the
response body from the specified resource
request_interval
The number of seconds between the time that Amazon Route 53 gets a response from
your endpoint and the time that it sends the next health-check request.
failure_threshold
The number of consecutive health checks that an endpoint must pass or fail for
Amazon Route 53 to change the current status of the endpoint from unhealthy to
healthy or vice versa.
region
Region endpoint to connect to
key
AWS key
keyid
AWS keyid
profile
AWS pillar profile
CLI Example::
salt myminion boto_route53.create_healthcheck 192.168.0.1
salt myminion boto_route53.create_healthcheck 192.168.0.1 port=443 hc_type=HTTPS \
resource_path=/ fqdn=blog.saltstack.furniture
'''
if fqdn is None and ip_addr is None:
msg = 'One of the following must be specified: fqdn or ip_addr'
log.error(msg)
return {'error': msg}
hc_ = boto.route53.healthcheck.HealthCheck(ip_addr,
port,
hc_type,
resource_path,
fqdn=fqdn,
string_match=string_match,
request_interval=request_interval,
failure_threshold=failure_threshold)
if region is None:
region = 'universal'
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
while error_retries > 0:
try:
return {'result': conn.create_health_check(hc_)}
except DNSServerError as exc:
log.debug(exc)
if retry_on_errors:
if 'Throttling' == exc.code:
log.debug('Throttled by AWS API.')
elif 'PriorRequestNotComplete' == exc.code:
log.debug('The request was rejected by AWS API.\
Route 53 was still processing a prior request')
time.sleep(3)
error_retries -= 1
continue
return {'error': __utils__['boto.get_error'](exc)}
return False
def delete_zone(zone, region=None, key=None, keyid=None, profile=None):
'''
Delete a Route53 hosted zone.
.. versionadded:: 2015.8.0
CLI Example::
salt myminion boto_route53.delete_zone example.org
'''
if region is None:
region = 'universal'
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
_zone = conn.get_zone(zone)
if _zone:
conn.delete_hosted_zone(_zone.id)
return True
return False
def _encode_name(name):
return name.replace('*', r'\052')
def _decode_name(name):
return name.replace(r'\052', '*')
def get_record(name, zone, record_type, fetch_all=False, region=None, key=None,
keyid=None, profile=None, split_dns=False, private_zone=False,
identifier=None, retry_on_rate_limit=None,
rate_limit_retries=None, retry_on_errors=True, error_retries=5):
'''
Get a record from a zone.
CLI example::
salt myminion boto_route53.get_record test.example.org example.org A
'''
if region is None:
region = 'universal'
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if retry_on_rate_limit or rate_limit_retries is not None:
salt.utils.versions.warn_until(
'Neon',
'The \'retry_on_rate_limit\' and \'rate_limit_retries\' arguments '
'have been deprecated in favor of \'retry_on_errors\' and '
'\'error_retries\' respectively. Their functionality will be '
'removed, as such, their usage is no longer required.'
)
if retry_on_rate_limit is not None:
retry_on_errors = retry_on_rate_limit
if rate_limit_retries is not None:
error_retries = rate_limit_retries
while error_retries > 0:
try:
if split_dns:
_zone = _get_split_zone(zone, conn, private_zone)
else:
_zone = conn.get_zone(zone)
if not _zone:
msg = 'Failed to retrieve zone {0}'.format(zone)
log.error(msg)
return None
_type = record_type.upper()
ret = odict.OrderedDict()
name = _encode_name(name)
_record = _zone.find_records(name, _type, all=fetch_all, identifier=identifier)
break # the while True
except DNSServerError as e:
if retry_on_errors:
if 'Throttling' == e.code:
log.debug('Throttled by AWS API.')
elif 'PriorRequestNotComplete' == e.code:
log.debug('The request was rejected by AWS API.\
Route 53 was still processing a prior request')
time.sleep(3)
error_retries -= 1
continue
raise e
if _record:
ret['name'] = _decode_name(_record.name)
ret['value'] = _record.resource_records[0]
ret['record_type'] = _record.type
ret['ttl'] = _record.ttl
if _record.identifier:
ret['identifier'] = []
ret['identifier'].append(_record.identifier)
ret['identifier'].append(_record.weight)
return ret
def _munge_value(value, _type):
split_types = ['A', 'MX', 'AAAA', 'TXT', 'SRV', 'SPF', 'NS']
if _type in split_types:
return value.split(',')
return value
def add_record(name, value, zone, record_type, identifier=None, ttl=None,
region=None, key=None, keyid=None, profile=None,
wait_for_sync=True, split_dns=False, private_zone=False,
retry_on_rate_limit=None, rate_limit_retries=None,
retry_on_errors=True, error_retries=5):
'''
Add a record to a zone.
CLI example::
salt myminion boto_route53.add_record test.example.org 1.1.1.1 example.org A
'''
if region is None:
region = 'universal'
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if retry_on_rate_limit or rate_limit_retries is not None:
salt.utils.versions.warn_until(
'Neon',
'The \'retry_on_rate_limit\' and \'rate_limit_retries\' arguments '
'have been deprecated in favor of \'retry_on_errors\' and '
'\'error_retries\' respectively. Their functionality will be '
'removed, as such, their usage is no longer required.'
)
if retry_on_rate_limit is not None:
retry_on_errors = retry_on_rate_limit
if rate_limit_retries is not None:
error_retries = rate_limit_retries
while error_retries > 0:
try:
if split_dns:
_zone = _get_split_zone(zone, conn, private_zone)
else:
_zone = conn.get_zone(zone)
if not _zone:
msg = 'Failed to retrieve zone {0}'.format(zone)
log.error(msg)
return False
_type = record_type.upper()
break
except DNSServerError as e:
if retry_on_errors:
if 'Throttling' == e.code:
log.debug('Throttled by AWS API.')
elif 'PriorRequestNotComplete' == e.code:
log.debug('The request was rejected by AWS API.\
Route 53 was still processing a prior request')
time.sleep(3)
error_retries -= 1
continue
raise e
_value = _munge_value(value, _type)
while error_retries > 0:
try:
# add_record requires a ttl value, annoyingly.
if ttl is None:
ttl = 60
status = _zone.add_record(_type, name, _value, ttl, identifier)
return _wait_for_sync(status.id, conn, wait_for_sync)
except DNSServerError as e:
if retry_on_errors:
if 'Throttling' == e.code:
log.debug('Throttled by AWS API.')
elif 'PriorRequestNotComplete' == e.code:
log.debug('The request was rejected by AWS API.\
Route 53 was still processing a prior request')
time.sleep(3)
error_retries -= 1
continue
raise e
def update_record(name, value, zone, record_type, identifier=None, ttl=None,
region=None, key=None, keyid=None, profile=None,
wait_for_sync=True, split_dns=False, private_zone=False,
retry_on_rate_limit=None, rate_limit_retries=None,
retry_on_errors=True, error_retries=5):
'''
Modify a record in a zone.
CLI example::
salt myminion boto_route53.modify_record test.example.org 1.1.1.1 example.org A
'''
if region is None:
region = 'universal'
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if split_dns:
_zone = _get_split_zone(zone, conn, private_zone)
else:
_zone = conn.get_zone(zone)
if not _zone:
msg = 'Failed to retrieve zone {0}'.format(zone)
log.error(msg)
return False
_type = record_type.upper()
if retry_on_rate_limit or rate_limit_retries is not None:
salt.utils.versions.warn_until(
'Neon',
'The \'retry_on_rate_limit\' and \'rate_limit_retries\' arguments '
'have been deprecated in favor of \'retry_on_errors\' and '
'\'error_retries\' respectively. Their functionality will be '
'removed, as such, their usage is no longer required.'
)
if retry_on_rate_limit is not None:
retry_on_errors = retry_on_rate_limit
if rate_limit_retries is not None:
error_retries = rate_limit_retries
_value = _munge_value(value, _type)
while error_retries > 0:
try:
old_record = _zone.find_records(name, _type, identifier=identifier)
if not old_record:
return False
status = _zone.update_record(old_record, _value, ttl, identifier)
return _wait_for_sync(status.id, conn, wait_for_sync)
except DNSServerError as e:
if retry_on_errors:
if 'Throttling' == e.code:
log.debug('Throttled by AWS API.')
elif 'PriorRequestNotComplete' == e.code:
log.debug('The request was rejected by AWS API.\
Route 53 was still processing a prior request')
time.sleep(3)
error_retries -= 1
continue
raise e
def delete_record(name, zone, record_type, identifier=None, all_records=False,
region=None, key=None, keyid=None, profile=None,
wait_for_sync=True, split_dns=False, private_zone=False,
retry_on_rate_limit=None, rate_limit_retries=None,
retry_on_errors=True, error_retries=5):
'''
Modify a record in a zone.
CLI example::
salt myminion boto_route53.delete_record test.example.org example.org A
'''
if region is None:
region = 'universal'
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if split_dns:
_zone = _get_split_zone(zone, conn, private_zone)
else:
_zone = conn.get_zone(zone)
if not _zone:
msg = 'Failed to retrieve zone {0}'.format(zone)
log.error(msg)
return False
_type = record_type.upper()
if retry_on_rate_limit or rate_limit_retries is not None:
salt.utils.versions.warn_until(
'Neon',
'The \'retry_on_rate_limit\' and \'rate_limit_retries\' arguments '
'have been deprecated in favor of \'retry_on_errors\' and '
'\'error_retries\' respectively. Their functionality will be '
'removed, as such, their usage is no longer required.'
)
if retry_on_rate_limit is not None:
retry_on_errors = retry_on_rate_limit
if rate_limit_retries is not None:
error_retries = rate_limit_retries
while error_retries > 0:
try:
old_record = _zone.find_records(name, _type, all=all_records, identifier=identifier)
if not old_record:
return False
status = _zone.delete_record(old_record)
return _wait_for_sync(status.id, conn, wait_for_sync)
except DNSServerError as e:
if retry_on_errors:
if 'Throttling' == e.code:
log.debug('Throttled by AWS API.')
elif 'PriorRequestNotComplete' == e.code:
log.debug('The request was rejected by AWS API.\
Route 53 was still processing a prior request')
time.sleep(3)
error_retries -= 1
continue
raise e
def _try_func(conn, func, **args):
tries = 30
while True:
try:
return getattr(conn, func)(**args)
except AttributeError as e:
# Don't include **args in log messages - security concern.
log.error('Function `%s()` not found for AWS connection object %s',
func, conn)
return None
except DNSServerError as e:
if tries and e.code == 'Throttling':
log.debug('Throttled by AWS API. Will retry in 5 seconds')
time.sleep(5)
tries -= 1
continue
log.error('Failed calling %s(): %s', func, e)
return None
def _wait_for_sync(status, conn, wait=True):
### Wait should be a bool or an integer
if wait is True:
wait = 600
if not wait:
return True
orig_wait = wait
log.info('Waiting up to %s seconds for Route53 changes to synchronize', orig_wait)
while wait > 0:
change = conn.get_change(status)
current = change.GetChangeResponse.ChangeInfo.Status
if current == 'INSYNC':
return True
sleep = wait if wait % 60 == wait else 60
log.info(
'Sleeping %s seconds waiting for changes to synch (current status %s)',
sleep, current
)
time.sleep(sleep)
wait -= sleep
continue
log.error('Route53 changes not synced after %s seconds.', orig_wait)
return False
def create_hosted_zone(domain_name, caller_ref=None, comment='', private_zone=False, vpc_id=None,
vpc_name=None, vpc_region=None, region=None, key=None, keyid=None,
profile=None):
'''
Create a new Route53 Hosted Zone. Returns a Python data structure with information about the
newly created Hosted Zone.
domain_name
The name of the domain. This must be fully-qualified, terminating with a period. This is
the name you have registered with your domain registrar. It is also the name you will
delegate from your registrar to the Amazon Route 53 delegation servers returned in response
to this request.
caller_ref
A unique string that identifies the request and that allows create_hosted_zone() calls to
be retried without the risk of executing the operation twice. It can take several minutes
for the change to replicate globally, and change from PENDING to INSYNC status. Thus it's
best to provide some value for this where possible, since duplicate calls while the first
is in PENDING status will be accepted and can lead to multiple copies of the zone being
created. On the other hand, if a zone is created with a given caller_ref, then deleted,
a second attempt to create a zone with the same caller_ref will fail until that caller_ref
is flushed from the Route53 system, which can take upwards of 24 hours.
comment
Any comments you want to include about the hosted zone.
private_zone
Set True if creating a private hosted zone.
vpc_id
When creating a private hosted zone, either the VPC ID or VPC Name to associate with is
required. Exclusive with vpe_name. Ignored when creating a non-private zone.
vpc_name
When creating a private hosted zone, either the VPC ID or VPC Name to associate with is
required. Exclusive with vpe_id. Ignored when creating a non-private zone.
vpc_region
When creating a private hosted zone, the region of the associated VPC is required. If not
provided, an effort will be made to determine it from vpc_id or vpc_name, where possible.
If this fails, you'll need to provide an explicit value for this option. Ignored when
creating a non-private zone.
region
Region endpoint to connect to.
key
AWS key to bind with.
keyid
AWS keyid to bind with.
profile
Dict, or pillar key pointing to a dict, containing AWS region/key/keyid.
CLI Example::
salt myminion boto_route53.create_hosted_zone example.org
'''
if region is None:
region = 'universal'
if not domain_name.endswith('.'):
raise SaltInvocationError('Domain MUST be fully-qualified, complete '
'with ending period.')
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
deets = conn.get_hosted_zone_by_name(domain_name)
if deets:
log.info('Route53 hosted zone %s already exists', domain_name)
return None
args = {'domain_name': domain_name,
'caller_ref': caller_ref,
'comment': comment,
'private_zone': private_zone}
if private_zone:
if not _exactly_one((vpc_name, vpc_id)):
raise SaltInvocationError('Either vpc_name or vpc_id is required '
'when creating a private zone.')
vpcs = __salt__['boto_vpc.describe_vpcs'](
vpc_id=vpc_id, name=vpc_name, region=region, key=key,
keyid=keyid, profile=profile).get('vpcs', [])
if vpc_region and vpcs:
vpcs = [v for v in vpcs if v['region'] == vpc_region]
if not vpcs:
log.error('Private zone requested but a VPC matching given criteria'
' not found.')
return None
if len(vpcs) > 1:
log.error('Private zone requested but multiple VPCs matching given '
'criteria found: %s.', [v['id'] for v in vpcs])
return None
vpc = vpcs[0]
if vpc_name:
vpc_id = vpc['id']
if not vpc_region:
vpc_region = vpc['region']
args.update({'vpc_id': vpc_id, 'vpc_region': vpc_region})
else:
if any((vpc_id, vpc_name, vpc_region)):
log.info('Options vpc_id, vpc_name, and vpc_region are ignored '
'when creating non-private zones.')
r = _try_func(conn, 'create_hosted_zone', **args)
if r is None:
log.error('Failed to create hosted zone %s', domain_name)
return None
r = r.get('CreateHostedZoneResponse', {})
# Pop it since it'll be irrelevant by the time we return
status = r.pop('ChangeInfo', {}).get('Id', '').replace('/change/', '')
synced = _wait_for_sync(status, conn, wait=600)
if not synced:
log.error('Hosted zone %s not synced after 600 seconds.', domain_name)
return None
return r
| 33.731441
| 103
| 0.602725
|
08c86cf42f5706b4ad158032639e81dfaef76dce
| 7,136
|
py
|
Python
|
medium/tictactoe/minimax.py
|
nightmarebadger/tutorials-python-basic
|
a4c49e01bf9c9c5006239c013c81d85603dd96fd
|
[
"MIT"
] | null | null | null |
medium/tictactoe/minimax.py
|
nightmarebadger/tutorials-python-basic
|
a4c49e01bf9c9c5006239c013c81d85603dd96fd
|
[
"MIT"
] | null | null | null |
medium/tictactoe/minimax.py
|
nightmarebadger/tutorials-python-basic
|
a4c49e01bf9c9c5006239c013c81d85603dd96fd
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
A basic implementation of the TicTacToe game which can be played humans or
computer players (human vs human, human vs ai, ai vs ai).
"""
import random
import time
board_print_style = """
+---+---+---+
| {6} | {7} | {8} |
+---+---+---+
| {3} | {4} | {5} |
+---+---+---+
| {0} | {1} | {2} |
+---+---+---+
"""
def check_same(li):
"""Check if all elements in the list are the same and not equal ' '.
Returns False of the value in the list."""
same = all([i == li[0] for i in li])
if same and li[0] != ' ':
return li[0]
return False
class TicTacToe(object):
"""A class for the TicTacToe game."""
def __init__(self):
self.ply1 = 'X'
self.ply2 = 'O'
self.ply = self.ply1
self.board = [' ' for i in range(9)]
self.computer = None
def change_players(self):
"""Change between the players."""
if self.ply == self.ply1:
self.ply = self.ply2
else:
self.ply = self.ply1
def __str__(self):
"""Print a nice presentation of the board."""
return board_print_style.format(*self.board)
def make_move(self, n):
"""Make a move on the location ``n``."""
self.board[n] = self.ply
def get_possible_moves(self):
"""Return a list of all possible moves."""
return [i+1 for i in range(9) if self.board[i] == ' ']
def get_move(self):
"""Ask the player to input a move. Must input a possible move or it
will ask again.
:returns: The move to be made
:rtype: ``int``
"""
moves = self.get_possible_moves()
print("Possible moves: {0}".format(moves))
try:
x = int(input("Next move: "))
except (ValueError, NameError):
x = None
while x not in moves:
try:
x = int(input("Next move: "))
except (ValueError, NameError):
pass
return x
def _check_win(self):
"""Check if someone won. Returns None or the winning value."""
possible_win = [
[0, 1, 2], [3, 4, 5], [6, 7, 8],
[0, 3, 6], [1, 4, 7], [2, 5, 8],
[0, 4, 8], [6, 4, 2]
]
for pos in possible_win:
res = check_same([
self.board[pos[0]],
self.board[pos[1]],
self.board[pos[2]]
])
if res:
return res
def check_win(self):
res = self._check_win()
if res:
print(self)
print("{0} won!".format(res))
return res
def play_human(self):
"""Play the game against a human opponent."""
self.computer = False
while not self.check_win() and self.get_possible_moves():
print(self)
x = self.get_move()
self.make_move(x - 1)
self.change_players()
if not self.check_win():
print(self)
print("Nobody won!")
def computer_turn(self):
return self.ply == self.ply2
def judge(self):
win = self._check_win()
if win == self.ply1:
return 1
elif win == self.ply2:
return -1
if not self.get_possible_moves():
return 0
def computer_move(self):
assessments = []
for move in self.get_possible_moves():
try:
self.make_move(move - 1)
self.change_players()
# assessments.append((move, self.assess_move(move)))
assessments.append((move, self.assess_move_optimised(move)))
finally:
self.revert_move(move - 1)
self.change_players()
# Shuffle so we don't always start the same
random.shuffle(assessments)
# Sort so we choose the best option
assessments.sort(key=lambda x: x[1])
print('Computer assessment (lower is better): {}'.format(assessments))
if self.ply == self.ply2:
return assessments[0][0]
else:
return assessments[-1][0]
def assess_move(self, current_move):
score = self.judge()
if score is not None:
return score
assessments = []
for move in self.get_possible_moves():
try:
self.make_move(move - 1)
self.change_players()
assessments.append(self.assess_move(move))
finally:
self.revert_move(move - 1)
self.change_players()
if self.ply == self.ply1:
return max(assessments)
else:
return min(assessments)
def assess_move_optimised(self, current_move):
score = self.judge()
if score is not None:
return score
assessments = []
for move in self.get_possible_moves():
try:
self.make_move(move - 1)
self.change_players()
assessment = self.assess_move_optimised(move)
# This looks 'wrong' because we're still on the 'changed'
# player, so the scores need to be 'turned around'
if self.ply == self.ply2 and assessment == 1:
return 1
elif self.ply == self.ply1 and assessment == -1:
return -1
assessments.append(assessment)
finally:
self.revert_move(move - 1)
self.change_players()
if self.ply == self.ply1:
return max(assessments)
else:
return min(assessments)
def revert_move(self, n):
"""Make a move on the location ``n``."""
self.board[n] = ' '
def play_vs_computer(self):
"""Play the game against a computer opponent."""
self.computer = True
while not self.check_win() and self.get_possible_moves():
if not self.computer_turn():
print(self)
x = self.get_move()
self.make_move(x - 1)
self.change_players()
else:
t = time.time()
self.make_move(self.computer_move() - 1)
print('Thinking time: {}s'.format(time.time() - t))
self.change_players()
if not self.check_win():
print(self)
print("Nobody won!")
def play_computer_vs_computer(self):
"""Play the game computer vs computer."""
self.computer = True
while not self.check_win() and self.get_possible_moves():
print(self)
t = time.time()
self.make_move(self.computer_move() - 1)
tt = time.time() - t
print('Thinking time: {}s'.format(tt))
self.change_players()
if tt < 1:
time.sleep(1 - tt)
if not self.check_win():
print(self)
print("Nobody won!")
if __name__ == "__main__":
ttt = TicTacToe()
ttt.play_vs_computer()
| 26.626866
| 78
| 0.507287
|
78ca29748c230db93a13aa7d58d1582eca051db0
| 19,181
|
py
|
Python
|
test/integration/ggrc/models/test_revision.py
|
sfarbotka/ggrc-core
|
ef7aae6bc09ad2f53a2414f643572e07d689784a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/integration/ggrc/models/test_revision.py
|
sfarbotka/ggrc-core
|
ef7aae6bc09ad2f53a2414f643572e07d689784a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/integration/ggrc/models/test_revision.py
|
sfarbotka/ggrc-core
|
ef7aae6bc09ad2f53a2414f643572e07d689784a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2019 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
""" Tests for ggrc.models.Revision """
from datetime import datetime
from freezegun import freeze_time
import ddt
import mock
import ggrc.models
from ggrc.models import all_models
import integration.ggrc.generator
from integration.ggrc import TestCase
from integration.ggrc.models import factories
from integration.ggrc import api_helper
from integration.ggrc import query_helper
from integration.ggrc.review import build_reviewer_acl
def _get_revisions(obj, field="resource"):
return ggrc.models.Revision.query.filter_by(**{
field + "_type": obj.__class__.__name__,
field + "_id": obj.id
}).all()
def _project_content(content):
return {
"source": {
"type": content["source_type"],
"id": content["source_id"],
},
"destination": {
"type": content["destination_type"],
"id": content["destination_id"],
},
}
@ddt.ddt
class TestRevisions(query_helper.WithQueryApi, TestCase):
""" Tests for ggrc.models.Revision """
def setUp(self):
super(TestRevisions, self).setUp()
self.gen = integration.ggrc.generator.ObjectGenerator()
self.api_helper = api_helper.Api()
self.api_helper.client.get("/login")
def test_revisions(self):
""" Test revision creation for POST and PUT """
cls = ggrc.models.DataAsset
name = cls._inflector.table_singular # pylint: disable=protected-access
_, obj = self.gen.generate(cls, name, {name: {
"title": "revisioned v1",
"context": None,
}})
revisions = _get_revisions(obj)
self.assertEqual(len(revisions), 1)
_, obj = self.gen.modify(obj, name, {name: {
"slug": obj.slug,
"title": "revisioned v2",
"context": None,
}})
revisions = _get_revisions(obj)
expected = {("created", "revisioned v1"), ("modified", "revisioned v2")}
actual = {(r.action, r.content["title"]) for r in revisions}
self.assertEqual(actual, expected)
def test_relevant_revisions(self):
""" Test revision creation for mapping to an object """
cls = ggrc.models.DataAsset
name = cls._inflector.table_singular # pylint: disable=protected-access
_, obj1 = self.gen.generate(cls, name, {name: {
"title": "connected 1",
"context": None,
}})
_, obj2 = self.gen.generate(cls, name, {name: {
"title": "connected 2",
"context": None,
}})
rel_data = {
"source": {"id": obj1.id, "type": cls.__name__},
"destination": {"id": obj2.id, "type": cls.__name__},
"context": None,
}
_, rel = self.gen.generate(ggrc.models.Relationship, "relationship", {
"relationship": rel_data})
revisions_source = _get_revisions(obj1, "source")
revisions_destination = _get_revisions(obj2, "destination")
self.assertEqual(revisions_source, revisions_destination)
self.assertEqual(len(revisions_source), 1)
self.gen.api.delete(rel)
revisions_source = _get_revisions(obj1, "source")
revisions_destination = _get_revisions(obj2, "destination")
self.assertEqual(revisions_source, revisions_destination)
self.assertEqual(len(revisions_source), 2)
expected_data = {
"source": {
"type": cls.__name__,
"id": obj1.id,
},
"destination": {
"type": cls.__name__,
"id": obj2.id,
},
}
expected = [(u"created", expected_data), ("deleted", expected_data)]
actual = [(r.action, _project_content(r.content))
for r in revisions_source]
self.assertEqual(sorted(actual), sorted(expected))
def test_content_length(self):
"""Test revision content length restrictions."""
process = factories.ProcessFactory(
title="a" * 200,
description="b" * 65000,
notes="c" * 65000,
)
revision = ggrc.models.Revision.query.filter(
ggrc.models.Revision.resource_id == process.id,
ggrc.models.Revision.resource_type == process.type,
).first()
self.assertIsNotNone(revision)
self.assertEqual(revision.content["title"], process.title)
self.assertEqual(revision.content["description"], process.description)
@ddt.data(True, False)
def test_revision_after_del_cad(self, is_add_cav):
"""Test creating new revision after deleting CAD.
In case of deleting CAD, new revision must be created for object,
which had this CAD.
"""
with factories.single_commit():
control = factories.ControlFactory()
cad = factories.CustomAttributeDefinitionFactory(
title="test_name",
definition_type="control",
)
cad_id = cad.id
if is_add_cav:
factories.CustomAttributeValueFactory(
custom_attribute=cad,
attributable=control,
attribute_value="text",
)
revision_id = ggrc.models.Revision.query.filter(
ggrc.models.Revision.resource_id == control.id,
ggrc.models.Revision.resource_type == control.type,
).order_by(ggrc.models.Revision.id.desc()).first().id
with self.api_helper.as_external():
self.api_helper.delete(cad, cad_id)
control = ggrc.models.Control.query.first()
last_revision_id = ggrc.models.Revision.query.filter(
ggrc.models.Revision.resource_id == control.id,
ggrc.models.Revision.resource_type == control.type,
).order_by(ggrc.models.Revision.id.desc()).first().id
self.assertGreater(last_revision_id, revision_id)
@ddt.data(True, False)
def test_change_modified_by(self, is_add_cav):
"""Test checked correct changing of modified_by_id field.
User 1 create control, user 2 delete CAD. After the deleting CAD
test checking that modified_by field contains user 2.
"""
with factories.single_commit():
control = factories.ControlFactory()
cad = factories.CustomAttributeDefinitionFactory(
title="test_cad",
definition_type="control",
attribute_type="Text",
)
control_id = control.id
if is_add_cav:
factories.CustomAttributeValueFactory(
custom_attribute=cad,
attributable=control,
attribute_value="test")
user = self.gen.generate_person(
data={"name": "test_admin", "email": "external_app@example.com"},
user_role="Administrator")[1]
self.api_helper.set_user(user)
self.client.get("/login")
control_revisions = ggrc.models.Revision.query.filter(
ggrc.models.Revision.resource_id == control_id,
ggrc.models.Revision.resource_type == "Control",
).order_by(ggrc.models.Revision.id.desc()).all()
ids_before_del = set(revision.id for revision in control_revisions)
cad = ggrc.models.CustomAttributeDefinition.query.filter_by(
title="test_cad").first()
resp_delete = self.api_helper.delete(cad)
self.assert200(resp_delete)
cad = ggrc.models.CustomAttributeDefinition.query.filter_by(
title="test_cad").first()
self.assertIsNone(cad)
control_revisions_after = ggrc.models.Revision.query.filter(
ggrc.models.Revision.resource_id == control_id,
ggrc.models.Revision.resource_type == "Control",
).order_by(ggrc.models.Revision.id.desc()).all()
ids_after_del = set(revision.id for revision
in control_revisions_after)
difference_revision_id = ids_after_del.difference(ids_before_del)
last_revision = ggrc.models.Revision.query.filter(
ggrc.models.Revision.resource_id == control_id,
ggrc.models.Revision.resource_type == "Control",
).order_by(ggrc.models.Revision.id.desc()).first()
self.assertSetEqual(difference_revision_id, {last_revision.id})
expected_id = ggrc.models.Person.query.filter_by(
name="test_admin").first().id
self.assertEquals(last_revision.content["modified_by_id"], expected_id)
self.assertEquals(last_revision.content["modified_by"]["id"], expected_id)
def _test_revision_with_empty_cads(self,
attribute_type,
attribute_value,
is_global):
"""Population cavs and cads depend on is_global flag and send params."""
asmnt = factories.AssessmentFactory()
asmnt_id = asmnt.id
cad_params = {
"title": "test_cad",
"definition_type": "assessment",
"attribute_type": attribute_type
}
if not is_global:
cad_params["definition_id"] = asmnt_id
with factories.single_commit():
cad = factories.CustomAttributeDefinitionFactory(**cad_params)
cad_id = cad.id
revisions = ggrc.models.Revision.query.filter(
ggrc.models.Revision.resource_id == asmnt_id,
ggrc.models.Revision.resource_type == "Assessment",
).order_by(ggrc.models.Revision.id.desc()).all()
self.assertEqual(1, len(revisions))
revision = revisions[0]
# pylint: disable=protected-access
self.assertIn("custom_attribute_values", revision._content)
self.assertIn("custom_attribute_definitions", revision._content)
self.assertEqual([], revision._content["custom_attribute_values"])
self.assertEqual([], revision._content["custom_attribute_definitions"])
self.assertIn("custom_attribute_values", revision.content)
self.assertEqual(
[{
'attributable_id': asmnt_id,
'attributable_type': 'Assessment',
'attribute_object': None,
'attribute_value': attribute_value,
'context_id': None,
'custom_attribute_id': cad_id,
'display_name': '',
'type': 'CustomAttributeValue',
}],
revision.content["custom_attribute_values"])
self.assertIn("custom_attribute_definitions", revision.content)
cad = all_models.CustomAttributeDefinition.query.get(cad_id)
self.assertEqual([cad.log_json()],
revision.content["custom_attribute_definitions"])
@ddt.data(
("Text", ""),
("Rich Text", ""),
("Dropdown", ""),
("Checkbox", "0"),
("Date", ""),
)
@ddt.unpack
def test_revisions_with_empty_gcads(self, attribute_type, attribute_value):
"""Population cavs and global cads for type {0}."""
self._test_revision_with_empty_cads(attribute_type, attribute_value, True)
@ddt.data(
("Text", ""),
("Rich Text", ""),
("Dropdown", ""),
("Checkbox", "0"),
("Date", ""),
)
@ddt.unpack
def test_revisions_with_empty_lcads(self, attribute_type, attribute_value):
"""Population cavs and local cads for type {0}."""
self._test_revision_with_empty_cads(attribute_type, attribute_value, False)
@ddt.data("", u"0", u"", None, "0")
@mock.patch(
"ggrc.models.custom_attribute_value.CustomAttributeValue"
"._validate_checkbox", return_value=True
)
def test_revisions_invalid_cavs(self, value, _):
"""Test filtering of Checkbox CAVs."""
with factories.single_commit():
asmnt = factories.AssessmentFactory()
ca_def = factories.CustomAttributeDefinitionFactory(
definition_id=asmnt.id,
definition_type="assessment",
title="CA",
attribute_type="Checkbox",
)
self.gen.api.modify_object(
asmnt, {
"custom_attribute_values": [{
"attributable_id": asmnt.id,
"attributable_type": "assessment",
"attribute_value": value,
"custom_attribute_id": ca_def.id,
}, ],
},
)
revisions = ggrc.models.Revision.query.filter(
ggrc.models.Revision.resource_id == asmnt.id,
ggrc.models.Revision.resource_type == "Assessment",
).order_by(ggrc.models.Revision.id.desc()).all()
content = revisions[0].content
self.assertEqual(
content["custom_attribute_values"][0]["attribute_value"], "0")
def test_revisions_cavs_wo_cad(self):
"""Test filtering CAVs without definitions."""
with factories.single_commit():
asmnt = factories.AssessmentFactory()
ca_def = factories.CustomAttributeDefinitionFactory(
definition_id=asmnt.id,
definition_type="assessment",
title="CA",
attribute_type="Text",
)
ca_def_id = ca_def.id
self.gen.api.modify_object(
asmnt, {
"custom_attribute_values": [{
"attributable_id": asmnt.id,
"attributable_type": "assessment",
"attribute_value": "abc",
"custom_attribute_id": ca_def.id,
}, ],
},
)
ggrc.models.CustomAttributeDefinition.query.filter_by(
id=ca_def_id
).delete()
revisions = ggrc.models.Revision.query.filter(
ggrc.models.Revision.resource_id == asmnt.id,
ggrc.models.Revision.resource_type == "Assessment",
).order_by(ggrc.models.Revision.id.desc()).all()
content = revisions[0].content
self.assertEqual(len(content["custom_attribute_values"]), 1)
cav = content["custom_attribute_values"][0]
self.assertEqual(cav["custom_attribute_id"], ca_def.id)
self.assertEqual(cav["attributable_id"], asmnt.id)
def test_revision_review_stub(self):
""" Test proper review stub population in revision content """
program = factories.ProgramFactory()
revisions = _get_revisions(program)
self.assertEqual(len(revisions), 1)
self.assertEqual(revisions[0].action, "created")
resp = self.api_helper.post(
all_models.Review,
{
"review": {
"reviewable": {
"type": program.type,
"id": program.id,
},
"context": None,
"notification_type": "email",
"status": all_models.Review.STATES.REVIEWED,
"access_control_list": build_reviewer_acl()
},
},
)
self.assertEqual(201, resp.status_code)
self.assertIn("review", resp.json)
resp_review = resp.json["review"]
self.assertEqual(all_models.Review.STATES.REVIEWED,
resp_review["status"])
revisions = _get_revisions(program)
self.assertEqual(len(revisions), 2)
self.assertEqual(revisions[0].action, "created")
self.assertEqual(revisions[1].action, "modified")
rev_content = revisions[1].content
self.assertIsNotNone(rev_content)
self.assertIn("review", rev_content)
review = rev_content["review"]
self.assertIsNotNone(review)
expected = {
"context_id": None,
"href": "/api/reviews/{}".format(resp_review["id"]),
"id": resp_review["id"],
"type": resp_review["type"],
}
self.assertEqual(review, expected)
def test_revision_ordering(self):
"""Test revision ordering by created_at with query api"""
with factories.single_commit():
# pylint: disable=expression-not-assigned
[factories.ControlFactory() for i in range(10)]
# pylint: enable=expression-not-assigned
query = self._make_query_dict(
"Revision", expression=("resource_type", "=", "Control"),
order_by=[{"name": "created_at", "desc": True}]
)
self.client.get("/login")
result = self._get_first_result_set(query, "Revision")
count, values = result["count"], result["values"]
datetime_format = "%Y-%m-%dT%H:%M:%S"
for value in values:
value["created_at"] = datetime.strptime(value["created_at"],
datetime_format)
self.assertTrue(
all(values[i]["created_at"] >= values[i + 1]["created_at"]
for i in range(count - 1))
)
def test_created_at_filtering(self):
"""Test revision could be filtered by created_at."""
with freeze_time("2019-01-08 12:00:00"):
control = factories.ControlFactory()
control_id = control.id
factories.RevisionFactory(obj=control)
expected_ids = set()
with freeze_time("2019-01-08 23:59:59"):
rev = factories.RevisionFactory(obj=control)
expected_ids.add(rev.id)
self.client.get("/login")
resp = self._get_first_result_set(
{
"object_name": "Revision",
"type": "ids",
"filters": {
"expression": {
"op": {"name": "AND"},
"left": {
"op": {"name": "AND"},
"left": {
"op": {"name": "="},
"left": "resource_type",
"right": "Control"
},
"right": {
"op": {"name": "="},
"left": "resource_id",
"right": control_id
}
},
"right": {
"op": {"name": ">"},
"left": "created_at",
"right": "2019-01-08 12:00:00"
}
}
}
},
"Revision",
"ids"
)
self.assertItemsEqual(resp, expected_ids)
def test_populated_automapping(self):
"""Test automapping content in revision"""
with factories.single_commit():
program_a = factories.ProgramFactory()
program_b = factories.ProgramFactory()
factories.RelationshipFactory(source=program_b,
destination=program_a)
regulation_a = factories.RegulationFactory()
program_a_id = program_a.id
program_b_id = program_b.id
regulation_a_id = regulation_a.id
self.gen.generate_relationship(regulation_a, program_a)
rel_1 = all_models.Relationship.query.filter_by(
source_type="Regulation",
source_id=regulation_a_id,
destination_type="Program",
destination_id=program_b_id
).first()
rel_2 = all_models.Relationship.query.filter_by(
source_type="Program",
source_id=program_b_id,
destination_type="Regulation",
destination_id=regulation_a_id
).first()
relationship = rel_1 or rel_2
revision = all_models.Revision.query.filter_by(
resource_type="Relationship",
resource_id=relationship.id
).first()
automapping = revision.content["automapping"]
nodes = {automapping["source_type"]: automapping["source_id"],
automapping["destination_type"]: automapping["destination_id"]}
self.assertTrue(program_a_id == nodes["Program"])
self.assertTrue(regulation_a_id == nodes["Regulation"])
def test_empty_revision(self):
"""Test revision is marked as empty if no changes present."""
with factories.single_commit():
audit = factories.AuditFactory()
response = self.api_helper.put(audit, {})
self.assert200(response)
self.refresh_object(audit)
revisions = _get_revisions(audit)
self.assertEqual(len(revisions), 2)
self.assertFalse(revisions[0].is_empty)
self.assertTrue(revisions[1].is_empty)
| 34.811252
| 79
| 0.62734
|
c8762f6bd68e5de1617f5211445a0dbc1b059e1d
| 29,017
|
py
|
Python
|
msp430/asm/ld.py
|
pvrs12/python-msp430-tools
|
bd9b1d55b43f884368eaef9dc537330882058fd9
|
[
"BSD-3-Clause"
] | 15
|
2017-10-18T01:56:40.000Z
|
2022-02-28T04:33:01.000Z
|
msp430/asm/ld.py
|
pvrs12/python-msp430-tools
|
bd9b1d55b43f884368eaef9dc537330882058fd9
|
[
"BSD-3-Clause"
] | 3
|
2017-07-24T13:41:04.000Z
|
2019-11-08T19:13:54.000Z
|
msp430/asm/ld.py
|
pvrs12/python-msp430-tools
|
bd9b1d55b43f884368eaef9dc537330882058fd9
|
[
"BSD-3-Clause"
] | 8
|
2017-10-11T14:05:29.000Z
|
2022-03-22T02:13:01.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of https://github.com/zsquareplusc/python-msp430-tools
# (C) 2001-2010 Chris Liechti <cliechti@gmx.net>
#
# SPDX-License-Identifier: BSD-3-Clause
"""\
Linker for TI MSP430.
Inputs are '.o4' files from 'as.py'
"""
import sys
import codecs
from msp430.asm import mcu_definition_parser
from msp430.asm import rpn, peripherals
from msp430.asm.cpp import hexlify
class LinkError(rpn.RPNError):
"""\
Exception class for errors that can occur during linking. The exception can
be annotated with the location in the source file.
"""
# a segment has a valid range of addresses that is set by
# start_address to end_address (excluding!)
# if the data is complete, a call to shrink_to_fit() adjusts the
# start and end addresses for a final positioning of the data
class Segment(object):
"""\
Store data bytes along with information about a segment. A segment can
also contain subsegments.
"""
def __init__(self, name, start_address=None, end_address=None, align=True, programmable=False, little_endian=True, parent=None, mirror_of=None):
self.name = name
self.start_address = start_address
self.end_address = end_address
self.align = align # place data on even addresses
self.data = []
self.programmable = programmable
self.little_endian = little_endian
self.parent = parent
self.mirror_of = mirror_of
self.subsegments = []
self.read_only = False
self.order = 0
def __getitem__(self, segment_name):
"""Easy access to subsegment by name."""
for segment in self.subsegments:
if segment.name == segment_name:
return segment
raise KeyError('no subsegment with name {} found'.format(segment_name))
def sort_subsegments(self, by_address=False):
"""\
Sort list of subsegments either by order of definition or by order of
start address.
"""
if by_address:
self.subsegments.sort()
else:
self.subsegments.sort(key=lambda x: x.order)
for segment in self.subsegments:
segment.sort_subsegments(by_address)
def clear(self):
"""Clear data. Recursively with all subsegments."""
del self.data[:]
for segment in self.subsegments:
segment.clear()
def __len__(self):
"""Get the number of bytes contained in the segment."""
return len(self.data)
def __lt__(self, other):
"""Compare function that allows to sort segments by their start_address."""
if self.start_address is None: return False
if other.start_address is None: return True
return self.start_address < other.start_address
#~ def __cmp__(self, other):
#~ """Compare function that allows to sort segments by their start_address."""
#~ return cmp(self.start_address, other.start_address)
def __repr__(self):
return 'Segment[{}, {}, {}{}{}]'.format(
self.name,
self.start_address is not None and '0x{:04x}'.format(self.start_address) or None,
self.end_address is not None and '0x{:04x}'.format(self.end_address) or None,
self.programmable and ', programmable=True' or '',
self.little_endian and ', little_endian=True' or '')
def print_tree(self, output, indent='', hide_empty=False):
"""Output segment and subsegments."""
if None not in (self.end_address, self.start_address):
size = self.end_address - self.start_address
if size:
start = '0x{:04x}'.format(self.start_address)
end = '0x{:04x}'.format(self.end_address - 1)
else:
start = end = 'n/a'
size_str = '{} B'.format(size)
else:
start = end = ''
size = 0
size_str = ''
if not hide_empty or size:
output.write('{}{:<24}{}{:>8}-{:<8} {:>8} {}{}{}{}\n'.format(
indent,
self.name,
' ' * (8 - len(indent)),
start,
end,
size_str,
self.little_endian and 'LE' or 'BE',
self.programmable and ', downloaded' or '',
self.mirror_of and (', mirror of "{}"'.format(self.mirror_of)) or '',
self.read_only and ', read_only' or '',
))
for segment in self.subsegments:
segment.print_tree(output, indent=indent + ' ', hide_empty=hide_empty)
def shrink_to_fit(self, address=None):
"""modify start- and end_address of segment to fit the data that it contains"""
#~ if self.read_only: return
if address is None:
address = self.start_address
else:
self.start_address = address
# pad own data
if self.align and len(self.data) & 1:
self.data.append(0xff) # pad to align data on even addresses
# reserve space for own data
if address is not None:
address += len(self.data)
# assign areas for each subsegment
for segment in self.subsegments:
segment.shrink_to_fit(address)
if address is not None:
address += len(segment.data)
# save true end address, but not before checking if data fits in segment
if None not in (address, self.end_address) and address > self.end_address:
raise LinkError('Segment {} contains too much data (total {} bytes, {} bytes in excess)'.format(
self.name, len(self.data), address - self.end_address))
if address is not None:
self.end_address = address
def write_8bit(self, value):
"""Write one byte"""
self.data.append(value & 0xff)
def write_16bit(self, value):
"""Write two bytes. Order in memory depends on endianness of segment"""
if self.little_endian:
self.data.append(value & 0xff)
self.data.append((value >> 8) & 0xff)
else:
self.data.append((value >> 8) & 0xff)
self.data.append(value & 0xff)
def write_32bit(self, value):
"""Write four bytes. Order in memory depends on endianness of segment"""
if self.little_endian:
self.data.append(value & 0xff)
self.data.append((value >> 8) & 0xff)
self.data.append((value >> 16) & 0xff)
self.data.append((value >> 24) & 0xff)
else:
self.data.append((value >> 24) & 0xff)
self.data.append((value >> 16) & 0xff)
self.data.append((value >> 8) & 0xff)
self.data.append(value & 0xff)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class Linker(rpn.RPN):
"""\
The linker processes a set of instructions and builds a memory image.
The current address is maintained (PC) and can be used in expressions.
It supports labels which can be set to a arbitrary value or the current
address. The data handling instructions can calculate with the labels
values (and PC).
The file format that the linker reads has Forth like syntax. The rpn module
is used to read and process it. Linker specific instructions are
implemented in this class.
"""
def __init__(self, instructions=[]):
rpn.RPN.__init__(self)
# separate name space for symbols from the data
self.labels = {}
# weak aliases are checked if a label is undefined.
self.weak_alias = {}
# the linking will require multiple passes, a flag controls
# if errors are fatal or ignored
self.errors_are_fatal = True
# to check labels for duplicate definition
self.check_labels = None
# The link instructions
self.instructions = instructions
# information about the input
self.source_filename = '<unknown>'
self.source_line = None
self.source_column = None
# internal states
self.current_segment = None
self.address = 0
self.segments = {}
def linker_error(self, message):
"""\
Raise a LinkError. This function generate an exception with information
annotated about source (filename, lineo, etc.).
"""
raise LinkError(message, self.source_filename, self.source_line, self.source_column)
@rpn.word('RESET')
def word_reset(self, rpn):
"""\
Reset state. This can be used between files, so that every file starts
with the same preconditions (such as no segment selected).
"""
self.current_segment = None
self.source_filename = '<unknown>'
self.source_line = None
self.source_column = None
@rpn.word('SEGMENT')
def word_SEGMENT(self, rpn):
"""\
Select a different segment to put data into. The segment name must be
known. The location counter is set to append to any existing data in
the segment. Example::
SEGMENT .vectors
"""
name = rpn.next_word()
try:
segment = self.segments[name]
except KeyError:
self.linker_error('There is no segment named {}'.format(name))
self.current_segment = segment
if segment.start_address is not None:
address = segment.start_address
else:
# this happens in the first pass
address = 0
self.address = address + len(segment.data)
@rpn.word('FILENAME')
def word_FILENAME(self, rpn):
"""\
Store source filename for error messages. This also clears all local
symbols. Example::
FILENAME source.S
"""
self.source_filename = self.next_word()
@rpn.word('LINE')
def word_LINE(self, rpn):
"""\
Store source filename for error messages.
Example::
5 LINE
"""
self.source_line = self.pop()
@rpn.word('COLUMN')
def word_COLUMN(self, rpn):
"""\
Store source filename for error messages.
Example::
10 COLUMN
"""
self.source_column = self.pop()
@rpn.word('8BIT')
def word_8BIT(self, rpn):
"""\
Store a byte (8 bits) from the stack in the current segment. The value
is masked to 8 bits. Example::
0x12 8BIT
"""
if self.current_segment is None:
self.linker_error('No segment selected (use .text, .section etc.)')
self.current_segment.write_8bit(int(self.pop()))
self.address += 1
@rpn.word('16BIT')
def word_16BIT(self, rpn):
"""\
Store a word (16 bits) from the stack in the current segment. The value
is masked to 16 bits. Example::
0x1234 16BIT
"""
if self.current_segment is None:
self.linker_error('No segment selected (use .text, .section etc.)')
self.current_segment.write_16bit(int(self.pop()))
self.address += 2
@rpn.word('32BIT')
def word_32BIT(self, rpn):
"""\
Store an integer (32 bits) from the stack in the current segment. The value
is masked to 32 bits. Example::
0x12345678 32BIT
"""
if self.current_segment is None:
self.linker_error('No segment selected (use .text, .section etc.)')
self.current_segment.write_32bit(int(self.pop()))
self.address += 4
@rpn.word('RESERVE')
def word_RESERVE(self, rpn):
"""\
Reserve space in the current segment. Length in bytes is taken from
the stack.
"""
if self.current_segment is None:
self.linker_error('No segment selected (use .text, .section etc.)')
count = self.pop()
for i in range(count):
self.current_segment.data.append(None)
self.address += count
@rpn.word('ALIGN')
def word_ALIGN(self, rpn):
"""Make location counter (PC) even."""
if self.current_segment is None:
self.linker_error('No segment selected (use .text, .section etc.)')
exponent = self.pop()
if exponent > 0:
mask = (1 << exponent) - 1
while self.address & mask:
self.current_segment.data.append(None)
self.address += 1
@rpn.word('PC')
def word_PC(self, rpn):
"""Put the value of the location counter on the stack."""
self.push(self.address)
@rpn.word('CONSTANT-SYMBOL')
def _constant_symbol(self, rpn):
"""Create symbol and assign to it the value from the stack. Example: ``1 CONSTANT-SYMBOL somelabel``"""
name = self.name_symbol(self.next_word())
value = self.pop()
if self.check_labels is not None:
if name in self.check_labels and self.check_labels[name] != value:
self.linker_error('redefinition of symbol {!r} with different value (previous: {!r}, new: {!r})'.format(
name,
self.labels[name],
value))
self.check_labels[name] = value
self.labels[name] = value
@rpn.word('WEAK-ALIAS')
def _weak_alias(self, rpn):
"""\
Assign a symbol for an other symbol. The alias is used when the symbol is not defined.
Example: ``WEAK-ALIAS __vector_0 _unused_vector`` here, if
``__vector_0`` is not defined, it will point to ``_unused_vector``.
"""
name = self.name_symbol(self.next_word())
alias = self.name_symbol(self.next_word())
if name in self.weak_alias and self.weak_alias[name] != alias:
self.linker_error('Weak alias {!r} redefined (old value: {!r})'.format(name, self.weak_alias[name]))
self.weak_alias[name] = alias
@rpn.word('CREATE-SYMBOL')
def _create_symbol(self, rpn):
"""Mark current location with symbol. Example: ``CREATE-SYMBOL somelabel``"""
name = self.name_symbol(self.next_word())
#~ # this simple check does not work as we're doing multiple passes
if self.check_labels is not None:
if name in self.check_labels:
self.linker_error('Label {!r} redefined (old value: {!r})'.format(name, self.labels[name]))
self.check_labels[name] = self.address
self.labels[name] = self.address
@rpn.word('GET-SYMBOL')
def _get_symbol(self, rpn):
"""Get a symbol and put its value on the stack. Example: ``GET-SYMBOL somelabel``"""
name = self.name_symbol(self.next_word())
# check if there is an alias as long as its not already found in labels
if name in self.weak_alias and name not in self.labels:
name = self.weak_alias[name]
try:
value = self.labels[name]
except KeyError:
# other wise it is undefined
if self.errors_are_fatal:
self.linker_error('Label {!r} is not defined'.format(name))
else:
value = 0
self.push(value)
# XXX this should be separate as it is machine dependant (while the rest of
# the linker is not). The calculation is not the problem, the error
# messages are - there are currently no instructions for that
@rpn.word('JMP')
def word_JMP(self, rpn):
"""\
MSP430 jump instruction (insns dist). Takes offset and instruction
from stack, calculate final opcode and store it.
Example::
0x2000 GET-SYMBOL somelabel PC - 2 - JMP
"""
distance = self.pop()
instruction = self.pop()
if distance & 1:
if self.errors_are_fatal:
self.linker_error('Jump distance must be of even length (distance {})'.format(distance))
if distance < -512 * 2 or distance > 511 * 2:
if self.errors_are_fatal:
self.linker_errorr('Jump out of range (distance {})'.format(distance))
else:
instruction |= 0x3ff & (distance // 2)
self.current_segment.write_16bit(instruction)
self.address += 2
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def segments_from_definition(self, segment_definitions):
"""\
Initialize flat list of segments and the top level segment given a
dictionary with segment descriptions (input from
mcu_definition_parser.
"""
self.top_segment = Segment('<default segment>')
symbols = []
# step 1: create a flat list of segments
for name, definition in segment_definitions.items():
# skip special entries
if name.startswith('__'):
continue
if definition['__type__'] == 'segment':
# create a segment
start, end = definition.get('start'), definition.get('end')
if end is not None:
end += 1
self.segments[name] = Segment(
name,
start,
end,
programmable='programmable' in definition['flags'],
parent=definition.get('in'),
mirror_of=definition.get('mirror'),
)
self.segments[name].order = definition.get('order')
self.segments[name].read_only = 'read-only' in definition['flags']
elif definition['__type__'] == 'symbol':
symbols.append(definition)
else:
self.linker_error('unknown record type in memory map: {!r}'.format(definition['__type__']))
# step 2: create a hierarchical tree of segments
for segment in self.segments.values():
if segment.parent is not None:
self.segments[segment.parent].subsegments.append(segment)
else:
self.top_segment.subsegments.append(segment)
self.top_segment.sort_subsegments()
self.segments['default'] = self.top_segment
# create calculated symbols
for definition in symbols:
name = definition['__name__']
if 'address' in definition:
self.labels[name] = definition['address']
else:
segment = self.segments[definition['in']]
location = definition.get('location', 'start')
if location == 'start':
self.labels[name] = segment.start
elif location == 'end':
self.labels[name] = segment.end_address
else:
self.linker_error('invalid location {!r} for symbol {!r}'.format(location, name))
def update_mirrored_segments(self):
"""In all mirrored segments, update the copied data."""
for segment in self.segments.values():
if segment.mirror_of is not None:
segment.data = list(self.segments[segment.mirror_of].data)
def name_symbol(self, name):
"""Name mangling for local symbols, otherwise return original name."""
if name[0] == '.':
name = '.{}{}'.format(hexlify(self.source_filename), name[1:])
return name
def clear_local_symbols(self):
"""Forget about local symbols (the ones starting with a dot)"""
for name in list(self.labels): # iterate over a copy
if name[0] == '.':
del self.labels[name]
# helper functions for 3 pass linking
def pass_one(self):
"""\
Shortcut to run the 1st pass of 3 stage linking.
Segment sizes and positioning is determined.
"""
self.errors_are_fatal = False # 1st two runs are used to find out data positioning only
self.top_segment.clear()
self.interpret_sequence(self.instructions)
# update segment start and end_addresses, handle alignment
self.update_mirrored_segments()
self.top_segment.shrink_to_fit()
def pass_two(self):
"""\
Shortcut to run the 2nd pass of 3 stage linking.
This run is used to find all labels at their final locations.
"""
self.top_segment.clear()
self.check_labels = {}
self.interpret_sequence(self.instructions)
self.check_labels = None
# create automatic labels for all segments (start/end)
for segment in self.segments.values():
name = segment.name.replace('.', '') # remove dots in names
# create labels if addresses are defined
if segment.start_address is not None:
self.labels['_{}_start'.format(name)] = segment.start_address
if segment.end_address is not None:
self.labels['_{}_end'.format(name)] = segment.end_address
def pass_three(self):
"""\
Shortcut to run the 3rd pass of 3 stage linking.
This run uses all the labels and creates the final contents.
"""
self.errors_are_fatal = True
self.top_segment.clear()
self.interpret_sequence(self.instructions)
self.update_mirrored_segments()
self.clear_local_symbols()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def substitute_none(data):
"""Ensure that stream does not contain None"""
for value in data:
if value is None:
yield 0
else:
yield value
def to_addressed_byte_stream(segments):
"""\
Create a stream of (address, byte) tuples from the list of segments. The
output is sorted by ascending address.
"""
for segment in sorted(segments.values()):
if segment.data and segment.programmable:
for n, byte in enumerate(substitute_none(segment.data)):
yield (segment.start_address + n, byte)
def to_TI_Text(segments):
"""\
Return a string containing TI-Text, given a dictionary with segments.
"""
out = []
row_count = 0
last_address = None
for address, byte in to_addressed_byte_stream(segments):
# need to start a new block if address jumping
if address - 1 != last_address or address == 0x10000:
if out and row_count != 0: # except for the 1st one
out.append('\n')
out.append('@{:04x}\n'.format(address))
row_count = 0
last_address = address
# output byte
out.append('{:02x}'.format(byte))
row_count += 1
# after 16 bytes (a row) insert a newline
if row_count == 16:
out.append('\n')
row_count = 0
else:
out.append(' ')
if row_count != 0:
out.append('\n')
out.append('q\n')
return ''.join(out)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def main():
import logging
import argparse
logging.basicConfig()
parser = argparse.ArgumentParser(description="""\
If no input files are specified data is read from stdin.
Output is in "TI-Text" format.""")
group = parser.add_argument_group('Input')
group.add_argument(
'INPUT',
type=argparse.FileType('r'),
nargs='+',
default=['-'])
group.add_argument(
'-T', '--segmentfile',
help='linker definition file',
metavar='FILE',
default=None)
group.add_argument(
'-m', '--mcu',
help='name of the MCU (used to load memory map)',
metavar='MCU',
default='MSP430F1121')
group = parser.add_argument_group('Output')
group.add_argument(
'-o', '--outfile',
type=argparse.FileType('w'),
help='name of the destination file',
default='-',
metavar='FILE')
group.add_argument(
'--mapfile',
type=argparse.FileType('w'),
help='write map file',
metavar='FILE')
parser.add_argument(
'-v', '--verbose',
action='count',
dest='verbose',
default=0,
help='print status messages, can be given multiple times to increase messages')
parser.add_argument(
'--debug',
action='store_true',
default=False,
help='print debug messages')
parser.add_argument(
'--symbols',
help='read register names for given architecture (e.g. F1xx)',
metavar='NAME')
args = parser.parse_args()
#~ print(args)
if args.debug:
logging.getLogger().setLevel(logging.DEBUG)
elif args.verbose:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARN)
if sys.version_info < (3, 0):
# XXX make stderr unicode capable
sys.stderr = codecs.getwriter("utf-8")(sys.stderr)
instructions = []
for fileobj in args.INPUT:
if args.verbose > 2:
sys.stderr.write(u'reading file "{}"...\n'.format(fileobj.name))
instructions.append('reset')
instructions.extend(['filename', fileobj.name])
try:
instructions.extend(rpn.words_in_file(fileobj.name, fileobj=fileobj))
except IOError as e:
sys.stderr.write('ld: {}: File not found\n'.format(fileobj.name))
sys.exit(1)
linker = Linker(instructions)
# load symbols
if args.symbols is not None:
all_peripherals = peripherals.load_internal(args.symbols)
for peripheral in all_peripherals.peripherals.values():
for reg_name, register in peripheral.items():
if reg_name.startswith('__'):
continue
if '__address__' in register:
linker.labels[register['__name__']] = register['__address__']
for value, name in register['__bits__'].items():
linker.labels[name] = value
for value, name in register['__values__'].items():
linker.labels[name] = value
if '__values__' in peripheral:
for value, name in peripheral['__values__'].items():
linker.labels[name] = value
# ========= load MCU definition =========
if args.verbose > 1:
sys.stderr.write("Step 1: load segment descriptions.\n")
# load the file and get the desired MCU description
try:
if args.segmentfile:
mem_maps = mcu_definition_parser.load_from_file(args.segmentfile)
else:
mem_maps = mcu_definition_parser.load_internal()
args.mcu = args.mcu.upper() # XXX hack
segment_definitions = mcu_definition_parser.expand_definition(mem_maps, args.mcu)
except Exception as msg:
sys.stderr.write('ERROR loading segment descriptions: {}\n'.format(msg))
raise
sys.exit(1)
linker.segments_from_definition(segment_definitions)
if args.verbose > 2:
sys.stderr.write('Segments available:\n')
linker.top_segment.print_tree(sys.stderr)
# ========= Do the actual linking =========
try:
if args.verbose > 1:
sys.stderr.write("Step 2: generate machine code\n")
sys.stderr.write(" Pass 1: determinate segment sizes.\n")
linker.pass_one()
if args.verbose > 1:
sys.stderr.write(" Pass 2: calculate labels.\n")
linker.pass_two()
if args.verbose > 1:
sys.stderr.write(" Pass 3: final output.\n")
linker.pass_three()
except LinkError as e:
#~ if e.lineno is not None else '?'
sys.stderr.write(u'{e.filename}:{e.lineno}: {e}\n'.format(e=e))
sys.exit(1)
except rpn.RPNError as e:
sys.stderr.write(u'{e.filename}:{e.lineno}: {e}\n'.format(e=e))
if args.debug and e.text:
sys.stderr.write(u'{e.filename}:{e.lineno}: input line: {e.text!r}\n'.format(e=e))
if args.debug:
raise
sys.exit(1)
# ========= Output final result =========
if args.verbose > 1:
sys.stderr.write('Step 3: write machine code to file.\n')
args.outfile.write(to_TI_Text(linker.segments))
if args.verbose > 1:
sys.stderr.write('Labels:\n')
labels = sorted(linker.labels.keys())
for i in labels:
sys.stderr.write(u' {:<24} = 0x{:08x}\n'.format(i, linker.labels[i]))
if args.mapfile:
labels = [(v, k) for k, v in linker.labels.items()]
labels.sort()
for address, label in labels:
args.mapfile.write(u'0x{:04x} {}\n'.format(address, label))
args.mapfile.close()
if args.verbose:
sys.stderr.write('Segments used:\n')
linker.top_segment.sort_subsegments(by_address=True)
linker.top_segment.print_tree(sys.stderr, hide_empty=True)
if __name__ == '__main__':
main()
| 36.090796
| 148
| 0.582279
|
7a954ea682febfa3e53bcb29981f0bf5ddf6cff8
| 3,884
|
py
|
Python
|
tools/sty2html.py
|
AnjaneyuluBatta505/epydoc
|
e074483d519912218b1fb1d4eacb492076a1ed73
|
[
"MIT"
] | 1
|
2022-01-29T08:19:34.000Z
|
2022-01-29T08:19:34.000Z
|
tools/sty2html.py
|
AnjaneyuluBatta505/epydoc
|
e074483d519912218b1fb1d4eacb492076a1ed73
|
[
"MIT"
] | null | null | null |
tools/sty2html.py
|
AnjaneyuluBatta505/epydoc
|
e074483d519912218b1fb1d4eacb492076a1ed73
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Convert epydoc's LaTeX sty files to HTML
from __future__ import absolute_import
from __future__ import print_function
from epydoc.docwriter.latex_sty import STYLESHEETS
import re, sys, os.path
TEMPLATE = """\
<?xml version="1.0" encoding="ascii" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=ascii" />
<title>%(title)s</title>
<link rel="stylesheet" href="epydoc.css" type="text/css" />
</head>
<body>
<div class="body">
<h1 class="title">%(title)s</h1>
<pre>%(body)s</pre>
</div>
<table width="100%%" class="navbox" cellpadding="1" cellspacing="0">
<tr>
<a class="nav" href="index.html">
<td align="center" width="20%%" class="nav">
<a class="nav" href="index.html">
Home</a></td></a>
<a class="nav" href="installing.html">
<td align="center" width="20%%" class="nav">
<a class="nav" href="installing.html">
Installing Epydoc</a></td></a>
<a class="nav" href="using.html">
<td align="center" width="20%%" class="nav">
<a class="nav" href="using.html">
Using Epydoc</a></td></a>
<a class="nav" href="epytext.html">
<td align="center" width="20%%" class="nav">
<a class="nav" href="epytext.html">
Epytext</a></td></a>
<td align="center" width="20%%" class="nav">
<A href="http://sourceforge.net/projects/epydoc">
<IMG src="sflogo.png"
width="88" height="26" border="0" alt="SourceForge"
align="top"/></A></td>
</tr>
</table>
</body>
</html>
"""
COLOR = {'def': '#705000',
'defname': '#000080',
'comment': '#005080',
'command': '#705000',
'escape': '#ffffff',
}
COLORIZE_RE = re.compile('|'.join(['(%s)' % s for s in [
r'(?P<def>(re)?new(command|environment)){(?P<defname>[^}]+)}',
r'(?P<command>\\\w+)',
r'(?P<escape>\\.)',
r'(?P<comment>%.*)',
]]))
def subfunc(m):
if m.group('def') is not None:
return ('<code class="%s">%s</code>{<code class="%s">%s</code>}' %
('keyword', m.group('def'), 'function', m.group('defname')))
if m.group('command') is not None:
return '<code class="%s">%s</code>' % ('keyword', m.group('command'))
if m.group('escape') is not None:
return '<code class="%s">%s</code>' % ('escape', m.group('escape'))
if m.group('comment') is not None:
return '<code class="%s">%s</code>' % ('comment', m.group('comment'))
assert False, 'expected to match some group'
def colorize(s, title):
s = s.replace('&', '&')
s = s.replace('<', '<')
s = s.replace('>', '>')
body = COLORIZE_RE.sub(subfunc, s)
return TEMPLATE % dict(title=title, body=body)
def main():
if len(sys.argv) != 2:
print('Usage: %s <output-dir>' % sys.argv[0])
sys.exit(-1)
# hackish to hardcode this; oh well.
sty_list = open('doc/epydoc-style-list.txt', 'w')
sty_list.write('.. This file is automatically generated by %s\n\n' %
sys.argv[0])
output_dir = sys.argv[1]
for (name, sheet) in sorted(STYLESHEETS.items()):
if name == 'default': pass
filename = 'epydoc-sty-%s.html' % name
title = 'LaTeX Style File: epydoc-%s.sty' % name
out = open(os.path.join(output_dir, filename), 'wb')
out.write(colorize(sheet, title))
out.close()
sty_list.write('- `%s <%s>`__\n' % (title, filename))
sty_list.close()
# hackish to hardcode this; oh well.
demo = open('doc/epydoc-latex-demo.tex').read()
out = open(os.path.join(output_dir, 'epydoc-latex-demo.html'), 'wb')
out.write(colorize(demo, 'Epydoc LaTeX Style Reference'))
out.close()
if __name__ == '__main__':
main()
| 32.638655
| 77
| 0.583419
|
d660973d14033824e9c34cf9c05e06fe7f388dbc
| 960
|
py
|
Python
|
scripts/file_generator.py
|
qian-long/TileDB-multinode
|
ba2a38b2cc6169935c73b76af8c53e8544c11300
|
[
"MIT"
] | null | null | null |
scripts/file_generator.py
|
qian-long/TileDB-multinode
|
ba2a38b2cc6169935c73b76af8c53e8544c11300
|
[
"MIT"
] | null | null | null |
scripts/file_generator.py
|
qian-long/TileDB-multinode
|
ba2a38b2cc6169935c73b76af8c53e8544c11300
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python
import random
import sys
random.seed(0)
BOTTOM = 0
TOP = 1000000
COEF = 1000.0/27.5
def size_to_rows(size): #size in megabytes
size = size * 1000 # size in kilobytes
return int(size * COEF)
def gen_row():
return (random.randint(BOTTOM, TOP), random.randint(BOTTOM, TOP), random.randint(BOTTOM, TOP), random.randint(BOTTOM, TOP))
def row_to_string(row):
return str(row[0]) + "," +str(row[1]) + "," +str(row[2]) + "," +str(row[3]) + "\n"
def gen_file(myrank, num_node, output_size):
filename = str(output_size) + "MB.csv"
num_rows = size_to_rows(output_size)
with open("../data"+"/"+filename, 'w') as myfile:
for i in xrange(num_rows*num_node):
row = gen_row()
if (i % num_node) == myrank-1:
myfile.write(row_to_string(row))
print "Done making file", filename
if __name__ == "__main__":
gen_file(int(sys.argv[1]), int(sys.argv[2]), int(sys.argv[3]))
| 27.428571
| 127
| 0.626042
|
6b5916b84517c7089f225c3ff7d538a650388e34
| 783
|
py
|
Python
|
backend/authentication/views.py
|
r-kaminski/innovativeproject-inventory-of-supplies
|
b3352cb6432ec2e7c5b83b648231808eabf56734
|
[
"MIT"
] | 1
|
2019-05-05T17:50:37.000Z
|
2019-05-05T17:50:37.000Z
|
backend/authentication/views.py
|
r-kaminski/innovativeproject-inventory-of-supplies
|
b3352cb6432ec2e7c5b83b648231808eabf56734
|
[
"MIT"
] | 117
|
2019-03-05T20:34:55.000Z
|
2022-03-11T23:45:54.000Z
|
backend/authentication/views.py
|
r-kaminski/innovativeproject-inventory-of-supplies
|
b3352cb6432ec2e7c5b83b648231808eabf56734
|
[
"MIT"
] | 5
|
2019-08-04T15:36:43.000Z
|
2021-06-16T20:49:49.000Z
|
from .serializers import NameRegisterSerializer
from rest_auth.registration.views import RegisterView
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.views import APIView
class NameRegisterView(RegisterView):
serializer_class = NameRegisterSerializer
class LogoutView(APIView):
"""
Use to logout useing jwt auth.
Removes JWT_TOKEN from cookies
"""
authentication_classes = (JSONWebTokenAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
def post(self, request, format=None):
response = Response("Logged out successfully")
response.delete_cookie('JWT_TOKEN')
return response
| 30.115385
| 72
| 0.785441
|
86e481fc35e56fe313328088f48a704998ba2e66
| 207
|
py
|
Python
|
Utest_PublishStatus.py
|
nerusm/srv_mod
|
94f55b34f7210e87951099709adfa0a1aa569430
|
[
"Apache-2.0"
] | null | null | null |
Utest_PublishStatus.py
|
nerusm/srv_mod
|
94f55b34f7210e87951099709adfa0a1aa569430
|
[
"Apache-2.0"
] | null | null | null |
Utest_PublishStatus.py
|
nerusm/srv_mod
|
94f55b34f7210e87951099709adfa0a1aa569430
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'suren'
from config import Config
from publish_status import SendStatus
config = Config()
send_status = SendStatus(config=config, status=True, user_name='Suren')
send_status.publish_message()
| 23
| 71
| 0.801932
|
141eb952c7f732cdeb3a4489bad0f3f92e6c77ca
| 25,117
|
py
|
Python
|
tests/http_validator.py
|
webberian/webperf_core
|
718ae7332f7e4f363ac74958f9b778d1c87290b3
|
[
"MIT"
] | null | null | null |
tests/http_validator.py
|
webberian/webperf_core
|
718ae7332f7e4f363ac74958f9b778d1c87290b3
|
[
"MIT"
] | null | null | null |
tests/http_validator.py
|
webberian/webperf_core
|
718ae7332f7e4f363ac74958f9b778d1c87290b3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import http3
import h2
import h11
import dns.resolver
import urllib.parse
import textwrap
import ipaddress
import hashlib
import datetime
import binascii
import base64
import sys
import socket
import ssl
import json
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.util import ssl_
# https://docs.python.org/3/library/urllib.parse.html
import urllib
from urllib.parse import urlparse
import uuid
import re
from bs4 import BeautifulSoup
import config
from tests.utils import httpRequestGetContent, has_redirect
import gettext
_ = gettext.gettext
# DEFAULTS
request_timeout = config.http_request_timeout
useragent = config.useragent
def run_test(langCode, url):
"""
Only work on a domain-level. Returns tuple with decimal for grade and string with review
"""
points = 0.0
review = ''
result_dict = {}
language = gettext.translation(
'http_validator', localedir='locales', languages=[langCode])
language.install()
_ = language.gettext
print(_('TEXT_RUNNING_TEST'))
nof_checks = 0
check_url = True
while check_url and nof_checks < 10:
review += _('TEXT_REVIEW_RESULT_FOR').format(url)
url_result = validate_url(url, _)
points += url_result[0]
review += url_result[1]
redirect_result = has_redirect(url)
check_url = redirect_result[0]
url = redirect_result[1]
nof_checks += 1
if nof_checks > 1:
review += _('TEXT_REVIEW_SCORE_IS_DIVIDED').format(
nof_checks)
points = points / nof_checks
if len(review) == 0:
review = _('TEXT_REVIEW_NO_REMARKS')
if points < 1.0:
points = 1.0
return (points, review, result_dict)
def validate_url(url, _):
points = 0.0
review = ''
o = urllib.parse.urlparse(url)
hostname = o.hostname
result = http_to_https_score(url, _)
points += result[0]
review += result[1]
result = tls_version_score(url, _)
points += result[0]
review += _('TEXT_REVIEW_TLS_VERSION')
review += result[1]
result = ip_version_score(hostname, _)
points += result[0]
review += _('TEXT_REVIEW_IP_VERSION')
review += result[1]
result = http_version_score(hostname, url, _)
points += result[0]
review += _('TEXT_REVIEW_HTTP_VERSION')
review += result[1]
return (points, review)
def http_to_https_score(url, _):
http_url = ''
o = urllib.parse.urlparse(url)
if (o.scheme == 'https'):
http_url = url.replace('https://', 'http://')
else:
http_url = url
redirect_result = has_redirect(http_url)
result_url = ''
if (redirect_result[0]):
result_url = redirect_result[1]
else:
result_url = http_url
if result_url == None:
return (0.0, _('TEXT_REVIEW_HTTP_TO_HTTP_REDIRECT_UNABLE_TO_VERIFY'))
result_url_o = urllib.parse.urlparse(result_url)
if (result_url_o.scheme == 'http'):
return (0.0, _('TEXT_REVIEW_HTTP_TO_HTTP_REDIRECT_NO_REDIRECT'))
else:
return (1.0, _('TEXT_REVIEW_HTTP_TO_HTTP_REDIRECT_REDIRECTED'))
def dns_score(hostname, _):
result = dns_lookup('_esni.' + hostname, "TXT")
if result[0]:
return (1.0, _('TEXT_REVIEW_DNS_HAS_ESNI'))
return (0.0, _('TEXT_REVIEW_DNS_NO_ESNI'))
def ip_version_score(hostname, _):
ip4_result = dns_lookup(hostname, "A")
ip6_result = dns_lookup(hostname, "AAAA")
if ip4_result[0] and ip6_result[0]:
return (1.0, _('TEXT_REVIEW_IP_VERSION_BOTH_IPV4_AND_IPV6'))
if ip6_result[0]:
return (0.5, _('TEXT_REVIEW_IP_VERSION_IPV6'))
if ip4_result[0]:
return (0.5, _('TEXT_REVIEW_IP_VERSION_IPV4'))
return (0.0, _('TEXT_REVIEW_IP_VERSION_UNABLE_TO_VERIFY'))
def protocol_version_score(url, protocol_version, _):
points = 0.0
review = ''
result_not_validated = (False, '')
result_validated = (False, '')
protocol_rule = False
protocol_name = ''
protocol_translate_name = ''
protocol_is_secure = False
try:
if protocol_version == ssl.PROTOCOL_TLS:
protocol_name = 'TLSv1.3'
protocol_translate_name = 'TLS1_3'
assert ssl.HAS_TLSv1_3
protocol_rule = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_TLSv1_2
protocol_is_secure = True
elif protocol_version == ssl.PROTOCOL_TLSv1_2:
protocol_name = 'TLSv1.2'
protocol_translate_name = 'TLS1_2'
assert ssl.HAS_TLSv1_2
protocol_rule = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_TLSv1_3
protocol_is_secure = True
elif protocol_version == ssl.PROTOCOL_TLSv1_1:
protocol_name = 'TLSv1.1'
protocol_translate_name = 'TLS1_1'
assert ssl.HAS_TLSv1_1
protocol_rule = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_2 | ssl.OP_NO_TLSv1_3
protocol_is_secure = False
elif protocol_version == ssl.PROTOCOL_TLSv1:
protocol_name = 'TLSv1.0'
protocol_translate_name = 'TLS1_0'
assert ssl.HAS_TLSv1
protocol_rule = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_TLSv1_2 | ssl.OP_NO_TLSv1_3
protocol_is_secure = False
elif protocol_version == ssl.PROTOCOL_SSLv3:
protocol_name = 'SSLv3'
protocol_translate_name = 'SSL3_0'
assert ssl.HAS_SSLv3
protocol_rule = ssl.OP_NO_SSLv2 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_TLSv1_2 | ssl.OP_NO_TLSv1_3
protocol_is_secure = False
elif protocol_version == ssl.PROTOCOL_SSLv2:
protocol_name = 'SSLv2'
protocol_translate_name = 'SSL2_0'
protocol_rule = ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_TLSv1_2 | ssl.OP_NO_TLSv1_3
assert ssl.HAS_SSLv2
protocol_is_secure = False
result_not_validated = has_protocol_version(
url, False, protocol_rule)
result_validated = has_protocol_version(
url, True, protocol_rule)
has_full_support = result_not_validated[0] and result_validated[0]
has_wrong_cert = result_not_validated[0]
if has_full_support:
if protocol_is_secure:
points += 0.5
review += _('TEXT_REVIEW_' +
protocol_translate_name + '_SUPPORT')
elif has_wrong_cert:
review += _('TEXT_REVIEW_' +
protocol_translate_name + '_SUPPORT_WRONG_CERT')
else:
review += _('TEXT_REVIEW_' +
protocol_translate_name + '_NO_SUPPORT')
if not protocol_is_secure:
points += 0.3
result_insecure_cipher = (False, 'unset')
try:
result_insecure_cipher = has_insecure_cipher(
url, protocol_rule)
except ssl.SSLError as sslex:
print('error insecure_cipher', sslex)
pass
if result_insecure_cipher[0]:
review += _('TEXT_REVIEW_' +
protocol_translate_name + '_INSECURE_CIPHERS')
result_weak_cipher = (False, 'unset')
try:
result_weak_cipher = has_weak_cipher(
url, protocol_rule)
except ssl.SSLError as sslex:
print('error weak_cipher', sslex)
pass
if result_weak_cipher[0]:
review += _('TEXT_REVIEW_' +
protocol_translate_name + '_WEAK_CIPHERS')
except ssl.SSLError as sslex:
print('error 0.0s', sslex)
pass
except AssertionError:
print('### No {0} support on your machine, unable to test ###'.format(
protocol_name))
pass
except:
print('error protocol_version_score: {0}'.format(sys.exc_info()[0]))
pass
return (points, review)
def tls_version_score(orginal_url, _):
points = 0.0
review = ''
url = orginal_url.replace('http://', 'https://')
# TODO: check cipher security
# TODO: re add support for identify wrong certificate
try:
result = protocol_version_score(url, ssl.PROTOCOL_TLS, _)
points += result[0]
review += result[1]
except:
pass
try:
result = protocol_version_score(url, ssl.PROTOCOL_TLSv1_2, _)
points += result[0]
review += result[1]
except:
pass
try:
result = protocol_version_score(url, ssl.PROTOCOL_TLSv1_1, _)
points += result[0]
review += result[1]
except:
pass
try:
result = protocol_version_score(url, ssl.PROTOCOL_TLSv1, _)
points += result[0]
review += result[1]
except:
pass
try:
# HOW TO ENABLE SSLv3, https://askubuntu.com/questions/893155/simple-way-of-enabling-sslv2-and-sslv3-in-openssl
result = protocol_version_score(url, ssl.PROTOCOL_SSLv3, _)
points += result[0]
review += result[1]
except:
pass
try:
# HOW TO ENABLE SSLv2, https://askubuntu.com/questions/893155/simple-way-of-enabling-sslv2-and-sslv3-in-openssl
result = protocol_version_score(url, ssl.PROTOCOL_SSLv2, _)
points += result[0]
review += result[1]
except:
pass
if points > 2.0:
points = 2.0
return (points, review)
def dns_lookup(hostname, record_type):
try:
dns_record = dns.resolver.query(hostname, record_type)
except dns.resolver.NXDOMAIN:
return (False, "No record found")
except (dns.resolver.NoAnswer, dns.resolver.NoNameservers) as error:
return (False, error)
record = '' + str(dns_record[0])
return (True, record)
def http_version_score(hostname, url, _):
points = 0.0
review = ''
result = check_http11(hostname)
if result[0]:
points += 0.5
review += _('TEXT_REVIEW_HTTP_VERSION_HTTP_1_1')
result = check_http2(hostname)
if result[0]:
points += 0.5
review += _('TEXT_REVIEW_HTTP_VERSION_HTTP_2')
# If we still have 0.0 points something must have gone wrong, try fallback
if points == 0.0:
result = check_http_fallback(url)
if result[0]:
points += 0.5
review += _('TEXT_REVIEW_HTTP_VERSION_HTTP_1_1')
if result[1]:
points += 0.5
review += _('TEXT_REVIEW_HTTP_VERSION_HTTP_2')
result = check_http3(hostname)
if result[0]:
points += 0.2
review += _('TEXT_REVIEW_HTTP_VERSION_HTTP_3')
if result[1]:
points += 0.2
review += _('TEXT_REVIEW_HTTP_VERSION_QUIC')
return (points, review)
def check_http11(hostname):
try:
socket.setdefaulttimeout(10)
conn = ssl.create_default_context()
conn.set_alpn_protocols(['http/1.1'])
try:
conn.set_npn_protocols(["http/1.1"])
except NotImplementedError:
pass
ssock = conn.wrap_socket(
socket.socket(socket.AF_INET, socket.SOCK_STREAM), server_hostname=hostname)
ssock.connect((hostname, 443))
negotiated_protocol = ssock.selected_alpn_protocol()
if negotiated_protocol is None:
negotiated_protocol = ssock.selected_npn_protocol()
if negotiated_protocol == "http/1.1":
return (True, "http/1.1")
else:
return (False, "http/1.1")
except Exception:
return (False, "http/1.1")
def check_http2(hostname):
try:
socket.setdefaulttimeout(10)
conn = ssl.create_default_context()
conn.set_alpn_protocols(['h2'])
try:
conn.set_npn_protocols(["h2"])
except NotImplementedError:
pass
ssock = conn.wrap_socket(
socket.socket(socket.AF_INET, socket.SOCK_STREAM), server_hostname=hostname)
ssock.connect((hostname, 443))
negotiated_protocol = ssock.selected_alpn_protocol()
if negotiated_protocol is None:
negotiated_protocol = ssock.selected_npn_protocol()
if negotiated_protocol == "h2":
return (True, "http2")
else:
return (False, "http2")
except Exception:
return (False, "http2")
def check_http3(host):
try:
url = 'https://http3check.net/?host={0}'.format(host)
headers = {'user-agent': useragent}
request = requests.get(url, allow_redirects=True,
headers=headers, timeout=request_timeout)
# We use variable to validate it once
requestText = ''
hasRequestText = False
has_quic_support = False
has_http3_support = False
if request.text:
requestText = request.text
hasRequestText = True
if hasRequestText:
try:
soup = BeautifulSoup(requestText, 'lxml')
elements_success = soup.find_all(
class_="uk-text-success")
for result in elements_success:
supportText = result.text.lower()
has_quic_support = has_quic_support or 'quic' in supportText
has_http3_support = has_quic_support or 'http/3' in supportText
except:
print(
'Error getting HTTP/3 or QUIC support!\nMessage:\n{0}'.format(sys.exc_info()[0]))
return (has_http3_support, has_quic_support)
except Exception:
return (False, False)
def check_http_fallback(url):
has_http2 = False
has_http11 = False
try:
r = http3.get(url, allow_redirects=True)
has_http2 = r.protocol == "HTTP/2"
has_http11 = r.protocol == "HTTP1.1"
except ssl.CertificateError as error:
print('ERR1', error)
pass
except Exception as e:
print('ERR2', e)
pass
try:
if not has_http11:
# This call only supports HTTP/1.1
content = httpRequestGetContent(url, True)
if '</html>' in content:
has_http11 = True
except Exception as e:
# Probably a CERT validation error, ignore
print('ERR3', e)
pass
return (has_http11, has_http2)
# Read post at: https://hussainaliakbar.github.io/restricting-tls-version-and-cipher-suites-in-python-requests-and-testing-wireshark/
WEAK_CIPHERS = (
'ECDHE+AES128+CBC+SHA:'
'ECDHE+AES256+CBC+SHA:'
'ECDHE+RSA+3DES+EDE+CBC+SHA:'
'ECDHE+RSA+AES256+GCM+SHA383:'
'RSA+AES128+CBC+SHA:'
'RSA+AES256+CBC+SHA:'
'RSA+AES128+GCM+SHA256:'
'RSA+AES256+GCM+SHA:'
'RSA+AES256+GCM+SHA383:'
'RSA+CAMELLIA128+CBC+SHA:'
'RSA+CAMELLIA256+CBC+SHA:'
'RSA+IDEA+CBC+SHA:'
'RSA+AES256+GCM+SHA:'
'RSA+3DES+EDE+CBC+SHA:'
'RSA+SEED+CBC+SHA:'
'DHE+RSA+3DES+EDE+CBC+SHA:'
'DHE+RSA+AES128+CBC+SHA:'
'DHE+RSA+AES256+CBC+SHA:'
'DHE+RSA+CAMELLIA128+CBC+SHA:'
'DHE+RSA+CAMELLIA256+CBC+SHA:'
'DHE+RSA+SEED+CBC+SHA:'
)
class TlsAdapterWeakCiphers(HTTPAdapter):
def __init__(self, ssl_options=0, **kwargs):
self.ssl_options = ssl_options
super(TlsAdapterWeakCiphers, self).__init__(**kwargs)
def init_poolmanager(self, *pool_args, **pool_kwargs):
ctx = ssl_.create_urllib3_context(
ciphers=WEAK_CIPHERS,
cert_reqs=ssl.CERT_REQUIRED, options=self.ssl_options)
self.poolmanager = PoolManager(*pool_args,
ssl_context=ctx,
**pool_kwargs)
def proxy_manager_for(self, *args, **kwargs):
context = ssl_.create_urllib3_context(ciphers=WEAK_CIPHERS)
kwargs['ssl_context'] = context
return super(TlsAdapterWeakCiphers, self).proxy_manager_for(*args, **kwargs)
def has_weak_cipher(url, protocol_version):
session = False
try:
#print('ssl._DEFAULT_CIPHERS', ssl._DEFAULT_CIPHERS)
session = requests.session()
adapter = TlsAdapterWeakCiphers(protocol_version)
session.mount(url, adapter)
except ssl.SSLError as sslex:
# print('### No weak cipher support on your machine, unable to test: {0} ###'.format(
# WEAK_CIPHERS))
return (False, 'weak_cipher SSLError {0}'.format(sslex))
try:
allow_redirects = False
headers = {'user-agent': useragent}
a = session.get(url, verify=False, allow_redirects=allow_redirects,
headers=headers, timeout=request_timeout)
if a.status_code == 200 or a.status_code == 301 or a.status_code == 302 or a.status_code == 404:
#print('is ok')
return (True, 'is ok')
resulted_in_html = '<html' in a.text
# if resulted_in_html:
# print('has html')
# else:
# print('no html')
return (resulted_in_html, 'has <html tag in result')
except ssl.SSLCertVerificationError as sslcertex:
#print('weak_cipher SSLCertVerificationError', sslcertex)
return (True, 'weak_cipher SSLCertVerificationError: {0}'.format(sslcertex))
except ssl.SSLError as sslex:
#print('error has_weak_cipher SSLError1', sslex)
return (False, 'weak_cipher SSLError {0}'.format(sslex))
except ConnectionResetError as resetex:
#print('error ConnectionResetError', resetex)
return (False, 'weak_cipher ConnectionResetError {0}'.format(resetex))
except requests.exceptions.SSLError as sslerror:
#print('error weak_cipher SSLError2', sslerror)
return (False, 'Unable to verify: SSL error occured')
except requests.exceptions.ConnectionError as conex:
#print('error weak_cipher ConnectionError', conex)
return (False, 'Unable to verify: connection error occured')
except Exception as exception:
#print('weak_cipher test', exception)
return (False, 'weak_cipher Exception {0}'.format(exception))
# Read post at: https://hussainaliakbar.github.io/restricting-tls-version-and-cipher-suites-in-python-requests-and-testing-wireshark/
INSECURE_CIPHERS = (
'RSA+RC4+MD5:'
'RSA+RC4128+MD5:'
'RSA+RC4+SHA:'
'RSA+RC4128+SHA:'
'ECDHE+RSA+RC4+SHA:'
'ECDHE+RSA+RC4+SHA:'
'ECDHE+RSA+RC4128+MD5:'
'ECDHE+RSA+RC4128+MD5:'
)
class TlsAdapterInsecureCiphers(HTTPAdapter):
def __init__(self, ssl_options=0, **kwargs):
self.ssl_options = ssl_options
super(TlsAdapterInsecureCiphers, self).__init__(**kwargs)
def init_poolmanager(self, *pool_args, **pool_kwargs):
ctx = ssl_.create_urllib3_context(
ciphers=INSECURE_CIPHERS,
cert_reqs=ssl.CERT_REQUIRED, options=self.ssl_options)
self.poolmanager = PoolManager(*pool_args,
ssl_context=ctx,
**pool_kwargs)
def proxy_manager_for(self, *args, **kwargs):
context = ssl_.create_urllib3_context(ciphers=INSECURE_CIPHERS)
kwargs['ssl_context'] = context
return super(TlsAdapterInsecureCiphers, self).proxy_manager_for(*args, **kwargs)
def has_insecure_cipher(url, protocol_version):
session = False
try:
#print('ssl._DEFAULT_CIPHERS', ssl._DEFAULT_CIPHERS)
session = requests.session()
adapter = TlsAdapterInsecureCiphers(protocol_version)
session.mount(url, adapter)
except ssl.SSLError as sslex:
# print('### No weak cipher support on your machine, unable to test: {0} ###'.format(
# WEAK_CIPHERS))
return (False, 'insecure_cipher SSLError {0}'.format(sslex))
try:
allow_redirects = False
headers = {'user-agent': useragent}
a = session.get(url, verify=False, allow_redirects=allow_redirects,
headers=headers, timeout=request_timeout)
if a.status_code == 200 or a.status_code == 301 or a.status_code == 302 or a.status_code == 404:
#print('is ok')
return (True, 'is ok')
resulted_in_html = '<html' in a.text
# if resulted_in_html:
# print('has html')
# else:
# print('no html')
return (resulted_in_html, 'has <html tag in result')
except ssl.SSLCertVerificationError as sslcertex:
#print('weak_cipher SSLCertVerificationError', sslcertex)
return (True, 'insecure_cipher SSLCertVerificationError: {0}'.format(sslcertex))
except ssl.SSLError as sslex:
#print('error has_weak_cipher SSLError1', sslex)
return (False, 'insecure_cipher SSLError {0}'.format(sslex))
except ConnectionResetError as resetex:
#print('error ConnectionResetError', resetex)
return (False, 'insecure_cipher ConnectionResetError {0}'.format(resetex))
except requests.exceptions.SSLError as sslerror:
#print('error weak_cipher SSLError2', sslerror)
return (False, 'Unable to verify: SSL error occured')
except requests.exceptions.ConnectionError as conex:
#print('error weak_cipher ConnectionError', conex)
return (False, 'Unable to verify: connection error occured')
except Exception as exception:
#print('weak_cipher test', exception)
return (False, 'insecure_cipher Exception {0}'.format(exception))
class TlsAdapterCertRequired(HTTPAdapter):
def __init__(self, ssl_options=0, **kwargs):
self.ssl_options = ssl_options
super(TlsAdapterCertRequired, self).__init__(**kwargs)
def init_poolmanager(self, *pool_args, **pool_kwargs):
ctx = ssl_.create_urllib3_context(
cert_reqs=ssl.CERT_REQUIRED, options=self.ssl_options)
self.poolmanager = PoolManager(*pool_args,
ssl_context=ctx,
**pool_kwargs)
class TlsAdapterNoCert(HTTPAdapter):
def __init__(self, ssl_options=0, **kwargs):
self.ssl_options = ssl_options
super(TlsAdapterNoCert, self).__init__(**kwargs)
def init_poolmanager(self, *pool_args, **pool_kwargs):
ctx = ssl_.create_urllib3_context(
cert_reqs=ssl.CERT_NONE,
options=self.ssl_options)
self.poolmanager = PoolManager(*pool_args,
ssl_context=ctx,
**pool_kwargs)
def has_protocol_version(url, validate_hostname, protocol_version):
session = requests.session()
if validate_hostname:
adapter = TlsAdapterCertRequired(protocol_version)
else:
adapter = TlsAdapterNoCert(protocol_version)
session.mount("https://", adapter)
try:
allow_redirects = False
headers = {'user-agent': useragent}
a = session.get(url, verify=validate_hostname, allow_redirects=allow_redirects,
headers=headers, timeout=request_timeout)
if a.status_code == 200 or a.status_code == 301 or a.status_code == 302:
return (True, 'is ok')
if not validate_hostname and a.status_code == 404:
return (True, 'is ok')
resulted_in_html = '<html' in a.text
return (resulted_in_html, 'has <html tag in result')
except ssl.SSLCertVerificationError as sslcertex:
#print('protocol version SSLCertVerificationError', sslcertex)
if validate_hostname:
return (True, 'protocol version SSLCertVerificationError: {0}'.format(sslcertex))
else:
return (False, 'protocol version SSLCertVerificationError: {0}'.format(sslcertex))
except ssl.SSLError as sslex:
#print('protocol version SSLError', sslex)
return (False, 'protocol version SSLError: {0}'.format(sslex))
except ssl.SSLCertVerificationError as sslcertex:
#print('protocol version SSLCertVerificationError', sslcertex)
return (True, 'protocol version SSLCertVerificationError: {0}'.format(sslcertex))
except ssl.SSLError as sslex:
#print('error protocol version ', sslex)
return (False, 'protocol version SSLError {0}'.format(sslex))
except ConnectionResetError as resetex:
#print('error protocol version ConnectionResetError', resetex)
return (False, 'protocol version ConnectionResetError {0}'.format(resetex))
except requests.exceptions.SSLError as sslerror:
#print('error protocol version SSLError', sslerror)
return (False, 'Unable to verify: SSL error occured')
except requests.exceptions.ConnectionError as conex:
#print('error protocol version ConnectionError', conex)
return (False, 'Unable to verify: connection error occured')
except Exception as exception:
#print('protocol version test', exception)
return (False, 'protocol version Exception {0}'.format(exception))
| 32.704427
| 133
| 0.632639
|
490e1cd5a5a05265de35aee780f8c682dd07c499
| 1,522
|
py
|
Python
|
nova_dveri_ru/main.py
|
Aleksey-Voko/nova_dveri_ru
|
7657c81e12f4486100385b9f181895f7688a8106
|
[
"MIT"
] | null | null | null |
nova_dveri_ru/main.py
|
Aleksey-Voko/nova_dveri_ru
|
7657c81e12f4486100385b9f181895f7688a8106
|
[
"MIT"
] | null | null | null |
nova_dveri_ru/main.py
|
Aleksey-Voko/nova_dveri_ru
|
7657c81e12f4486100385b9f181895f7688a8106
|
[
"MIT"
] | null | null | null |
from pathlib import Path
from urllib.parse import urljoin
from data import BASE_URL, HTML_SITE_MAP
from parsing.product_card import get_product_cards
from utils import add_in_yaml, get_from_yaml
from sitemap.html_site_map import (get_html,
get_sub_links_to_html_sitemap,
get_page_links_to_html_sitemap,
get_product_links)
def main():
product_links_file = 'output/product_links.yaml'
product_cards_file = 'output/product_cards.yaml'
if not Path(product_links_file).exists():
html_site_map = get_html(urljoin(BASE_URL, HTML_SITE_MAP))
sub_links = get_sub_links_to_html_sitemap(html_site_map)
page_links = get_page_links_to_html_sitemap(sub_links)
for link in get_product_links(page_links):
print(link)
add_in_yaml(link, product_links_file, flow_style=False)
product_links = get_from_yaml(product_links_file)
if Path(product_cards_file).exists():
product_cards = get_from_yaml(product_cards_file)
completed_links = [x['link'] for x in product_cards]
else:
completed_links = []
product_links = list(set(product_links) - set(completed_links))
for product_card in get_product_cards(product_links):
add_in_yaml(product_card, product_cards_file, flow_style=False)
completed_links.append(product_card['link'])
print('=' * 100)
print()
if __name__ == '__main__':
main()
| 35.395349
| 71
| 0.689225
|
1556ee43c6a706ba96a0daa96fd119f84e8d7b33
| 166
|
py
|
Python
|
src/yellowdog_client/model/node_worker_target_type.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/node_worker_target_type.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/node_worker_target_type.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
from enum import Enum
class NodeWorkerTargetType(Enum):
PER_NODE = "PER_NODE"
PER_VCPU = "PER_VCPU"
def __str__(self) -> str:
return self.name
| 16.6
| 33
| 0.662651
|
4c63fe6032200e045fb52ba0358fd74aecf5e72c
| 2,620
|
py
|
Python
|
core/context.py
|
Shihira/policy-control
|
f0fb7b8f1b9700ccb41ef989e0985434db39f843
|
[
"MIT"
] | null | null | null |
core/context.py
|
Shihira/policy-control
|
f0fb7b8f1b9700ccb41ef989e0985434db39f843
|
[
"MIT"
] | null | null | null |
core/context.py
|
Shihira/policy-control
|
f0fb7b8f1b9700ccb41ef989e0985434db39f843
|
[
"MIT"
] | null | null | null |
# Copyright(c) 2015, Shihira Fung <fengzhiping@hotmail.com>
class context(object):
"""
Context stores and manages runtime infomation like local-variables, the
pointer pointing to currently running command, etc. The concrete content of
context depends on command implementations.
context is picklale, you can store it in persistent interface with pickle.
attributes start with '_' are not going to be dumpped.
each context has a unique id `context.id` after started
A context is currently containing these fields(attributes):
* id: id is for you to identify transactions
* vars: variables to store in this transaction
* ip: instructor pointer register :-)
* _yield: yield record (not stored)
* _preload: modules loaded with `load` command (not stored)
"""
def __getattr__(self, attr):
"Available expression: value = context[attr]"
return self.__dict__[attr]
def __setattr__(self, attr, value):
"Available expression: context[attr] = value"
self.__dict__[attr] = value
return value
def __getstate__(self):
"""
Available expression: s = pickle.dumps(context)
NOTE AGAIN: variables start with '_' are not going to be dumpped
"""
dump = self.__dict__.copy()
return dict(filter(lambda p: p[0][0] != "_", dump.items()))
def __setstate__(self, d):
"Available expression: context = pickle.loads(s)"
self.__dict__.update(d)
def __init__(self, **kwargs):
"Available expression: context(attr1 = value1, attr2 = value2)"
self.__dict__.update(kwargs)
def __len__(self):
"Available expression: bool(context)"
return len(self.__dict__)
def __delattr__(self, attr):
del self.__dict__[attr]
def ensure(self, attr, init = None):
"""
If an attribute exists return it directly, otherwise
initialize it with `init`
"""
if not hasattr(self, attr):
self.__setattr__(attr, init)
return self
def clear(self):
self.__dict__.clear()
@staticmethod
def start_new():
"start a new context/session, with default context value initialized"
import string, random
return context(
id = "".join([random.choice(
string.uppercase + string.lowercase + string.digits)
for i in range(32)]),
vars = { },
ip = 0,
_yield = [ ],
_preload = [ ],
_await_tags = [ ],
)
| 31.566265
| 79
| 0.607634
|
cf4b2690781dccb6c70284efa3cd99eb181dd40e
| 11,282
|
py
|
Python
|
advanced-analytics/recommendation-engine/recommendation_engine.py
|
m0ps/gcp-fsi-design-pattern-ipre
|
391fc03c70bd7e2fb38605980d25e32ede6c7d27
|
[
"Apache-2.0"
] | 7
|
2021-06-23T08:09:42.000Z
|
2022-01-16T05:17:07.000Z
|
advanced-analytics/recommendation-engine/recommendation_engine.py
|
skylinepro/gcp-fsi-design-pattern-ipre
|
7a933e742b29a4b1b9acde2c2a03ff29bf61e600
|
[
"Apache-2.0"
] | null | null | null |
advanced-analytics/recommendation-engine/recommendation_engine.py
|
skylinepro/gcp-fsi-design-pattern-ipre
|
7a933e742b29a4b1b9acde2c2a03ff29bf61e600
|
[
"Apache-2.0"
] | 3
|
2021-06-29T12:53:09.000Z
|
2021-12-14T01:34:50.000Z
|
""" The recommendation service module provides functions for loading remote data,
portfolio optimization, investment analytics.
`recommendation_engine.py` requires env variables:
- QUOTES_BUCKET -- GCS bucket name with capital markets quotes data
- QUOTES_BLOB -- name of capital markets quotes file, e.g. capital-markets-quotes.csv
- PREDICTED_IRP_BUCKET -- GCS bucket name with _predicted_ investor risk preferences
- PREDICTED_IRP_BLOB -- name of predicted IRP file, e.g. predicted-irp.csv
- PREDICTED_RETURNS_BUCKET -- GCS bucket name with _predicted_ expected returns data
- PREDICTED_RETURNS_BLOB -- name of predicted expected returns file, e.g. predicted-expected-returns.csv
"""
import json
import logging
import sys
import os
import sys
import numpy as np
import pandas as pd
import pypfopt
from google.cloud import storage
# Set logging
logger = logging.getLogger("recommendation-engine")
logging.basicConfig(
level=logging.DEBUG,
stream=sys.stdout,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
encoding="utf-8"
)
__valid_methods__ = [
"make_recommendation"
]
class PortfolioOptimizer:
""" Class for computing optimal asset weights in the portfolio.
Public methods:
fit() -- compute optimal asset weights for given risk aversion.
get_portfolio_metrics() -- compute investment performance metrics.
Attributes:
uuid -- unique user ID for making personalized recommendation
"""
def __init__(self, uuid):
self.quotesBucket: str = os.environ["QUOTES_BUCKET"]
self.quotesBlob: str = os.environ["QUOTES_BLOB"]
self.quotes: pd.DataFrame = None
self.riskAversionBucket: str = os.environ["PREDICTED_IRP_BUCKET"]
self.riskAversionBlob: str = os.environ["PREDICTED_IRP_BLOB"]
self.riskAversion: float = None
self.expectedReturnsBucket: str = os.environ["PREDICTED_RETURNS_BUCKET"]
self.expectedReturnsBlob: str = os.environ["PREDICTED_RETURNS_BLOB"]
self.expectedReturns: pd.Series = None
self.tickers = json.load(open("settings.json", "r"))["tickers"]
self.uuid: str = uuid
self.periodsPerYear: int = 12
self.periodicReturns: pd.DataFrame = None
self.expectedVolatility: pd.Series = None
self.riskModel: pd.DataFrame = None
self.optimizer = None
self.assetWeights: dict = None
self.portfolioMetrics: dict = None
def get_quotes(self) -> pd.DataFrame:
""" Load historical quotes data from Cloud storage csv.
Returns:
pd.DataFrame: quotes dataframe.
"""
logger.debug(
f"Getting quotes from {self.quotesBucket}/{self.quotesBlob}.")
dataPath = "".join(["gs://", os.path.join(self.quotesBucket, self.quotesBlob)])
quotesAll = pd.read_csv(dataPath, index_col=0)
self.quotes = quotesAll.loc[:, self.tickers]
return self.quotes
def get_periodic_returns(self, periods: int = 20) -> pd.DataFrame:
""" Calculate periodic returns from quotes.
Args:
periods (int): Rolling window of periodic returns. Defaults to 20.
Args:
periods (int): rolling window of MA. Defaults to 20.
Returns:
pd.DataFrame: periodic returns data frame.
"""
if not isinstance(self.quotes, pd.DataFrame):
self.get_quotes()
logger.debug(f"Estimating periodic returns, periods = {periods}.")
self.periodicReturns = self.quotes.pct_change(periods=periods).dropna(how='all')
return self.periodicReturns
def get_expected_returns(self) -> pd.Series:
""" Get annualized expected returns vector.
Returns:
pd.Series: vector of annualized expected returns.
"""
logger.debug(f"Estimating expected annualized returns, periodsPerYear={self.periodsPerYear}.")
try:
logger.debug(f"Getting expected returns vector from {self.expectedReturnsBucket}/{self.expectedReturnsBlob}.")
dataPath = "".join(["gs://", os.path.join(self.expectedReturnsBucket, self.expectedReturnsBlob)])
remoteReturns = pd.read_csv(dataPath, index_col=0)
self.expectedReturns = remoteReturns.loc[self.tickers, 'forecast_value'] * self.periodsPerYear
except FileNotFoundError:
logger.warning("Failed to load expected returns from GCS. Estimating returns from quotes.")
if not isinstance(self.periodicReturns, pd.DataFrame):
self.get_periodic_returns()
# annualize expected returns
nYears = self.periodicReturns.shape[0] / self.periodsPerYear
self.expectedReturns = np.power(np.prod(1 + self.periodicReturns), (1 / nYears)) - 1
return self.expectedReturns
def get_expected_volatility(self) -> pd.Series:
""" Calculate annualized expected volatility vector.
Returns:
pd.Series: vector of annualized expected volatilities.
"""
if not isinstance(self.periodicReturns, pd.DataFrame):
self.get_periodic_returns()
logger.debug("Estimating expected annualized volatilities for tickers.")
self.expectedVolatility = self.periodicReturns.std() * np.sqrt(self.periodsPerYear)
return self.expectedVolatility
def get_risk_model(self) -> pd.DataFrame:
""" Compute risk model with Ledoit-Wolf shrinkage method.
Returns:
pd.DataFrame: Annualized risk model VCM.
"""
if not isinstance(self.periodicReturns, pd.DataFrame):
self.get_periodic_returns()
logger.debug("Estimating risk model.")
vcm = pypfopt.risk_models.CovarianceShrinkage(
prices=self.periodicReturns,
returns_data=True,
frequency=self.periodsPerYear
)
self.riskModel = vcm.ledoit_wolf()
return self.riskModel
def get_risk_aversion(self, label: str = "predicted_risk", min_max: tuple[int] = (5, 15)) -> float:
""" Get risk aversion value for investor UUID.
Args:
label (str, optional): Name of predicted risk aversion column. Defaults to "predicted_risk".
min_max (tuple, optional): Target min, max risk aversion range. Defaults to (5, 15).
Returns:
float: Risk aversion value.
"""
try:
logger.debug(f"Getting risk aversion from {self.riskAversionBucket}/{self.riskAversionBlob}.")
dataPath = "".join(["gs://", os.path.join(self.riskAversionBucket, self.riskAversionBlob)])
riskAversion_df = pd.read_csv(dataPath, sep=';')
riskAversion_series = riskAversion_df.loc[riskAversion_df.clientID == self.uuid, 'predicted_risk']
# scale risk aversion
self.riskAversion = self.scale_value(riskAversion_series.iloc[-1], min_max)
except FileNotFoundError:
logger.warning("Failed to load risk aversion from GCS. Setting to default riskAversion=10.0")
self.riskAversion = 10.0
return self.riskAversion
@staticmethod
def scale_value(value: float, min_max: tuple[int] = (5, 15)):
_min, _max = min_max
return float(value * (_max - _min) + _min)
@staticmethod
def unscale_value(value: float, min_max: tuple[int] = (5, 15)):
_min, _max = min_max
return float((value - _min) / (_max - _min))
def set_optimizer(self):
""" Initialize convex optimizer.
Returns:
obj: pyfopt.efficient_frontier object.
"""
if not isinstance(self.expectedReturns, pd.Series):
self.get_expected_returns()
if self.riskModel is None:
self.get_risk_model()
logger.debug("Setting convex optimizer.")
self.optimizer = pypfopt.efficient_frontier.EfficientFrontier(
expected_returns=self.expectedReturns,
cov_matrix=self.riskModel,
weight_bounds=(0, 1)
)
return self.optimizer
@staticmethod
def structure_results(weights: dict, returns: pd.Series, volatility: pd.Series) -> dict:
""" Helper function for structuring results in dictionary.
Args:
weights (dict): dictionary with optimal asset weights in portfolio.
returns (pd.Series): annualized expected returns vector.
volatility (pd.Series): annualized expected volatility vector.
Returns:
dict: dictionary with ticker attributes.
"""
for key, value in weights.items():
weights[key] = {
"weight": value,
"expectedReturn": returns.loc[key],
"expectedVolatility": volatility.loc[key]
}
return weights
def fit(self, riskAversion: float) -> dict:
""" Compute optimal asset weights in the portfolio.
Args:
riskAversion (float, optional): Risk aversion factor. Defaults to None.
Returns:
dict: dictionary of asset weights in the portfolio.
"""
if self.optimizer is None:
self.set_optimizer()
logger.debug(f"Computing optimal weights for riskAversion = {riskAversion}.")
self.assetWeights = self.optimizer.max_quadratic_utility(
risk_aversion=riskAversion ** 2,
market_neutral=False
)
self.assetWeights = self.structure_results(
weights=dict(self.assetWeights),
returns=self.expectedReturns,
volatility=self.get_expected_volatility()
)
return self.assetWeights
def get_portfolio_metrics(self, rf: float = 0.025) -> dict:
""" Compute portfolio metrics given risk-free rate.
Args:
rf (float, optional): Risk-free rate. Defaults to 0.025.
Returns:
dict: E[r], E[std], Sharpe-Ratio.
"""
logger.debug(f"Computing portfolio performance metrics for rf={rf}.")
portfolio_metrics = self.optimizer.portfolio_performance(rf)
self.portfolioMetrics = {
"expectedReturn": portfolio_metrics[0] * 100,
"annualVolatility": portfolio_metrics[1] * 100,
"sharpeRatio": portfolio_metrics[2]
}
return self.portfolioMetrics
def make_recommendation(uuid: str, riskAversion: float = None):
""" Workflow for making personalized recommendation, computing investment analytics.
Args:
uuid (str): unique user ID.
riskAversion (float, optional): Select risk aversion factor in range [0, 1]. Defaults to None.
Returns:
dict: personalized recommendation on investment products, investment performance metrics.
"""
mypy = PortfolioOptimizer(uuid)
if not isinstance(riskAversion, float):
riskAversion = mypy.get_risk_aversion(uuid)
else:
riskAversion = mypy.scale_value(riskAversion)
weights = mypy.fit(riskAversion)
metrics = mypy.get_portfolio_metrics(rf=0.025)
recommendation = {
"portfolioComposition": weights,
"portfolioMetrics": metrics,
"riskAversion": mypy.unscale_value(riskAversion),
}
return recommendation
| 39.310105
| 122
| 0.650948
|
752aab94b740a814eb7bca568d1b19d3b4baaf29
| 3,786
|
py
|
Python
|
ask-smapi-model/ask_smapi_model/v1/skill/asr/evaluations/audio_asset.py
|
alexa-labs/alexa-apis-for-python
|
52838be4f57ee1a2479402ea78b1247b56017942
|
[
"Apache-2.0"
] | 90
|
2018-09-19T21:56:42.000Z
|
2022-03-30T11:25:21.000Z
|
ask-smapi-model/ask_smapi_model/v1/skill/asr/evaluations/audio_asset.py
|
ishitaojha/alexa-apis-for-python
|
a68f94b7a0e41f819595d6fe56e800403e8a4194
|
[
"Apache-2.0"
] | 11
|
2018-09-23T12:16:48.000Z
|
2021-06-10T19:49:45.000Z
|
ask-smapi-model/ask_smapi_model/v1/skill/asr/evaluations/audio_asset.py
|
ishitaojha/alexa-apis-for-python
|
a68f94b7a0e41f819595d6fe56e800403e8a4194
|
[
"Apache-2.0"
] | 28
|
2018-09-19T22:30:38.000Z
|
2022-02-22T22:57:07.000Z
|
# coding: utf-8
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional, Union, Any
from datetime import datetime
class AudioAsset(object):
"""
Object containing information about downloading audio file
:param download_url: S3 presigned download url for downloading the audio file
:type download_url: (optional) str
:param expiry_time: timestamp when the audio download url expire in ISO 8601 format
:type expiry_time: (optional) str
"""
deserialized_types = {
'download_url': 'str',
'expiry_time': 'str'
} # type: Dict
attribute_map = {
'download_url': 'downloadUrl',
'expiry_time': 'expiryTime'
} # type: Dict
supports_multiple_types = False
def __init__(self, download_url=None, expiry_time=None):
# type: (Optional[str], Optional[str]) -> None
"""Object containing information about downloading audio file
:param download_url: S3 presigned download url for downloading the audio file
:type download_url: (optional) str
:param expiry_time: timestamp when the audio download url expire in ISO 8601 format
:type expiry_time: (optional) str
"""
self.__discriminator_value = None # type: str
self.download_url = download_url
self.expiry_time = expiry_time
def to_dict(self):
# type: () -> Dict[str, object]
"""Returns the model properties as a dict"""
result = {} # type: Dict
for attr, _ in six.iteritems(self.deserialized_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else
x.value if isinstance(x, Enum) else x,
value
))
elif isinstance(value, Enum):
result[attr] = value.value
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else
(item[0], item[1].value)
if isinstance(item[1], Enum) else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (object) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, AudioAsset):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
"""Returns true if both objects are not equal"""
return not self == other
| 32.637931
| 96
| 0.602483
|
eb35ed497a07efe22b8ee3e27f235f85ab43bd0b
| 2,949
|
py
|
Python
|
network2json.py
|
pablocarb/rpviz
|
9309a8853be9cd1a69ad3d8b2accdcfc811ff7fc
|
[
"MIT"
] | null | null | null |
network2json.py
|
pablocarb/rpviz
|
9309a8853be9cd1a69ad3d8b2accdcfc811ff7fc
|
[
"MIT"
] | 1
|
2019-05-31T09:23:41.000Z
|
2019-05-31T09:29:38.000Z
|
network2json.py
|
pablocarb/rpviz
|
9309a8853be9cd1a69ad3d8b2accdcfc811ff7fc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu May 30 16:23:33 2019
@author: anael
"""
import networkx as nx
import random
def network2(G,LR,Lreact,Lprod,name,sp_smiles,reac_smiles,image,\
image2,spname,sp_links,roots,dic_types,\
image2big, data_tab, dfG_prime_o,dfG_prime_m, dfG_uncert,\
flux_value, rule_id, rule_score, fba_obj_name, revers):
"""From lists and dictionaries creates nodes, attributes and edges in cytoscape format
Input : Lists and dictionaries
Output : Network dictionary in cytoscape format"""
###Create the network with networkx
col="#"+''.join([random.choice('0123456789ABCDEF') for j in range(6)])
for i in LR:
G.add_node(i,pathway=name,colour=col) #add reactions nodes
#LR=['<Reaction RP1>_rp_3_1', '<Reaction targetSink>_rp_3_1']
#Lreact=[['MNXM2__64__MNXC3_rp_3_1', 'MNXM497__64__MNXC3_rp_3_1'], ['TARGET_0000000001__64__MNXC3']]
#Lprod=[['TARGET_0000000001__64__MNXC3', 'MNXM26__64__MNXC3'], []]
for i in range(len(LR)):
for j in range(len(Lreact[i])):
if Lreact[i][j] not in G.nodes():
G.add_node(Lreact[i][j],pathway=name)
G.add_edge(Lreact[i][j],LR[i],pathway=name,colour=col) #add reactants nodes
for k in range(len(Lprod[i])):
if Lprod[i][k] not in G.nodes():
G.add_node(Lprod[i][k],pathway=name)
G.add_edge(LR[i],Lprod[i][k],pathway=name,colour=col) #add products nodes
#Attribute name
nx.set_node_attributes(G,name='name', values=spname)
#Attribute category
nx.set_node_attributes(G,name='category',values=dic_types)
#Attribute smile
nx.set_node_attributes(G, name='smiles', values=sp_smiles)
#Attribute smile
nx.set_node_attributes(G, name='Rsmiles', values=reac_smiles)
#Attribute image
nx.set_node_attributes(G,name='image', values=image)
#Attribute reaction image
nx.set_node_attributes(G,name='image2',values=image2)
nx.set_node_attributes(G,name='image2big',values=image2big)
#Attribute link
nx.set_node_attributes(G,name="link",values=sp_links)
#Attribute Root
nx.set_node_attributes(G,name="root", values=roots)
#Attribute reversibility
nx.set_node_attributes(G,name="reversibility", values=revers)
#Attribute Data tab
nx.set_node_attributes(G,name="data_tab", values=data_tab)
nx.set_node_attributes(G,name="dfG_prime_o", values=dfG_prime_o)
nx.set_node_attributes(G,name="dfG_prime_m", values=dfG_prime_m)
nx.set_node_attributes(G,name="dfG_uncert", values=dfG_uncert)
nx.set_node_attributes(G,name="flux_value", values=flux_value)
nx.set_node_attributes(G,name="rule_id", values=rule_id)
nx.set_node_attributes(G,name="rule_score", values=rule_score)
nx.set_node_attributes(G,name="fba_obj_name", values=fba_obj_name)
return(G)
| 33.511364
| 100
| 0.68057
|
78fc8f1c5a366c3a01163e414ff9e05b27b233f9
| 369
|
py
|
Python
|
boardproject/urls.py
|
kazumaro756/django_sns
|
6298d385e4cabb95f69597e71fc789b558fedf93
|
[
"MIT"
] | null | null | null |
boardproject/urls.py
|
kazumaro756/django_sns
|
6298d385e4cabb95f69597e71fc789b558fedf93
|
[
"MIT"
] | null | null | null |
boardproject/urls.py
|
kazumaro756/django_sns
|
6298d385e4cabb95f69597e71fc789b558fedf93
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.urls import path,include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('boardapp.urls'))
]+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)+ static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| 33.545455
| 129
| 0.783198
|
626ee88da259add9f47748406d2f23e3252ac7dd
| 34,870
|
py
|
Python
|
bluebottle/funding_stripe/tests/test_api.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 10
|
2015-05-28T18:26:40.000Z
|
2021-09-06T10:07:03.000Z
|
bluebottle/funding_stripe/tests/test_api.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 762
|
2015-01-15T10:00:59.000Z
|
2022-03-31T15:35:14.000Z
|
bluebottle/funding_stripe/tests/test_api.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 9
|
2015-02-20T13:19:30.000Z
|
2022-03-08T14:09:17.000Z
|
from builtins import str
import json
import mock
import munch
from django.db import connection
from django.urls import reverse
from rest_framework import status
import stripe
from bluebottle.funding.tests.factories import FundingFactory, DonorFactory
from bluebottle.funding_stripe.models import StripePaymentProvider
from bluebottle.funding_stripe.tests.factories import (
StripePayoutAccountFactory,
ExternalAccountFactory,
StripePaymentProviderFactory
)
from bluebottle.initiatives.tests.factories import InitiativeFactory
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.utils import BluebottleTestCase, JSONAPITestClient
class StripePaymentIntentTestCase(BluebottleTestCase):
def setUp(self):
super(StripePaymentIntentTestCase, self).setUp()
StripePaymentProvider.objects.all().delete()
StripePaymentProviderFactory.create()
self.client = JSONAPITestClient()
self.user = BlueBottleUserFactory()
self.initiative = InitiativeFactory.create()
self.initiative.states.submit()
self.initiative.states.approve(save=True)
self.bank_account = ExternalAccountFactory.create()
self.funding = FundingFactory.create(initiative=self.initiative, bank_account=self.bank_account)
self.donation = DonorFactory.create(activity=self.funding, user=None)
self.intent_url = reverse('stripe-payment-intent-list')
self.data = {
'data': {
'type': 'payments/stripe-payment-intents',
'relationships': {
'donation': {
'data': {
'type': 'contributors/donations',
'id': self.donation.pk,
}
}
}
}
}
def test_create_intent(self):
self.donation.user = self.user
self.donation.save()
payment_intent = stripe.PaymentIntent('some intent id')
payment_intent.update({
'client_secret': 'some client secret',
})
with mock.patch('stripe.PaymentIntent.create', return_value=payment_intent) as create_intent:
response = self.client.post(self.intent_url, data=json.dumps(self.data), user=self.user)
create_intent.assert_called_with(
amount=int(self.donation.amount.amount * 100),
currency=self.donation.amount.currency,
metadata={
'tenant_name': u'test',
'activity_id': self.donation.activity.pk,
'activity_title': self.donation.activity.title,
'tenant_domain': u'testserver'
},
statement_descriptor=u'Test',
statement_descriptor_suffix=u'Test',
transfer_data={
'destination': self.bank_account.connect_account.account_id
}
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = json.loads(response.content)
self.assertEqual(data['data']['attributes']['intent-id'], payment_intent.id)
self.assertEqual(data['data']['attributes']['client-secret'], payment_intent.client_secret)
self.assertEqual(data['included'][0]['attributes']['status'], 'new')
def test_create_intent_us(self):
self.bank_account.connect_account.account.country = 'US'
self.bank_account.connect_account.country = 'US'
self.bank_account.connect_account.save()
self.donation.user = self.user
self.donation.save()
payment_intent = stripe.PaymentIntent('some intent id')
payment_intent.update({
'client_secret': 'some client secret',
})
with mock.patch('stripe.PaymentIntent.create', return_value=payment_intent) as create_intent:
response = self.client.post(self.intent_url, data=json.dumps(self.data), user=self.user)
create_intent.assert_called_with(
amount=int(self.donation.amount.amount * 100),
currency=self.donation.amount.currency,
metadata={
'tenant_name': u'test',
'activity_id': self.donation.activity.pk,
'activity_title': self.donation.activity.title,
'tenant_domain': u'testserver'
},
on_behalf_of=self.bank_account.connect_account.account_id,
statement_descriptor=u'Test',
statement_descriptor_suffix=u'Test',
transfer_data={
'destination': self.bank_account.connect_account.account_id
}
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = json.loads(response.content)
self.assertEqual(data['data']['attributes']['intent-id'], payment_intent.id)
self.assertEqual(data['data']['attributes']['client-secret'], payment_intent.client_secret)
self.assertEqual(data['included'][0]['attributes']['status'], 'new')
def test_create_intent_anonymous(self):
payment_intent = stripe.PaymentIntent('some intent id')
payment_intent.update({
'client_secret': 'some client secret',
})
with mock.patch('stripe.PaymentIntent.create', return_value=payment_intent):
response = self.client.post(
self.intent_url,
data=json.dumps(self.data),
HTTP_AUTHORIZATION='Donation {}'.format(self.donation.client_secret)
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = json.loads(response.content)
self.assertEqual(data['data']['attributes']['intent-id'], payment_intent.id)
self.assertEqual(data['data']['attributes']['client-secret'], payment_intent.client_secret)
self.assertEqual(data['included'][0]['attributes']['status'], 'new')
def test_create_intent_wrong_token(self):
payment_intent = stripe.PaymentIntent('some intent id')
payment_intent.update({
'client_secret': 'some client secret',
})
with mock.patch('stripe.PaymentIntent.create', return_value=payment_intent):
response = self.client.post(
self.intent_url,
data=json.dumps(self.data),
HTTP_AUTHORIZATION='Donation wrong-token'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_create_intent_other_user(self):
self.donation.user = self.user
self.donation.save()
response = self.client.post(
self.intent_url,
data=json.dumps(self.data),
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_create_intent_no_user(self):
response = self.client.post(
self.intent_url,
data=json.dumps(self.data),
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class StripeSourcePaymentTestCase(BluebottleTestCase):
def setUp(self):
super(StripeSourcePaymentTestCase, self).setUp()
StripePaymentProviderFactory.create()
self.client = JSONAPITestClient()
self.user = BlueBottleUserFactory()
self.initiative = InitiativeFactory.create()
self.initiative.states.submit()
self.initiative.states.approve(save=True)
self.bank_account = ExternalAccountFactory.create()
self.funding = FundingFactory.create(initiative=self.initiative, bank_account=self.bank_account)
self.donation = DonorFactory.create(activity=self.funding, user=None)
self.payment_url = reverse('stripe-source-payment-list')
self.data = {
'data': {
'type': 'payments/stripe-source-payments',
'attributes': {
'source-token': 'test-token',
},
'relationships': {
'donation': {
'data': {
'type': 'contributors/donations',
'id': self.donation.pk,
}
}
}
}
}
def test_create_payment(self):
self.donation.user = self.user
self.donation.save()
with mock.patch(
'stripe.Source.modify'
):
response = self.client.post(self.payment_url, data=json.dumps(self.data), user=self.user)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = json.loads(response.content)
self.assertEqual(data['data']['attributes']['source-token'], 'test-token')
self.assertEqual(data['included'][0]['attributes']['status'], 'new')
def test_create_payment_anonymous(self):
with mock.patch(
'stripe.Source.modify'
):
response = self.client.post(
self.payment_url,
data=json.dumps(self.data),
HTTP_AUTHORIZATION='Donation {}'.format(self.donation.client_secret)
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = json.loads(response.content)
self.assertEqual(data['data']['attributes']['source-token'], 'test-token')
self.assertEqual(data['included'][0]['attributes']['status'], 'new')
def test_create_intent_other_user(self):
self.donation.user = self.user
self.donation.save()
response = self.client.post(
self.payment_url,
data=json.dumps(self.data),
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_create_intent_no_user(self):
response = self.client.post(
self.payment_url,
data=json.dumps(self.data),
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ConnectAccountDetailsTestCase(BluebottleTestCase):
def setUp(self):
super(ConnectAccountDetailsTestCase, self).setUp()
self.client = JSONAPITestClient()
self.user = BlueBottleUserFactory()
country = 'NL'
self.stripe_connect_account = stripe.Account('some-connect-id')
self.stripe_connect_account.update({
'country': country,
'individual': munch.munchify({
'first_name': 'Jhon',
'last_name': 'Example',
'email': 'jhon@example.com',
'verification': munch.munchify({
'status': 'pending',
}),
'requirements': munch.munchify({
'eventually_due': ['external_accounts', 'individual.dob.month'],
'currently_due': [],
'past_due': [],
})
}),
'requirements': munch.munchify({
'eventually_due': ['external_accounts', 'individual.dob.month'],
'disabled': False
}),
'external_accounts': munch.munchify({
'total_count': 0,
'data': []
})
})
with mock.patch(
'stripe.Account.retrieve', return_value=self.stripe_connect_account
):
self.connect_account = StripePayoutAccountFactory(
owner=self.user,
country=country,
account_id='some-account-id'
)
self.account_list_url = reverse('connect-account-list')
self.account_url = reverse('connect-account-details', args=(self.connect_account.id,))
self.country_spec = stripe.CountrySpec(country)
self.country_spec.update({
'verification_fields': munch.munchify({
'individual': munch.munchify({
'additional': ['external_accounts'],
'minimum': ['individual.first_name'],
})
})
})
self.data = {
'data': {
'type': 'payout-accounts/stripes',
'id': self.connect_account.pk,
'attributes': {
'token': 'some-account-token',
'country': self.connect_account.country,
}
}
}
def test_create(self):
self.connect_account.delete()
tenant = connection.tenant
tenant.name = 'tst'
tenant.save()
connect_account = stripe.Account('some-connect-id')
connect_account.update({
'country': self.data['data']['attributes']['country'],
'individual': munch.munchify({
'first_name': 'Jhon',
'last_name': 'Example',
'email': 'jhon@example.com',
'verification': munch.munchify({
'status': 'pending',
}),
'requirements': munch.munchify({
'eventually_due': ['external_accounts', 'individual.dob.month'],
'currently_due': [],
'past_due': [],
})
}),
'requirements': munch.munchify({
'eventually_due': ['external_accounts', 'individual.dob.month'],
'disabled': False
}),
'external_accounts': munch.munchify({
'total_count': 0,
'data': []
})
})
with mock.patch(
'stripe.CountrySpec.retrieve', return_value=self.country_spec
), mock.patch(
'stripe.Account.create', return_value=connect_account
) as create_account, mock.patch(
'stripe.Account.modify', return_value=connect_account
) as modify_account, mock.patch(
'stripe.Account.retrieve', return_value=connect_account
):
response = self.client.post(
self.account_list_url, data=json.dumps(self.data), user=self.user
)
create_account.assert_called_with(
business_profile={'url': 'https://testserver', 'mcc': '8398'},
business_type='individual',
country=self.data['data']['attributes']['country'],
metadata={'tenant_name': 'test', 'tenant_domain': 'testserver', 'member_id': self.user.pk},
requested_capabilities=['transfers'],
settings={
'card_payments': {
'statement_descriptor_prefix': u'tst--'
},
'payments': {
'statement_descriptor': u'tst--'
},
'payouts': {
'statement_descriptor': u'tst--',
'schedule': {'interval': 'manual'}
}
},
# business_type='individual',
type='custom'
)
modify_account.assert_called_with(
'some-connect-id',
account_token='some-account-token'
)
data = json.loads(response.content)
self.assertEqual(
data['data']['attributes']['country'],
self.data['data']['attributes']['country']
)
self.assertEqual(
data['data']['attributes']['disabled'], False
)
self.assertEqual(
data['data']['attributes']['verified'], False
)
self.assertEqual(
data['data']['meta']['required-fields'],
[
u'country',
u'external_accounts',
u'individual.verification.additional_document',
u'document_type',
u'individual.verification.document.front',
u'individual.dob'
]
)
self.assertEqual(
data['data']['attributes']['account']['individual']['first_name'],
'Jhon',
)
self.assertEqual(
data['data']['relationships']['owner']['data']['id'],
str(self.user.pk)
)
def test_create_us(self):
self.connect_account.delete()
tenant = connection.tenant
tenant.name = 'tst'
tenant.save()
connect_account = stripe.Account('some-connect-id')
connect_account.update({
'country': self.data['data']['attributes']['country'],
'individual': munch.munchify({
'first_name': 'Jhon',
'last_name': 'Example',
'email': 'jhon@example.com',
'verification': munch.munchify({
'status': 'pending',
}),
'requirements': munch.munchify({
'eventually_due': ['external_accounts', 'individual.dob.month'],
'currently_due': [],
'past_due': [],
})
}),
'requirements': munch.munchify({
'eventually_due': ['external_accounts', 'individual.dob.month'],
'disabled': False
}),
'external_accounts': munch.munchify({
'total_count': 0,
'data': []
})
})
self.data['data']['attributes']['country'] = 'US'
with mock.patch(
'stripe.CountrySpec.retrieve', return_value=self.country_spec
), mock.patch(
'stripe.Account.create', return_value=connect_account
) as create_account, mock.patch(
'stripe.Account.modify', return_value=connect_account
) as modify_account, mock.patch(
'stripe.Account.retrieve', return_value=connect_account
):
self.client.post(
self.account_list_url, data=json.dumps(self.data), user=self.user
)
create_account.assert_called_with(
business_profile={'url': 'https://testserver', 'mcc': '8398'},
business_type='individual',
country=self.data['data']['attributes']['country'],
metadata={'tenant_name': 'test', 'tenant_domain': 'testserver', 'member_id': self.user.pk},
requested_capabilities=['transfers', 'card_payments'],
settings={
'card_payments': {
'statement_descriptor_prefix': u'tst--'
},
'payments': {
'statement_descriptor': u'tst--'
},
'payouts': {
'statement_descriptor': u'tst--',
'schedule': {'interval': 'manual'}
}
},
# business_type='individual',
type='custom'
)
modify_account.assert_called_with(
'some-connect-id',
account_token='some-account-token'
)
def test_create_no_user(self):
self.connect_account.delete()
response = self.client.post(
self.account_url,
data=json.dumps(self.data)
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get(self):
with mock.patch(
'stripe.CountrySpec.retrieve', return_value=self.country_spec
), mock.patch(
'stripe.Account.retrieve', return_value=self.stripe_connect_account
) as retrieve:
response = self.client.get(
self.account_url, user=self.user
)
retrieve.assert_called_with(self.connect_account.account_id)
data = json.loads(response.content)
self.assertEqual(
data['data']['attributes']['country'],
self.connect_account.country
)
self.assertEqual(
data['data']['attributes']['disabled'], False
)
self.assertEqual(
data['data']['attributes']['verified'], False
)
self.assertEqual(
data['data']['meta']['required-fields'],
[
u'country',
u'external_accounts',
u'individual.verification.additional_document',
u'document_type',
u'individual.verification.document.front',
u'individual.dob'
]
)
self.assertEqual(
data['data']['attributes']['account']['individual']['first_name'],
'Jhon',
)
self.assertEqual(
data['data']['relationships']['owner']['data']['id'],
str(self.user.pk)
)
def test_get_verification_error(self):
error = {
"reason": (
"The date of birth (DOB) on the document does not match "
"the DOB on the account. Please upload a document with a "
"DOB that matches the DOB on the account. You can also "
"update the DOB on the account."
),
"code": "verification_document_dob_mismatch",
"requirement": "individual.verification.document"
}
self.stripe_connect_account.update({
'requirements': munch.munchify({
'eventually_due': ['external_accounts', 'individual.dob.month'],
'errors': [error],
'disabled': False
}),
})
with mock.patch(
'stripe.CountrySpec.retrieve', return_value=self.country_spec
), mock.patch(
'stripe.Account.retrieve', return_value=self.stripe_connect_account
) as retrieve:
response = self.client.get(
self.account_url, user=self.user
)
retrieve.assert_called_with(self.connect_account.account_id)
data = json.loads(response.content)
self.assertEqual(
data['data']['meta']['errors'][0]['source']['pointer'],
'/data/attributes/individual/verification/document/front'
)
self.assertEqual(
data['data']['meta']['errors'][0]['title'],
error['reason']
)
self.assertEqual(
data['data']['meta']['errors'][0]['code'],
error['code']
)
def test_get_no_user(self):
response = self.client.get(
self.account_url,
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get_wrong_user(self):
response = self.client.get(
self.account_url,
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_patch(self):
with mock.patch(
'stripe.CountrySpec.retrieve', return_value=self.country_spec
), mock.patch(
'stripe.Account.modify', return_value=self.stripe_connect_account
) as modify_account:
response = self.client.patch(
self.account_url,
data=json.dumps(self.data),
user=self.user
)
modify_account.assert_called_with('some-account-id', account_token='some-account-token')
data = json.loads(response.content)
self.assertEqual(
data['data']['attributes']['country'],
self.connect_account.country
)
self.assertEqual(
data['data']['attributes']['disabled'], False
)
def test_patch_wrong_user(self):
response = self.client.patch(
self.account_url,
data=json.dumps(self.data),
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_patch_no_user(self):
response = self.client.patch(
self.account_url,
data=json.dumps(self.data),
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get_bank_accounts_no_user(self):
response = self.client.get(
self.account_list_url
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get_bank_accounts_other_user(self):
response = self.client.get(
self.account_list_url,
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class ExternalAccountsTestCase(BluebottleTestCase):
def setUp(self):
super(ExternalAccountsTestCase, self).setUp()
self.client = JSONAPITestClient()
self.user = BlueBottleUserFactory()
account_id = 'some-account-id'
country = 'NU'
self.connect_external_account = stripe.BankAccount('some-bank-token')
self.connect_external_account.update(munch.munchify({
'object': 'bank_account',
'account_holder_name': 'Jane Austen',
'account_holder_type': 'individual',
'bank_name': 'STRIPE TEST BANK',
'country': 'US',
'currency': 'usd',
'fingerprint': '1JWtPxqbdX5Gamtc',
'last4': '6789',
'metadata': {
'order_id': '6735'
},
'routing_number': '110000000',
'status': 'new',
'account': 'acct_1032D82eZvKYlo2C'
}))
external_accounts = stripe.ListObject()
external_accounts.data = [self.connect_external_account]
external_accounts.update({
'total_count': 1,
})
self.stripe_connect_account = stripe.Account(account_id)
self.stripe_connect_account.update({
'country': country,
'external_accounts': external_accounts,
'requirements': munch.munchify({
'eventually_due': ['document_type']
})
})
self.country_spec = stripe.CountrySpec(country)
self.country_spec.update({
'verification_fields': munch.munchify({
'individual': munch.munchify({
'additional': ['individual.verification.document'],
'minimum': ['individual.first_name'],
})
})
})
with mock.patch(
'stripe.Account.retrieve', return_value=self.stripe_connect_account
):
self.connect_account = StripePayoutAccountFactory.create(owner=self.user, account_id=account_id)
self.external_account = ExternalAccountFactory.create(
connect_account=self.connect_account,
account_id='some-external-account-id'
)
self.url = reverse('connect-account-details', args=(self.connect_account.id, ))
self.external_account_url = reverse('stripe-external-account-list')
self.external_account_detail_url = reverse(
'stripe-external-account-details',
args=(self.external_account.pk, )
)
def test_get_accounts_no_user(self):
response = self.client.get(
self.external_account_url
)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_get_accounts_other_user(self):
response = self.client.get(
self.external_account_url,
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_get(self):
with mock.patch(
'stripe.CountrySpec.retrieve', return_value=self.country_spec
), mock.patch(
'stripe.Account.retrieve', return_value=self.stripe_connect_account
) as retrieve, mock.patch(
'stripe.ListObject.retrieve', return_value=self.connect_external_account
) as retrieve:
response = self.client.get(
self.url, user=self.user
)
retrieve.assert_called_with(self.external_account.account_id)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
external_account = data['included'][1]['attributes']
self.assertEqual(
external_account['currency'], self.connect_external_account.currency
)
self.assertEqual(
external_account['country'], self.connect_external_account.country
)
self.assertEqual(
external_account['routing-number'], self.connect_external_account.routing_number
)
self.assertEqual(
external_account['account-holder-name'], self.connect_external_account.account_holder_name
)
self.assertEqual(
external_account['last4'], self.connect_external_account.last4
)
def test_create(self):
data = {
'data': {
'type': 'payout-accounts/stripe-external-accounts',
'attributes': {
'token': self.connect_external_account.id
},
'relationships': {
'connect_account': {
'data': {
'type': 'payout-accounts/stripes',
'id': self.connect_account.pk
},
}
}
}
}
with mock.patch(
'stripe.CountrySpec.retrieve', return_value=self.country_spec
), mock.patch(
'stripe.Account.retrieve', return_value=self.stripe_connect_account
), mock.patch(
'stripe.Account.create_external_account', return_value=self.connect_external_account
):
response = self.client.post(
self.external_account_url, data=json.dumps(data), user=self.user
)
self.assertEqual(response.status_code, 201)
data = json.loads(response.content)
external_account = data['data']['attributes']
self.assertEqual(
external_account['currency'], self.connect_external_account.currency
)
self.assertEqual(
external_account['country'], self.connect_external_account.country
)
self.assertEqual(
external_account['routing-number'], self.connect_external_account.routing_number
)
self.assertEqual(
external_account['account-holder-name'], self.connect_external_account.account_holder_name
)
self.assertEqual(
external_account['last4'], self.connect_external_account.last4
)
with mock.patch(
'stripe.CountrySpec.retrieve', return_value=self.country_spec
), mock.patch(
'stripe.Account.retrieve', return_value=self.stripe_connect_account
), mock.patch(
'stripe.ListObject.retrieve', return_value=self.connect_external_account
):
response = self.client.get(
self.url, user=self.user
)
data = json.loads(response.content)
external_account = data['included'][1]['attributes']
self.assertEqual(
external_account['currency'], self.connect_external_account.currency
)
self.assertEqual(
external_account['country'], self.connect_external_account.country
)
self.assertEqual(
external_account['routing-number'], self.connect_external_account.routing_number
)
self.assertEqual(
external_account['account-holder-name'], self.connect_external_account.account_holder_name
)
self.assertEqual(
external_account['last4'], self.connect_external_account.last4
)
def test_get_external_account_detail(self):
response = self.client.get(
self.external_account_detail_url,
user=self.external_account.owner
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.json()['data']['attributes']['account-id'],
'some-external-account-id'
)
def test_get_external_account_anonymous(self):
response = self.client.get(
self.external_account_detail_url
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get_external_account_other_user(self):
response = self.client.get(
self.external_account_detail_url,
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_create_new_extenal(self):
data = {
'data': {
"attributes": {
"account-holder-name": "Tes Ting",
"token": "btok_1234"
},
"type": "payout-accounts/stripe-external-accounts",
"relationships": {
"connect-account": {
"data": {
"type": "payout-accounts/stripes",
"id": self.connect_account.id
}
}
}
}
}
connect_external_account = stripe.BankAccount('some-bank-token')
connect_external_account.update(munch.munchify({
'object': 'bank_account',
'account_holder_name': 'Jane Austen',
'account_holder_type': 'individual',
'bank_name': 'STRIPE TEST BANK',
'country': 'US',
'currency': 'usd',
'fingerprint': '1JWtPxqbdX5Gamtc',
'last4': '6789',
'metadata': {
'order_id': '6735'
},
'routing_number': '110000000',
'status': 'new',
'account': 'acct_1032D82eZvKYlo2C'
}))
with mock.patch(
'stripe.CountrySpec.retrieve', return_value=self.country_spec
), mock.patch(
'stripe.Account.retrieve', return_value=self.stripe_connect_account
), mock.patch(
'stripe.Account.create_external_account', return_value=connect_external_account
):
response = self.client.post(
self.external_account_url, data=json.dumps(data), user=self.user
)
self.assertEqual(response.status_code, 201)
data = json.loads(response.content)
external_account = data['data']['attributes']
self.assertEqual(external_account['status'], 'unverified')
| 36.860465
| 108
| 0.5655
|
6afc0aaf95ee12dd18f650e06517938c675f6a04
| 4,585
|
py
|
Python
|
Chimera/python3_scripts/zynq_tcp_server.py
|
zzpwahaha/Chimera-Control-Trim
|
df1bbf6bea0b87b8c7c9a99dce213fdc249118f2
|
[
"MIT"
] | null | null | null |
Chimera/python3_scripts/zynq_tcp_server.py
|
zzpwahaha/Chimera-Control-Trim
|
df1bbf6bea0b87b8c7c9a99dce213fdc249118f2
|
[
"MIT"
] | null | null | null |
Chimera/python3_scripts/zynq_tcp_server.py
|
zzpwahaha/Chimera-Control-Trim
|
df1bbf6bea0b87b8c7c9a99dce213fdc249118f2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#from __future__ import print_function
import socket
import errno
import sys
import binascii
import time
import sequencer
from axis_fifo import AXIS_FIFO
from devices import fifo_devices
from devices import gpio_devices
from axi_gpio import AXI_GPIO
import dds_lock_pll
from reset_all import reset
from soft_trigger import trigger
import dds_lock_pll
from dac81416 import DAC81416
class zynq_tcp_server:
def __init__(self):
self.seq = sequencer.sequencer()
self.dioByteLen = 28
self.dacByteLen = 44
self.ddsByteLen = 46
self.chimeraInterface()
#Function that reads commands from Chimera and passes them to the axis-fifo interface of the Zynq FPGA
def chimeraInterface(self):
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Bind the socket to the port
server_address = ("10.10.0.2", 8080)
print('server starting up on %s port %s' % server_address)
sock.bind(server_address)
# Listen for incoming connections
sock.listen(1)
while True:
# Wait for a connection
print('waiting for a connection')
connection, client_address = sock.accept()
try:
print('connection from', client_address)
# Receive the data in small chunks
while True:
data = connection.recv(64).decode('utf-8')
print('------------------------------------------------------------')
print('received "%s"' % data)
# print(len(data))
if data:
self.writeDevice(connection, data)
# connection.sendall(data)
else:
# print 'no more data from', client_address
break
except socket.error as error:
print(error)
break
connection.close()
def writeDevice(self, conn, data):
data = data.strip('\0')
data_split = data.split('_')
dev = data_split[0]
print('dev = ', dev)
if (dev == 'DIOseq'):
self.writeDIOseq(conn, data_split)
self.seq.mod_enable()
elif (dev == 'DACseq'):
self.writeDACseq(conn, data_split)
self.seq.mod_enable()
elif (dev == 'DDSseq'):
self.writeDDSseq(conn, data_split)
self.seq.mod_enable()
elif (dev == 'initExp'):
self.seq.initExp()
elif (dev == 'resetSeq'):
self.seq.reset_enable_mod()
elif (dev == 'disableMod'):
self.seq.mod_disable()
elif (dev == 'enableMod'):
self.seq.mod_enable()
elif (dev == 'DAC'):
self.seq.mod_disable()
self.seq.set_DAC(int(data_split[1]), float(data_split[2]))
time.sleep(0.01)
self.seq.mod_enable()
elif (dev == 'DDS'):
# self.seq.mod_disable()
if len(data_split)==3:
self.seq.set_DDS(int(data_split[1]), float(data_split[2]))
elif len(data_split)==4:
self.seq.set_DDS(int(data_split[1]), float(data_split[2]),float(data_split[3]))
# time.sleep(0.05)
# self.seq.mod_enable()
elif (dev == 'trigger'):
self.seq.soft_trigger()
else:
print('no device selected')
def writeDIOseq(self, conn, data_split):
num_snapshots = int(data_split[1])
print('num_bytes = ', self.dioByteLen*num_snapshots)
byte_buf = self.socket_read(conn, self.dioByteLen*num_snapshots)#.decode('utf-8') #each byte buffer snapshot consists of 3 sets of 4 bytes
# print hex(ord(byte_buf[0]))
for ii in range(num_snapshots):
print('\n', 'snapshot', ii)
print(byte_buf[ii*self.dioByteLen: ii*self.dioByteLen + self.dioByteLen])
self.seq.dio_seq_write_points(self.dioByteLen, byte_buf, num_snapshots)
def writeDACseq(self, conn, data_split):
num_snapshots = int(data_split[1])
print('num_snapshots = ', num_snapshots)
byte_buf = self.socket_read(conn, self.dacByteLen*num_snapshots)
for ii in range(num_snapshots):
print('\n', 'snapshot', ii)
print(byte_buf[ii*self.dacByteLen: ii*self.dacByteLen + self.dacByteLen])
self.seq.dac_seq_write_points(self.dacByteLen, byte_buf, num_snapshots)
def writeDDSseq(self, conn, data_split):
num_snapshots = int(data_split[1])
print('num_snapshots = ', num_snapshots)
byte_buf = self.socket_read(conn, self.ddsByteLen*num_snapshots)
for ii in range(num_snapshots):
print('\n', 'snapshot', ii)
print(byte_buf[ii*self.ddsByteLen: ii*self.ddsByteLen + self.ddsByteLen])
print("self.ddsByteLen = ", self.ddsByteLen)
print("byte_buf = ", byte_buf)
self.seq.dds_seq_write_points(self.ddsByteLen, byte_buf, num_snapshots)
def socket_read(self, conn, expected):
"""Read expected number of bytes from sock
Will repeatedly call recv until all expected data is received
"""
buffer = b''
while len(buffer) < expected:
buffer += conn.recv(expected - len(buffer))
return buffer
if __name__ == "__main__":
server = zynq_tcp_server()
| 29.96732
| 140
| 0.697928
|
01db2191b9bb4106500511a0cb830d84e6543204
| 5,946
|
py
|
Python
|
inb4404.py
|
Nachtalb/4chan-downloader
|
a3311b8880444300e59c0208b446fd2287c693b5
|
[
"MIT"
] | null | null | null |
inb4404.py
|
Nachtalb/4chan-downloader
|
a3311b8880444300e59c0208b446fd2287c693b5
|
[
"MIT"
] | null | null | null |
inb4404.py
|
Nachtalb/4chan-downloader
|
a3311b8880444300e59c0208b446fd2287c693b5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import urllib.request, urllib.error, urllib.parse, argparse, logging
import os, re, time
import http.client
import fileinput
from multiprocessing import Process
log = logging.getLogger('inb4404')
workpath = os.path.dirname(os.path.realpath(__file__))
args = None
def main():
global args
parser = argparse.ArgumentParser(description='inb4404')
parser.add_argument('thread', nargs=1, help='url of the thread (or filename; one url per line)')
parser.add_argument('-c', '--with-counter', action='store_true', help='show a counter next the the image that has been downloaded')
parser.add_argument('-d', '--date', action='store_true', help='show date as well')
parser.add_argument('-l', '--less', action='store_true', help='show less information (surpresses checking messages)')
parser.add_argument('-n', '--use-names', action='store_true', help='use thread names instead of the thread ids (...4chan.org/board/thread/thread-id/thread-name)')
parser.add_argument('-r', '--reload', action='store_true', help='reload the queue file every 5 minutes')
args = parser.parse_args()
if args.date:
logging.basicConfig(level=logging.INFO, format='[%(asctime)s] %(message)s', datefmt='%Y-%m-%d %I:%M:%S %p')
else:
logging.basicConfig(level=logging.INFO, format='[%(asctime)s] %(message)s', datefmt='%I:%M:%S %p')
thread = args.thread[0].strip()
if thread[:4].lower() == 'http':
download_thread(thread)
else:
download_from_file(thread)
def load(url):
req = urllib.request.Request(url, headers={'User-Agent': '4chan Browser'})
return urllib.request.urlopen(req).read()
def download_thread(thread_link):
board = thread_link.split('/')[3]
thread = thread_link.split('/')[5].split('#')[0]
if len(thread_link.split('/')) > 6:
thread_tmp = thread_link.split('/')[6].split('#')[0]
if args.use_names or os.path.exists(os.path.join(workpath, 'downloads', board, thread_tmp)):
thread = thread_tmp
directory = os.path.join(workpath, 'downloads', board, thread)
if not os.path.exists(directory):
os.makedirs(directory)
while True:
try:
regex = '(\/\/i(?:s|)\d*\.(?:4cdn|4chan)\.org\/\w+\/(\d+\.(?:jpg|png|gif|webm)))'
regex_result = list(set(re.findall(regex, load(thread_link).decode('utf-8'))))
regex_result = sorted(regex_result, key=lambda tup: tup[1])
regex_result_len = len(regex_result)
regex_result_cnt = 1
for link, img in regex_result:
img_path = os.path.join(directory, img)
if not os.path.exists(img_path):
data = load('https:' + link)
output_text = board + '/' + thread + '/' + img
if args.with_counter:
output_text = '[' + str(regex_result_cnt).rjust(len(str(regex_result_len))) + '/' + str(regex_result_len) + '] ' + output_text
log.info(output_text)
with open(img_path, 'wb') as f:
f.write(data)
##################################################################################
# saves new images to a seperate directory
# if you delete them there, they are not downloaded again
# if you delete an image in the 'downloads' directory, it will be downloaded again
copy_directory = os.path.join(workpath, 'new', board, thread)
if not os.path.exists(copy_directory):
os.makedirs(copy_directory)
copy_path = os.path.join(copy_directory, img)
with open(copy_path, 'wb') as f:
f.write(data)
##################################################################################
regex_result_cnt += 1
except urllib.error.HTTPError as err:
time.sleep(10)
try:
load(thread_link)
except urllib.error.HTTPError as err:
log.info('%s 404\'d', thread_link)
break
continue
except (urllib.error.URLError, http.client.BadStatusLine, http.client.IncompleteRead):
if not args.less:
log.warning('Something went wrong')
if not args.less:
log.info('Checking ' + board + '/' + thread)
time.sleep(20)
def download_from_file(filename):
running_links = []
while True:
processes = []
for link in [_f for _f in [line.strip() for line in open(filename) if line[:4] == 'http'] if _f]:
if link not in running_links:
running_links.append(link)
log.info('Added ' + link)
process = Process(target=download_thread, args=(link, ))
process.start()
processes.append([process, link])
if len(processes) == 0:
log.warning(filename + ' empty')
if args.reload:
time.sleep(60 * 5) # 5 minutes
links_to_remove = []
for process, link in processes:
if not process.is_alive():
links_to_remove.append(link)
else:
process.terminate()
for link in links_to_remove:
for line in fileinput.input(filename, inplace=True):
print(line.replace(link, '-' + link), end='')
running_links.remove(link)
log.info('Removed ' + link)
if not args.less:
log.info('Reloading ' + args.thread[0]) # thread = filename here; reloading on next loop
else:
break
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| 41.291667
| 166
| 0.550454
|
d8e7133f2cb70ff9a3ce0e48caa02332b9c292b4
| 3,523
|
py
|
Python
|
Eval/noiseGen_LBPH.py
|
SchemingWeasels/EEEE1027_face-recognition_template-matching
|
22a9ebf174d21bbbfbe637d9c7dadba34d468614
|
[
"MIT"
] | null | null | null |
Eval/noiseGen_LBPH.py
|
SchemingWeasels/EEEE1027_face-recognition_template-matching
|
22a9ebf174d21bbbfbe637d9c7dadba34d468614
|
[
"MIT"
] | null | null | null |
Eval/noiseGen_LBPH.py
|
SchemingWeasels/EEEE1027_face-recognition_template-matching
|
22a9ebf174d21bbbfbe637d9c7dadba34d468614
|
[
"MIT"
] | null | null | null |
import cv2
import os
import numpy as np
import cv2.cuda
import time
import csv
CSV_MODE = 'a'
noise_sd = [0, 0.5, 1.0, 1.5, 2.0, 2.5, 3]
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DATA_DIR = os.path.join(BASE_DIR, 'noise')
ENCODING_DIR = os.path.join(BASE_DIR, '..', 'Task 1')
recognizer = cv2.face.LBPHFaceRecognizer_create()
recognizer.read(os.path.join(ENCODING_DIR,'face-trainner_nohair_nosize.yml'))
def tryFaces(frame, sd, scale, file_name):
framCpy = np.copy(frame)
face_cascade = cv2.CascadeClassifier(os.path.join(ENCODING_DIR,'cascades/haarcascade_frontalface_alt2.xml'))
frame_gray = cv2.cvtColor(framCpy, cv2.COLOR_BGR2GRAY)
# frame_gray = cv2.equalizeHist(frame_gray)
# -- Detect faces
faces = face_cascade.detectMultiScale(frame_gray, scaleFactor=scale)
best_dist = 1000
best_match = (-1,0,0,0)
count = 0
for (x, y, w, h) in faces:
center = (x + w // 2, y + h // 2)
framCpy = cv2.ellipse(framCpy, center, (w // 2, h // 2), 0, 0, 360, (255, 0, 255), 2)
roi = frame_gray[y:y + h, x:x + w]
_id, clvl = recognizer.predict(roi)
if clvl > 0:
count += 1
if clvl<best_dist and clvl>1:
best_dist = clvl
best_match = (x,y,w,h)
#if clvl < 100:
cv2.putText(framCpy, f"{clvl:.2f}", (x, y+10), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2,
cv2.LINE_AA)
#-- In each face, detect eyes
#cv2.imshow("testroi", roi)
writer.writerow([file_name, sd, len(faces), count])
if not best_match == (-1,0,0,0):
x, y, w, h = best_match
framCpy = cv2.rectangle(framCpy, (x,y), (x+w,y+h), (255,255,0), thickness=10)
cv2.putText(framCpy, "Jon", (x, y + 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2,
cv2.LINE_AA)
if(framCpy.shape[0] > 1000 or framCpy.shape[1] > 1000):
scale_percent = 30 # percent of original size
width = int(framCpy.shape[1] * scale_percent / 100)
height = int(framCpy.shape[0] * scale_percent / 100)
dim = (width, height)
framCpy = cv2.resize(framCpy, dim, interpolation=cv2.INTER_AREA)
return framCpy
def gen_noise(img, sd):
# Generatr noise mat
# Generate Gaussian noise
gauss = np.random.normal(0, sd, img.size)
gauss = gauss.reshape(img.shape[0], img.shape[1], img.shape[2]).astype('uint8')
# Add the Gaussian noise to the image
img_gauss = cv2.add(img, gauss)
return img_gauss
save_file = open(os.path.join(BASE_DIR, "noise_LBPH.csv"), CSV_MODE, newline='')
writer = csv.writer(save_file)
output_dir = os.path.join(BASE_DIR, "noise LBPH")
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
if CSV_MODE == 'w':
writer.writerow(["file", "sd", "detections", "matches"])
for root, dirs, files in os.walk(DATA_DIR):
if "unused" in root:
continue
for file in files:
file = file.lower()
if file.endswith("png") or file.endswith("jpg") or file.endswith("jpeg"):
file_name = os.path.splitext(file)[0]
data_output_dir = os.path.join(output_dir, file_name)
if not os.path.isdir(data_output_dir):
os.mkdir(data_output_dir)
img = cv2.imread(os.path.join(root, file))
for sd in noise_sd:
cv2.imwrite(os.path.join(data_output_dir, str(sd) + ".jpg"), tryFaces(gen_noise(img, sd), sd, 1.09, file_name))
print("Done")
cv2.destroyAllWindows()
| 34.881188
| 127
| 0.61652
|
3fab256836e3c2695afaebffbd3e344034438d33
| 4,413
|
py
|
Python
|
emissionsapi/db.py
|
brennerm/emissions-api
|
577fa209ffd27476ff1ad0904ecc081564cf8f53
|
[
"MIT"
] | null | null | null |
emissionsapi/db.py
|
brennerm/emissions-api
|
577fa209ffd27476ff1ad0904ecc081564cf8f53
|
[
"MIT"
] | null | null | null |
emissionsapi/db.py
|
brennerm/emissions-api
|
577fa209ffd27476ff1ad0904ecc081564cf8f53
|
[
"MIT"
] | null | null | null |
"""Database Layer for the Emmission API.
"""
from functools import wraps
from sqlalchemy import create_engine, Column, DateTime, Integer, Float, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import geoalchemy2
from emissionsapi.config import config
import emissionsapi.logger
# Logger
logger = emissionsapi.logger.getLogger('emission-api.db')
# Database uri as described in
# https://docs.sqlalchemy.org/en/13/core/engines.html#database-urls
# Retrieved as environment variable.
database = config('database') or 'postgresql://user:user@localhost/db'
# Global session variable. Set on initialization.
__session__ = None
# Base Class of all ORM objects.
Base = declarative_base()
class File(Base):
"""ORM Object for the nc files.
"""
# Tablename
__tablename__ = 'file'
filename = Column(String, primary_key=True)
class Carbonmonoxide(Base):
"""ORM Object for Carbonmonoxide Point
"""
# Tablename
__tablename__ = 'carbonmonoxide'
# Primary Key
id = Column(Integer, primary_key=True)
# Carbonmonoxide Value
value = Column(Float)
# Longitude
longitude = Column(Float)
# Latitude
latitude = Column(Float)
# timestamp
timestamp = Column(DateTime)
# PostGis type
geom = Column(geoalchemy2.Geometry(geometry_type="POINT"))
def __init__(self, value, longitude, latitude, timestamp):
self.value = value
self.longitude = longitude
self.latitude = latitude
self.timestamp = timestamp
self.geom = geoalchemy2.elements.WKTElement(
f"POINT({longitude} {latitude})")
def with_session(f):
"""Wrapper for f to make a SQLAlchemy session present within the function
:param f: function to call
:type f: function
:raises e: Possible Exception of f
:return: result of f
"""
@wraps(f)
def decorated(*args, **kwargs):
# Get new session
session = get_session()
try:
# Call f with the session and all the other arguments
result = f(session, *args, **kwargs)
except Exception as e:
# Rollback session, something bad happend.
session.rollback()
session.close()
raise e
# Close session and return the result of f
session.close()
return result
return decorated
def get_session():
"""Get a new session.
Lazy load the database connection and create the tables.
Returns:
sqlalchemy.orm.session.Session -- SQLAlchemy Session Object
"""
global __session__
# Create Database Connection, Tables and Sessionmaker if neccessary.
if not __session__:
Engine = create_engine(database)
__session__ = sessionmaker(bind=Engine)
Base.metadata.create_all(Engine)
# Return new session object
return __session__()
def get_points_in_polygon(session, polygon):
"""Get all points from within the specified polygon.
:param session: SQL Alchemy Session
:type session: sqlalchemy.orm.session.Session
:param polygon: Polygon where to search for points
:type polygon: geoalchemy2.WKTElement
:return: SQLAlchemy Query Object with the points from within the polygon.
:rtype: sqlalchemy.orm.query.Query
"""
return session.query(Carbonmonoxide).filter(
geoalchemy2.func.ST_WITHIN(Carbonmonoxide.geom, polygon))
def get_points_in_rectangle(session, upper_left, lower_right):
"""Get all points from within a rectangle.
:param session: SQL Alchemy Session
:type session: sqlalchemy.orm.session.Session
:param polygon: Polygon where to search for points
:type polygon: geoalchemy2.WKTElement
:param upper_left: Upper left point of the rectangle
:type upper_left: tuple
:param lower_right: Lower right point of the rectangle
:type lower_right: tuple
:return: SQLAlchemy Query Object with the points from within the polygon.
:rtype: sqlalchemy.orm.query.Query
"""
# Defining the rectangle
rectangle = geoalchemy2.elements.WKTElement(
f'POLYGON(({upper_left[0]} {upper_left[1]},'
f' {lower_right[0]} {upper_left[1]},'
f' {lower_right[0]} {lower_right[1]},'
f' {upper_left[0]} {lower_right[1]},'
f' {upper_left[0]} {upper_left[1]}))')
return get_points_in_polygon(session, rectangle)
| 30.226027
| 78
| 0.686155
|
c8357f7944bf2609dd98e46a4eb4f94840915d8d
| 6,668
|
py
|
Python
|
python/gstgva/region_of_interest.py
|
LikHait/gst-video-analytics
|
492561548051ee7be0ad5f1045108fb56bd1a3ac
|
[
"MIT"
] | null | null | null |
python/gstgva/region_of_interest.py
|
LikHait/gst-video-analytics
|
492561548051ee7be0ad5f1045108fb56bd1a3ac
|
[
"MIT"
] | null | null | null |
python/gstgva/region_of_interest.py
|
LikHait/gst-video-analytics
|
492561548051ee7be0ad5f1045108fb56bd1a3ac
|
[
"MIT"
] | 1
|
2020-05-14T15:30:03.000Z
|
2020-05-14T15:30:03.000Z
|
# ==============================================================================
# Copyright (C) 2018-2020 Intel Corporation
#
# SPDX-License-Identifier: MIT
# ==============================================================================
## @file region_of_interest.py
# @brief This file contains gstgva.region_of_interest.RegionOfInterest class to control region of interest for particular gstgva.video_frame.VideoFrame with gstgva.tensor.Tensor instances attached
import ctypes
import numpy
from typing import List
from collections import namedtuple
from .tensor import Tensor
from .util import VideoRegionOfInterestMeta
from .util import libgst, libgobject, libgstvideo, GLIST_POINTER
import gi
gi.require_version('GstVideo', '1.0')
gi.require_version('GLib', '2.0')
gi.require_version('Gst', '1.0')
from gi.repository import GstVideo, GLib, GObject, Gst
Rect = namedtuple("Rect", "x y w h")
## @brief This class represents region of interest - object describing detection result (bounding box) and containing
# multiple Tensor objects (inference results) attached by multiple models. For example, it can be region of interest with detected face and recognized age
# and sex of a person. It can be produced by a pipeline with gvadetect with detection model and two gvaclassify
# elements with two classification models. Such RegionOfInterest will have bounding box coordinates filled and will have 3 Tensor objects
# attached - 1 Tensor object with detection result and 2 Tensor objects with classification results coming from 2 classifications
class RegionOfInterest(object):
## @brief Get bounding box of the RegionOfInterest as pixel coordinates in original image
# @return Bounding box coordinates of the RegionOfInterest
def rect(self):
return Rect(x = self.__roi_meta.x,
y = self.__roi_meta.y,
w = self.__roi_meta.w,
h = self.__roi_meta.h)
## @brief Get bounding box of the RegionOfInterest as normalized coordinates in the range [0, 1]
# @return Bounding box coordinates of the RegionOfInterest
def normalized_rect(self):
detection = self.detection()
return Rect(x = detection['x_min'],
y = detection['y_min'],
w = detection['x_max'] - detection['x_min'],
h = detection['y_max'] - detection['y_min'])
## @brief Get class label of this RegionOfInterest
# @return Class label of this RegionOfInterest
def label(self) -> str:
return GLib.quark_to_string(self.__roi_meta.roi_type)
## @brief Get confidence from detection Tensor, last added to this RegionOfInterest
# @return last added detection Tensor confidence if exists, otherwise None
def confidence(self) -> float:
detection = self.detection()
return detection.confidence() if detection else None
## @brief Get all Tensor instances added to this RegionOfInterest
# @return list of Tensor instances added to this RegionOfInterest
def tensors(self):
param = self.meta()._params
while param:
tensor_structure = param.contents.data
# "object_id" is used to store ROI id for tracking
if not libgst.gst_structure_has_name(tensor_structure, "object_id".encode('utf-8')):
yield Tensor(tensor_structure)
param = param.contents.next
## @brief Returns detection Tensor, last added to this RegionOfInterest. As any other Tensor, returned detection
# Tensor can contain arbitrary information. If you use RegionOfInterest based on VideoRegionOfInterestMeta
# attached by gvadetect by default, then this Tensor will contain "label_id", "confidence", "x_min", "x_max",
# "y_min", "y_max" fields.
# If RegionOfInterest doesn't have detection Tensor, it will be created in-place
# @return detection Tensor, empty if there were no detection Tensor objects added to this RegionOfInterest when
# this method was called
def detection(self) -> Tensor:
for tensor in self.tensors():
if tensor.is_detection():
return tensor
return self.add_tensor('detection')
## @brief Get label_id from detection Tensor, last added to this RegionOfInterest
# @return last added detection Tensor label_id if exists, otherwise None
def label_id(self) -> int:
detection = self.detection()
return detection.label_id() if detection else None
## @brief Add new Tensor (inference result) to the RegionOfInterest.
# @param name Name for the tensor.
# This function does not take ownership of tensor passed, but only copies its contents
# @return just created Tensor object, which can be filled with tensor information further
def add_tensor(self, name: str = "") -> Tensor:
tensor = libgst.gst_structure_new_empty(name.encode('utf-8'))
libgstvideo.gst_video_region_of_interest_meta_add_param(self.meta(), tensor)
return Tensor(tensor)
## @brief Get VideoRegionOfInterestMeta containing bounding box information and tensors (inference results).
# Tensors are represented as GstStructures added to GstVideoRegionOfInterestMeta.params
# @return VideoRegionOfInterestMeta containing bounding box and tensors (inference results)
def meta(self) -> VideoRegionOfInterestMeta:
return self.__roi_meta
## @brief Iterate by VideoRegionOfInterestMeta instances attached to buffer
# @param buffer buffer with GstVideoRegionOfInterestMeta instances attached
# @return generator for VideoRegionOfInterestMeta instances attached to buffer
@classmethod
def _iterate(self, buffer: Gst.Buffer):
try:
meta_api = hash(GObject.GType.from_name("GstVideoRegionOfInterestMetaAPI"))
except:
return
gpointer = ctypes.c_void_p()
while True:
try:
value = libgst.gst_buffer_iterate_meta_filtered(hash(buffer), ctypes.byref(gpointer), meta_api)
except:
value = None
if not value:
return
roi_meta = ctypes.cast(value, ctypes.POINTER(VideoRegionOfInterestMeta)).contents
yield RegionOfInterest(roi_meta)
## @brief Construct RegionOfInterest instance from VideoRegionOfInterestMeta. After this, RegionOfInterest will
# obtain all tensors (detection & inference results) from VideoRegionOfInterestMeta
# @param roi_meta VideoRegionOfInterestMeta containing bounding box information and tensors
def __init__(self, roi_meta: VideoRegionOfInterestMeta):
self.__roi_meta = roi_meta
| 50.135338
| 197
| 0.697361
|
832016ab8cc6843449f450efde09286aa48a9428
| 7,096
|
py
|
Python
|
kmeans_ae_removal.py
|
anonxyz123/fgk62af5
|
6dd028b0c5e90c3f296eb32dfae033c5612fcae1
|
[
"MIT"
] | null | null | null |
kmeans_ae_removal.py
|
anonxyz123/fgk62af5
|
6dd028b0c5e90c3f296eb32dfae033c5612fcae1
|
[
"MIT"
] | null | null | null |
kmeans_ae_removal.py
|
anonxyz123/fgk62af5
|
6dd028b0c5e90c3f296eb32dfae033c5612fcae1
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
import random
import numpy as np
from sklearn.cluster import KMeans
from sklearn.preprocessing import StandardScaler
import numpy as np
import torch.optim as optim
import torchvision.utils as vutils
from tqdm import tqdm
import sys
import os
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
torch.backends.cudnn.benchmark = True
transform = transforms.Compose(
[transforms.ToTensor()])
trainset = torchvision.datasets.CIFAR100(root='/home/lucky/datasets/cifar.python/', train=True,
download=True, transform=transform)
valset = torchvision.datasets.CIFAR100(root='/home/lucky/datasets/cifar.python/', train=False,
download=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=1,
shuffle=False)
valloader = torch.utils.data.DataLoader(valset, batch_size=128,
shuffle=False)
class Autoencoder(nn.Module):
def __init__(self):
super(Autoencoder, self).__init__()
self.encoder = nn.Sequential(
nn.Conv2d(3, 16, 4, stride=2, padding=1), # [batch, 8, 16, 16]
nn.BatchNorm2d(16),
nn.ReLU(),
nn.Conv2d(16, 32, 4, stride=2, padding=1), # [batch, 16, 8, 8]
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(32, 64, 4, stride=2, padding=1), # [batch, 32, 4, 4]
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(64, 64, 4, stride=2, padding=1), # [batch, 64, 2, 2]
)
self.decoder = nn.Sequential(
nn.ConvTranspose2d(64, 64, 4, stride=2, padding=1), # [batch, 32, 4, 4]
nn.BatchNorm2d(64),
nn.ReLU(),
nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1), # [batch, 16, 8, 8]
nn.BatchNorm2d(32),
nn.ReLU(),
nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1), # [batch, 8, 16, 16]
nn.BatchNorm2d(16),
nn.ReLU(),
nn.ConvTranspose2d(16, 3, 4, stride=2, padding=1), # [batch, 3, 32, 32]
nn.Sigmoid(),
)
def forward(self, x):
encoded = self.encoder(x)
decoded = self.decoder(encoded)
return encoded, decoded
def autoencoder_training(epochs, train_set, val_set):
train_loader = torch.utils.data.DataLoader(train_set, batch_size=64,
shuffle=True)
val_loader = torch.utils.data.DataLoader(val_set, batch_size=128,
shuffle=False)
os.makedirs('./outputAE/', exist_ok=True)
model = Autoencoder()
criterion = nn.BCELoss()
model = model.to(device)
optimizer = optim.Adam(model.parameters())
train_losses = []
val_losses = []
for epoch in range(epochs):
running_loss_train = 0.0
running_loss_val = 0.0
pbar = tqdm(train_loader, leave=False,
file=sys.stdout, ascii=True)
model.train()
for i, data in enumerate(pbar):
inputs, labels = data
inputs = inputs.to(device)
optimizer.zero_grad()
output_encoded, output_decoded = model(inputs)
loss = criterion(output_decoded, inputs)
loss.backward()
optimizer.step()
running_loss_train += loss.item()
print('[%d] train-loss: %.3f' % (epoch + 1, running_loss_train / len(train_loader)))
train_losses.append(running_loss_train / len(train_loader))
model.eval()
pbar = tqdm(val_loader, leave=False,
file=sys.stdout, ascii=True)
for i, data in enumerate(pbar):
inputs, labels = data
inputs = inputs.to(device)
output_encoded, output_decoded = model(inputs)
loss = criterion(output_decoded, inputs)
running_loss_val += loss.item()
print('[%d] val-loss: %.3f' % (epoch + 1, running_loss_val / len(val_loader)))
val_losses.append(running_loss_val / len(val_loader))
torch.save(model.cpu(), "./outputAE/ae_%d" % (epoch + 1))
model.to(device)
vutils.save_image(output_decoded[:64], './outputAE/fake_%d.pdf' % (epoch + 1))
if epoch == 0:
vutils.save_image(inputs[:64], './outputAE/real.pdf')
import pickle
with open('./outputAE/train_losses.pkl', 'wb') as f:
pickle.dump(train_losses, f)
with open('./outputAE/val_losses.pkl', 'wb') as f:
pickle.dump(val_losses, f)
import matplotlib.pyplot as plt
plt.xlabel("Epochs")
plt.ylabel("Binary Cross-Entropy Loss")
plt.plot(train_losses, label='Train Loss')
plt.plot(val_losses, label='Val Loss')
plt.legend(loc='lower left')
plt.savefig("./outputAE/loss_curves.pdf")
plt.show()
def k_means_removal_ae(epoch, ds, fraction, n_clusters, n_init=100, max_iter=300):
from sklearn.cluster import KMeans
from sklearn.preprocessing import StandardScaler
import numpy as np
images = []
print("Konverting to numpy arrays...")
model = torch.load("./outputAE/ae_%d" % (epoch))
model.eval()
for i, data in enumerate(ds):
output_encoded, output_decoded = model(data[0])
images.append(output_encoded.detach().numpy().reshape((-1)))
images = np.array(images)
print(images.shape)
print("Starting scaling...")
scaler = StandardScaler()
scaled_features = scaler.fit_transform(images)
print("Starting KMeans...")
kmeans = KMeans(init="random",
n_clusters=n_clusters,
n_init=n_init,
max_iter=max_iter)
kmeans.fit(scaled_features)
print("Finished K-Means...")
print("Calculating differences...")
diffs = []
for i in range(len(scaled_features)):
diffs.append(np.linalg.norm(kmeans.cluster_centers_[kmeans.labels_[i]] - scaled_features[i]))
diffs = np.array(diffs)
result = diffs.argsort()[:int(len(diffs) * fraction)].tolist()
import pickle
with open('/home/lucky/datasets/cifar.python/kmeans_ae_' + str(n_clusters) + '_removal_' + str(fraction) + '.pkl', 'wb') as f:
pickle.dump(result, f)
return result
if __name__ == '__main__':
autoencoder_training(50, trainset, valset)
k_means_removal_ae(50, trainloader, 0.75, 50)
k_means_removal_ae(50, trainloader, 0.50, 50)
k_means_removal_ae(50, trainloader, 0.25, 50)
k_means_removal_ae(50, trainloader, 0.75, 100)
k_means_removal_ae(50, trainloader, 0.50, 100)
k_means_removal_ae(50, trainloader, 0.25, 100)
k_means_removal_ae(50, trainloader, 0.75, 150)
k_means_removal_ae(50, trainloader, 0.50, 150)
k_means_removal_ae(50, trainloader, 0.25, 150)
k_means_removal_ae(50, trainloader, 0.75, 200)
k_means_removal_ae(50, trainloader, 0.50, 200)
k_means_removal_ae(50, trainloader, 0.25, 200)
| 36.389744
| 130
| 0.61133
|
083c23c6dd23922c92ca7fc4e23e3804b3f05275
| 59,148
|
py
|
Python
|
web/addons/survey/survey.py
|
diogocs1/comps
|
63df07f6cf21c41e4527c06e2d0499f23f4322e7
|
[
"Apache-2.0"
] | null | null | null |
web/addons/survey/survey.py
|
diogocs1/comps
|
63df07f6cf21c41e4527c06e2d0499f23f4322e7
|
[
"Apache-2.0"
] | null | null | null |
web/addons/survey/survey.py
|
diogocs1/comps
|
63df07f6cf21c41e4527c06e2d0499f23f4322e7
|
[
"Apache-2.0"
] | null | null | null |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and / or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DF
from openerp.addons.website.models.website import slug
from urlparse import urljoin
from itertools import product
from collections import Counter
from collections import OrderedDict
import datetime
import logging
import re
import uuid
_logger = logging.getLogger(__name__)
class survey_stage(osv.Model):
"""Stages for Kanban view of surveys"""
_name = 'survey.stage'
_description = 'Survey Stage'
_order = 'sequence,id'
_columns = {
'name': fields.char(string="Name", required=True, translate=True),
'sequence': fields.integer(string="Sequence"),
'closed': fields.boolean(string="Closed", help="If closed, people won't be able to answer to surveys in this column."),
'fold': fields.boolean(string="Folded in kanban view")
}
_defaults = {
'sequence': 1,
'closed': False
}
_sql_constraints = [
('positive_sequence', 'CHECK(sequence >= 0)', 'Sequence number MUST be a natural')
]
class survey_survey(osv.Model):
'''Settings for a multi-page/multi-question survey.
Each survey can have one or more attached pages, and each page can display
one or more questions.
'''
_name = 'survey.survey'
_description = 'Survey'
_rec_name = 'title'
_inherit = ['mail.thread', 'ir.needaction_mixin']
# Protected methods #
def _has_questions(self, cr, uid, ids, context=None):
""" Ensure that this survey has at least one page with at least one
question. """
for survey in self.browse(cr, uid, ids, context=context):
if not survey.page_ids or not [page.question_ids
for page in survey.page_ids if page.question_ids]:
return False
return True
## Function fields ##
def _is_designed(self, cr, uid, ids, name, arg, context=None):
res = dict()
for survey in self.browse(cr, uid, ids, context=context):
if not survey.page_ids or not [page.question_ids
for page in survey.page_ids if page.question_ids]:
res[survey.id] = False
else:
res[survey.id] = True
return res
def _get_tot_sent_survey(self, cr, uid, ids, name, arg, context=None):
""" Returns the number of invitations sent for this survey, be they
(partially) completed or not """
res = dict((id, 0) for id in ids)
sur_res_obj = self.pool.get('survey.user_input')
for id in ids:
res[id] = sur_res_obj.search(cr, uid, # SUPERUSER_ID,
[('survey_id', '=', id), ('type', '=', 'link')],
context=context, count=True)
return res
def _get_tot_start_survey(self, cr, uid, ids, name, arg, context=None):
""" Returns the number of started instances of this survey, be they
completed or not """
res = dict((id, 0) for id in ids)
sur_res_obj = self.pool.get('survey.user_input')
for id in ids:
res[id] = sur_res_obj.search(cr, uid, # SUPERUSER_ID,
['&', ('survey_id', '=', id), '|', ('state', '=', 'skip'), ('state', '=', 'done')],
context=context, count=True)
return res
def _get_tot_comp_survey(self, cr, uid, ids, name, arg, context=None):
""" Returns the number of completed instances of this survey """
res = dict((id, 0) for id in ids)
sur_res_obj = self.pool.get('survey.user_input')
for id in ids:
res[id] = sur_res_obj.search(cr, uid, # SUPERUSER_ID,
[('survey_id', '=', id), ('state', '=', 'done')],
context=context, count=True)
return res
def _get_public_url(self, cr, uid, ids, name, arg, context=None):
""" Computes a public URL for the survey """
if context and context.get('relative_url'):
base_url = '/'
else:
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
res = {}
for survey in self.browse(cr, uid, ids, context=context):
res[survey.id] = urljoin(base_url, "survey/start/%s" % slug(survey))
return res
def _get_public_url_html(self, cr, uid, ids, name, arg, context=None):
""" Computes a public URL for the survey (html-embeddable version)"""
urls = self._get_public_url(cr, uid, ids, name, arg, context=context)
for id, url in urls.iteritems():
urls[id] = '<a href="%s">%s</a>' % (url, _("Click here to start survey"))
return urls
def _get_print_url(self, cr, uid, ids, name, arg, context=None):
""" Computes a printing URL for the survey """
if context and context.get('relative_url'):
base_url = '/'
else:
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
res = {}
for survey in self.browse(cr, uid, ids, context=context):
res[survey.id] = urljoin(base_url, "survey/print/%s" % slug(survey))
return res
def _get_result_url(self, cr, uid, ids, name, arg, context=None):
""" Computes an URL for the survey results """
if context and context.get('relative_url'):
base_url = '/'
else:
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
res = {}
for survey in self.browse(cr, uid, ids, context=context):
res[survey.id] = urljoin(base_url, "survey/results/%s" % slug(survey))
return res
# Model fields #
_columns = {
'title': fields.char('Title', required=1, translate=True),
'res_model': fields.char('Category'),
'page_ids': fields.one2many('survey.page', 'survey_id', 'Pages', copy=True),
'stage_id': fields.many2one('survey.stage', string="Stage", ondelete="set null", copy=False),
'auth_required': fields.boolean('Login required',
help="Users with a public link will be requested to login before taking part to the survey",
oldname="authenticate"),
'users_can_go_back': fields.boolean('Users can go back',
help="If checked, users can go back to previous pages."),
'tot_sent_survey': fields.function(_get_tot_sent_survey,
string="Number of sent surveys", type="integer"),
'tot_start_survey': fields.function(_get_tot_start_survey,
string="Number of started surveys", type="integer"),
'tot_comp_survey': fields.function(_get_tot_comp_survey,
string="Number of completed surveys", type="integer"),
'description': fields.html('Description', translate=True,
oldname="description", help="A long description of the purpose of the survey"),
'color': fields.integer('Color Index'),
'user_input_ids': fields.one2many('survey.user_input', 'survey_id',
'User responses', readonly=1),
'designed': fields.function(_is_designed, string="Is designed?",
type="boolean"),
'public_url': fields.function(_get_public_url,
string="Public link", type="char"),
'public_url_html': fields.function(_get_public_url_html,
string="Public link (html version)", type="char"),
'print_url': fields.function(_get_print_url,
string="Print link", type="char"),
'result_url': fields.function(_get_result_url,
string="Results link", type="char"),
'email_template_id': fields.many2one('email.template',
'Email Template', ondelete='set null'),
'thank_you_message': fields.html('Thank you message', translate=True,
help="This message will be displayed when survey is completed"),
'quizz_mode': fields.boolean(string='Quizz mode')
}
def _default_stage(self, cr, uid, context=None):
ids = self.pool['survey.stage'].search(cr, uid, [], limit=1, context=context)
if ids:
return ids[0]
return False
_defaults = {
'color': 0,
'stage_id': lambda self, *a, **kw: self._default_stage(*a, **kw)
}
def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None):
""" Read group customization in order to display all the stages in the
kanban view, even if they are empty """
stage_obj = self.pool.get('survey.stage')
order = stage_obj._order
access_rights_uid = access_rights_uid or uid
if read_group_order == 'stage_id desc':
order = '%s desc' % order
stage_ids = stage_obj._search(cr, uid, [], order=order, access_rights_uid=access_rights_uid, context=context)
result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context)
# restore order of the search
result.sort(lambda x, y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0])))
fold = {}
for stage in stage_obj.browse(cr, access_rights_uid, stage_ids, context=context):
fold[stage.id] = stage.fold or False
return result, fold
_group_by_full = {
'stage_id': _read_group_stage_ids
}
# Public methods #
def copy_data(self, cr, uid, id, default=None, context=None):
current_rec = self.read(cr, uid, id, fields=['title'], context=context)
title = _("%s (copy)") % (current_rec.get('title'))
default = dict(default or {}, title=title)
return super(survey_survey, self).copy_data(cr, uid, id, default,
context=context)
def next_page(self, cr, uid, user_input, page_id, go_back=False, context=None):
'''The next page to display to the user, knowing that page_id is the id
of the last displayed page.
If page_id == 0, it will always return the first page of the survey.
If all the pages have been displayed and go_back == False, it will
return None
If go_back == True, it will return the *previous* page instead of the
next page.
.. note::
It is assumed here that a careful user will not try to set go_back
to True if she knows that the page to display is the first one!
(doing this will probably cause a giant worm to eat her house)'''
survey = user_input.survey_id
pages = list(enumerate(survey.page_ids))
# First page
if page_id == 0:
return (pages[0][1], 0, len(pages) == 1)
current_page_index = pages.index((filter(lambda p: p[1].id == page_id, pages))[0])
# All the pages have been displayed
if current_page_index == len(pages) - 1 and not go_back:
return (None, -1, False)
# Let's get back, baby!
elif go_back and survey.users_can_go_back:
return (pages[current_page_index - 1][1], current_page_index - 1, False)
else:
# This will show the last page
if current_page_index == len(pages) - 2:
return (pages[current_page_index + 1][1], current_page_index + 1, True)
# This will show a regular page
else:
return (pages[current_page_index + 1][1], current_page_index + 1, False)
def filter_input_ids(self, cr, uid, survey, filters, finished=False, context=None):
'''If user applies any filters, then this function returns list of
filtered user_input_id and label's strings for display data in web.
:param filters: list of dictionary (having: row_id, ansewr_id)
:param finished: True for completely filled survey,Falser otherwise.
:returns list of filtered user_input_ids.
'''
context = context if context else {}
if filters:
input_line_obj = self.pool.get('survey.user_input_line')
domain_filter, choice, filter_display_data = [], [], []
for filter in filters:
row_id, answer_id = filter['row_id'], filter['answer_id']
if row_id == 0:
choice.append(answer_id)
else:
domain_filter.extend(['|', ('value_suggested_row.id', '=', row_id), ('value_suggested.id', '=', answer_id)])
if choice:
domain_filter.insert(0, ('value_suggested.id', 'in', choice))
else:
domain_filter = domain_filter[1:]
line_ids = input_line_obj.search(cr, uid, domain_filter, context=context)
filtered_input_ids = [input.user_input_id.id for input in input_line_obj.browse(cr, uid, line_ids, context=context)]
else:
filtered_input_ids, filter_display_data = [], []
if finished:
user_input = self.pool.get('survey.user_input')
if not filtered_input_ids:
current_filters = user_input.search(cr, uid, [('survey_id', '=', survey.id)], context=context)
user_input_objs = user_input.browse(cr, uid, current_filters, context=context)
else:
user_input_objs = user_input.browse(cr, uid, filtered_input_ids, context=context)
return [input.id for input in user_input_objs if input.state == 'done']
return filtered_input_ids
def get_filter_display_data(self, cr, uid, filters, context):
'''Returns data to display current filters
:param filters: list of dictionary (having: row_id, answer_id)
:param finished: True for completely filled survey, False otherwise.
:returns list of dict having data to display filters.
'''
filter_display_data = []
if filters:
question_obj = self.pool.get('survey.question')
label_obj = self.pool.get('survey.label')
for filter in filters:
row_id, answer_id = filter['row_id'], filter['answer_id']
question_id = label_obj.browse(cr, uid, answer_id, context=context).question_id.id
question = question_obj.browse(cr, uid, question_id, context=context)
if row_id == 0:
labels = label_obj.browse(cr, uid, [answer_id], context=context)
else:
labels = label_obj.browse(cr, uid, [row_id, answer_id], context=context)
filter_display_data.append({'question_text': question.question, 'labels': [label.value for label in labels]})
return filter_display_data
def prepare_result(self, cr, uid, question, current_filters=None, context=None):
''' Compute statistical data for questions by counting number of vote per choice on basis of filter '''
current_filters = current_filters if current_filters else []
context = context if context else {}
result_summary = {}
#Calculate and return statistics for choice
if question.type in ['simple_choice', 'multiple_choice']:
answers = {}
comments = []
[answers.update({label.id: {'text': label.value, 'count': 0, 'answer_id': label.id}}) for label in question.labels_ids]
for input_line in question.user_input_line_ids:
if input_line.answer_type == 'suggestion' and answers.get(input_line.value_suggested.id) and (not(current_filters) or input_line.user_input_id.id in current_filters):
answers[input_line.value_suggested.id]['count'] += 1
if input_line.answer_type == 'text' and (not(current_filters) or input_line.user_input_id.id in current_filters):
comments.append(input_line)
result_summary = {'answers': answers.values(), 'comments': comments}
#Calculate and return statistics for matrix
if question.type == 'matrix':
rows = OrderedDict()
answers = OrderedDict()
res = dict()
comments = []
[rows.update({label.id: label.value}) for label in question.labels_ids_2]
[answers.update({label.id: label.value}) for label in question.labels_ids]
for cell in product(rows.keys(), answers.keys()):
res[cell] = 0
for input_line in question.user_input_line_ids:
if input_line.answer_type == 'suggestion' and (not(current_filters) or input_line.user_input_id.id in current_filters):
res[(input_line.value_suggested_row.id, input_line.value_suggested.id)] += 1
if input_line.answer_type == 'text' and (not(current_filters) or input_line.user_input_id.id in current_filters):
comments.append(input_line)
result_summary = {'answers': answers, 'rows': rows, 'result': res, 'comments': comments}
#Calculate and return statistics for free_text, textbox, datetime
if question.type in ['free_text', 'textbox', 'datetime']:
result_summary = []
for input_line in question.user_input_line_ids:
if not(current_filters) or input_line.user_input_id.id in current_filters:
result_summary.append(input_line)
#Calculate and return statistics for numerical_box
if question.type == 'numerical_box':
result_summary = {'input_lines': []}
all_inputs = []
for input_line in question.user_input_line_ids:
if not(current_filters) or input_line.user_input_id.id in current_filters:
all_inputs.append(input_line.value_number)
result_summary['input_lines'].append(input_line)
if all_inputs:
result_summary.update({'average': round(sum(all_inputs) / len(all_inputs), 2),
'max': round(max(all_inputs), 2),
'min': round(min(all_inputs), 2),
'sum': sum(all_inputs),
'most_comman': Counter(all_inputs).most_common(5)})
return result_summary
def get_input_summary(self, cr, uid, question, current_filters=None, context=None):
''' Returns overall summary of question e.g. answered, skipped, total_inputs on basis of filter '''
current_filters = current_filters if current_filters else []
context = context if context else {}
result = {}
if question.survey_id.user_input_ids:
total_input_ids = current_filters or [input_id.id for input_id in question.survey_id.user_input_ids if input_id.state != 'new']
result['total_inputs'] = len(total_input_ids)
question_input_ids = []
for user_input in question.user_input_line_ids:
if not user_input.skipped:
question_input_ids.append(user_input.user_input_id.id)
result['answered'] = len(set(question_input_ids) & set(total_input_ids))
result['skipped'] = result['total_inputs'] - result['answered']
return result
# Actions
def action_start_survey(self, cr, uid, ids, context=None):
''' Open the website page with the survey form '''
trail = ""
context = dict(context or {}, relative_url=True)
if 'survey_token' in context:
trail = "/" + context['survey_token']
return {
'type': 'ir.actions.act_url',
'name': "Start Survey",
'target': 'self',
'url': self.read(cr, uid, ids, ['public_url'], context=context)[0]['public_url'] + trail
}
def action_send_survey(self, cr, uid, ids, context=None):
''' Open a window to compose an email, pre-filled with the survey
message '''
if not self._has_questions(cr, uid, ids, context=None):
raise osv.except_osv(_('Error!'), _('You cannot send an invitation for a survey that has no questions.'))
survey_browse = self.pool.get('survey.survey').browse(cr, uid, ids,
context=context)[0]
if survey_browse.stage_id.closed:
raise osv.except_osv(_('Warning!'),
_("You cannot send invitations for closed surveys."))
assert len(ids) == 1, 'This option should only be used for a single \
survey at a time.'
ir_model_data = self.pool.get('ir.model.data')
templates = ir_model_data.get_object_reference(cr, uid,
'survey', 'email_template_survey')
template_id = templates[1] if len(templates) > 0 else False
ctx = dict(context)
ctx.update({'default_model': 'survey.survey',
'default_res_id': ids[0],
'default_survey_id': ids[0],
'default_use_template': bool(template_id),
'default_template_id': template_id,
'default_composition_mode': 'comment'}
)
return {
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'survey.mail.compose.message',
'target': 'new',
'context': ctx,
}
def action_print_survey(self, cr, uid, ids, context=None):
''' Open the website page with the survey printable view '''
trail = ""
context = dict(context or {}, relative_url=True)
if 'survey_token' in context:
trail = "/" + context['survey_token']
return {
'type': 'ir.actions.act_url',
'name': "Print Survey",
'target': 'self',
'url': self.read(cr, uid, ids, ['print_url'], context=context)[0]['print_url'] + trail
}
def action_result_survey(self, cr, uid, ids, context=None):
''' Open the website page with the survey results view '''
context = dict(context or {}, relative_url=True)
return {
'type': 'ir.actions.act_url',
'name': "Results of the Survey",
'target': 'self',
'url': self.read(cr, uid, ids, ['result_url'], context=context)[0]['result_url']
}
def action_test_survey(self, cr, uid, ids, context=None):
''' Open the website page with the survey form into test mode'''
context = dict(context or {}, relative_url=True)
return {
'type': 'ir.actions.act_url',
'name': "Results of the Survey",
'target': 'self',
'url': self.read(cr, uid, ids, ['public_url'], context=context)[0]['public_url'] + "/phantom"
}
class survey_page(osv.Model):
'''A page for a survey.
Pages are essentially containers, allowing to group questions by ordered
screens.
.. note::
A page should be deleted if the survey it belongs to is deleted. '''
_name = 'survey.page'
_description = 'Survey Page'
_rec_name = 'title'
_order = 'sequence,id'
# Model Fields #
_columns = {
'title': fields.char('Page Title', required=1,
translate=True),
'survey_id': fields.many2one('survey.survey', 'Survey',
ondelete='cascade', required=True),
'question_ids': fields.one2many('survey.question', 'page_id',
'Questions', copy=True),
'sequence': fields.integer('Page number'),
'description': fields.html('Description',
help="An introductory text to your page", translate=True,
oldname="note"),
}
_defaults = {
'sequence': 10
}
# Public methods #
def copy_data(self, cr, uid, ids, default=None, context=None):
current_rec = self.read(cr, uid, ids, fields=['title'], context=context)
title = _("%s (copy)") % (current_rec.get('title'))
default = dict(default or {}, title=title)
return super(survey_page, self).copy_data(cr, uid, ids, default,
context=context)
class survey_question(osv.Model):
''' Questions that will be asked in a survey.
Each question can have one of more suggested answers (eg. in case of
dropdown choices, multi-answer checkboxes, radio buttons...).'''
_name = 'survey.question'
_description = 'Survey Question'
_rec_name = 'question'
_order = 'sequence,id'
# Model fields #
_columns = {
# Question metadata
'page_id': fields.many2one('survey.page', 'Survey page',
ondelete='cascade', required=1),
'survey_id': fields.related('page_id', 'survey_id', type='many2one',
relation='survey.survey', string='Survey'),
'sequence': fields.integer(string='Sequence'),
# Question
'question': fields.char('Question Name', required=1, translate=True),
'description': fields.html('Description', help="Use this field to add \
additional explanations about your question", translate=True,
oldname='descriptive_text'),
# Answer
'type': fields.selection([('free_text', 'Long Text Zone'),
('textbox', 'Text Input'),
('numerical_box', 'Numerical Value'),
('datetime', 'Date and Time'),
('simple_choice', 'Multiple choice: only one answer'),
('multiple_choice', 'Multiple choice: multiple answers allowed'),
('matrix', 'Matrix')], 'Type of Question', size=15, required=1),
'matrix_subtype': fields.selection([('simple', 'One choice per row'),
('multiple', 'Multiple choices per row')], 'Matrix Type'),
'labels_ids': fields.one2many('survey.label',
'question_id', 'Types of answers', oldname='answer_choice_ids', copy=True),
'labels_ids_2': fields.one2many('survey.label',
'question_id_2', 'Rows of the Matrix', copy=True),
# labels are used for proposed choices
# if question.type == simple choice | multiple choice
# -> only labels_ids is used
# if question.type == matrix
# -> labels_ids are the columns of the matrix
# -> labels_ids_2 are the rows of the matrix
# Display options
'column_nb': fields.selection([('12', '1'),
('6', '2'),
('4', '3'),
('3', '4'),
('2', '6')],
'Number of columns'),
# These options refer to col-xx-[12|6|4|3|2] classes in Bootstrap
'display_mode': fields.selection([('columns', 'Radio Buttons/Checkboxes'),
('dropdown', 'Selection Box')],
'Display mode'),
# Comments
'comments_allowed': fields.boolean('Show Comments Field',
oldname="allow_comment"),
'comments_message': fields.char('Comment Message', translate=True),
'comment_count_as_answer': fields.boolean('Comment Field is an Answer Choice',
oldname='make_comment_field'),
# Validation
'validation_required': fields.boolean('Validate entry',
oldname='is_validation_require'),
'validation_email': fields.boolean('Input must be an email'),
'validation_length_min': fields.integer('Minimum Text Length'),
'validation_length_max': fields.integer('Maximum Text Length'),
'validation_min_float_value': fields.float('Minimum value'),
'validation_max_float_value': fields.float('Maximum value'),
'validation_min_date': fields.datetime('Minimum Date'),
'validation_max_date': fields.datetime('Maximum Date'),
'validation_error_msg': fields.char('Error message',
oldname='validation_valid_err_msg',
translate=True),
# Constraints on number of answers (matrices)
'constr_mandatory': fields.boolean('Mandatory Answer',
oldname="is_require_answer"),
'constr_error_msg': fields.char("Error message",
oldname='req_error_msg', translate=True),
'user_input_line_ids': fields.one2many('survey.user_input_line',
'question_id', 'Answers',
domain=[('skipped', '=', False)]),
}
_defaults = {
'page_id': lambda self, cr, uid, context: context.get('page_id'),
'sequence': 10,
'type': 'free_text',
'matrix_subtype': 'simple',
'column_nb': '12',
'display_mode': 'columns',
'constr_error_msg': lambda s, cr, uid, c: _('This question requires an answer.'),
'validation_error_msg': lambda s, cr, uid, c: _('The answer you entered has an invalid format.'),
'validation_required': False,
'comments_message': lambda s, cr, uid, c: _('If other, precise:'),
}
_sql_constraints = [
('positive_len_min', 'CHECK (validation_length_min >= 0)', 'A length must be positive!'),
('positive_len_max', 'CHECK (validation_length_max >= 0)', 'A length must be positive!'),
('validation_length', 'CHECK (validation_length_min <= validation_length_max)', 'Max length cannot be smaller than min length!'),
('validation_float', 'CHECK (validation_min_float_value <= validation_max_float_value)', 'Max value cannot be smaller than min value!'),
('validation_date', 'CHECK (validation_min_date <= validation_max_date)', 'Max date cannot be smaller than min date!')
]
def copy_data(self, cr, uid, ids, default=None, context=None):
current_rec = self.read(cr, uid, ids, context=context)
question = _("%s (copy)") % (current_rec.get('question'))
default = dict(default or {}, question=question)
return super(survey_question, self).copy_data(cr, uid, ids, default,
context=context)
# Validation methods
def validate_question(self, cr, uid, question, post, answer_tag, context=None):
''' Validate question, depending on question type and parameters '''
try:
checker = getattr(self, 'validate_' + question.type)
except AttributeError:
_logger.warning(question.type + ": This type of question has no validation method")
return {}
else:
return checker(cr, uid, question, post, answer_tag, context=context)
def validate_free_text(self, cr, uid, question, post, answer_tag, context=None):
errors = {}
answer = post[answer_tag].strip()
# Empty answer to mandatory question
if question.constr_mandatory and not answer:
errors.update({answer_tag: question.constr_error_msg})
return errors
def validate_textbox(self, cr, uid, question, post, answer_tag, context=None):
errors = {}
answer = post[answer_tag].strip()
# Empty answer to mandatory question
if question.constr_mandatory and not answer:
errors.update({answer_tag: question.constr_error_msg})
# Email format validation
# Note: this validation is very basic:
# all the strings of the form
# <something>@<anything>.<extension>
# will be accepted
if answer and question.validation_email:
if not re.match(r"[^@]+@[^@]+\.[^@]+", answer):
errors.update({answer_tag: _('This answer must be an email address')})
# Answer validation (if properly defined)
# Length of the answer must be in a range
if answer and question.validation_required:
if not (question.validation_length_min <= len(answer) <= question.validation_length_max):
errors.update({answer_tag: question.validation_error_msg})
return errors
def validate_numerical_box(self, cr, uid, question, post, answer_tag, context=None):
errors = {}
answer = post[answer_tag].strip()
# Empty answer to mandatory question
if question.constr_mandatory and not answer:
errors.update({answer_tag: question.constr_error_msg})
# Checks if user input is a number
if answer:
try:
floatanswer = float(answer)
except ValueError:
errors.update({answer_tag: _('This is not a number')})
# Answer validation (if properly defined)
if answer and question.validation_required:
# Answer is not in the right range
try:
floatanswer = float(answer) # check that it is a float has been done hereunder
if not (question.validation_min_float_value <= floatanswer <= question.validation_max_float_value):
errors.update({answer_tag: question.validation_error_msg})
except ValueError:
pass
return errors
def validate_datetime(self, cr, uid, question, post, answer_tag, context=None):
errors = {}
answer = post[answer_tag].strip()
# Empty answer to mandatory question
if question.constr_mandatory and not answer:
errors.update({answer_tag: question.constr_error_msg})
# Checks if user input is a datetime
if answer:
try:
dateanswer = datetime.datetime.strptime(answer, DF)
except ValueError:
errors.update({answer_tag: _('This is not a date/time')})
return errors
# Answer validation (if properly defined)
if answer and question.validation_required:
# Answer is not in the right range
try:
dateanswer = datetime.datetime.strptime(answer, DF)
if not (datetime.datetime.strptime(question.validation_min_date, DF) <= dateanswer <= datetime.datetime.strptime(question.validation_max_date, DF)):
errors.update({answer_tag: question.validation_error_msg})
except ValueError: # check that it is a datetime has been done hereunder
pass
return errors
def validate_simple_choice(self, cr, uid, question, post, answer_tag, context=None):
errors = {}
if question.comments_allowed:
comment_tag = "%s_%s" % (answer_tag, 'comment')
# Empty answer to mandatory question
if question.constr_mandatory and not answer_tag in post:
errors.update({answer_tag: question.constr_error_msg})
if question.constr_mandatory and answer_tag in post and post[answer_tag].strip() == '':
errors.update({answer_tag: question.constr_error_msg})
# Answer is a comment and is empty
if question.constr_mandatory and answer_tag in post and post[answer_tag] == "-1" and question.comment_count_as_answer and comment_tag in post and not post[comment_tag].strip():
errors.update({answer_tag: question.constr_error_msg})
return errors
def validate_multiple_choice(self, cr, uid, question, post, answer_tag, context=None):
errors = {}
if question.constr_mandatory:
answer_candidates = dict_keys_startswith(post, answer_tag)
comment_flag = answer_candidates.pop(("%s_%s" % (answer_tag, -1)), None)
if question.comments_allowed:
comment_answer = answer_candidates.pop(("%s_%s" % (answer_tag, 'comment')), '').strip()
# There is no answer neither comments (if comments count as answer)
if not answer_candidates and question.comment_count_as_answer and (not comment_flag or not comment_answer):
errors.update({answer_tag: question.constr_error_msg})
# There is no answer at all
if not answer_candidates and not question.comment_count_as_answer:
errors.update({answer_tag: question.constr_error_msg})
return errors
def validate_matrix(self, cr, uid, question, post, answer_tag, context=None):
errors = {}
if question.constr_mandatory:
lines_number = len(question.labels_ids_2)
answer_candidates = dict_keys_startswith(post, answer_tag)
comment_answer = answer_candidates.pop(("%s_%s" % (answer_tag, 'comment')), '').strip()
# Number of lines that have been answered
if question.matrix_subtype == 'simple':
answer_number = len(answer_candidates)
elif question.matrix_subtype == 'multiple':
answer_number = len(set([sk.rsplit('_', 1)[0] for sk in answer_candidates.keys()]))
else:
raise RuntimeError("Invalid matrix subtype")
# Validate that each line has been answered
if answer_number != lines_number:
errors.update({answer_tag: question.constr_error_msg})
return errors
class survey_label(osv.Model):
''' A suggested answer for a question '''
_name = 'survey.label'
_rec_name = 'value'
_order = 'sequence,id'
_description = 'Survey Label'
def _check_question_not_empty(self, cr, uid, ids, context=None):
'''Ensure that field question_id XOR field question_id_2 is not null'''
for label in self.browse(cr, uid, ids, context=context):
# 'bool()' is required in order to make '!=' act as XOR with objects
return bool(label.question_id) != bool(label.question_id_2)
_columns = {
'question_id': fields.many2one('survey.question', 'Question',
ondelete='cascade'),
'question_id_2': fields.many2one('survey.question', 'Question',
ondelete='cascade'),
'sequence': fields.integer('Label Sequence order'),
'value': fields.char("Suggested value", translate=True,
required=True),
'quizz_mark': fields.float('Score for this answer', help="A positive score indicates a correct answer; a negative or null score indicates a wrong answer"),
}
_defaults = {
'sequence': 10,
}
_constraints = [
(_check_question_not_empty, "A label must be attached to one and only one question", ['question_id', 'question_id_2'])
]
class survey_user_input(osv.Model):
''' Metadata for a set of one user's answers to a particular survey '''
_name = "survey.user_input"
_rec_name = 'date_create'
_description = 'Survey User Input'
def _quizz_get_score(self, cr, uid, ids, name, args, context=None):
ret = dict()
for user_input in self.browse(cr, uid, ids, context=context):
ret[user_input.id] = sum([uil.quizz_mark for uil in user_input.user_input_line_ids] or [0.0])
return ret
_columns = {
'survey_id': fields.many2one('survey.survey', 'Survey', required=True,
readonly=1, ondelete='restrict'),
'date_create': fields.datetime('Creation Date', required=True,
readonly=1),
'deadline': fields.datetime("Deadline",
help="Date by which the person can open the survey and submit answers",
oldname="date_deadline"),
'type': fields.selection([('manually', 'Manually'), ('link', 'Link')],
'Answer Type', required=1, readonly=1,
oldname="response_type"),
'state': fields.selection([('new', 'Not started yet'),
('skip', 'Partially completed'),
('done', 'Completed')],
'Status',
readonly=True),
'test_entry': fields.boolean('Test entry', readonly=1),
'token': fields.char("Identification token", readonly=1, required=1),
# Optional Identification data
'partner_id': fields.many2one('res.partner', 'Partner', readonly=1),
'email': fields.char("E-mail", readonly=1),
# Displaying data
'last_displayed_page_id': fields.many2one('survey.page',
'Last displayed page'),
# The answers !
'user_input_line_ids': fields.one2many('survey.user_input_line',
'user_input_id', 'Answers'),
# URLs used to display the answers
'result_url': fields.related('survey_id', 'result_url', type='char',
string="Public link to the survey results"),
'print_url': fields.related('survey_id', 'print_url', type='char',
string="Public link to the empty survey"),
'quizz_score': fields.function(_quizz_get_score, type="float", string="Score for the quiz")
}
_defaults = {
'date_create': fields.datetime.now,
'type': 'manually',
'state': 'new',
'token': lambda s, cr, uid, c: uuid.uuid4().__str__(),
'quizz_score': 0.0,
}
_sql_constraints = [
('unique_token', 'UNIQUE (token)', 'A token must be unique!'),
('deadline_in_the_past', 'CHECK (deadline >= date_create)', 'The deadline cannot be in the past')
]
def copy_data(self, cr, uid, id, default=None, context=None):
raise osv.except_osv(_('Warning!'), _('You cannot duplicate this \
element!'))
def do_clean_emptys(self, cr, uid, automatic=False, context=None):
''' Remove empty user inputs that have been created manually
(used as a cronjob declared in data/survey_cron.xml) '''
empty_user_input_ids = self.search(cr, uid, [('type', '=', 'manually'),
('state', '=', 'new'),
('date_create', '<', (datetime.datetime.now() - datetime.timedelta(hours=1)).strftime(DF))],
context=context)
if empty_user_input_ids:
self.unlink(cr, uid, empty_user_input_ids, context=context)
def action_survey_resent(self, cr, uid, ids, context=None):
''' Sent again the invitation '''
record = self.browse(cr, uid, ids[0], context=context)
context = dict(context or {})
context.update({
'survey_resent_token': True,
'default_partner_ids': record.partner_id and [record.partner_id.id] or [],
'default_multi_email': record.email or "",
'default_public': 'email_private',
})
return self.pool.get('survey.survey').action_send_survey(cr, uid,
[record.survey_id.id], context=context)
def action_view_answers(self, cr, uid, ids, context=None):
''' Open the website page with the survey form '''
user_input = self.read(cr, uid, ids, ['print_url', 'token'], context=context)[0]
return {
'type': 'ir.actions.act_url',
'name': "View Answers",
'target': 'self',
'url': '%s/%s' % (user_input['print_url'], user_input['token'])
}
def action_survey_results(self, cr, uid, ids, context=None):
''' Open the website page with the survey results '''
return {
'type': 'ir.actions.act_url',
'name': "Survey Results",
'target': 'self',
'url': self.read(cr, uid, ids, ['result_url'], context=context)[0]['result_url']
}
class survey_user_input_line(osv.Model):
_name = 'survey.user_input_line'
_description = 'Survey User Input Line'
_rec_name = 'date_create'
def _answered_or_skipped(self, cr, uid, ids, context=None):
for uil in self.browse(cr, uid, ids, context=context):
# 'bool()' is required in order to make '!=' act as XOR with objects
return uil.skipped != bool(uil.answer_type)
def _check_answer_type(self, cr, uid, ids, context=None):
for uil in self.browse(cr, uid, ids, context=None):
if uil.answer_type:
if uil.answer_type == 'text':
# 'bool()' is required in order to make '!=' act as XOR with objects
return bool(uil.value_text)
elif uil.answer_type == 'number':
return (uil.value_number == 0) or (uil.value_number != False)
elif uil.answer_type == 'date':
return bool(uil.value_date)
elif uil.answer_type == 'free_text':
return bool(uil.value_free_text)
elif uil.answer_type == 'suggestion':
return bool(uil.value_suggested)
return True
_columns = {
'user_input_id': fields.many2one('survey.user_input', 'User Input',
ondelete='cascade', required=1),
'question_id': fields.many2one('survey.question', 'Question',
ondelete='restrict', required=1),
'page_id': fields.related('question_id', 'page_id', type='many2one',
relation='survey.page', string="Page"),
'survey_id': fields.related('user_input_id', 'survey_id',
type="many2one", relation="survey.survey",
string='Survey', store=True),
'date_create': fields.datetime('Create Date', required=1),
'skipped': fields.boolean('Skipped'),
'answer_type': fields.selection([('text', 'Text'),
('number', 'Number'),
('date', 'Date'),
('free_text', 'Free Text'),
('suggestion', 'Suggestion')],
'Answer Type'),
'value_text': fields.char("Text answer"),
'value_number': fields.float("Numerical answer"),
'value_date': fields.datetime("Date answer"),
'value_free_text': fields.text("Free Text answer"),
'value_suggested': fields.many2one('survey.label', "Suggested answer"),
'value_suggested_row': fields.many2one('survey.label', "Row answer"),
'quizz_mark': fields.float("Score given for this answer")
}
_defaults = {
'skipped': False,
'date_create': fields.datetime.now()
}
_constraints = [
(_answered_or_skipped, "A question cannot be unanswered and skipped", ['skipped', 'answer_type']),
(_check_answer_type, "The answer must be in the right type", ['answer_type', 'text', 'number', 'date', 'free_text', 'suggestion'])
]
def __get_mark(self, cr, uid, value_suggested, context=None):
try:
mark = self.pool.get('survey.label').browse(cr, uid, int(value_suggested), context=context).quizz_mark
except AttributeError:
mark = 0.0
except KeyError:
mark = 0.0
except ValueError:
mark = 0.0
return mark
def create(self, cr, uid, vals, context=None):
value_suggested = vals.get('value_suggested')
if value_suggested:
vals.update({'quizz_mark': self.__get_mark(cr, uid, value_suggested)})
return super(survey_user_input_line, self).create(cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
value_suggested = vals.get('value_suggested')
if value_suggested:
vals.update({'quizz_mark': self.__get_mark(cr, uid, value_suggested)})
return super(survey_user_input_line, self).write(cr, uid, ids, vals, context=context)
def copy_data(self, cr, uid, id, default=None, context=None):
raise osv.except_osv(_('Warning!'), _('You cannot duplicate this \
element!'))
def save_lines(self, cr, uid, user_input_id, question, post, answer_tag,
context=None):
''' Save answers to questions, depending on question type
If an answer already exists for question and user_input_id, it will be
overwritten (in order to maintain data consistency). '''
try:
saver = getattr(self, 'save_line_' + question.type)
except AttributeError:
_logger.error(question.type + ": This type of question has no saving function")
return False
else:
saver(cr, uid, user_input_id, question, post, answer_tag, context=context)
def save_line_free_text(self, cr, uid, user_input_id, question, post, answer_tag, context=None):
vals = {
'user_input_id': user_input_id,
'question_id': question.id,
'page_id': question.page_id.id,
'survey_id': question.survey_id.id,
'skipped': False,
}
if answer_tag in post and post[answer_tag].strip() != '':
vals.update({'answer_type': 'free_text', 'value_free_text': post[answer_tag]})
else:
vals.update({'answer_type': None, 'skipped': True})
old_uil = self.search(cr, uid, [('user_input_id', '=', user_input_id),
('survey_id', '=', question.survey_id.id),
('question_id', '=', question.id)],
context=context)
if old_uil:
self.write(cr, uid, old_uil[0], vals, context=context)
else:
self.create(cr, uid, vals, context=context)
return True
def save_line_textbox(self, cr, uid, user_input_id, question, post, answer_tag, context=None):
vals = {
'user_input_id': user_input_id,
'question_id': question.id,
'page_id': question.page_id.id,
'survey_id': question.survey_id.id,
'skipped': False
}
if answer_tag in post and post[answer_tag].strip() != '':
vals.update({'answer_type': 'text', 'value_text': post[answer_tag]})
else:
vals.update({'answer_type': None, 'skipped': True})
old_uil = self.search(cr, uid, [('user_input_id', '=', user_input_id),
('survey_id', '=', question.survey_id.id),
('question_id', '=', question.id)],
context=context)
if old_uil:
self.write(cr, uid, old_uil[0], vals, context=context)
else:
self.create(cr, uid, vals, context=context)
return True
def save_line_numerical_box(self, cr, uid, user_input_id, question, post, answer_tag, context=None):
vals = {
'user_input_id': user_input_id,
'question_id': question.id,
'page_id': question.page_id.id,
'survey_id': question.survey_id.id,
'skipped': False
}
if answer_tag in post and post[answer_tag].strip() != '':
vals.update({'answer_type': 'number', 'value_number': float(post[answer_tag])})
else:
vals.update({'answer_type': None, 'skipped': True})
old_uil = self.search(cr, uid, [('user_input_id', '=', user_input_id),
('survey_id', '=', question.survey_id.id),
('question_id', '=', question.id)],
context=context)
if old_uil:
self.write(cr, uid, old_uil[0], vals, context=context)
else:
self.create(cr, uid, vals, context=context)
return True
def save_line_datetime(self, cr, uid, user_input_id, question, post, answer_tag, context=None):
vals = {
'user_input_id': user_input_id,
'question_id': question.id,
'page_id': question.page_id.id,
'survey_id': question.survey_id.id,
'skipped': False
}
if answer_tag in post and post[answer_tag].strip() != '':
vals.update({'answer_type': 'date', 'value_date': post[answer_tag]})
else:
vals.update({'answer_type': None, 'skipped': True})
old_uil = self.search(cr, uid, [('user_input_id', '=', user_input_id),
('survey_id', '=', question.survey_id.id),
('question_id', '=', question.id)],
context=context)
if old_uil:
self.write(cr, uid, old_uil[0], vals, context=context)
else:
self.create(cr, uid, vals, context=context)
return True
def save_line_simple_choice(self, cr, uid, user_input_id, question, post, answer_tag, context=None):
vals = {
'user_input_id': user_input_id,
'question_id': question.id,
'page_id': question.page_id.id,
'survey_id': question.survey_id.id,
'skipped': False
}
old_uil = self.search(cr, uid, [('user_input_id', '=', user_input_id),
('survey_id', '=', question.survey_id.id),
('question_id', '=', question.id)],
context=context)
if old_uil:
self.unlink(cr, uid, old_uil, context=context)
if answer_tag in post and post[answer_tag].strip() != '':
vals.update({'answer_type': 'suggestion', 'value_suggested': post[answer_tag]})
else:
vals.update({'answer_type': None, 'skipped': True})
# '-1' indicates 'comment count as an answer' so do not need to record it
if post.get(answer_tag) and post.get(answer_tag) != '-1':
self.create(cr, uid, vals, context=context)
comment_answer = post.pop(("%s_%s" % (answer_tag, 'comment')), '').strip()
if comment_answer:
vals.update({'answer_type': 'text', 'value_text': comment_answer, 'skipped': False, 'value_suggested': False})
self.create(cr, uid, vals, context=context)
return True
def save_line_multiple_choice(self, cr, uid, user_input_id, question, post, answer_tag, context=None):
vals = {
'user_input_id': user_input_id,
'question_id': question.id,
'page_id': question.page_id.id,
'survey_id': question.survey_id.id,
'skipped': False
}
old_uil = self.search(cr, uid, [('user_input_id', '=', user_input_id),
('survey_id', '=', question.survey_id.id),
('question_id', '=', question.id)],
context=context)
if old_uil:
self.unlink(cr, uid, old_uil, context=context)
ca = dict_keys_startswith(post, answer_tag)
comment_answer = ca.pop(("%s_%s" % (answer_tag, 'comment')), '').strip()
if len(ca) > 0:
for a in ca:
# '-1' indicates 'comment count as an answer' so do not need to record it
if a != ('%s_%s' % (answer_tag, '-1')):
vals.update({'answer_type': 'suggestion', 'value_suggested': ca[a]})
self.create(cr, uid, vals, context=context)
if comment_answer:
vals.update({'answer_type': 'text', 'value_text': comment_answer, 'value_suggested': False})
self.create(cr, uid, vals, context=context)
if not ca and not comment_answer:
vals.update({'answer_type': None, 'skipped': True})
self.create(cr, uid, vals, context=context)
return True
def save_line_matrix(self, cr, uid, user_input_id, question, post, answer_tag, context=None):
vals = {
'user_input_id': user_input_id,
'question_id': question.id,
'page_id': question.page_id.id,
'survey_id': question.survey_id.id,
'skipped': False
}
old_uil = self.search(cr, uid, [('user_input_id', '=', user_input_id),
('survey_id', '=', question.survey_id.id),
('question_id', '=', question.id)],
context=context)
if old_uil:
self.unlink(cr, uid, old_uil, context=context)
no_answers = True
ca = dict_keys_startswith(post, answer_tag)
comment_answer = ca.pop(("%s_%s" % (answer_tag, 'comment')), '').strip()
if comment_answer:
vals.update({'answer_type': 'text', 'value_text': comment_answer})
self.create(cr, uid, vals, context=context)
no_answers = False
if question.matrix_subtype == 'simple':
for row in question.labels_ids_2:
a_tag = "%s_%s" % (answer_tag, row.id)
if a_tag in ca:
no_answers = False
vals.update({'answer_type': 'suggestion', 'value_suggested': ca[a_tag], 'value_suggested_row': row.id})
self.create(cr, uid, vals, context=context)
elif question.matrix_subtype == 'multiple':
for col in question.labels_ids:
for row in question.labels_ids_2:
a_tag = "%s_%s_%s" % (answer_tag, row.id, col.id)
if a_tag in ca:
no_answers = False
vals.update({'answer_type': 'suggestion', 'value_suggested': col.id, 'value_suggested_row': row.id})
self.create(cr, uid, vals, context=context)
if no_answers:
vals.update({'answer_type': None, 'skipped': True})
self.create(cr, uid, vals, context=context)
return True
def dict_keys_startswith(dictionary, string):
'''Returns a dictionary containing the elements of <dict> whose keys start
with <string>.
.. note::
This function uses dictionary comprehensions (Python >= 2.7)'''
return {k: dictionary[k] for k in filter(lambda key: key.startswith(string), dictionary.keys())}
| 47.3184
| 184
| 0.585785
|
b39d05e420ce90896622ccca139d5d784fdbe259
| 6,436
|
py
|
Python
|
Main.py
|
david-c-stein/Python-PowerSwitchRaspPi
|
db261109f81d322da11f49e3306d76aaaa3072ff
|
[
"MIT"
] | null | null | null |
Main.py
|
david-c-stein/Python-PowerSwitchRaspPi
|
db261109f81d322da11f49e3306d76aaaa3072ff
|
[
"MIT"
] | null | null | null |
Main.py
|
david-c-stein/Python-PowerSwitchRaspPi
|
db261109f81d322da11f49e3306d76aaaa3072ff
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import datetime
import getopt
import inspect
import json
import os
import platform
import sys
import time
import threading
from Global import __MULTIPROCESSING__
__version__ = '0.1'
if __MULTIPROCESSING__:
import multiprocessing
from multiprocessing import Queue
else:
if sys.version_info[0] < 3:
from Queue import Queue
else:
from queue import Queue
import Logger
starttime = datetime.datetime.now().strftime("%b %d %Y %H:%M:%S")
#-----------------------
class myApp(object):
logger = None
logconfig = None
pHW = None # Hardware thread/process
pWS = None # WebServices thread/process
def main(self, argv):
self.logger = Logger.logging.getLogger(__name__)
self.logconfig = Logger.logconfig
self.logger.info("Start time: " + starttime)
self.configFile = None
# parse command line arguments
try:
opts, args = getopt.getopt(argv, "hd:", ["help", "desc="])
except getopt.GetoptError as e:
self.logger.exception(str(e))
self.usage()
return
for opt, arg in opts:
if opt in ("-h", "--help"):
self.usage()
return
elif opt in ("-d"):
self.configFile = arg
else:
self.usage()
return
if(self.configFile == None):
self.usage()
return
# initilize and run
self.initilize()
self.start()
self.stop()
#-----------------------
def initilize(self):
try:
# identify platform
self.logger.info("------------------------------")
self.logger.info(" machine: " + platform.machine())
self.logger.info(" version: " + platform.version())
self.logger.info(" platform: " + platform.platform())
self.logger.info(" system: " + platform.system())
self.logger.info("processor: " + platform.processor())
if __MULTIPROCESSING__:
self.logger.info(" cores: " + str(multiprocessing.cpu_count()))
self.logger.info(" nodes: " + platform.node())
self.logger.info("PythonImp: " + platform.python_implementation())
self.logger.info("PythonVer: " + platform.python_version())
self.logger.info("starttime: " + starttime)
self.logger.info("scriptver: " + __version__)
self.logger.info("------------------------------")
# include paths
dirs = ['pythonLibs', 'Hardware', 'WebServices']
self.initPaths(dirs)
# initialize queues
if __MULTIPROCESSING__:
self.queHdw = multiprocessing.Queue()
self.queWeb = multiprocessing.Queue()
else:
self.queHdw = Queue()
self.queWeb = Queue()
# hardware configuration
self.configHW = {
"HTTPPORT" : 8888,
"SOCKETIOPORT" : 8888,
}
# include configuation from file
data = self.readFile(self.configFile)
self.configHW.update(data)
#print(self.configHW)
# initialize hardware process
try:
import Hardware
self.pHW = Hardware.Hardware(self.logger, self.logconfig, self.queHdw, self.queWeb, self.configHW)
except Exception as e:
self.logger.exception(e)
print( "Hardware Initialization Error: " + str(e) )
# initialize web services process
try:
import WebServices
self.pWS = WebServices.WebServices(self.logger, self.logconfig, self.queHdw, self.queWeb, self.configHW)
except Exception as e:
self.logger.exception(e)
print( "Web Initialization Error: " + str(e) )
except Exception as e:
self.logger.exception(e)
print( "Initialization Error: " + str(e) )
exit(1)
return
#-----------------------
# json file methods
def readFile(self, path):
with open(path, 'r') as datafile:
return(json.load(datafile))
def writeFile(self, path, data):
with open(path, 'w') as datafile:
json.dump(data, datafile)
#-----------------------
def start(self):
try:
# start hardware process
self.pHW.start()
# start webservices process
self.pWS.start()
RUNNING = True;
while RUNNING:
try:
# TODO : include a curses command line gui here
time.sleep(0.200)
except (KeyboardInterrupt, SystemExit):
self.logger.info("Interrupted")
self.stop()
exit()
except Exception as e:
self.logger.exception(str(e))
except Exception as e:
self.logger.exception(str(e))
#-----------------------
def stop(self):
# stop processes
if(self.pHW != None):
self.pHW.stop()
if(self.pWS != None):
self.pWS.stop()
#-----------------------
def usage(self):
print("\n\n python " + __file__ + " -d <config>.cfg \n")
exit()
#-----------------------
def initPaths(self, dirs):
try:
# include <local> paths NOTE: realpath() works with simlinks
cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
self.logger.info("Path Added : " + cmd_folder)
# include dirs passed
for dir in dirs:
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile(inspect.currentframe()))[0], dir)))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
self.logger.info("Path Added : " + cmd_subfolder)
except Exception as e:
self.logger.exception(str(e))
raise
if __name__== '__main__':
myApp().main(sys.argv[1:])
| 29.796296
| 143
| 0.516004
|
ad6c305dcc5a14e6b4baae0eb52b27574845cc23
| 37,412
|
py
|
Python
|
tests/functional/demos/test_demos_linux.py
|
artanokhov/docker_ci
|
97259f66baaffe72cd5ed93880c8f38cc170471d
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/demos/test_demos_linux.py
|
artanokhov/docker_ci
|
97259f66baaffe72cd5ed93880c8f38cc170471d
|
[
"Apache-2.0"
] | null | null | null |
tests/functional/demos/test_demos_linux.py
|
artanokhov/docker_ci
|
97259f66baaffe72cd5ed93880c8f38cc170471d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (C) 2019-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import pytest
@pytest.mark.usefixtures('_is_image_os', '_is_distribution')
@pytest.mark.parametrize('_is_image_os', [('ubuntu18', 'ubuntu20')], indirect=True)
@pytest.mark.parametrize('_is_distribution', [('data_dev', 'proprietary', 'custom-full')], indirect=True)
class TestDemosLinuxDataDev:
@pytest.mark.parametrize('omz_python_demo_path', ['action_recognition'], indirect=True)
def test_action_recognition_python_cpu(self, tester, image, omz_python_demo_path, bash):
tester.test_docker_image(
image,
['curl -LJo /root/action_recognition.mp4 https://github.com/intel-iot-devkit/sample-videos/blob/'
'master/head-pose-face-detection-female.mp4?raw=true',
bash('omz_downloader --name action-recognition-0001-encoder,action-recognition-0001-decoder '
'--precision FP16'),
bash(f'python3 {omz_python_demo_path} -at en-de '
'-m_en /opt/intel/openvino/intel/action-recognition-0001/'
'action-recognition-0001-encoder/FP16/action-recognition-0001-encoder.xml '
'-m_de /opt/intel/openvino/intel/action-recognition-0001/'
'action-recognition-0001-decoder/FP16/action-recognition-0001-decoder.xml '
'-i /root/action_recognition.mp4 -d CPU --no_show'),
],
self.test_action_recognition_python_cpu.__name__,
)
@pytest.mark.gpu
@pytest.mark.parametrize('omz_python_demo_path', ['action_recognition'], indirect=True)
def test_action_recognition_python_gpu(self, tester, image, omz_python_demo_path, bash):
kwargs = {'devices': ['/dev/dri:/dev/dri'], 'mem_limit': '3g'}
tester.test_docker_image(
image,
['curl -LJo /root/action_recognition.mp4 https://github.com/intel-iot-devkit/sample-videos/blob/master/'
'head-pose-face-detection-female.mp4?raw=true',
bash('omz_downloader --name action-recognition-0001-encoder,action-recognition-0001-decoder '
'--precision FP16'),
bash(f'python3 {omz_python_demo_path} -at en-de '
'-m_en /opt/intel/openvino/intel/action-recognition-0001/'
'action-recognition-0001-encoder/FP16/action-recognition-0001-encoder.xml '
'-m_de /opt/intel/openvino/intel/action-recognition-0001/'
'action-recognition-0001-decoder/FP16/action-recognition-0001-decoder.xml '
'-i /root/action_recognition.mp4 -d GPU --no_show'),
],
self.test_action_recognition_python_gpu.__name__, **kwargs,
)
@pytest.mark.vpu
@pytest.mark.parametrize('omz_python_demo_path', ['action_recognition'], indirect=True)
@pytest.mark.xfail_log(pattern='Can not init Myriad device: NC_ERROR',
reason='Sporadic error on MYRIAD device')
def test_action_recognition_python_vpu(self, tester, image, omz_python_demo_path, bash):
kwargs = {'device_cgroup_rules': ['c 189:* rmw'],
'volumes': ['/dev/bus/usb:/dev/bus/usb'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
['curl -LJo /root/action_recognition.mp4 https://github.com/intel-iot-devkit/sample-videos/blob/master/'
'head-pose-face-detection-female.mp4?raw=true',
bash('omz_downloader --name action-recognition-0001-encoder,action-recognition-0001-decoder '
'--precision FP16'),
bash(f'python3 {omz_python_demo_path} -at en-de '
'-m_en /opt/intel/openvino/intel/action-recognition-0001/'
'action-recognition-0001-encoder/FP16/action-recognition-0001-encoder.xml '
'-m_de /opt/intel/openvino/intel/action-recognition-0001/'
'action-recognition-0001-decoder/FP16/action-recognition-0001-decoder.xml '
'-i /root/action_recognition.mp4 -d MYRIAD --no_show'),
],
self.test_action_recognition_python_vpu.__name__, **kwargs,
)
@pytest.mark.hddl
@pytest.mark.parametrize('omz_python_demo_path', ['action_recognition'], indirect=True)
def test_action_recognition_python_hddl(self, tester, image, omz_python_demo_path, bash):
kwargs = {'devices': ['/dev/ion:/dev/ion'],
'volumes': ['/var/tmp:/var/tmp', '/dev/shm:/dev/shm'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
['curl -LJo /root/action_recognition.mp4 https://github.com/intel-iot-devkit/sample-videos/blob/master/'
'head-pose-face-detection-female.mp4?raw=true',
bash('omz_downloader --name action-recognition-0001-encoder,action-recognition-0001-decoder '
'--precision FP16'),
bash(f'umask 0000 && python3 {omz_python_demo_path} -at en-de '
'-m_en /opt/intel/openvino/intel/action-recognition-0001/'
'action-recognition-0001-encoder/FP16/action-recognition-0001-encoder.xml '
'-m_de /opt/intel/openvino/intel/action-recognition-0001/'
'action-recognition-0001-decoder/FP16/action-recognition-0001-decoder.xml '
'-i /root/action_recognition.mp4 -d HDDL --no_show && rm -f /dev/shm/hddl_*'),
],
self.test_action_recognition_python_hddl.__name__, **kwargs,
)
@pytest.mark.usefixtures('_is_image_os', '_is_distribution')
@pytest.mark.parametrize('_is_image_os', [('ubuntu18', 'ubuntu20', 'rhel8')], indirect=True)
@pytest.mark.parametrize('_is_distribution', [('dev', 'proprietary', 'custom-full')], indirect=True)
class TestDemosLinux:
def test_crossroad_cpp_cpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'mem_limit': '3g'}
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name person-vehicle-bike-detection-crossroad-0078 '
'--precisions FP16 '
'-o /root/omz_demos_build/intel64/Release/'), download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/crossroad_camera_demo '
'-m /root/omz_demos_build/intel64/Release/intel/person-vehicle-bike-detection-crossroad-0078/'
'FP16/person-vehicle-bike-detection-crossroad-0078.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d CPU -no_show'),
],
self.test_crossroad_cpp_cpu.__name__, **kwargs,
)
@pytest.mark.gpu
def test_crossroad_cpp_gpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'devices': ['/dev/dri:/dev/dri'], 'mem_limit': '3g'}
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name person-vehicle-bike-detection-crossroad-0078 '
'--precisions FP16 -o /root/omz_demos_build/intel64/Release/'), download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/crossroad_camera_demo '
'-m /root/omz_demos_build/intel64/Release/intel/person-vehicle-bike-detection-crossroad-0078/FP16/'
'person-vehicle-bike-detection-crossroad-0078.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d GPU -no_show'),
],
self.test_crossroad_cpp_gpu.__name__, **kwargs,
)
@pytest.mark.vpu
@pytest.mark.xfail_log(pattern='Can not init Myriad device: NC_ERROR',
reason='Sporadic error on MYRIAD device')
@pytest.mark.usefixtures('_is_not_image_os')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_crossroad_cpp_vpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'device_cgroup_rules': ['c 189:* rmw'],
'volumes': ['/dev/bus/usb:/dev/bus/usb'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name person-vehicle-bike-detection-crossroad-0078 '
'--precisions FP16 -o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/crossroad_camera_demo '
'-m /root/omz_demos_build/intel64/Release/intel/person-vehicle-bike-detection-crossroad-0078/FP16/'
'person-vehicle-bike-detection-crossroad-0078.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d MYRIAD -no_show'),
],
self.test_crossroad_cpp_vpu.__name__, **kwargs,
)
@pytest.mark.hddl
@pytest.mark.usefixtures('_is_not_image_os')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_crossroad_cpp_hddl(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'devices': ['/dev/ion:/dev/ion'],
'volumes': ['/var/tmp:/var/tmp', '/dev/shm:/dev/shm'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name person-vehicle-bike-detection-crossroad-0078 '
'--precisions FP16 -o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('umask 0000 && /root/omz_demos_build/intel64/Release/crossroad_camera_demo '
'-m /root/omz_demos_build/intel64/Release/intel/person-vehicle-bike-detection-crossroad-0078/FP16/'
'person-vehicle-bike-detection-crossroad-0078.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d HDDL -no_show && rm -f /dev/shm/hddl_*'),
],
self.test_crossroad_cpp_hddl.__name__, **kwargs,
)
def test_security_cpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name vehicle-license-plate-detection-barrier-0106 '
'-o /root/omz_demos_build/intel64/Release/'),
bash('omz_downloader --name license-plate-recognition-barrier-0001 '
'-o /root/omz_demos_build/intel64/Release/'),
bash('omz_downloader --name vehicle-attributes-recognition-barrier-0039 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/security_barrier_camera_demo '
'-i /opt/intel/openvino/samples/car_1.bmp '
'-m /root/omz_demos_build/intel64/Release/intel/vehicle-license-plate-detection-barrier-0106/'
'FP16/vehicle-license-plate-detection-barrier-0106.xml '
'-m_lpr /root/omz_demos_build/intel64/Release/intel/license-plate-recognition-barrier-0001/'
'FP16/license-plate-recognition-barrier-0001.xml '
'-m_va /root/omz_demos_build/intel64/Release/intel/vehicle-attributes-recognition-barrier-0039/'
'FP16/vehicle-attributes-recognition-barrier-0039.xml -no_show -d CPU -d_va CPU -d_lpr CPU'),
], self.test_security_cpu.__name__,
)
@pytest.mark.gpu
def test_security_gpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'devices': ['/dev/dri:/dev/dri'], 'mem_limit': '3g'}
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name vehicle-license-plate-detection-barrier-0106 '
'-o /root/omz_demos_build/intel64/Release/'),
bash('omz_downloader --name license-plate-recognition-barrier-0001 '
'-o /root/omz_demos_build/intel64/Release/'),
bash('omz_downloader --name vehicle-attributes-recognition-barrier-0039 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/security_barrier_camera_demo '
'-i /opt/intel/openvino/samples/car_1.bmp '
'-m /root/omz_demos_build/intel64/Release/intel/vehicle-license-plate-detection-barrier-0106/'
'FP16/vehicle-license-plate-detection-barrier-0106.xml '
'-m_lpr /root/omz_demos_build/intel64/Release/intel/license-plate-recognition-barrier-0001/'
'FP16/license-plate-recognition-barrier-0001.xml '
'-m_va /root/omz_demos_build/intel64/Release/intel/vehicle-attributes-recognition-barrier-0039/'
'FP16/vehicle-attributes-recognition-barrier-0039.xml -no_show -d GPU -d_va GPU -d_lpr GPU'),
], self.test_security_gpu.__name__, **kwargs,
)
@pytest.mark.vpu
@pytest.mark.xfail_log(pattern='Can not init Myriad device: NC_ERROR',
reason='Sporadic error on MYRIAD device')
@pytest.mark.usefixtures('_is_not_image_os')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_security_vpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'device_cgroup_rules': ['c 189:* rmw'],
'volumes': ['/dev/bus/usb:/dev/bus/usb'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name vehicle-license-plate-detection-barrier-0106 '
'-o /root/omz_demos_build/intel64/Release/'),
bash('omz_downloader --name license-plate-recognition-barrier-0001 '
'-o /root/omz_demos_build/intel64/Release/'),
bash('omz_downloader --name vehicle-attributes-recognition-barrier-0039 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/security_barrier_camera_demo '
'-i /opt/intel/openvino/samples/car_1.bmp '
'-m /root/omz_demos_build/intel64/Release/intel/vehicle-license-plate-detection-barrier-0106/'
'FP16/vehicle-license-plate-detection-barrier-0106.xml '
'-m_lpr /root/omz_demos_build/intel64/Release/intel/license-plate-recognition-barrier-0001/'
'FP16/license-plate-recognition-barrier-0001.xml '
'-m_va /root/omz_demos_build/intel64/Release/intel/vehicle-attributes-recognition-barrier-0039/'
'FP16/vehicle-attributes-recognition-barrier-0039.xml -no_show -d MYRIAD -d_va MYRIAD -d_lpr MYRIAD'),
], self.test_security_vpu.__name__, **kwargs,
)
@pytest.mark.hddl
@pytest.mark.usefixtures('_is_not_image_os')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_security_hddl(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'devices': ['/dev/ion:/dev/ion'],
'volumes': ['/var/tmp:/var/tmp', '/dev/shm:/dev/shm'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name vehicle-license-plate-detection-barrier-0106 '
'-o /root/omz_demos_build/intel64/Release/'),
bash('omz_downloader --name license-plate-recognition-barrier-0001 '
'-o /root/omz_demos_build/intel64/Release/'),
bash('omz_downloader --name vehicle-attributes-recognition-barrier-0039 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('umask 0000 && /root/omz_demos_build/intel64/Release/security_barrier_camera_demo '
'-i /opt/intel/openvino/samples/car_1.bmp '
'-m /root/omz_demos_build/intel64/Release/intel/vehicle-license-plate-detection-barrier-0106/'
'FP16/vehicle-license-plate-detection-barrier-0106.xml '
'-m_lpr /root/omz_demos_build/intel64/Release/intel/license-plate-recognition-barrier-0001/'
'FP16/license-plate-recognition-barrier-0001.xml '
'-m_va /root/omz_demos_build/intel64/Release/intel/vehicle-attributes-recognition-barrier-0039/'
'FP16/vehicle-attributes-recognition-barrier-0039.xml -no_show -d HDDL -d_va HDDL -d_lpr HDDL '
'-d HDDL -sample-options -no_show && rm -f /dev/shm/hddl_*"'),
], self.test_security_hddl.__name__, **kwargs,
)
def test_text_cpp_cpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'mem_limit': '3g'}
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name text-detection-0004 --precision FP16 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/text_detection_demo '
'-m_td /root/omz_demos_build/intel64/Release/intel/text-detection-0004/FP16/text-detection-0004.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d_td CPU -no_show'),
],
self.test_text_cpp_cpu.__name__, **kwargs,
)
@pytest.mark.gpu
def test_text_cpp_gpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'devices': ['/dev/dri:/dev/dri'], 'mem_limit': '3g'}
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name text-detection-0004 --precision FP16 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/text_detection_demo '
'-m_td /root/omz_demos_build/intel64/Release/intel/text-detection-0004/FP16/text-detection-0004.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d_td GPU -no_show'),
],
self.test_text_cpp_gpu.__name__, **kwargs,
)
@pytest.mark.vpu
@pytest.mark.xfail_log(pattern='Can not init Myriad device: NC_ERROR',
reason='Sporadic error on MYRIAD device')
@pytest.mark.usefixtures('_is_not_image_os')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_text_cpp_vpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'device_cgroup_rules': ['c 189:* rmw'],
'volumes': ['/dev/bus/usb:/dev/bus/usb'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name text-detection-0004 --precision FP16 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/text_detection_demo '
'-m_td /root/omz_demos_build/intel64/Release/intel/text-detection-0004/FP16/text-detection-0004.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d_td MYRIAD -no_show'),
],
self.test_text_cpp_vpu.__name__, **kwargs,
)
@pytest.mark.hddl
@pytest.mark.usefixtures('_is_not_image_os')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_text_cpp_hddl(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'devices': ['/dev/ion:/dev/ion'],
'volumes': ['/var/tmp:/var/tmp', '/dev/shm:/dev/shm'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name text-detection-0004 --precision FP16 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('umask 0000 && /root/omz_demos_build/intel64/Release/text_detection_demo '
'-m_td /root/omz_demos_build/intel64/Release/intel/text-detection-0004/FP16/text-detection-0004.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d_td HDDL -no_show && '
'rm -f /dev/shm/hddl_*'),
],
self.test_text_cpp_hddl.__name__, **kwargs,
)
@pytest.mark.usefixtures('_python_ngraph_required')
@pytest.mark.parametrize('omz_python_demo_path', ['object_detection'], indirect=True)
def test_detection_ssd_python_cpu(self, tester, image, omz_python_demo_path, bash,
install_openvino_dependencies, download_picture):
tester.test_docker_image(
image,
[bash('omz_downloader --name vehicle-detection-adas-0002 --precision FP16'), install_openvino_dependencies,
download_picture('car_1.bmp'),
bash(f'python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/intel/vehicle-detection-adas-0002/FP16/vehicle-detection-adas-0002.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d CPU --no_show -r'),
],
self.test_detection_ssd_python_cpu.__name__,
)
@pytest.mark.gpu
@pytest.mark.usefixtures('_python_ngraph_required')
@pytest.mark.parametrize('omz_python_demo_path', ['object_detection'], indirect=True)
def test_detection_ssd_python_gpu(self, tester, image, omz_python_demo_path, bash,
install_openvino_dependencies, download_picture):
kwargs = {'devices': ['/dev/dri:/dev/dri'], 'mem_limit': '3g'}
tester.test_docker_image(
image,
[bash('omz_downloader --name vehicle-detection-adas-0002 --precision FP16'), install_openvino_dependencies,
download_picture('car_1.bmp'),
bash(f'python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/intel/vehicle-detection-adas-0002/FP16/vehicle-detection-adas-0002.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d GPU --no_show -r'),
],
self.test_detection_ssd_python_gpu.__name__, **kwargs,
)
@pytest.mark.vpu
@pytest.mark.usefixtures('_python_ngraph_required', '_is_not_image_os')
@pytest.mark.parametrize('omz_python_demo_path', ['object_detection'], indirect=True)
@pytest.mark.xfail_log(pattern='Can not init Myriad device: NC_ERROR',
reason='Sporadic error on MYRIAD device')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_detection_ssd_python_vpu(self, tester, image, omz_python_demo_path, bash, download_picture,
install_openvino_dependencies):
kwargs = {'device_cgroup_rules': ['c 189:* rmw'],
'volumes': ['/dev/bus/usb:/dev/bus/usb'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('omz_downloader --name vehicle-detection-adas-0002 --precision FP16'),
download_picture('car_1.bmp'),
bash(f'python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/intel/vehicle-detection-adas-0002/FP16/vehicle-detection-adas-0002.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d MYRIAD --no_show -r'),
],
self.test_detection_ssd_python_vpu.__name__, **kwargs,
)
@pytest.mark.hddl
@pytest.mark.usefixtures('_python_ngraph_required', '_is_not_image_os')
@pytest.mark.parametrize('omz_python_demo_path', ['object_detection'], indirect=True)
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_detection_ssd_python_hddl(self, tester, image, omz_python_demo_path, bash, download_picture,
install_openvino_dependencies):
kwargs = {'devices': ['/dev/ion:/dev/ion'],
'volumes': ['/var/tmp:/var/tmp', '/dev/shm:/dev/shm'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('omz_downloader --name vehicle-detection-adas-0002 --precision FP16'),
download_picture('car_1.bmp'),
bash(f'umask 0000 && python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/intel/vehicle-detection-adas-0002/FP16/vehicle-detection-adas-0002.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d HDDL --no_show -r && '
'rm -f /dev/shm/hddl_*'),
],
self.test_detection_ssd_python_hddl.__name__, **kwargs,
)
def test_segmentation_cpp_cpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'mem_limit': '3g'}
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name semantic-segmentation-adas-0001 --precision FP16 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/segmentation_demo '
'-m /root/omz_demos_build/intel64/Release/intel/semantic-segmentation-adas-0001/FP16/'
'semantic-segmentation-adas-0001.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d CPU -no_show'),
],
self.test_segmentation_cpp_cpu.__name__, **kwargs,
)
@pytest.mark.gpu
def test_segmentation_cpp_gpu(self, tester, image, install_openvino_dependencies, bash, download_picture):
kwargs = {'devices': ['/dev/dri:/dev/dri'], 'mem_limit': '3g'}
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('/opt/intel/openvino/demos/build_demos.sh'),
bash('omz_downloader --name semantic-segmentation-adas-0001 --precision FP16 '
'-o /root/omz_demos_build/intel64/Release/'),
download_picture('car_1.bmp'),
bash('/root/omz_demos_build/intel64/Release/segmentation_demo '
'-m /root/omz_demos_build/intel64/Release/intel/semantic-segmentation-adas-0001/FP16/'
'semantic-segmentation-adas-0001.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d GPU -no_show'),
],
self.test_segmentation_cpp_gpu.__name__, **kwargs,
)
@pytest.mark.parametrize('omz_python_demo_path', ['segmentation'], indirect=True)
def test_segmentation_python_cpu(self, tester, image, omz_python_demo_path, bash,
install_openvino_dependencies, download_picture):
tester.test_docker_image(
image,
[bash('omz_downloader --name semantic-segmentation-adas-0001 --precision FP16'),
install_openvino_dependencies,
download_picture('car_1.bmp'),
bash(f'python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/intel/semantic-segmentation-adas-0001/FP16/'
'semantic-segmentation-adas-0001.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d CPU -at segmentation --no_show'),
],
self.test_segmentation_python_cpu.__name__,
)
@pytest.mark.gpu
@pytest.mark.parametrize('omz_python_demo_path', ['segmentation'], indirect=True)
def test_segmentation_python_gpu(self, tester, image, omz_python_demo_path, bash,
install_openvino_dependencies, download_picture):
kwargs = {'devices': ['/dev/dri:/dev/dri'], 'mem_limit': '3g'}
tester.test_docker_image(
image,
[bash('omz_downloader --name semantic-segmentation-adas-0001 --precision FP16'),
install_openvino_dependencies,
download_picture('car_1.bmp'),
bash(f'python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/intel/semantic-segmentation-adas-0001/FP16/'
'semantic-segmentation-adas-0001.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d GPU -at segmentation --no_show'),
],
self.test_segmentation_python_gpu.__name__, **kwargs,
)
@pytest.mark.vpu
@pytest.mark.parametrize('omz_python_demo_path', ['segmentation'], indirect=True)
@pytest.mark.xfail_log(pattern='Can not init Myriad device: NC_ERROR', reason='Sporadic error on MYRIAD device')
@pytest.mark.usefixtures('_is_not_image_os')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_segmentation_python_vpu(self, tester, image, omz_python_demo_path, bash, download_picture,
install_openvino_dependencies):
kwargs = {'device_cgroup_rules': ['c 189:* rmw'],
'volumes': ['/dev/bus/usb:/dev/bus/usb'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('omz_downloader --name semantic-segmentation-adas-0001 --precision FP16'),
download_picture('car_1.bmp'),
bash(f'python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/intel/semantic-segmentation-adas-0001/FP16/'
'semantic-segmentation-adas-0001.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d MYRIAD -at segmentation --no_show'),
],
self.test_segmentation_python_vpu.__name__, **kwargs,
)
@pytest.mark.hddl
@pytest.mark.parametrize('omz_python_demo_path', ['segmentation'], indirect=True)
@pytest.mark.usefixtures('_is_not_image_os')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_segmentation_python_hddl(self, tester, image, omz_python_demo_path, bash, download_picture,
install_openvino_dependencies):
kwargs = {'devices': ['/dev/ion:/dev/ion'],
'volumes': ['/var/tmp:/var/tmp', '/dev/shm:/dev/shm'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('omz_downloader --name semantic-segmentation-adas-0001 --precision FP16'),
download_picture('car_1.bmp'),
bash(f'umask 0000 && python3 {omz_python_demo_path} -at segmentation --no_show '
'-m /opt/intel/openvino/intel/semantic-segmentation-adas-0001/FP16/'
'semantic-segmentation-adas-0001.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d HDDL && rm -f /dev/shm/hddl_*'),
],
self.test_segmentation_python_hddl.__name__, **kwargs,
)
@pytest.mark.usefixtures('_python_ngraph_required')
@pytest.mark.parametrize('omz_python_demo_path', ['object_detection'], indirect=True)
def test_object_detection_centernet_python_cpu(self, tester, image, omz_python_demo_path, bash,
install_openvino_dependencies, download_picture):
tester.test_docker_image(
image,
[bash('omz_downloader --name ctdet_coco_dlav0_384 --precision FP16'),
bash('omz_converter --name ctdet_coco_dlav0_384 --precision FP16'), install_openvino_dependencies,
download_picture('car_1.bmp'),
bash(f'python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/public/ctdet_coco_dlav0_384/FP16/ctdet_coco_dlav0_384.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d CPU --no_show'),
],
self.test_object_detection_centernet_python_cpu.__name__,
)
@pytest.mark.gpu
@pytest.mark.usefixtures('_python_ngraph_required')
@pytest.mark.parametrize('omz_python_demo_path', ['object_detection'], indirect=True)
def test_object_detection_centernet_python_gpu(self, tester, image, omz_python_demo_path, bash,
install_openvino_dependencies, download_picture):
kwargs = {'devices': ['/dev/dri:/dev/dri'], 'mem_limit': '3g'}
tester.test_docker_image(
image,
[bash('omz_downloader --name ctdet_coco_dlav0_384 --precision FP16'),
bash('omz_converter --name ctdet_coco_dlav0_384 --precision FP16'), install_openvino_dependencies,
download_picture('car_1.bmp'),
bash(f'python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/public/ctdet_coco_dlav0_384/FP16/ctdet_coco_dlav0_384.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d GPU --no_show'),
],
self.test_object_detection_centernet_python_gpu.__name__, **kwargs,
)
@pytest.mark.vpu
@pytest.mark.usefixtures('_python_ngraph_required', '_is_not_image_os')
@pytest.mark.parametrize('omz_python_demo_path', ['object_detection'], indirect=True)
@pytest.mark.xfail_log(pattern='Can not init Myriad device: NC_ERROR',
reason='Sporadic error on MYRIAD device')
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_object_detection_centernet_python_vpu(self, tester, image, omz_python_demo_path, bash, download_picture,
install_openvino_dependencies):
kwargs = {'device_cgroup_rules': ['c 189:* rmw'],
'volumes': ['/dev/bus/usb:/dev/bus/usb'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('omz_downloader --name ctdet_coco_dlav0_384 --precision FP16'),
bash('omz_converter --name ctdet_coco_dlav0_384 --precision FP16'),
download_picture('car_1.bmp'),
bash(f'python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/public/ctdet_coco_dlav0_384/FP16/ctdet_coco_dlav0_384.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d MYRIAD --no_show -r'),
],
self.test_object_detection_centernet_python_vpu.__name__, **kwargs,
)
@pytest.mark.hddl
@pytest.mark.usefixtures('_python_ngraph_required', '_is_not_image_os')
@pytest.mark.parametrize('omz_python_demo_path', ['object_detection'], indirect=True)
@pytest.mark.parametrize('_is_not_image_os', [('rhel8')], indirect=True)
def test_object_detection_centernet_python_hddl(self, tester, image, omz_python_demo_path, bash, download_picture,
install_openvino_dependencies):
kwargs = {'devices': ['/dev/ion:/dev/ion'],
'volumes': ['/var/tmp:/var/tmp', '/dev/shm:/dev/shm'], 'mem_limit': '3g'} # nosec # noqa: S108
tester.test_docker_image(
image,
[install_openvino_dependencies,
bash('omz_downloader --name ctdet_coco_dlav0_384 --precision FP16'),
bash('omz_converter --name ctdet_coco_dlav0_384 --precision FP16'),
download_picture('car_1.bmp'),
bash(f'umask 0000 && python3 {omz_python_demo_path} '
'-m /opt/intel/openvino/public/ctdet_coco_dlav0_384/FP16/ctdet_coco_dlav0_384.xml '
'-i /opt/intel/openvino/samples/car_1.bmp -d HDDL --no_show -r && '
'rm -f /dev/shm/hddl_*'),
],
self.test_object_detection_centernet_python_hddl.__name__, **kwargs,
)
| 59.955128
| 120
| 0.628889
|
1261f438e673657e8109fe0df44a287045639be1
| 17,559
|
py
|
Python
|
yt_dlp/extractor/instagram.py
|
jhwgh1968/yt-dlp
|
b6a35ad83bbe2cbb9cda17052fd68e2dff16e99b
|
[
"Unlicense"
] | 5
|
2021-08-24T17:08:12.000Z
|
2022-03-03T13:06:09.000Z
|
yt_dlp/extractor/instagram.py
|
jhwgh1968/yt-dlp
|
b6a35ad83bbe2cbb9cda17052fd68e2dff16e99b
|
[
"Unlicense"
] | null | null | null |
yt_dlp/extractor/instagram.py
|
jhwgh1968/yt-dlp
|
b6a35ad83bbe2cbb9cda17052fd68e2dff16e99b
|
[
"Unlicense"
] | null | null | null |
from __future__ import unicode_literals
import itertools
import hashlib
import json
import re
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_HTTPError,
)
from ..utils import (
ExtractorError,
float_or_none,
get_element_by_attribute,
int_or_none,
lowercase_escape,
std_headers,
try_get,
url_or_none,
variadic,
)
class InstagramIE(InfoExtractor):
_VALID_URL = r'(?P<url>https?://(?:www\.)?instagram\.com/(?:p|tv|reel)/(?P<id>[^/?#&]+))'
_TESTS = [{
'url': 'https://instagram.com/p/aye83DjauH/?foo=bar#abc',
'md5': '0d2da106a9d2631273e192b372806516',
'info_dict': {
'id': 'aye83DjauH',
'ext': 'mp4',
'title': 'Video by naomipq',
'description': 'md5:1f17f0ab29bd6fe2bfad705f58de3cb8',
'thumbnail': r're:^https?://.*\.jpg',
'duration': 0,
'timestamp': 1371748545,
'upload_date': '20130620',
'uploader_id': 'naomipq',
'uploader': 'B E A U T Y F O R A S H E S',
'like_count': int,
'comment_count': int,
'comments': list,
},
}, {
# missing description
'url': 'https://www.instagram.com/p/BA-pQFBG8HZ/?taken-by=britneyspears',
'info_dict': {
'id': 'BA-pQFBG8HZ',
'ext': 'mp4',
'title': 'Video by britneyspears',
'thumbnail': r're:^https?://.*\.jpg',
'duration': 0,
'timestamp': 1453760977,
'upload_date': '20160125',
'uploader_id': 'britneyspears',
'uploader': 'Britney Spears',
'like_count': int,
'comment_count': int,
'comments': list,
},
'params': {
'skip_download': True,
},
}, {
# multi video post
'url': 'https://www.instagram.com/p/BQ0eAlwhDrw/',
'playlist': [{
'info_dict': {
'id': 'BQ0dSaohpPW',
'ext': 'mp4',
'title': 'Video 1',
},
}, {
'info_dict': {
'id': 'BQ0dTpOhuHT',
'ext': 'mp4',
'title': 'Video 2',
},
}, {
'info_dict': {
'id': 'BQ0dT7RBFeF',
'ext': 'mp4',
'title': 'Video 3',
},
}],
'info_dict': {
'id': 'BQ0eAlwhDrw',
'title': 'Post by instagram',
'description': 'md5:0f9203fc6a2ce4d228da5754bcf54957',
},
}, {
# IGTV
'url': 'https://www.instagram.com/tv/BkfuX9UB-eK/',
'info_dict': {
'id': 'BkfuX9UB-eK',
'ext': 'mp4',
'title': 'Fingerboarding Tricks with @cass.fb',
'thumbnail': r're:^https?://.*\.jpg',
'duration': 53.83,
'timestamp': 1530032919,
'upload_date': '20180626',
'uploader_id': 'instagram',
'uploader': 'Instagram',
'like_count': int,
'comment_count': int,
'comments': list,
'description': 'Meet Cass Hirst (@cass.fb), a fingerboarding pro who can perform tiny ollies and kickflips while blindfolded.',
}
}, {
'url': 'https://instagram.com/p/-Cmh1cukG2/',
'only_matching': True,
}, {
'url': 'http://instagram.com/p/9o6LshA7zy/embed/',
'only_matching': True,
}, {
'url': 'https://www.instagram.com/tv/aye83DjauH/',
'only_matching': True,
}, {
'url': 'https://www.instagram.com/reel/CDUMkliABpa/',
'only_matching': True,
}]
@staticmethod
def _extract_embed_url(webpage):
mobj = re.search(
r'<iframe[^>]+src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?instagram\.com/p/[^/]+/embed.*?)\1',
webpage)
if mobj:
return mobj.group('url')
blockquote_el = get_element_by_attribute(
'class', 'instagram-media', webpage)
if blockquote_el is None:
return
mobj = re.search(
r'<a[^>]+href=([\'"])(?P<link>[^\'"]+)\1', blockquote_el)
if mobj:
return mobj.group('link')
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
url = mobj.group('url')
webpage = self._download_webpage(url, video_id)
(media, video_url, description, thumbnail, timestamp, uploader,
uploader_id, like_count, comment_count, comments, height,
width) = [None] * 12
shared_data = self._parse_json(
self._search_regex(
r'window\._sharedData\s*=\s*({.+?});',
webpage, 'shared data', default='{}'),
video_id, fatal=False)
if shared_data:
media = try_get(
shared_data,
(lambda x: x['entry_data']['PostPage'][0]['graphql']['shortcode_media'],
lambda x: x['entry_data']['PostPage'][0]['media']),
dict)
# _sharedData.entry_data.PostPage is empty when authenticated (see
# https://github.com/ytdl-org/youtube-dl/pull/22880)
if not media:
additional_data = self._parse_json(
self._search_regex(
r'window\.__additionalDataLoaded\s*\(\s*[^,]+,\s*({.+?})\s*\)\s*;',
webpage, 'additional data', default='{}'),
video_id, fatal=False)
if additional_data:
media = try_get(
additional_data, lambda x: x['graphql']['shortcode_media'],
dict)
if media:
video_url = media.get('video_url')
height = int_or_none(media.get('dimensions', {}).get('height'))
width = int_or_none(media.get('dimensions', {}).get('width'))
description = try_get(
media, lambda x: x['edge_media_to_caption']['edges'][0]['node']['text'],
compat_str) or media.get('caption')
title = media.get('title')
thumbnail = media.get('display_src') or media.get('display_url')
duration = float_or_none(media.get('video_duration'))
timestamp = int_or_none(media.get('taken_at_timestamp') or media.get('date'))
uploader = media.get('owner', {}).get('full_name')
uploader_id = media.get('owner', {}).get('username')
def get_count(keys, kind):
for key in variadic(keys):
count = int_or_none(try_get(
media, (lambda x: x['edge_media_%s' % key]['count'],
lambda x: x['%ss' % kind]['count'])))
if count is not None:
return count
like_count = get_count('preview_like', 'like')
comment_count = get_count(
('preview_comment', 'to_comment', 'to_parent_comment'), 'comment')
comments = [{
'author': comment.get('user', {}).get('username'),
'author_id': comment.get('user', {}).get('id'),
'id': comment.get('id'),
'text': comment.get('text'),
'timestamp': int_or_none(comment.get('created_at')),
} for comment in media.get(
'comments', {}).get('nodes', []) if comment.get('text')]
if not video_url:
edges = try_get(
media, lambda x: x['edge_sidecar_to_children']['edges'],
list) or []
if edges:
entries = []
for edge_num, edge in enumerate(edges, start=1):
node = try_get(edge, lambda x: x['node'], dict)
if not node:
continue
node_video_url = url_or_none(node.get('video_url'))
if not node_video_url:
continue
entries.append({
'id': node.get('shortcode') or node['id'],
'title': node.get('title') or 'Video %d' % edge_num,
'url': node_video_url,
'thumbnail': node.get('display_url'),
'duration': float_or_none(node.get('video_duration')),
'width': int_or_none(try_get(node, lambda x: x['dimensions']['width'])),
'height': int_or_none(try_get(node, lambda x: x['dimensions']['height'])),
'view_count': int_or_none(node.get('video_view_count')),
})
return self.playlist_result(
entries, video_id,
'Post by %s' % uploader_id if uploader_id else None,
description)
if not video_url:
video_url = self._og_search_video_url(webpage, secure=False)
formats = [{
'url': video_url,
'width': width,
'height': height,
}]
if not uploader_id:
uploader_id = self._search_regex(
r'"owner"\s*:\s*{\s*"username"\s*:\s*"(.+?)"',
webpage, 'uploader id', fatal=False)
if not description:
description = self._search_regex(
r'"caption"\s*:\s*"(.+?)"', webpage, 'description', default=None)
if description is not None:
description = lowercase_escape(description)
if not thumbnail:
thumbnail = self._og_search_thumbnail(webpage)
return {
'id': video_id,
'formats': formats,
'ext': 'mp4',
'title': title or 'Video by %s' % uploader_id,
'description': description,
'duration': duration,
'thumbnail': thumbnail,
'timestamp': timestamp,
'uploader_id': uploader_id,
'uploader': uploader,
'like_count': like_count,
'comment_count': comment_count,
'comments': comments,
}
class InstagramPlaylistIE(InfoExtractor):
# A superclass for handling any kind of query based on GraphQL which
# results in a playlist.
_gis_tmpl = None # used to cache GIS request type
def _parse_graphql(self, webpage, item_id):
# Reads a webpage and returns its GraphQL data.
return self._parse_json(
self._search_regex(
r'sharedData\s*=\s*({.+?})\s*;\s*[<\n]', webpage, 'data'),
item_id)
def _extract_graphql(self, data, url):
# Parses GraphQL queries containing videos and generates a playlist.
def get_count(suffix):
return int_or_none(try_get(
node, lambda x: x['edge_media_' + suffix]['count']))
uploader_id = self._match_id(url)
csrf_token = data['config']['csrf_token']
rhx_gis = data.get('rhx_gis') or '3c7ca9dcefcf966d11dacf1f151335e8'
cursor = ''
for page_num in itertools.count(1):
variables = {
'first': 12,
'after': cursor,
}
variables.update(self._query_vars_for(data))
variables = json.dumps(variables)
if self._gis_tmpl:
gis_tmpls = [self._gis_tmpl]
else:
gis_tmpls = [
'%s' % rhx_gis,
'',
'%s:%s' % (rhx_gis, csrf_token),
'%s:%s:%s' % (rhx_gis, csrf_token, std_headers['User-Agent']),
]
# try all of the ways to generate a GIS query, and not only use the
# first one that works, but cache it for future requests
for gis_tmpl in gis_tmpls:
try:
json_data = self._download_json(
'https://www.instagram.com/graphql/query/', uploader_id,
'Downloading JSON page %d' % page_num, headers={
'X-Requested-With': 'XMLHttpRequest',
'X-Instagram-GIS': hashlib.md5(
('%s:%s' % (gis_tmpl, variables)).encode('utf-8')).hexdigest(),
}, query={
'query_hash': self._QUERY_HASH,
'variables': variables,
})
media = self._parse_timeline_from(json_data)
self._gis_tmpl = gis_tmpl
break
except ExtractorError as e:
# if it's an error caused by a bad query, and there are
# more GIS templates to try, ignore it and keep trying
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 403:
if gis_tmpl != gis_tmpls[-1]:
continue
raise
edges = media.get('edges')
if not edges or not isinstance(edges, list):
break
for edge in edges:
node = edge.get('node')
if not node or not isinstance(node, dict):
continue
if node.get('__typename') != 'GraphVideo' and node.get('is_video') is not True:
continue
video_id = node.get('shortcode')
if not video_id:
continue
info = self.url_result(
'https://instagram.com/p/%s/' % video_id,
ie=InstagramIE.ie_key(), video_id=video_id)
description = try_get(
node, lambda x: x['edge_media_to_caption']['edges'][0]['node']['text'],
compat_str)
thumbnail = node.get('thumbnail_src') or node.get('display_src')
timestamp = int_or_none(node.get('taken_at_timestamp'))
comment_count = get_count('to_comment')
like_count = get_count('preview_like')
view_count = int_or_none(node.get('video_view_count'))
info.update({
'description': description,
'thumbnail': thumbnail,
'timestamp': timestamp,
'comment_count': comment_count,
'like_count': like_count,
'view_count': view_count,
})
yield info
page_info = media.get('page_info')
if not page_info or not isinstance(page_info, dict):
break
has_next_page = page_info.get('has_next_page')
if not has_next_page:
break
cursor = page_info.get('end_cursor')
if not cursor or not isinstance(cursor, compat_str):
break
def _real_extract(self, url):
user_or_tag = self._match_id(url)
webpage = self._download_webpage(url, user_or_tag)
data = self._parse_graphql(webpage, user_or_tag)
self._set_cookie('instagram.com', 'ig_pr', '1')
return self.playlist_result(
self._extract_graphql(data, url), user_or_tag, user_or_tag)
class InstagramUserIE(InstagramPlaylistIE):
_VALID_URL = r'https?://(?:www\.)?instagram\.com/(?P<id>[^/]{2,})/?(?:$|[?#])'
IE_DESC = 'Instagram user profile'
IE_NAME = 'instagram:user'
_TEST = {
'url': 'https://instagram.com/porsche',
'info_dict': {
'id': 'porsche',
'title': 'porsche',
},
'playlist_count': 5,
'params': {
'extract_flat': True,
'skip_download': True,
'playlistend': 5,
}
}
_QUERY_HASH = '42323d64886122307be10013ad2dcc44',
@staticmethod
def _parse_timeline_from(data):
# extracts the media timeline data from a GraphQL result
return data['data']['user']['edge_owner_to_timeline_media']
@staticmethod
def _query_vars_for(data):
# returns a dictionary of variables to add to the timeline query based
# on the GraphQL of the original page
return {
'id': data['entry_data']['ProfilePage'][0]['graphql']['user']['id']
}
class InstagramTagIE(InstagramPlaylistIE):
_VALID_URL = r'https?://(?:www\.)?instagram\.com/explore/tags/(?P<id>[^/]+)'
IE_DESC = 'Instagram hashtag search'
IE_NAME = 'instagram:tag'
_TEST = {
'url': 'https://instagram.com/explore/tags/lolcats',
'info_dict': {
'id': 'lolcats',
'title': 'lolcats',
},
'playlist_count': 50,
'params': {
'extract_flat': True,
'skip_download': True,
'playlistend': 50,
}
}
_QUERY_HASH = 'f92f56d47dc7a55b606908374b43a314',
@staticmethod
def _parse_timeline_from(data):
# extracts the media timeline data from a GraphQL result
return data['data']['hashtag']['edge_hashtag_to_media']
@staticmethod
def _query_vars_for(data):
# returns a dictionary of variables to add to the timeline query based
# on the GraphQL of the original page
return {
'tag_name':
data['entry_data']['TagPage'][0]['graphql']['hashtag']['name']
}
| 37.044304
| 139
| 0.50168
|
bb8b1a844ead551395492e5193b4c848f257ae5b
| 1,504
|
py
|
Python
|
.ls/views.py
|
millerthegorilla/django_users_app
|
f63c5bdcdd9700366099bc1189c3f21bf5e3fc2c
|
[
"MIT"
] | null | null | null |
.ls/views.py
|
millerthegorilla/django_users_app
|
f63c5bdcdd9700366099bc1189c3f21bf5e3fc2c
|
[
"MIT"
] | null | null | null |
.ls/views.py
|
millerthegorilla/django_users_app
|
f63c5bdcdd9700366099bc1189c3f21bf5e3fc2c
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, redirect
from django.views.generic.base import TemplateView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic.edit import CreateView, UpdateView
from django.contrib.auth.views import LoginView
from django.contrib.auth.models import User
from django.urls import reverse_lazy
from django_email_verification import sendConfirm
import uuid # used as custom salt
# custom form imports
from .forms import CustomUserCreationForm, UserForm, ProfileForm
class UserLoginView(LoginView):
http_method_names = ['get', 'post']
redirect_authenticated_user = True
success_url = 'users/dashboard.html'
class DashboardView(LoginRequiredMixin, TemplateView):
http_method_names = ['get', 'post']
template_name = 'users/dashboard.html'
extra_context = {'user_form': UserForm, 'profile_form': ProfileForm}
class RegisterView(CreateView):
http_method_names = ['get', 'post']
template_name = 'users/register.html'
form_class = CustomUserCreationForm
success_url = reverse_lazy("users:login")
model = User
def form_valid(self, form):
user = form.save()
returnVal = super(RegisterView, self).form_valid(form)
sendConfirm(user, custom_salt=uuid.uuid4())
return returnVal
class ProfileView(LoginRequiredMixin, UpdateView):
http_method_names = ['get', 'post']
template_name = 'users/profile.html'
extra_context = {'user_form': UserForm, 'profile_form': ProfileForm}
| 35.809524
| 72
| 0.757979
|
ee49e0d759991d41f8afe6bc968c792da2769afa
| 1,320
|
py
|
Python
|
f5/bigip/shared/test/functional/test_licensing.py
|
nghia-tran/f5-common-python
|
acb23a6e5830a119b460c19a578654113419f5c3
|
[
"Apache-2.0"
] | 272
|
2016-02-23T06:05:44.000Z
|
2022-02-20T02:09:32.000Z
|
f5/bigip/shared/test/functional/test_licensing.py
|
nghia-tran/f5-common-python
|
acb23a6e5830a119b460c19a578654113419f5c3
|
[
"Apache-2.0"
] | 1,103
|
2016-02-11T17:48:03.000Z
|
2022-02-15T17:13:37.000Z
|
f5/bigip/shared/test/functional/test_licensing.py
|
nghia-tran/f5-common-python
|
acb23a6e5830a119b460c19a578654113419f5c3
|
[
"Apache-2.0"
] | 167
|
2016-02-11T17:48:21.000Z
|
2022-01-17T20:13:05.000Z
|
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from f5.sdk_exception import UnsupportedMethod
class TestActivation(object):
def test_load(self, request, mgmt_root):
a = mgmt_root.tm.shared.licensing.activation.load()
assert hasattr(a, 'generation')
def test_update(self, request, mgmt_root):
with pytest.raises(UnsupportedMethod):
mgmt_root.tm.shared.licensing.activation.update()
class TestRegistration(object):
def test_load(self, request, mgmt_root):
reg = mgmt_root.tm.shared.licensing.registration.load()
assert hasattr(reg, 'generation')
def test_update(self, request, mgmt_root):
with pytest.raises(UnsupportedMethod):
mgmt_root.tm.shared.licensing.registration.update()
| 34.736842
| 74
| 0.733333
|
edcc4a1dec3d68b6456b7e380655a2290cbbfdd8
| 681
|
py
|
Python
|
py/tidy.py
|
ashlinrichardson/flatfile_tools
|
749071129cab7a598bd4c2edf050dce59324a97f
|
[
"Apache-2.0"
] | 2
|
2019-03-06T04:30:12.000Z
|
2019-03-26T16:23:56.000Z
|
py/tidy.py
|
ashlinrichardson/flatfile_tools
|
749071129cab7a598bd4c2edf050dce59324a97f
|
[
"Apache-2.0"
] | 9
|
2020-01-18T05:02:52.000Z
|
2022-03-14T18:09:53.000Z
|
py/tidy.py
|
bcgov/flatfile-tools
|
749071129cab7a598bd4c2edf050dce59324a97f
|
[
"Apache-2.0"
] | null | null | null |
# 20190711 : tidy up the folders by executing ws and dent
import os
import sys
from misc import *
if not (os.path.exists('R') and os.path.exists('c') and os.path.exists('cpp') and os.path.exists('py')):
err("must execute from within bin folder")
py = os.popen("find ./ -name '*.py'").readlines()
py = [py[i].strip() for i in range(0, len(py))]
for p in py:
run("ws " + p)
c = os.popen("find ./ -name '*.cpp'").readlines()
c = [c[i].strip() for i in range(0, len(c))]
for cc in c:
run("dent " + cc)
run("ws " + cc)
c = os.popen("find ./ -name '*.c'").readlines()
c = [c[i].strip() for i in range(0, len(c))]
for cc in c:
run("dent " + cc)
run("ws " + cc)
| 28.375
| 104
| 0.581498
|
5edad76896388bd601ccdff72130c4c1e3786d37
| 4,527
|
py
|
Python
|
tools/utils.py
|
pma-pj/Sollumz
|
bd43c23b8c114b5470b558c38a35423d2fb0d2bd
|
[
"MIT"
] | 4
|
2021-02-14T15:09:35.000Z
|
2022-02-01T20:07:01.000Z
|
tools/utils.py
|
pma-pj/Sollumz
|
bd43c23b8c114b5470b558c38a35423d2fb0d2bd
|
[
"MIT"
] | null | null | null |
tools/utils.py
|
pma-pj/Sollumz
|
bd43c23b8c114b5470b558c38a35423d2fb0d2bd
|
[
"MIT"
] | 1
|
2021-08-21T19:25:57.000Z
|
2021-08-21T19:25:57.000Z
|
from mathutils import Vector
import numpy
from math import inf, sqrt
from mathutils import Vector, Quaternion, Matrix
def get_list_item(list, index):
"""Get item of list without the risk of an error being thrown"""
if 0 <= index < len(list):
return list[index]
else:
return None
def flag_list_to_int(flag_list):
flags = 0
for i, enabled in enumerate(flag_list):
if enabled == True:
flags += (1 << i)
return flags
def int_to_bool_list(num, size=None):
return [bool(num & (1 << n)) for n in range(size or 32)]
def flag_prop_to_list(prop_type, data_block, size=None):
size = (size or 32) + 1
flags = [False] * size
i = 0
for flag_name in prop_type.__annotations__:
if i < size:
if flag_name in data_block:
flags[i] = data_block[flag_name] != 0
i += 1
return flags
def divide_list(list, d):
result = []
for item in list:
answer = item / d
result.append(answer)
return result
def float32_list(list):
result = []
for item in list:
result.append(numpy.float32(item))
return result
def abs_vector(v):
return Vector((abs(v.x), abs(v.y), abs(v.z)))
def divide_vector_inv(v):
r = Vector((0, 0, 0))
r.x = 1 / v.x
r.y = 1 / v.y
r.z = 1 / v.z
return r
def subtract_from_vector(v, f):
r = Vector((0, 0, 0))
r.x = v.x - f
r.y = v.y - f
r.z = v.z - f
return r
def add_to_vector(v, f):
r = Vector((0, 0, 0))
r.x = v.x + f
r.y = v.y + f
r.z = v.z + f
return r
def get_min_vector(v, c):
r = Vector((0, 0, 0))
r.x = min(v.x, c.x)
r.y = min(v.y, c.y)
r.z = min(v.z, c.z)
return r
def get_max_vector(v, c):
r = Vector((0, 0, 0))
r.x = max(v.x, c.x)
r.y = max(v.y, c.y)
r.z = max(v.z, c.z)
return r
def get_min_vector_list(vecs):
x = []
y = []
z = []
for v in vecs:
x.append(v[0])
y.append(v[1])
z.append(v[2])
return Vector((min(x), min(y), min(z)))
def get_max_vector_list(vecs):
x = []
y = []
z = []
for v in vecs:
x.append(v[0])
y.append(v[1])
z.append(v[2])
return Vector((max(x), max(y), max(z)))
def add_vector_list(list1, list2):
x = list1[0] + list2[0]
y = list1[1] + list2[1]
z = list1[2] + list2[2]
return [x, y, z]
def subtract_vector_list(list1, list2):
x = list1[0] - list2[0]
y = list1[1] - list2[1]
z = list1[2] - list2[2]
return [x, y, z]
def multiple_vector_list(list, num):
x = list[0] * num
y = list[1] * num
z = list[2] * num
return [x, y, z]
def get_vector_list_length(list):
sx = list[0] ** 2
sy = list[1] ** 2
sz = list[2] ** 2
length = (sx + sy + sz) ** 0.5
return length
def divide_vectors(a, b):
return Vector((a.x/b.x, a.y/b.y, a.z/b.z))
def get_closest_axis_point(axis, center, points):
closest = None
closestDist = inf
for p in points:
rel = (p - center).normalized()
dist = (rel - axis).length
if dist < closestDist:
closest = p
closestDist = dist
return closest
def get_distance_of_vectors(a, b):
locx = b.x - a.x
locy = b.y - a.y
locz = b.z - a.z
distance = sqrt((locx) ** 2 + (locy) ** 2 + (locz) ** 2)
return distance
def get_direction_of_vectors(a, b):
direction = (a - b).normalized()
axis_align = Vector((0.0, 0.0, 1.0))
angle = axis_align.angle(direction)
axis = axis_align.cross(direction)
q = Quaternion(axis, angle)
return q.to_euler("XYZ")
def lookatlh(eye, target, up):
mz = Vector((eye[0]-target[0], eye[1]-target[1], eye[2] -
target[2])).normalized() # inverse line of sight
mx = Vector(up.cross(mz)).normalized()
my = Vector(mz.cross(mx)).normalized()
tx = mx.dot(eye)
ty = my.dot(eye)
tz = mz.dot(eye) * -1
mat = Matrix()
mat[0] = mx[0], mz[0], my[0], 0
mat[1] = mx[2], mz[2], my[2], 0
mat[2] = mx[1], mz[1], my[1], 0
mat[3] = tx, tz, ty, 1
return mat
def multiW(m, v):
x = (((m[0][0] * v.x) + (m[1][0] * v.y)) + (m[2][0] * v.z)) + m[3][0]
y = (((m[0][1] * v.x) + (m[1][1] * v.y)) + (m[2][1] * v.z)) + m[3][1]
z = (((m[0][2] * v.x) + (m[1][2] * v.y)) + (m[2][2] * v.z)) + m[3][2]
w = (((m[0][3] * v.x) + (m[1][3] * v.y)) + (m[2][3] * v.z)) + m[3][3]
iw = 1.0 / abs(w)
return Vector((x * iw, y * iw, z * iw))
| 21.055814
| 73
| 0.520654
|
dcd36c1f54c0999517c1bdb0fcfd939b33cedeb1
| 164,986
|
py
|
Python
|
modin/pandas/test/test_series.py
|
maksimbo1/modin
|
e15657e6f6239f848d30fcd3dc997528ea900415
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
modin/pandas/test/test_series.py
|
maksimbo1/modin
|
e15657e6f6239f848d30fcd3dc997528ea900415
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
modin/pandas/test/test_series.py
|
maksimbo1/modin
|
e15657e6f6239f848d30fcd3dc997528ea900415
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Licensed to Modin Development Team under one or more contributor license agreements.
# See the NOTICE file distributed with this work for additional information regarding
# copyright ownership. The Modin Development Team licenses this file to you under the
# Apache License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
import pytest
import numpy as np
import json
import pandas
import matplotlib
import modin.pandas as pd
from numpy.testing import assert_array_equal
from pandas.core.base import SpecificationError
from modin.utils import get_current_execution
import sys
from modin.utils import to_pandas
from .utils import (
random_state,
RAND_LOW,
RAND_HIGH,
df_equals,
arg_keys,
name_contains,
test_data,
test_data_values,
test_data_keys,
test_data_with_duplicates_values,
test_data_with_duplicates_keys,
test_string_data_values,
test_string_data_keys,
test_string_list_data_values,
test_string_list_data_keys,
string_sep_values,
string_sep_keys,
string_na_rep_values,
string_na_rep_keys,
numeric_dfs,
no_numeric_dfs,
agg_func_keys,
agg_func_values,
agg_func_except_keys,
agg_func_except_values,
numeric_agg_funcs,
quantiles_keys,
quantiles_values,
axis_keys,
axis_values,
bool_arg_keys,
bool_arg_values,
int_arg_keys,
int_arg_values,
encoding_types,
categories_equals,
eval_general,
test_data_small_values,
test_data_small_keys,
test_data_categorical_values,
test_data_categorical_keys,
generate_multiindex,
test_data_diff_dtype,
df_equals_with_non_stable_indices,
)
from modin.config import NPartitions
NPartitions.put(4)
# Force matplotlib to not use any Xwindows backend.
matplotlib.use("Agg")
def get_rop(op):
if op.startswith("__") and op.endswith("__"):
return "__r" + op[2:]
else:
return None
def inter_df_math_helper(modin_series, pandas_series, op):
inter_df_math_helper_one_side(modin_series, pandas_series, op)
rop = get_rop(op)
if rop:
inter_df_math_helper_one_side(modin_series, pandas_series, rop)
def inter_df_math_helper_one_side(modin_series, pandas_series, op):
try:
pandas_attr = getattr(pandas_series, op)
except Exception as e:
with pytest.raises(type(e)):
_ = getattr(modin_series, op)
return
modin_attr = getattr(modin_series, op)
try:
pandas_result = pandas_attr(4)
except Exception as e:
with pytest.raises(type(e)):
repr(modin_attr(4)) # repr to force materialization
else:
modin_result = modin_attr(4)
df_equals(modin_result, pandas_result)
try:
pandas_result = pandas_attr(4.0)
except Exception as e:
with pytest.raises(type(e)):
repr(modin_attr(4.0)) # repr to force materialization
else:
modin_result = modin_attr(4.0)
df_equals(modin_result, pandas_result)
# These operations don't support non-scalar `other` or have a strange behavior in
# the testing environment
if op in [
"__divmod__",
"divmod",
"rdivmod",
"floordiv",
"__floordiv__",
"rfloordiv",
"__rfloordiv__",
"mod",
"__mod__",
"rmod",
"__rmod__",
]:
return
try:
pandas_result = pandas_attr(pandas_series)
except Exception as e:
with pytest.raises(type(e)):
repr(modin_attr(modin_series)) # repr to force materialization
else:
modin_result = modin_attr(modin_series)
df_equals(modin_result, pandas_result)
list_test = random_state.randint(RAND_LOW, RAND_HIGH, size=(modin_series.shape[0]))
try:
pandas_result = pandas_attr(list_test)
except Exception as e:
with pytest.raises(type(e)):
repr(modin_attr(list_test)) # repr to force materialization
else:
modin_result = modin_attr(list_test)
df_equals(modin_result, pandas_result)
series_test_modin = pd.Series(list_test, index=modin_series.index)
series_test_pandas = pandas.Series(list_test, index=pandas_series.index)
try:
pandas_result = pandas_attr(series_test_pandas)
except Exception as e:
with pytest.raises(type(e)):
repr(modin_attr(series_test_modin)) # repr to force materialization
else:
modin_result = modin_attr(series_test_modin)
df_equals(modin_result, pandas_result)
# Level test
new_idx = pandas.MultiIndex.from_tuples(
[(i // 4, i // 2, i) for i in modin_series.index]
)
modin_df_multi_level = modin_series.copy()
modin_df_multi_level.index = new_idx
try:
# Defaults to pandas
with pytest.warns(UserWarning):
# Operation against self for sanity check
getattr(modin_df_multi_level, op)(modin_df_multi_level, level=1)
except TypeError:
# Some operations don't support multilevel `level` parameter
pass
def create_test_series(vals, sort=False, **kwargs):
if isinstance(vals, dict):
modin_series = pd.Series(vals[next(iter(vals.keys()))], **kwargs)
pandas_series = pandas.Series(vals[next(iter(vals.keys()))], **kwargs)
else:
modin_series = pd.Series(vals, **kwargs)
pandas_series = pandas.Series(vals, **kwargs)
if sort:
modin_series = modin_series.sort_values().reset_index(drop=True)
pandas_series = pandas_series.sort_values().reset_index(drop=True)
return modin_series, pandas_series
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_to_frame(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.to_frame(name="miao"), pandas_series.to_frame(name="miao"))
def test_accessing_index_element_as_property():
s = pd.Series([10, 20, 30], index=["a", "b", "c"])
assert s.b == 20
with pytest.raises(Exception):
_ = s.d
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_callable_key_in_getitem(data):
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series[lambda s: s.index % 2 == 0],
pandas_series[lambda s: s.index % 2 == 0],
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_T(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.T, pandas_series.T)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___abs__(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.__abs__(), pandas_series.__abs__())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___add__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__add__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___and__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__and__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___array__(data):
modin_series, pandas_series = create_test_series(data)
modin_result = modin_series.__array__()
assert_array_equal(modin_result, pandas_series.__array__())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___bool__(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.__bool__()
except Exception as e:
with pytest.raises(type(e)):
modin_series.__bool__()
else:
modin_result = modin_series.__bool__()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___contains__(request, data):
modin_series, pandas_series = create_test_series(data)
result = False
key = "Not Exist"
assert result == modin_series.__contains__(key)
assert result == (key in modin_series)
if "empty_data" not in request.node.name:
result = True
key = pandas_series.keys()[0]
assert result == modin_series.__contains__(key)
assert result == (key in modin_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___copy__(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.copy(), modin_series)
df_equals(modin_series.copy(), pandas_series.copy())
df_equals(modin_series.copy(), pandas_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___deepcopy__(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.__deepcopy__(), modin_series)
df_equals(modin_series.__deepcopy__(), pandas_series.__deepcopy__())
df_equals(modin_series.__deepcopy__(), pandas_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___delitem__(data):
modin_series, pandas_series = create_test_series(data)
del modin_series[modin_series.index[0]]
del pandas_series[pandas_series.index[0]]
df_equals(modin_series, pandas_series)
del modin_series[modin_series.index[-1]]
del pandas_series[pandas_series.index[-1]]
df_equals(modin_series, pandas_series)
del modin_series[modin_series.index[0]]
del pandas_series[pandas_series.index[0]]
df_equals(modin_series, pandas_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_divmod(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "divmod")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_rdivmod(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "rdivmod")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___eq__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__eq__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___floordiv__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__floordiv__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___ge__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__ge__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___getitem__(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series[0], pandas_series[0])
df_equals(
modin_series[modin_series.index[-1]], pandas_series[pandas_series.index[-1]]
)
modin_series = pd.Series(list(range(1000)))
pandas_series = pandas.Series(list(range(1000)))
df_equals(modin_series[:30], pandas_series[:30])
df_equals(modin_series[modin_series > 500], pandas_series[pandas_series > 500])
# Test empty series
df_equals(pd.Series([])[:30], pandas.Series([])[:30])
def test___getitem__1383():
# see #1383 for more details
data = ["", "a", "b", "c", "a"]
modin_series = pd.Series(data)
pandas_series = pandas.Series(data)
df_equals(modin_series[3:7], pandas_series[3:7])
@pytest.mark.parametrize("start", [-7, -5, -3, 0, None, 3, 5, 7])
@pytest.mark.parametrize("stop", [-7, -5, -3, 0, None, 3, 5, 7])
def test___getitem_edge_cases(start, stop):
data = ["", "a", "b", "c", "a"]
modin_series = pd.Series(data)
pandas_series = pandas.Series(data)
df_equals(modin_series[start:stop], pandas_series[start:stop])
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___gt__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__gt__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___int__(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = int(pandas_series[0])
except Exception as e:
with pytest.raises(type(e)):
int(modin_series[0])
else:
assert int(modin_series[0]) == pandas_result
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___invert__(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.__invert__()
except Exception as e:
with pytest.raises(type(e)):
repr(modin_series.__invert__())
else:
df_equals(modin_series.__invert__(), pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___iter__(data):
modin_series, pandas_series = create_test_series(data)
for m, p in zip(modin_series.__iter__(), pandas_series.__iter__()):
np.testing.assert_equal(m, p)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___le__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__le__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___len__(data):
modin_series, pandas_series = create_test_series(data)
assert len(modin_series) == len(pandas_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___long__(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series[0].__long__()
except Exception as e:
with pytest.raises(type(e)):
modin_series[0].__long__()
else:
assert modin_series[0].__long__() == pandas_result
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___lt__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__lt__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___mod__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__mod__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___mul__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__mul__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___ne__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__ne__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___neg__(request, data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.__neg__()
except Exception as e:
with pytest.raises(type(e)):
repr(modin_series.__neg__())
else:
df_equals(modin_series.__neg__(), pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___or__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__or__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___pow__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__pow__")
@pytest.mark.parametrize("name", ["Dates", None])
@pytest.mark.parametrize(
"dt_index", [True, False], ids=["dt_index_true", "dt_index_false"]
)
@pytest.mark.parametrize(
"data",
[*test_data_values, "empty"],
ids=[*test_data_keys, "empty"],
)
def test___repr__(name, dt_index, data):
if data == "empty":
modin_series, pandas_series = pd.Series(), pandas.Series()
else:
modin_series, pandas_series = create_test_series(data)
pandas_series.name = modin_series.name = name
if dt_index:
index = pandas.date_range(
"1/1/2000", periods=len(pandas_series.index), freq="T"
)
pandas_series.index = modin_series.index = index
if get_current_execution() == "BaseOnPython" and data == "empty":
# TODO: Remove this when default `dtype` of empty Series will be `object` in pandas (see #3142).
assert modin_series.dtype == np.object
assert pandas_series.dtype == np.float64
df_equals(modin_series.index, pandas_series.index)
else:
assert repr(modin_series) == repr(pandas_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___round__(data):
modin_series, pandas_series = create_test_series(data)
df_equals(round(modin_series), round(pandas_series))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___setitem__(data):
modin_series, pandas_series = create_test_series(data)
for key in modin_series.keys():
modin_series[key] = 0
pandas_series[key] = 0
df_equals(modin_series, pandas_series)
@pytest.mark.parametrize(
"key",
[
pytest.param(lambda idx: slice(1, 3), id="location_based_slice"),
pytest.param(lambda idx: slice(idx[1], idx[-1]), id="index_based_slice"),
pytest.param(lambda idx: [idx[0], idx[2], idx[-1]], id="list_of_labels"),
pytest.param(
lambda idx: [True if i % 2 else False for i in range(len(idx))],
id="boolean_mask",
),
],
)
@pytest.mark.parametrize(
"index",
[
pytest.param(
lambda idx_len: [chr(x) for x in range(ord("a"), ord("a") + idx_len)],
id="str_index",
),
pytest.param(lambda idx_len: list(range(1, idx_len + 1)), id="int_index"),
],
)
def test___setitem___non_hashable(key, index):
data = np.arange(5)
index = index(len(data))
key = key(index)
md_sr, pd_sr = create_test_series(data, index=index)
md_sr[key] = 10
pd_sr[key] = 10
df_equals(md_sr, pd_sr)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___sizeof__(data):
modin_series, pandas_series = create_test_series(data)
with pytest.warns(UserWarning):
modin_series.__sizeof__()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___str__(data):
modin_series, pandas_series = create_test_series(data)
assert str(modin_series) == str(pandas_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___sub__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__sub__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___truediv__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__truediv__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test___xor__(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "__xor__")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_abs(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.abs(), pandas_series.abs())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_add(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "add")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_add_prefix(data):
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series.add_prefix("PREFIX_ADD_"), pandas_series.add_prefix("PREFIX_ADD_")
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_add_suffix(data):
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series.add_suffix("SUFFIX_ADD_"), pandas_series.add_suffix("SUFFIX_ADD_")
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_values, ids=agg_func_keys)
def test_agg(data, func):
eval_general(
*create_test_series(data),
lambda df: df.agg(func),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_except_values, ids=agg_func_except_keys)
def test_agg_except(data, func):
# SpecificationError is arisen because we treat a Series as a DataFrame.
# See details in pandas issue 36036.
with pytest.raises(SpecificationError):
eval_general(
*create_test_series(data),
lambda df: df.agg(func),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_values, ids=agg_func_keys)
def test_agg_numeric(request, data, func):
if name_contains(request.node.name, numeric_agg_funcs) and name_contains(
request.node.name, numeric_dfs
):
axis = 0
eval_general(
*create_test_series(data),
lambda df: df.agg(func, axis),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_except_values, ids=agg_func_except_keys)
def test_agg_numeric_except(request, data, func):
if name_contains(request.node.name, numeric_agg_funcs) and name_contains(
request.node.name, numeric_dfs
):
axis = 0
# SpecificationError is arisen because we treat a Series as a DataFrame.
# See details in pandas issue 36036.
with pytest.raises(SpecificationError):
eval_general(
*create_test_series(data),
lambda df: df.agg(func, axis),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_values, ids=agg_func_keys)
def test_aggregate(data, func):
axis = 0
eval_general(
*create_test_series(data),
lambda df: df.aggregate(func, axis),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_except_values, ids=agg_func_except_keys)
def test_aggregate_except(data, func):
axis = 0
# SpecificationError is arisen because we treat a Series as a DataFrame.
# See details in pandas issues 36036.
with pytest.raises(SpecificationError):
eval_general(
*create_test_series(data),
lambda df: df.aggregate(func, axis),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_values, ids=agg_func_keys)
def test_aggregate_numeric(request, data, func):
if name_contains(request.node.name, numeric_agg_funcs) and name_contains(
request.node.name, numeric_dfs
):
axis = 0
eval_general(
*create_test_series(data),
lambda df: df.agg(func, axis),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_except_values, ids=agg_func_except_keys)
def test_aggregate_numeric_except(request, data, func):
if name_contains(request.node.name, numeric_agg_funcs) and name_contains(
request.node.name, numeric_dfs
):
axis = 0
# SpecificationError is arisen because we treat a Series as a DataFrame.
# See details in pandas issues 36036.
with pytest.raises(SpecificationError):
eval_general(
*create_test_series(data),
lambda df: df.agg(func, axis),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_aggregate_error_checking(data):
modin_series, pandas_series = create_test_series(data)
assert pandas_series.aggregate("ndim") == 1
assert modin_series.aggregate("ndim") == 1
def user_warning_checker(series, fn):
if isinstance(series, pd.Series):
with pytest.warns(UserWarning):
return fn(series)
return fn(series)
eval_general(
modin_series,
pandas_series,
lambda series: user_warning_checker(
series, fn=lambda series: series.aggregate("cumproduct")
),
)
eval_general(
modin_series, pandas_series, lambda series: series.aggregate("NOT_EXISTS")
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_align(data):
modin_series, _ = create_test_series(data) # noqa: F841
with pytest.warns(UserWarning):
modin_series.align(modin_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_all(data, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.all(skipna=skipna), pandas_series.all(skipna=skipna))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_any(data, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.any(skipna=skipna), pandas_series.any(skipna=skipna))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_append(data):
modin_series, pandas_series = create_test_series(data)
data_to_append = {"append_a": 2, "append_b": 1000}
ignore_idx_values = [True, False]
for ignore in ignore_idx_values:
try:
pandas_result = pandas_series.append(data_to_append, ignore_index=ignore)
except Exception as e:
with pytest.raises(type(e)):
modin_series.append(data_to_append, ignore_index=ignore)
else:
modin_result = modin_series.append(data_to_append, ignore_index=ignore)
df_equals(modin_result, pandas_result)
try:
pandas_result = pandas_series.append(pandas_series.iloc[-1])
except Exception as e:
with pytest.raises(type(e)):
modin_series.append(modin_series.iloc[-1])
else:
modin_result = modin_series.append(modin_series.iloc[-1])
df_equals(modin_result, pandas_result)
try:
pandas_result = pandas_series.append([pandas_series.iloc[-1]])
except Exception as e:
with pytest.raises(type(e)):
modin_series.append([modin_series.iloc[-1]])
else:
modin_result = modin_series.append([modin_series.iloc[-1]])
df_equals(modin_result, pandas_result)
verify_integrity_values = [True, False]
for verify_integrity in verify_integrity_values:
try:
pandas_result = pandas_series.append(
[pandas_series, pandas_series], verify_integrity=verify_integrity
)
except Exception as e:
with pytest.raises(type(e)):
modin_series.append(
[modin_series, modin_series], verify_integrity=verify_integrity
)
else:
modin_result = modin_series.append(
[modin_series, modin_series], verify_integrity=verify_integrity
)
df_equals(modin_result, pandas_result)
try:
pandas_result = pandas_series.append(
pandas_series, verify_integrity=verify_integrity
)
except Exception as e:
with pytest.raises(type(e)):
modin_series.append(modin_series, verify_integrity=verify_integrity)
else:
modin_result = modin_series.append(
modin_series, verify_integrity=verify_integrity
)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_values, ids=agg_func_keys)
def test_apply(data, func):
eval_general(
*create_test_series(data),
lambda df: df.apply(func),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_except_values, ids=agg_func_except_keys)
def test_apply_except(data, func):
# SpecificationError is arisen because we treat a Series as a DataFrame.
# See details in pandas issues 36036.
with pytest.raises(SpecificationError):
eval_general(
*create_test_series(data),
lambda df: df.apply(func),
)
def test_apply_external_lib():
json_string = """
{
"researcher": {
"name": "Ford Prefect",
"species": "Betelgeusian",
"relatives": [
{
"name": "Zaphod Beeblebrox",
"species": "Betelgeusian"
}
]
}
}
"""
modin_result = pd.DataFrame.from_dict({"a": [json_string]}).a.apply(json.loads)
pandas_result = pandas.DataFrame.from_dict({"a": [json_string]}).a.apply(json.loads)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_values, ids=agg_func_keys)
def test_apply_numeric(request, data, func):
if name_contains(request.node.name, numeric_dfs):
eval_general(
*create_test_series(data),
lambda df: df.apply(func),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_except_values, ids=agg_func_except_keys)
def test_apply_numeric_except(request, data, func):
if name_contains(request.node.name, numeric_dfs):
# SpecificationError is arisen because we treat a Series as a DataFrame.
# See details in pandas issues 36036.
with pytest.raises(SpecificationError):
eval_general(
*create_test_series(data),
lambda df: df.apply(func),
)
@pytest.mark.parametrize("axis", [None, 0, 1])
@pytest.mark.parametrize("level", [None, -1, 0, 1])
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", ["count", "all", "kurt", "array", "searchsorted"])
def test_apply_text_func(level, data, func, axis):
func_kwargs = {}
if level:
func_kwargs.update({"level": level})
if axis:
func_kwargs.update({"axis": axis})
rows_number = len(next(iter(data.values()))) # length of the first data column
level_0 = np.random.choice([0, 1, 2], rows_number)
level_1 = np.random.choice([3, 4, 5], rows_number)
index = pd.MultiIndex.from_arrays([level_0, level_1])
modin_series, pandas_series = create_test_series(data)
modin_series.index = index
pandas_series.index = index
eval_general(modin_series, pandas_series, lambda df: df.apply(func), **func_kwargs)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("skipna", [True, False])
def test_argmax(data, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.argmax(skipna=skipna), pandas_series.argmax(skipna=skipna))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("skipna", [True, False])
def test_argmin(data, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.argmin(skipna=skipna), pandas_series.argmin(skipna=skipna))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_argsort(data):
modin_series, pandas_series = create_test_series(data)
with pytest.warns(UserWarning):
modin_result = modin_series.argsort()
df_equals(modin_result, pandas_series.argsort())
def test_asfreq():
index = pd.date_range("1/1/2000", periods=4, freq="T")
series = pd.Series([0.0, None, 2.0, 3.0], index=index)
with pytest.warns(UserWarning):
# We are only testing that this defaults to pandas, so we will just check for
# the warning
series.asfreq(freq="30S")
@pytest.mark.parametrize(
"where",
[
20,
30,
[10, 40],
[20, 30],
[20],
25,
[25, 45],
[25, 30],
pandas.Index([20, 30]),
pandas.Index([10]),
],
)
def test_asof(where):
# With NaN:
values = [1, 2, np.nan, 4]
index = [10, 20, 30, 40]
modin_series, pandas_series = pd.Series(values, index=index), pandas.Series(
values, index=index
)
df_equals(modin_series.asof(where), pandas_series.asof(where))
# No NaN:
values = [1, 2, 7, 4]
modin_series, pandas_series = pd.Series(values, index=index), pandas.Series(
values, index=index
)
df_equals(modin_series.asof(where), pandas_series.asof(where))
@pytest.mark.parametrize(
"where",
[
20,
30,
[10.5, 40.5],
[10],
pandas.Index([20, 30]),
pandas.Index([10.5]),
],
)
def test_asof_large(where):
values = test_data["float_nan_data"]["col1"]
index = list(range(len(values)))
modin_series, pandas_series = pd.Series(values, index=index), pandas.Series(
values, index=index
)
df_equals(modin_series.asof(where), pandas_series.asof(where))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_astype(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.astype(str)
except Exception as e:
with pytest.raises(type(e)):
repr(modin_series.astype(str)) # repr to force materialization
else:
df_equals(modin_series.astype(str), pandas_result)
try:
pandas_result = pandas_series.astype(np.int64)
except Exception as e:
with pytest.raises(type(e)):
repr(modin_series.astype(np.int64)) # repr to force materialization
else:
df_equals(modin_series.astype(np.int64), pandas_result)
try:
pandas_result = pandas_series.astype(np.float64)
except Exception as e:
with pytest.raises(type(e)):
repr(modin_series.astype(np.float64)) # repr to force materialization
else:
df_equals(modin_series.astype(np.float64), pandas_result)
def test_astype_categorical():
modin_df = pd.Series(["A", "A", "B", "B", "A"])
pandas_df = pandas.Series(["A", "A", "B", "B", "A"])
modin_result = modin_df.astype("category")
pandas_result = pandas_df.astype("category")
df_equals(modin_result, pandas_result)
assert modin_result.dtype == pandas_result.dtype
modin_df = pd.Series([1, 1, 2, 1, 2, 2, 3, 1, 2, 1, 2])
pandas_df = pandas.Series([1, 1, 2, 1, 2, 2, 3, 1, 2, 1, 2])
df_equals(modin_result, pandas_result)
assert modin_result.dtype == pandas_result.dtype
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_at(data):
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series.at[modin_series.index[0]], pandas_series.at[pandas_series.index[0]]
)
df_equals(
modin_series.at[modin_series.index[-1]], pandas_series[pandas_series.index[-1]]
)
def test_at_time():
i = pd.date_range("2008-01-01", periods=1000, freq="12H")
modin_series = pd.Series(list(range(1000)), index=i)
pandas_series = pandas.Series(list(range(1000)), index=i)
df_equals(modin_series.at_time("12:00"), pandas_series.at_time("12:00"))
df_equals(modin_series.at_time("3:00"), pandas_series.at_time("3:00"))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("lag", [1, 2, 3])
def test_autocorr(data, lag):
modin_series, pandas_series = create_test_series(data)
modin_result = modin_series.autocorr(lag=lag)
pandas_result = pandas_series.autocorr(lag=lag)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_axes(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.axes[0].equals(pandas_series.axes[0])
assert len(modin_series.axes) == len(pandas_series.axes)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_attrs(data):
modin_series, pandas_series = create_test_series(data)
eval_general(modin_series, pandas_series, lambda df: df.attrs)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_array(data):
modin_series, pandas_series = create_test_series(data)
eval_general(modin_series, pandas_series, lambda df: df.array)
@pytest.mark.xfail(reason="Using pandas Series.")
def test_between():
modin_series = create_test_series()
with pytest.raises(NotImplementedError):
modin_series.between(None, None)
def test_between_time():
i = pd.date_range("2008-01-01", periods=1000, freq="12H")
modin_series = pd.Series(list(range(1000)), index=i)
pandas_series = pandas.Series(list(range(1000)), index=i)
df_equals(
modin_series.between_time("12:00", "17:00"),
pandas_series.between_time("12:00", "17:00"),
)
df_equals(
modin_series.between_time("3:00", "8:00"),
pandas_series.between_time("3:00", "8:00"),
)
df_equals(
modin_series.between_time("3:00", "8:00", False),
pandas_series.between_time("3:00", "8:00", False),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_bfill(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.bfill(), pandas_series.bfill())
# inplace
modin_series_cp = modin_series.copy()
pandas_series_cp = pandas_series.copy()
modin_series_cp.bfill(inplace=True)
pandas_series_cp.bfill(inplace=True)
df_equals(modin_series_cp, pandas_series_cp)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_bool(data):
modin_series, pandas_series = create_test_series(data)
with pytest.raises(ValueError):
modin_series.bool()
with pytest.raises(ValueError):
modin_series.__bool__()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_clip(request, data):
modin_series, pandas_series = create_test_series(data)
if name_contains(request.node.name, numeric_dfs):
# set bounds
lower, upper = np.sort(random_state.random_integers(RAND_LOW, RAND_HIGH, 2))
# test only upper scalar bound
modin_result = modin_series.clip(None, upper)
pandas_result = pandas_series.clip(None, upper)
df_equals(modin_result, pandas_result)
# test lower and upper scalar bound
modin_result = modin_series.clip(lower, upper)
pandas_result = pandas_series.clip(lower, upper)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_combine(data):
modin_series, _ = create_test_series(data) # noqa: F841
modin_series2 = modin_series % (max(modin_series) // 2)
modin_series.combine(modin_series2, lambda s1, s2: s1 if s1 < s2 else s2)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_combine_first(data):
modin_series, pandas_series = create_test_series(data)
modin_series2 = modin_series % (max(modin_series) // 2)
pandas_series2 = pandas_series % (max(pandas_series) // 2)
modin_result = modin_series.combine_first(modin_series2)
pandas_result = pandas_series.combine_first(pandas_series2)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_compress(data):
modin_series, pandas_series = create_test_series(data) # noqa: F841
try:
pandas_series.compress(pandas_series > 30)
except Exception as e:
with pytest.raises(type(e)):
modin_series.compress(modin_series > 30)
else:
modin_series.compress(modin_series > 30)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_constructor(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series, pandas_series)
df_equals(pd.Series(modin_series), pandas.Series(pandas_series))
def test_constructor_columns_and_index():
modin_series = pd.Series([1, 1, 10], index=[1, 2, 3], name="health")
pandas_series = pandas.Series([1, 1, 10], index=[1, 2, 3], name="health")
df_equals(modin_series, pandas_series)
df_equals(pd.Series(modin_series), pandas.Series(pandas_series))
df_equals(
pd.Series(modin_series, name="max_speed"),
pandas.Series(pandas_series, name="max_speed"),
)
df_equals(
pd.Series(modin_series, index=[1, 2]),
pandas.Series(pandas_series, index=[1, 2]),
)
with pytest.raises(NotImplementedError):
pd.Series(modin_series, index=[1, 2, 99999])
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_copy(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series, modin_series.copy())
df_equals(modin_series.copy(), pandas_series)
df_equals(modin_series.copy(), pandas_series.copy())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_corr(data):
modin_series, pandas_series = create_test_series(data)
modin_result = modin_series.corr(modin_series)
pandas_result = pandas_series.corr(pandas_series)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_count(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.count(), pandas_series.count())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_cov(data):
modin_series, pandas_series = create_test_series(data)
modin_result = modin_series.cov(modin_series)
pandas_result = pandas_series.cov(pandas_series)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_cummax(data, skipna):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.cummax(skipna=skipna)
except Exception as e:
with pytest.raises(type(e)):
modin_series.cummax(skipna=skipna)
else:
df_equals(modin_series.cummax(skipna=skipna), pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_cummin(data, skipna):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.cummin(skipna=skipna)
except Exception as e:
with pytest.raises(type(e)):
modin_series.cummin(skipna=skipna)
else:
df_equals(modin_series.cummin(skipna=skipna), pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_cumprod(data, skipna):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.cumprod(skipna=skipna)
except Exception as e:
with pytest.raises(type(e)):
modin_series.cumprod(skipna=skipna)
else:
df_equals(modin_series.cumprod(skipna=skipna), pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_cumsum(data, skipna):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.cumsum(skipna=skipna)
except Exception as e:
with pytest.raises(type(e)):
modin_series.cumsum(skipna=skipna)
else:
df_equals(modin_series.cumsum(skipna=skipna), pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_describe(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.describe(), pandas_series.describe())
percentiles = [0.10, 0.11, 0.44, 0.78, 0.99]
df_equals(
modin_series.describe(percentiles=percentiles),
pandas_series.describe(percentiles=percentiles),
)
try:
pandas_result = pandas_series.describe(exclude=[np.float64])
except Exception as e:
with pytest.raises(type(e)):
modin_series.describe(exclude=[np.float64])
else:
modin_result = modin_series.describe(exclude=[np.float64])
df_equals(modin_result, pandas_result)
try:
pandas_result = pandas_series.describe(exclude=np.float64)
except Exception as e:
with pytest.raises(type(e)):
modin_series.describe(exclude=np.float64)
else:
modin_result = modin_series.describe(exclude=np.float64)
df_equals(modin_result, pandas_result)
try:
pandas_result = pandas_series.describe(
include=[np.timedelta64, np.datetime64, np.object, np.bool]
)
except Exception as e:
with pytest.raises(type(e)):
modin_series.describe(
include=[np.timedelta64, np.datetime64, np.object, np.bool]
)
else:
modin_result = modin_series.describe(
include=[np.timedelta64, np.datetime64, np.object, np.bool]
)
df_equals(modin_result, pandas_result)
modin_result = modin_series.describe(include=str(modin_series.dtypes))
pandas_result = pandas_series.describe(include=str(pandas_series.dtypes))
df_equals(modin_result, pandas_result)
modin_result = modin_series.describe(include=[np.number])
pandas_result = pandas_series.describe(include=[np.number])
df_equals(modin_result, pandas_result)
df_equals(
modin_series.describe(include="all"), pandas_series.describe(include="all")
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"periods", int_arg_values, ids=arg_keys("periods", int_arg_keys)
)
def test_diff(data, periods):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.diff(periods=periods)
except Exception as e:
with pytest.raises(type(e)):
modin_series.diff(periods=periods)
else:
modin_result = modin_series.diff(periods=periods)
df_equals(modin_result, pandas_result)
try:
pandas_result = pandas_series.T.diff(periods=periods)
except Exception as e:
with pytest.raises(type(e)):
modin_series.T.diff(periods=periods)
else:
modin_result = modin_series.T.diff(periods=periods)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_div(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "div")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_divide(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "divide")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_dot(data):
modin_series, pandas_series = create_test_series(data)
ind_len = len(modin_series)
# Test 1D array input
arr = np.arange(ind_len)
modin_result = modin_series.dot(arr)
pandas_result = pandas_series.dot(arr)
df_equals(modin_result, pandas_result)
# Test 2D array input
arr = np.arange(ind_len * 2).reshape(ind_len, 2)
modin_result = modin_series.dot(arr)
pandas_result = pandas_series.dot(arr)
assert_array_equal(modin_result, pandas_result)
# Test bad dimensions
with pytest.raises(ValueError):
modin_result = modin_series.dot(np.arange(ind_len + 10))
# Test dataframe input
modin_df = pd.DataFrame(data)
pandas_df = pandas.DataFrame(data)
modin_result = modin_series.dot(modin_df)
pandas_result = pandas_series.dot(pandas_df)
df_equals(modin_result, pandas_result)
# Test series input
modin_series_2 = pd.Series(np.arange(ind_len), index=modin_series.index)
pandas_series_2 = pandas.Series(np.arange(ind_len), index=pandas_series.index)
modin_result = modin_series.dot(modin_series_2)
pandas_result = pandas_series.dot(pandas_series_2)
df_equals(modin_result, pandas_result)
# Test when input series index doesn't line up with columns
with pytest.raises(ValueError):
modin_result = modin_series.dot(
pd.Series(
np.arange(ind_len), index=["a" for _ in range(len(modin_series.index))]
)
)
# Test case when left series has size (1 x 1)
# and right dataframe has size (1 x n)
modin_result = pd.Series([1]).dot(pd.DataFrame(modin_series).T)
pandas_result = pandas.Series([1]).dot(pandas.DataFrame(pandas_series).T)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_matmul(data):
modin_series, pandas_series = create_test_series(data) # noqa: F841
ind_len = len(modin_series)
# Test 1D array input
arr = np.arange(ind_len)
modin_result = modin_series @ arr
pandas_result = pandas_series @ arr
df_equals(modin_result, pandas_result)
# Test 2D array input
arr = np.arange(ind_len * 2).reshape(ind_len, 2)
modin_result = modin_series @ arr
pandas_result = pandas_series @ arr
assert_array_equal(modin_result, pandas_result)
# Test bad dimensions
with pytest.raises(ValueError):
modin_result = modin_series @ np.arange(ind_len + 10)
# Test dataframe input
modin_df = pd.DataFrame(data)
pandas_df = pandas.DataFrame(data)
modin_result = modin_series @ modin_df
pandas_result = pandas_series @ pandas_df
df_equals(modin_result, pandas_result)
# Test series input
modin_series_2 = pd.Series(np.arange(ind_len), index=modin_series.index)
pandas_series_2 = pandas.Series(np.arange(ind_len), index=pandas_series.index)
modin_result = modin_series @ modin_series_2
pandas_result = pandas_series @ pandas_series_2
df_equals(modin_result, pandas_result)
# Test when input series index doesn't line up with columns
with pytest.raises(ValueError):
modin_result = modin_series @ pd.Series(
np.arange(ind_len), index=["a" for _ in range(len(modin_series.index))]
)
@pytest.mark.xfail(reason="Using pandas Series.")
def test_drop():
modin_series = create_test_series()
with pytest.raises(NotImplementedError):
modin_series.drop(None, None, None, None)
@pytest.mark.parametrize(
"data", test_data_with_duplicates_values, ids=test_data_with_duplicates_keys
)
@pytest.mark.parametrize(
"keep", ["last", "first", False], ids=["last", "first", "False"]
)
@pytest.mark.parametrize("inplace", [True, False], ids=["True", "False"])
def test_drop_duplicates(data, keep, inplace):
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series.drop_duplicates(keep=keep, inplace=inplace),
pandas_series.drop_duplicates(keep=keep, inplace=inplace),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("how", ["any", "all"], ids=["any", "all"])
def test_dropna(data, how):
modin_series, pandas_series = create_test_series(data)
with pytest.raises(TypeError):
modin_series.dropna(how=None, thresh=None)
modin_result = modin_series.dropna(how=how)
pandas_result = pandas_series.dropna(how=how)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_dropna_inplace(data):
modin_series, pandas_series = create_test_series(data)
pandas_result = pandas_series.dropna()
modin_series.dropna(inplace=True)
df_equals(modin_series, pandas_result)
modin_series, pandas_series = create_test_series(data)
with pytest.raises(TypeError):
modin_series.dropna(thresh=2, inplace=True)
modin_series, pandas_series = create_test_series(data)
pandas_series.dropna(how="any", inplace=True)
modin_series.dropna(how="any", inplace=True)
df_equals(modin_series, pandas_series)
def test_dtype_empty():
modin_series, pandas_series = pd.Series(), pandas.Series()
if get_current_execution() == "BaseOnPython":
# TODO: Remove this when default `dtype` of empty Series will be `object` in pandas (see #3142).
assert modin_series.dtype == np.object
assert pandas_series.dtype == np.float64
else:
assert modin_series.dtype == pandas_series.dtype
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_dtype(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.dtype, modin_series.dtypes)
df_equals(modin_series.dtype, pandas_series.dtype)
df_equals(modin_series.dtype, pandas_series.dtypes)
def test_dt():
data = pd.date_range("2016-12-31", periods=128, freq="D", tz="Europe/Berlin")
modin_series = pd.Series(data)
pandas_series = pandas.Series(data)
df_equals(modin_series.dt.date, pandas_series.dt.date)
df_equals(modin_series.dt.time, pandas_series.dt.time)
df_equals(modin_series.dt.timetz, pandas_series.dt.timetz)
df_equals(modin_series.dt.year, pandas_series.dt.year)
df_equals(modin_series.dt.month, pandas_series.dt.month)
df_equals(modin_series.dt.day, pandas_series.dt.day)
df_equals(modin_series.dt.hour, pandas_series.dt.hour)
df_equals(modin_series.dt.minute, pandas_series.dt.minute)
df_equals(modin_series.dt.second, pandas_series.dt.second)
df_equals(modin_series.dt.microsecond, pandas_series.dt.microsecond)
df_equals(modin_series.dt.nanosecond, pandas_series.dt.nanosecond)
df_equals(modin_series.dt.week, pandas_series.dt.week)
df_equals(modin_series.dt.weekofyear, pandas_series.dt.weekofyear)
df_equals(modin_series.dt.dayofweek, pandas_series.dt.dayofweek)
df_equals(modin_series.dt.weekday, pandas_series.dt.weekday)
df_equals(modin_series.dt.dayofyear, pandas_series.dt.dayofyear)
df_equals(modin_series.dt.quarter, pandas_series.dt.quarter)
df_equals(modin_series.dt.is_month_start, pandas_series.dt.is_month_start)
df_equals(modin_series.dt.is_month_end, pandas_series.dt.is_month_end)
df_equals(modin_series.dt.is_quarter_start, pandas_series.dt.is_quarter_start)
df_equals(modin_series.dt.is_quarter_end, pandas_series.dt.is_quarter_end)
df_equals(modin_series.dt.is_year_start, pandas_series.dt.is_year_start)
df_equals(modin_series.dt.is_year_end, pandas_series.dt.is_year_end)
df_equals(modin_series.dt.is_leap_year, pandas_series.dt.is_leap_year)
df_equals(modin_series.dt.daysinmonth, pandas_series.dt.daysinmonth)
df_equals(modin_series.dt.days_in_month, pandas_series.dt.days_in_month)
assert modin_series.dt.tz == pandas_series.dt.tz
assert modin_series.dt.freq == pandas_series.dt.freq
df_equals(modin_series.dt.to_period("W"), pandas_series.dt.to_period("W"))
assert_array_equal(
modin_series.dt.to_pydatetime(), pandas_series.dt.to_pydatetime()
)
df_equals(
modin_series.dt.tz_localize(None),
pandas_series.dt.tz_localize(None),
)
df_equals(
modin_series.dt.tz_convert(tz="Europe/Berlin"),
pandas_series.dt.tz_convert(tz="Europe/Berlin"),
)
df_equals(modin_series.dt.normalize(), pandas_series.dt.normalize())
df_equals(
modin_series.dt.strftime("%B %d, %Y, %r"),
pandas_series.dt.strftime("%B %d, %Y, %r"),
)
df_equals(modin_series.dt.round("H"), pandas_series.dt.round("H"))
df_equals(modin_series.dt.floor("H"), pandas_series.dt.floor("H"))
df_equals(modin_series.dt.ceil("H"), pandas_series.dt.ceil("H"))
df_equals(modin_series.dt.month_name(), pandas_series.dt.month_name())
df_equals(modin_series.dt.day_name(), pandas_series.dt.day_name())
modin_series = pd.Series(pd.to_timedelta(np.arange(128), unit="d"))
pandas_series = pandas.Series(pandas.to_timedelta(np.arange(128), unit="d"))
assert_array_equal(
modin_series.dt.to_pytimedelta(), pandas_series.dt.to_pytimedelta()
)
df_equals(modin_series.dt.total_seconds(), pandas_series.dt.total_seconds())
df_equals(modin_series.dt.days, pandas_series.dt.days)
df_equals(modin_series.dt.seconds, pandas_series.dt.seconds)
df_equals(modin_series.dt.microseconds, pandas_series.dt.microseconds)
df_equals(modin_series.dt.nanoseconds, pandas_series.dt.nanoseconds)
df_equals(modin_series.dt.components, pandas_series.dt.components)
data_per = pd.date_range("1/1/2012", periods=128, freq="M")
pandas_series = pandas.Series(data_per, index=data_per).dt.to_period()
modin_series = pd.Series(data_per, index=data_per).dt.to_period()
df_equals(modin_series.dt.qyear, pandas_series.dt.qyear)
df_equals(modin_series.dt.start_time, pandas_series.dt.start_time)
df_equals(modin_series.dt.end_time, pandas_series.dt.end_time)
df_equals(modin_series.dt.to_timestamp(), pandas_series.dt.to_timestamp())
@pytest.mark.parametrize(
"data", test_data_with_duplicates_values, ids=test_data_with_duplicates_keys
)
@pytest.mark.parametrize(
"keep", ["last", "first", False], ids=["last", "first", "False"]
)
def test_duplicated(data, keep):
modin_series, pandas_series = create_test_series(data)
modin_result = modin_series.duplicated(keep=keep)
df_equals(modin_result, pandas_series.duplicated(keep=keep))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_empty(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.empty == pandas_series.empty
def test_empty_series():
modin_series = pd.Series()
assert modin_series.empty
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_eq(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "eq")
def test_equals():
series_data = [2.9, 3, 3, 3]
modin_df1 = pd.Series(series_data)
modin_df2 = pd.Series(series_data)
assert modin_df1.equals(modin_df2)
assert modin_df1.equals(pd.Series(modin_df1))
df_equals(modin_df1, modin_df2)
df_equals(modin_df1, pd.Series(modin_df1))
series_data = [2, 3, 5, 1]
modin_df3 = pd.Series(series_data, index=list("abcd"))
assert not modin_df1.equals(modin_df3)
with pytest.raises(AssertionError):
df_equals(modin_df3, modin_df1)
with pytest.raises(AssertionError):
df_equals(modin_df3, modin_df2)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_ewm(data):
modin_series, _ = create_test_series(data) # noqa: F841
with pytest.warns(UserWarning):
modin_series.ewm(halflife=6)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_expanding(data):
modin_series, _ = create_test_series(data) # noqa: F841
with pytest.warns(UserWarning):
modin_series.expanding()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_factorize(data):
modin_series, _ = create_test_series(data) # noqa: F841
with pytest.warns(UserWarning):
modin_series.factorize()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_ffill(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.ffill(), pandas_series.ffill())
# inplace
modin_series_cp = modin_series.copy()
pandas_series_cp = pandas_series.copy()
modin_series_cp.ffill(inplace=True)
pandas_series_cp.ffill(inplace=True)
df_equals(modin_series_cp, pandas_series_cp)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("reindex", [None, 2, -2])
@pytest.mark.parametrize("limit", [None, 1, 2, 0.5, -1, -2, 1.5])
def test_fillna(data, reindex, limit):
modin_series, pandas_series = create_test_series(data)
index = pandas_series.index
pandas_replace_series = index.to_series().sample(frac=1)
modin_replace_series = pd.Series(pandas_replace_series)
replace_dict = pandas_replace_series.to_dict()
if reindex is not None:
if reindex > 0:
pandas_series = pandas_series[:reindex].reindex(index)
modin_series = pd.Series(pandas_series)
else:
pandas_series = pandas_series[reindex:].reindex(index)
# Because of bug #3178 modin Series has to be created from pandas
# Series instead of performing the same slice and reindex operations.
modin_series = pd.Series(pandas_series)
if isinstance(limit, float):
limit = int(len(modin_series) * limit)
if limit is not None and limit < 0:
limit = len(modin_series) + limit
df_equals(modin_series.fillna(0, limit=limit), pandas_series.fillna(0, limit=limit))
df_equals(
modin_series.fillna(method="bfill", limit=limit),
pandas_series.fillna(method="bfill", limit=limit),
)
df_equals(
modin_series.fillna(method="ffill", limit=limit),
pandas_series.fillna(method="ffill", limit=limit),
)
df_equals(
modin_series.fillna(modin_replace_series, limit=limit),
pandas_series.fillna(pandas_replace_series, limit=limit),
)
df_equals(
modin_series.fillna(replace_dict, limit=limit),
pandas_series.fillna(replace_dict, limit=limit),
)
@pytest.mark.xfail(reason="Using pandas Series.")
def test_filter():
modin_series = create_test_series()
with pytest.raises(NotImplementedError):
modin_series.filter(None, None, None)
def test_first():
i = pd.date_range("2010-04-09", periods=400, freq="2D")
modin_series = pd.Series(list(range(400)), index=i)
pandas_series = pandas.Series(list(range(400)), index=i)
df_equals(modin_series.first("3D"), pandas_series.first("3D"))
df_equals(modin_series.first("20D"), pandas_series.first("20D"))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_first_valid_index(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.first_valid_index(), pandas_series.first_valid_index())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_floordiv(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "floordiv")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_ge(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "ge")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_get(data):
modin_series, pandas_series = create_test_series(data)
for key in modin_series.keys():
df_equals(modin_series.get(key), pandas_series.get(key))
df_equals(
modin_series.get("NO_EXIST", "DEFAULT"),
pandas_series.get("NO_EXIST", "DEFAULT"),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_gt(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "gt")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_hasnans(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.hasnans == pandas_series.hasnans
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("n", int_arg_values, ids=arg_keys("n", int_arg_keys))
def test_head(data, n):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.head(n), pandas_series.head(n))
df_equals(
modin_series.head(len(modin_series)), pandas_series.head(len(pandas_series))
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_hist(data):
modin_series, _ = create_test_series(data) # noqa: F841
with pytest.warns(UserWarning):
modin_series.hist(None)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_iat(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.iat[0], pandas_series.iat[0])
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_idxmax(data, skipna):
modin_series, pandas_series = create_test_series(data)
pandas_result = pandas_series.idxmax(skipna=skipna)
modin_result = modin_series.idxmax(skipna=skipna)
df_equals(modin_result, pandas_result)
pandas_result = pandas_series.T.idxmax(skipna=skipna)
modin_result = modin_series.T.idxmax(skipna=skipna)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_idxmin(data, skipna):
modin_series, pandas_series = create_test_series(data)
pandas_result = pandas_series.idxmin(skipna=skipna)
modin_result = modin_series.idxmin(skipna=skipna)
df_equals(modin_result, pandas_result)
pandas_result = pandas_series.T.idxmin(skipna=skipna)
modin_result = modin_series.T.idxmin(skipna=skipna)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_iloc(request, data):
modin_series, pandas_series = create_test_series(data)
if not name_contains(request.node.name, ["empty_data"]):
# Scalar
np.testing.assert_equal(modin_series.iloc[0], pandas_series.iloc[0])
# Series
df_equals(modin_series.iloc[1:], pandas_series.iloc[1:])
df_equals(modin_series.iloc[1:2], pandas_series.iloc[1:2])
df_equals(modin_series.iloc[[1, 2]], pandas_series.iloc[[1, 2]])
# Write Item
modin_series.iloc[[1, 2]] = 42
pandas_series.iloc[[1, 2]] = 42
df_equals(modin_series, pandas_series)
with pytest.raises(IndexError):
modin_series.iloc[1:, 1]
else:
with pytest.raises(IndexError):
modin_series.iloc[0]
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_index(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.index, pandas_series.index)
with pytest.raises(ValueError):
modin_series.index = list(modin_series.index) + [999]
modin_series.index = modin_series.index.map(str)
pandas_series.index = pandas_series.index.map(str)
df_equals(modin_series.index, pandas_series.index)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_interpolate(data):
modin_series, _ = create_test_series(data) # noqa: F841
with pytest.warns(UserWarning):
modin_series.interpolate()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_is_monotonic(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.is_monotonic == pandas_series.is_monotonic
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_is_monotonic_decreasing(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.is_monotonic_decreasing == pandas_series.is_monotonic_decreasing
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_is_monotonic_increasing(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.is_monotonic_increasing == pandas_series.is_monotonic_increasing
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_is_unique(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.is_unique == pandas_series.is_unique
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_isin(data):
modin_series, pandas_series = create_test_series(data)
val = [1, 2, 3, 4]
pandas_result = pandas_series.isin(val)
modin_result = modin_series.isin(val)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_isnull(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.isnull(), pandas_series.isnull())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_items(data):
modin_series, pandas_series = create_test_series(data)
modin_items = modin_series.items()
pandas_items = pandas_series.items()
for modin_item, pandas_item in zip(modin_items, pandas_items):
modin_index, modin_scalar = modin_item
pandas_index, pandas_scalar = pandas_item
df_equals(modin_scalar, pandas_scalar)
assert pandas_index == modin_index
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_iteritems(data):
modin_series, pandas_series = create_test_series(data)
modin_items = modin_series.iteritems()
pandas_items = pandas_series.iteritems()
for modin_item, pandas_item in zip(modin_items, pandas_items):
modin_index, modin_scalar = modin_item
pandas_index, pandas_scalar = pandas_item
df_equals(modin_scalar, pandas_scalar)
assert pandas_index == modin_index
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_keys(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.keys(), pandas_series.keys())
def test_kurtosis_alias():
# It's optimization. If failed, Series.kurt should be tested explicitly
# in tests: `test_kurt_kurtosis`, `test_kurt_kurtosis_level`.
assert pd.Series.kurt == pd.Series.kurtosis
@pytest.mark.parametrize("axis", [0, 1])
@pytest.mark.parametrize("skipna", bool_arg_values, ids=bool_arg_keys)
def test_kurtosis(axis, skipna):
eval_general(
*create_test_series(test_data["float_nan_data"]),
lambda df: df.kurtosis(axis=axis, skipna=skipna),
)
@pytest.mark.parametrize("axis", ["rows", "columns"])
@pytest.mark.parametrize("numeric_only", [True, False, None])
def test_kurtosis_numeric_only(axis, numeric_only):
eval_general(
*create_test_series(test_data_diff_dtype),
lambda df: df.kurtosis(axis=axis, numeric_only=numeric_only),
)
@pytest.mark.parametrize("level", [-1, 0, 1])
def test_kurtosis_level(level):
data = test_data["int_data"]
modin_s, pandas_s = create_test_series(data)
index = generate_multiindex(len(data.keys()))
modin_s.columns = index
pandas_s.columns = index
eval_general(
modin_s,
pandas_s,
lambda s: s.kurtosis(axis=1, level=level),
)
def test_last():
modin_index = pd.date_range("2010-04-09", periods=400, freq="2D")
pandas_index = pandas.date_range("2010-04-09", periods=400, freq="2D")
modin_series = pd.Series(list(range(400)), index=modin_index)
pandas_series = pandas.Series(list(range(400)), index=pandas_index)
df_equals(modin_series.last("3D"), pandas_series.last("3D"))
df_equals(modin_series.last("20D"), pandas_series.last("20D"))
@pytest.mark.parametrize("func", ["all", "any", "mad", "count"])
def test_index_order(func):
# see #1708 and #1869 for details
s_modin, s_pandas = create_test_series(test_data["float_nan_data"])
rows_number = len(s_modin.index)
level_0 = np.random.choice([x for x in range(10)], rows_number)
level_1 = np.random.choice([x for x in range(10)], rows_number)
index = pandas.MultiIndex.from_arrays([level_0, level_1])
s_modin.index = index
s_pandas.index = index
df_equals(
getattr(s_modin, func)(level=0).index,
getattr(s_pandas, func)(level=0).index,
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_last_valid_index(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.last_valid_index() == (pandas_series.last_valid_index())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_le(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "le")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_loc(data):
modin_series, pandas_series = create_test_series(data)
for v in modin_series.index:
df_equals(modin_series.loc[v], pandas_series.loc[v])
df_equals(modin_series.loc[v:], pandas_series.loc[v:])
indices = [True if i % 3 == 0 else False for i in range(len(modin_series.index))]
modin_result = modin_series.loc[indices]
pandas_result = pandas_series.loc[indices]
df_equals(modin_result, pandas_result)
# From issue #1988
index = pd.MultiIndex.from_product([np.arange(10), np.arange(10)], names=["f", "s"])
data = np.arange(100)
modin_series = pd.Series(data, index=index).sort_index()
pandas_series = pandas.Series(data, index=index).sort_index()
modin_result = modin_series.loc[
(slice(None), 1),
]
pandas_result = pandas_series.loc[
(slice(None), 1),
]
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_lt(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "lt")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("axis", [None, 0])
@pytest.mark.parametrize("skipna", [None, True, False])
@pytest.mark.parametrize("level", [0, -1, None])
def test_mad(level, data, axis, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series.mad(axis=axis, skipna=skipna, level=level),
pandas_series.mad(axis=axis, skipna=skipna, level=level),
)
@pytest.mark.parametrize("na_values", ["ignore", None], ids=["na_ignore", "na_none"])
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_map(data, na_values):
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series.map(str, na_action=na_values),
pandas_series.map(str, na_action=na_values),
)
mapper = {i: str(i) for i in range(100)}
df_equals(
modin_series.map(mapper, na_action=na_values),
pandas_series.map(mapper, na_action=na_values),
)
# Return list objects
modin_series_lists = modin_series.map(lambda s: [s, s, s])
pandas_series_lists = pandas_series.map(lambda s: [s, s, s])
df_equals(modin_series_lists, pandas_series_lists)
# Index into list objects
df_equals(
modin_series_lists.map(lambda l: l[0]), pandas_series_lists.map(lambda l: l[0])
)
def test_mask():
modin_series = pd.Series(np.arange(10))
m = modin_series % 3 == 0
with pytest.warns(UserWarning):
try:
modin_series.mask(~m, -modin_series)
except ValueError:
pass
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_max(data, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.max(skipna=skipna), pandas_series.max(skipna=skipna))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_mean(data, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.mean(skipna=skipna), pandas_series.mean(skipna=skipna))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_median(data, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.median(skipna=skipna), pandas_series.median(skipna=skipna))
@pytest.mark.parametrize(
"method", ["median", "skew", "std", "sum", "var", "prod", "sem"]
)
def test_median_skew_std_sum_var_prod_sem_1953(method):
# See #1953 for details
data = [3, 3, 3, 3, 3, 3, 3, 3, 3]
arrays = [
["1", "1", "1", "2", "2", "2", "3", "3", "3"],
["1", "2", "3", "4", "5", "6", "7", "8", "9"],
]
modin_s = pd.Series(data, index=arrays)
pandas_s = pandas.Series(data, index=arrays)
eval_general(modin_s, pandas_s, lambda s: getattr(s, method)(level=0))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("index", [True, False], ids=["True", "False"])
def test_memory_usage(data, index):
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series.memory_usage(index=index), pandas_series.memory_usage(index=index)
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_min(data, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.min(skipna=skipna), pandas_series.min(skipna=skipna))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_mod(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "mod")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_mode(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.mode(), pandas_series.mode())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_mul(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "mul")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_multiply(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "multiply")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_name(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.name == pandas_series.name
modin_series.name = pandas_series.name = "New_name"
assert modin_series.name == pandas_series.name
assert modin_series._query_compiler.columns == ["New_name"]
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_nbytes(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.nbytes == pandas_series.nbytes
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_ndim(data):
modin_series, _ = create_test_series(data) # noqa: F841
assert modin_series.ndim == 1
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_ne(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "ne")
@pytest.mark.xfail(reason="Using pandas Series.")
def test_nlargest():
modin_series = create_test_series()
with pytest.raises(NotImplementedError):
modin_series.nlargest(None)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_notnull(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.notnull(), pandas_series.notnull())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_nsmallest(data):
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series.nsmallest(n=5, keep="first"),
pandas_series.nsmallest(n=5, keep="first"),
)
df_equals(
modin_series.nsmallest(n=10, keep="first"),
pandas_series.nsmallest(n=10, keep="first"),
)
df_equals(
modin_series.nsmallest(n=10, keep="last"),
pandas_series.nsmallest(n=10, keep="last"),
)
df_equals(modin_series.nsmallest(keep="all"), pandas_series.nsmallest(keep="all"))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("dropna", [True, False], ids=["True", "False"])
def test_nunique(data, dropna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.nunique(dropna=dropna), pandas_series.nunique(dropna=dropna))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_pct_change(data):
modin_series, pandas_series = create_test_series(data)
with pytest.warns(UserWarning):
modin_series.pct_change()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_pipe(data):
modin_series, pandas_series = create_test_series(data)
n = len(modin_series.index)
a, b, c = 2 % n, 0, 3 % n
def h(x):
return x.dropna()
def g(x, arg1=0):
for _ in range(arg1):
x = x.append(x)
return x
def f(x, arg2=0, arg3=0):
return x.drop(x.index[[arg2, arg3]])
df_equals(
f(g(h(modin_series), arg1=a), arg2=b, arg3=c),
(modin_series.pipe(h).pipe(g, arg1=a).pipe(f, arg2=b, arg3=c)),
)
df_equals(
(modin_series.pipe(h).pipe(g, arg1=a).pipe(f, arg2=b, arg3=c)),
(pandas_series.pipe(h).pipe(g, arg1=a).pipe(f, arg2=b, arg3=c)),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_plot(request, data):
modin_series, pandas_series = create_test_series(data)
if name_contains(request.node.name, numeric_dfs):
# We have to test this way because equality in plots means same object.
zipped_plot_lines = zip(modin_series.plot().lines, pandas_series.plot().lines)
for left, right in zipped_plot_lines:
if isinstance(left.get_xdata(), np.ma.core.MaskedArray) and isinstance(
right.get_xdata(), np.ma.core.MaskedArray
):
assert all((left.get_xdata() == right.get_xdata()).data)
else:
assert np.array_equal(left.get_xdata(), right.get_xdata())
if isinstance(left.get_ydata(), np.ma.core.MaskedArray) and isinstance(
right.get_ydata(), np.ma.core.MaskedArray
):
assert all((left.get_ydata() == right.get_ydata()).data)
else:
assert np.array_equal(left.get_xdata(), right.get_xdata())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_pop(data):
modin_series, pandas_series = create_test_series(data)
for key in modin_series.keys():
df_equals(modin_series.pop(key), pandas_series.pop(key))
df_equals(modin_series, pandas_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_pow(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "pow")
def test_product_alias():
assert pd.Series.prod == pd.Series.product
@pytest.mark.parametrize("axis", [0, 1])
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_prod(axis, skipna):
eval_general(
*create_test_series(test_data["float_nan_data"]),
lambda s: s.prod(axis=axis, skipna=skipna),
)
@pytest.mark.parametrize(
"numeric_only", bool_arg_values, ids=arg_keys("numeric_only", bool_arg_keys)
)
@pytest.mark.parametrize(
"min_count", int_arg_values, ids=arg_keys("min_count", int_arg_keys)
)
def test_prod_specific(min_count, numeric_only):
eval_general(
*create_test_series(test_data_diff_dtype),
lambda df: df.prod(min_count=min_count, numeric_only=numeric_only),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("q", quantiles_values, ids=quantiles_keys)
def test_quantile(request, data, q):
modin_series, pandas_series = create_test_series(data)
if not name_contains(request.node.name, no_numeric_dfs):
df_equals(modin_series.quantile(q), pandas_series.quantile(q))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_radd(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "radd")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"na_option", ["keep", "top", "bottom"], ids=["keep", "top", "bottom"]
)
def test_rank(data, na_option):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.rank(na_option=na_option)
except Exception as e:
with pytest.raises(type(e)):
modin_series.rank(na_option=na_option)
else:
modin_result = modin_series.rank(na_option=na_option)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("order", [None, "C", "F", "A", "K"])
def test_ravel(data, order):
modin_series, pandas_series = create_test_series(data)
np.testing.assert_equal(
modin_series.ravel(order=order), pandas_series.ravel(order=order)
)
@pytest.mark.parametrize(
"data",
[
pandas.Categorical(np.arange(1000), ordered=True),
pandas.Categorical(np.arange(1000), ordered=False),
pandas.Categorical(np.arange(1000), categories=np.arange(500), ordered=True),
pandas.Categorical(np.arange(1000), categories=np.arange(500), ordered=False),
],
)
@pytest.mark.parametrize("order", [None, "C", "F", "A", "K"])
def test_ravel_category(data, order):
modin_series, pandas_series = create_test_series(data)
categories_equals(modin_series.ravel(order=order), pandas_series.ravel(order=order))
@pytest.mark.parametrize(
"data",
[
pandas.Categorical(np.arange(10), ordered=True),
pandas.Categorical(np.arange(10), ordered=False),
pandas.Categorical(np.arange(10), categories=np.arange(5), ordered=True),
pandas.Categorical(np.arange(10), categories=np.arange(5), ordered=False),
],
)
@pytest.mark.parametrize("order", [None, "C", "F", "A", "K"])
def test_ravel_simple_category(data, order):
modin_series, pandas_series = create_test_series(data)
categories_equals(modin_series.ravel(order=order), pandas_series.ravel(order=order))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_rdiv(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "rdiv")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_reindex(data):
modin_series, pandas_series = create_test_series(data)
pandas_result = pandas_series.reindex(
list(pandas_series.index) + ["_A_NEW_ROW"], fill_value=0
)
modin_result = modin_series.reindex(
list(modin_series.index) + ["_A_NEW_ROW"], fill_value=0
)
df_equals(pandas_result, modin_result)
frame_data = {
"col1": [0, 1, 2, 3],
"col2": [4, 5, 6, 7],
"col3": [8, 9, 10, 11],
"col4": [12, 13, 14, 15],
"col5": [0, 0, 0, 0],
}
pandas_df = pandas.DataFrame(frame_data)
modin_df = pd.DataFrame(frame_data)
for col in pandas_df.columns:
modin_series = modin_df[col]
pandas_series = pandas_df[col]
df_equals(
modin_series.reindex([0, 3, 2, 1]), pandas_series.reindex([0, 3, 2, 1])
)
df_equals(modin_series.reindex([0, 6, 2]), pandas_series.reindex([0, 6, 2]))
df_equals(
modin_series.reindex(index=[0, 1, 5]),
pandas_series.reindex(index=[0, 1, 5]),
)
# MultiIndex
modin_series, pandas_series = create_test_series(data)
modin_series.index, pandas_series.index = [
generate_multiindex(len(pandas_series))
] * 2
pandas_result = pandas_series.reindex(list(reversed(pandas_series.index)))
modin_result = modin_series.reindex(list(reversed(modin_series.index)))
df_equals(pandas_result, modin_result)
def test_reindex_like():
df1 = pd.DataFrame(
[
[24.3, 75.7, "high"],
[31, 87.8, "high"],
[22, 71.6, "medium"],
[35, 95, "medium"],
],
columns=["temp_celsius", "temp_fahrenheit", "windspeed"],
index=pd.date_range(start="2014-02-12", end="2014-02-15", freq="D"),
)
df2 = pd.DataFrame(
[[28, "low"], [30, "low"], [35.1, "medium"]],
columns=["temp_celsius", "windspeed"],
index=pd.DatetimeIndex(["2014-02-12", "2014-02-13", "2014-02-15"]),
)
series1 = df1["windspeed"]
series2 = df2["windspeed"]
with pytest.warns(UserWarning):
series2.reindex_like(series1)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_rename(data):
modin_series, pandas_series = create_test_series(data)
new_name = "NEW_NAME"
df_equals(modin_series.rename(new_name), pandas_series.rename(new_name))
modin_series_cp = modin_series.copy()
pandas_series_cp = pandas_series.copy()
modin_series_cp.rename(new_name, inplace=True)
pandas_series_cp.rename(new_name, inplace=True)
df_equals(modin_series_cp, pandas_series_cp)
modin_result = modin_series.rename("{}__".format)
pandas_result = pandas_series.rename("{}__".format)
df_equals(modin_result, pandas_result)
def test_reorder_levels():
data = np.random.randint(1, 100, 12)
modin_series = pd.Series(
data,
index=pd.MultiIndex.from_tuples(
[
(num, letter, color)
for num in range(1, 3)
for letter in ["a", "b", "c"]
for color in ["Red", "Green"]
],
names=["Number", "Letter", "Color"],
),
)
pandas_series = pandas.Series(
data,
index=pandas.MultiIndex.from_tuples(
[
(num, letter, color)
for num in range(1, 3)
for letter in ["a", "b", "c"]
for color in ["Red", "Green"]
],
names=["Number", "Letter", "Color"],
),
)
modin_result = modin_series.reorder_levels(["Letter", "Color", "Number"])
pandas_result = pandas_series.reorder_levels(["Letter", "Color", "Number"])
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"repeats", [0, 2, 3, 4], ids=["repeats_{}".format(i) for i in [0, 2, 3, 4]]
)
def test_repeat(data, repeats):
eval_general(pd.Series(data), pandas.Series(data), lambda df: df.repeat(repeats))
@pytest.mark.parametrize("data", [np.arange(256)])
@pytest.mark.parametrize(
"repeats",
[
[0],
[2],
[3],
[4],
np.arange(256),
[0] * 64 + [2] * 64 + [3] * 32 + [4] * 32 + [5] * 64,
[2] * 257,
[2] * 128,
],
)
def test_repeat_lists(data, repeats):
eval_general(
pd.Series(data),
pandas.Series(data),
lambda df: df.repeat(repeats),
)
def test_replace():
modin_series = pd.Series([0, 1, 2, 3, 4])
pandas_series = pandas.Series([0, 1, 2, 3, 4])
modin_result = modin_series.replace(0, 5)
pandas_result = pandas_series.replace(0, 5)
df_equals(modin_result, pandas_result)
modin_result = modin_series.replace([1, 2], method="bfill")
pandas_result = pandas_series.replace([1, 2], method="bfill")
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("closed", ["left", "right"])
@pytest.mark.parametrize("label", ["right", "left"])
@pytest.mark.parametrize("level", [None, 1])
def test_resample(closed, label, level):
rule = "5T"
freq = "H"
base = 2
index = pandas.date_range("1/1/2000", periods=12, freq=freq)
pandas_series = pandas.Series(range(12), index=index)
modin_series = pd.Series(range(12), index=index)
if level is not None:
index = pandas.MultiIndex.from_product(
[["a", "b", "c"], pandas.date_range("31/12/2000", periods=4, freq=freq)]
)
pandas_series.index = index
modin_series.index = index
pandas_resampler = pandas_series.resample(
rule, closed=closed, label=label, base=base, level=level
)
modin_resampler = modin_series.resample(
rule, closed=closed, label=label, base=base, level=level
)
df_equals(modin_resampler.count(), pandas_resampler.count())
df_equals(modin_resampler.var(0), pandas_resampler.var(0))
df_equals(modin_resampler.sum(), pandas_resampler.sum())
df_equals(modin_resampler.std(), pandas_resampler.std())
df_equals(modin_resampler.sem(), pandas_resampler.sem())
df_equals(modin_resampler.size(), pandas_resampler.size())
df_equals(modin_resampler.prod(), pandas_resampler.prod())
df_equals(modin_resampler.ohlc(), pandas_resampler.ohlc())
df_equals(modin_resampler.min(), pandas_resampler.min())
df_equals(modin_resampler.median(), pandas_resampler.median())
df_equals(modin_resampler.mean(), pandas_resampler.mean())
df_equals(modin_resampler.max(), pandas_resampler.max())
df_equals(modin_resampler.last(), pandas_resampler.last())
df_equals(modin_resampler.first(), pandas_resampler.first())
df_equals(modin_resampler.nunique(), pandas_resampler.nunique())
df_equals(
modin_resampler.pipe(lambda x: x.max() - x.min()),
pandas_resampler.pipe(lambda x: x.max() - x.min()),
)
df_equals(
modin_resampler.transform(lambda x: (x - x.mean()) / x.std()),
pandas_resampler.transform(lambda x: (x - x.mean()) / x.std()),
)
df_equals(
modin_resampler.aggregate("max"),
pandas_resampler.aggregate("max"),
)
df_equals(
modin_resampler.apply("sum"),
pandas_resampler.apply("sum"),
)
df_equals(
modin_resampler.get_group(name=list(modin_resampler.groups)[0]),
pandas_resampler.get_group(name=list(pandas_resampler.groups)[0]),
)
assert pandas_resampler.indices == modin_resampler.indices
assert pandas_resampler.groups == modin_resampler.groups
df_equals(modin_resampler.quantile(), pandas_resampler.quantile())
# Upsampling from level= or on= selection is not supported
if level is None:
df_equals(
modin_resampler.interpolate(),
pandas_resampler.interpolate(),
)
df_equals(modin_resampler.asfreq(), pandas_resampler.asfreq())
df_equals(
modin_resampler.fillna(method="nearest"),
pandas_resampler.fillna(method="nearest"),
)
df_equals(modin_resampler.pad(), pandas_resampler.pad())
df_equals(modin_resampler.nearest(), pandas_resampler.nearest())
df_equals(modin_resampler.bfill(), pandas_resampler.bfill())
df_equals(modin_resampler.backfill(), pandas_resampler.backfill())
df_equals(modin_resampler.ffill(), pandas_resampler.ffill())
df_equals(
modin_resampler.apply(["sum", "mean", "max"]),
pandas_resampler.apply(["sum", "mean", "max"]),
)
df_equals(
modin_resampler.aggregate(["sum", "mean", "max"]),
pandas_resampler.aggregate(["sum", "mean", "max"]),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("drop", [True, False], ids=["True", "False"])
@pytest.mark.parametrize("name", [None, "Custom name"])
@pytest.mark.parametrize("inplace", [True, False])
def test_reset_index(data, drop, name, inplace):
eval_general(
*create_test_series(data),
lambda df, *args, **kwargs: df.reset_index(*args, **kwargs),
drop=drop,
name=name,
inplace=inplace,
__inplace__=inplace,
)
@pytest.mark.xfail(reason="Using pandas Series.")
def test_reshape():
modin_series = create_test_series()
with pytest.raises(NotImplementedError):
modin_series.reshape(None)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_rfloordiv(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "rfloordiv")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_rmod(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "rmod")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_rmul(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "rmul")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_round(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.round(), pandas_series.round())
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_rpow(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "rpow")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_rsub(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "rsub")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_rtruediv(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "rtruediv")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_sample(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.sample(frac=0.5, random_state=21019)
except Exception as e:
with pytest.raises(type(e)):
modin_series.sample(frac=0.5, random_state=21019)
else:
modin_result = modin_series.sample(frac=0.5, random_state=21019)
df_equals(pandas_result, modin_result)
try:
pandas_result = pandas_series.sample(n=12, random_state=21019)
except Exception as e:
with pytest.raises(type(e)):
modin_series.sample(n=12, random_state=21019)
else:
modin_result = modin_series.sample(n=12, random_state=21019)
df_equals(pandas_result, modin_result)
with pytest.warns(UserWarning):
df_equals(
modin_series.sample(n=0, random_state=21019),
pandas_series.sample(n=0, random_state=21019),
)
with pytest.raises(ValueError):
modin_series.sample(n=-3)
@pytest.mark.parametrize("single_value_data", [True, False])
@pytest.mark.parametrize("use_multiindex", [True, False])
@pytest.mark.parametrize("sorter", [True, None])
@pytest.mark.parametrize("values_number", [1, 2, 5])
@pytest.mark.parametrize("side", ["left", "right"])
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_searchsorted(
data, side, values_number, sorter, use_multiindex, single_value_data
):
data = data if not single_value_data else data[next(iter(data.keys()))][0]
if not sorter:
modin_series, pandas_series = create_test_series(vals=data, sort=True)
else:
modin_series, pandas_series = create_test_series(vals=data)
sorter = np.argsort(list(modin_series))
if use_multiindex:
rows_number = len(modin_series.index)
level_0_series = random_state.choice([0, 1], rows_number)
level_1_series = random_state.choice([2, 3], rows_number)
index_series = pd.MultiIndex.from_arrays(
[level_0_series, level_1_series], names=["first", "second"]
)
modin_series.index = index_series
pandas_series.index = index_series
min_sample = modin_series.min(skipna=True)
max_sample = modin_series.max(skipna=True)
if single_value_data:
values = [data]
else:
values = []
values.append(pandas_series.sample(n=values_number, random_state=random_state))
values.append(
random_state.uniform(low=min_sample, high=max_sample, size=values_number)
)
values.append(
random_state.uniform(
low=max_sample, high=2 * max_sample, size=values_number
)
)
values.append(
random_state.uniform(
low=min_sample - max_sample, high=min_sample, size=values_number
)
)
pure_float = random_state.uniform(float(min_sample), float(max_sample))
pure_int = int(pure_float)
values.append(pure_float)
values.append(pure_int)
test_cases = [
modin_series.searchsorted(value=value, side=side, sorter=sorter)
== pandas_series.searchsorted(value=value, side=side, sorter=sorter)
for value in values
]
test_cases = [
case.all() if not isinstance(case, bool) else case for case in test_cases
]
for case in test_cases:
assert case
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
@pytest.mark.parametrize("ddof", int_arg_values, ids=arg_keys("ddof", int_arg_keys))
def test_sem_float_nan_only(skipna, ddof):
eval_general(
*create_test_series(test_data["float_nan_data"]),
lambda df: df.sem(skipna=skipna, ddof=ddof),
)
@pytest.mark.parametrize("ddof", int_arg_values, ids=arg_keys("ddof", int_arg_keys))
def test_sem_int_only(ddof):
eval_general(
*create_test_series(test_data["int_data"]),
lambda df: df.sem(ddof=ddof),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_set_axis(data):
modin_series, _ = create_test_series(data) # noqa: F841
modin_series.set_axis(labels=["{}_{}".format(i, i + 1) for i in modin_series.index])
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_shape(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.shape == pandas_series.shape
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_size(data):
modin_series, pandas_series = create_test_series(data)
assert modin_series.size == pandas_series.size
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
def test_skew(data, skipna):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.skew(skipna=skipna), pandas_series.skew(skipna=skipna))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("index", ["default", "ndarray", "has_duplicates"])
@pytest.mark.parametrize("periods", [0, 1, -1, 10, -10, 1000000000, -1000000000])
def test_shift_slice_shift(data, index, periods):
modin_series, pandas_series = create_test_series(data)
if index == "ndarray":
data_column_length = len(data[next(iter(data))])
modin_series.index = pandas_series.index = np.arange(2, data_column_length + 2)
elif index == "has_duplicates":
modin_series.index = pandas_series.index = list(modin_series.index[:-3]) + [
0,
1,
2,
]
df_equals(
modin_series.shift(periods=periods),
pandas_series.shift(periods=periods),
)
df_equals(
modin_series.shift(periods=periods, fill_value=777),
pandas_series.shift(periods=periods, fill_value=777),
)
eval_general(modin_series, pandas_series, lambda df: df.shift(axis=1))
df_equals(
modin_series.slice_shift(periods=periods),
pandas_series.slice_shift(periods=periods),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"ascending", bool_arg_values, ids=arg_keys("ascending", bool_arg_keys)
)
@pytest.mark.parametrize(
"sort_remaining", bool_arg_values, ids=arg_keys("sort_remaining", bool_arg_keys)
)
@pytest.mark.parametrize("na_position", ["first", "last"], ids=["first", "last"])
def test_sort_index(data, ascending, sort_remaining, na_position):
modin_series, pandas_series = create_test_series(data)
eval_general(
modin_series,
pandas_series,
lambda df: df.sort_index(
ascending=ascending,
sort_remaining=sort_remaining,
na_position=na_position,
),
)
eval_general(
modin_series.copy(),
pandas_series.copy(),
lambda df: df.sort_index(
ascending=ascending,
sort_remaining=sort_remaining,
na_position=na_position,
inplace=True,
),
__inplace__=True,
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("ascending", [True, False], ids=["True", "False"])
@pytest.mark.parametrize("na_position", ["first", "last"], ids=["first", "last"])
def test_sort_values(data, ascending, na_position):
modin_series, pandas_series = create_test_series(data)
modin_result = modin_series.sort_values(
ascending=ascending, na_position=na_position
)
pandas_result = pandas_series.sort_values(
ascending=ascending, na_position=na_position
)
# Note: For `ascending=False` only
# For some reason, the indexing of Series and DataFrame differ in the underlying
# algorithm. The order of values is the same, but the index values are shuffled.
# Since we use `DataFrame.sort_values` even for Series, the index can be different
# between `pandas.Series.sort_values`. For this reason, we check that the values are
# identical instead of the index as well.
if ascending:
df_equals(modin_result, pandas_result)
else:
np.testing.assert_equal(modin_result.values, pandas_result.values)
modin_series_cp = modin_series.copy()
pandas_series_cp = pandas_series.copy()
modin_series_cp.sort_values(
ascending=ascending, na_position=na_position, inplace=True
)
pandas_series_cp.sort_values(
ascending=ascending, na_position=na_position, inplace=True
)
# See above about `ascending=False`
if ascending:
df_equals(modin_series_cp, pandas_series_cp)
else:
np.testing.assert_equal(modin_series_cp.values, pandas_series_cp.values)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_squeeze(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.squeeze(None), pandas_series.squeeze(None))
df_equals(modin_series.squeeze(0), pandas_series.squeeze(0))
with pytest.raises(ValueError):
modin_series.squeeze(1)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
@pytest.mark.parametrize("ddof", int_arg_values, ids=arg_keys("ddof", int_arg_keys))
def test_std(request, data, skipna, ddof):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.std(skipna=skipna, ddof=ddof)
except Exception as e:
with pytest.raises(type(e)):
modin_series.std(skipna=skipna, ddof=ddof)
else:
modin_result = modin_series.std(skipna=skipna, ddof=ddof)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_sub(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "sub")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_subtract(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "subtract")
@pytest.mark.parametrize(
"data",
test_data_values + test_data_small_values,
ids=test_data_keys + test_data_small_keys,
)
@pytest.mark.parametrize("axis", axis_values, ids=axis_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
@pytest.mark.parametrize(
"numeric_only", bool_arg_values, ids=arg_keys("numeric_only", bool_arg_keys)
)
@pytest.mark.parametrize(
"min_count", int_arg_values, ids=arg_keys("min_count", int_arg_keys)
)
def test_sum(data, axis, skipna, numeric_only, min_count):
eval_general(
*create_test_series(data),
lambda df, *args, **kwargs: df.sum(*args, **kwargs),
axis=axis,
skipna=skipna,
numeric_only=numeric_only,
min_count=min_count,
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("axis1", [0, 1, "columns", "index"])
@pytest.mark.parametrize("axis2", [0, 1, "columns", "index"])
def test_swapaxes(data, axis1, axis2):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.swapaxes(axis1, axis2)
except Exception as e:
with pytest.raises(type(e)):
modin_series.swapaxes(axis1, axis2)
else:
modin_result = modin_series.swapaxes(axis1, axis2)
df_equals(modin_result, pandas_result)
def test_swaplevel():
data = np.random.randint(1, 100, 12)
modin_s = pd.Series(
data,
index=pd.MultiIndex.from_tuples(
[
(num, letter, color)
for num in range(1, 3)
for letter in ["a", "b", "c"]
for color in ["Red", "Green"]
],
names=["Number", "Letter", "Color"],
),
)
pandas_s = pandas.Series(
data,
index=pandas.MultiIndex.from_tuples(
[
(num, letter, color)
for num in range(1, 3)
for letter in ["a", "b", "c"]
for color in ["Red", "Green"]
],
names=["Number", "Letter", "Color"],
),
)
df_equals(
modin_s.swaplevel("Number", "Color"), pandas_s.swaplevel("Number", "Color")
)
df_equals(modin_s.swaplevel(), pandas_s.swaplevel())
df_equals(modin_s.swaplevel(1, 0), pandas_s.swaplevel(1, 0))
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("n", int_arg_values, ids=arg_keys("n", int_arg_keys))
def test_tail(data, n):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.tail(n), pandas_series.tail(n))
df_equals(
modin_series.tail(len(modin_series)), pandas_series.tail(len(pandas_series))
)
def test_take():
modin_s = pd.Series(["falcon", "parrot", "lion", "cat"], index=[0, 2, 3, 1])
pandas_s = pandas.Series(["falcon", "parrot", "lion", "cat"], index=[0, 2, 3, 1])
a = modin_s.take([0, 3])
df_equals(a, pandas_s.take([0, 3]))
try:
pandas_s.take([2], axis=1)
except Exception as e:
with pytest.raises(type(e)):
modin_s.take([2], axis=1)
@pytest.mark.parametrize(
"ignore_index", bool_arg_values, ids=arg_keys("ignore_index", bool_arg_keys)
)
def test_explode(ignore_index):
# Some items in this test data are lists that explode() should expand.
data = [[1, 2, 3], "foo", [], [3, 4]]
modin_series, pandas_series = create_test_series(data)
df_equals(
modin_series.explode(ignore_index=ignore_index),
pandas_series.explode(ignore_index=ignore_index),
)
def test_to_period():
idx = pd.date_range("1/1/2012", periods=5, freq="M")
series = pd.Series(np.random.randint(0, 100, size=(len(idx))), index=idx)
with pytest.warns(UserWarning):
series.to_period()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_to_numpy(data):
modin_series, pandas_series = create_test_series(data)
assert_array_equal(modin_series.values, pandas_series.values)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_to_string(request, data):
eval_general(
*create_test_series(data),
lambda df: df.to_string(),
)
def test_to_timestamp():
idx = pd.date_range("1/1/2012", periods=5, freq="M")
series = pd.Series(np.random.randint(0, 100, size=(len(idx))), index=idx)
with pytest.warns(UserWarning):
series.to_period().to_timestamp()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_to_xarray(data):
modin_series, _ = create_test_series(data) # noqa: F841
with pytest.warns(UserWarning):
modin_series.to_xarray()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_tolist(data):
modin_series, _ = create_test_series(data) # noqa: F841
with pytest.warns(UserWarning):
modin_series.tolist()
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_values, ids=agg_func_keys)
def test_transform(data, func):
eval_general(
*create_test_series(data),
lambda df: df.transform(func),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize("func", agg_func_except_values, ids=agg_func_except_keys)
def test_transform_except(data, func):
eval_general(
*create_test_series(data),
lambda df: df.transform(func),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_transpose(data):
modin_series, pandas_series = create_test_series(data)
df_equals(modin_series.transpose(), modin_series)
df_equals(modin_series.transpose(), pandas_series.transpose())
df_equals(modin_series.transpose(), pandas_series)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_truediv(data):
modin_series, pandas_series = create_test_series(data)
inter_df_math_helper(modin_series, pandas_series, "truediv")
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_truncate(data):
modin_series, pandas_series = create_test_series(data)
before = 1
after = len(modin_series - 3)
df_equals(
modin_series.truncate(before, after), pandas_series.truncate(before, after)
)
before = 1
after = 3
df_equals(
modin_series.truncate(before, after), pandas_series.truncate(before, after)
)
before = None
after = None
df_equals(
modin_series.truncate(before, after), pandas_series.truncate(before, after)
)
def test_tshift():
idx = pd.date_range("1/1/2012", periods=5, freq="M")
data = np.random.randint(0, 100, size=len(idx))
modin_series = pd.Series(data, index=idx)
pandas_series = pandas.Series(data, index=idx)
df_equals(modin_series.tshift(4), pandas_series.tshift(4))
def test_tz_convert():
modin_idx = pd.date_range(
"1/1/2012", periods=400, freq="2D", tz="America/Los_Angeles"
)
pandas_idx = pandas.date_range(
"1/1/2012", periods=400, freq="2D", tz="America/Los_Angeles"
)
data = np.random.randint(0, 100, size=len(modin_idx))
modin_series = pd.Series(data, index=modin_idx)
pandas_series = pandas.Series(data, index=pandas_idx)
modin_result = modin_series.tz_convert("UTC", axis=0)
pandas_result = pandas_series.tz_convert("UTC", axis=0)
df_equals(modin_result, pandas_result)
modin_multi = pd.MultiIndex.from_arrays([modin_idx, range(len(modin_idx))])
pandas_multi = pandas.MultiIndex.from_arrays([pandas_idx, range(len(modin_idx))])
modin_series = pd.Series(data, index=modin_multi)
pandas_series = pandas.Series(data, index=pandas_multi)
df_equals(
modin_series.tz_convert("UTC", axis=0, level=0),
pandas_series.tz_convert("UTC", axis=0, level=0),
)
def test_tz_localize():
idx = pd.date_range("1/1/2012", periods=400, freq="2D")
data = np.random.randint(0, 100, size=len(idx))
modin_series = pd.Series(data, index=idx)
pandas_series = pandas.Series(data, index=idx)
df_equals(
modin_series.tz_localize("America/Los_Angeles"),
pandas_series.tz_localize("America/Los_Angeles"),
)
df_equals(
modin_series.tz_localize("UTC"),
pandas_series.tz_localize("UTC"),
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_unique(data):
modin_series, pandas_series = create_test_series(data)
modin_result = modin_series.unique()
pandas_result = pandas_series.unique()
assert_array_equal(modin_result, pandas_result)
assert modin_result.shape == pandas_result.shape
modin_result = pd.Series([2, 1, 3, 3], name="A").unique()
pandas_result = pandas.Series([2, 1, 3, 3], name="A").unique()
assert_array_equal(modin_result, pandas_result)
assert modin_result.shape == pandas_result.shape
modin_result = pd.Series([pd.Timestamp("2016-01-01") for _ in range(3)]).unique()
pandas_result = pandas.Series(
[pd.Timestamp("2016-01-01") for _ in range(3)]
).unique()
assert_array_equal(modin_result, pandas_result)
assert modin_result.shape == pandas_result.shape
modin_result = pd.Series(
[pd.Timestamp("2016-01-01", tz="US/Eastern") for _ in range(3)]
).unique()
pandas_result = pandas.Series(
[pd.Timestamp("2016-01-01", tz="US/Eastern") for _ in range(3)]
).unique()
assert_array_equal(modin_result, pandas_result)
assert modin_result.shape == pandas_result.shape
modin_result = pandas.Series(pd.Categorical(list("baabc"))).unique()
pandas_result = pd.Series(pd.Categorical(list("baabc"))).unique()
assert_array_equal(modin_result, pandas_result)
assert modin_result.shape == pandas_result.shape
modin_result = pd.Series(
pd.Categorical(list("baabc"), categories=list("abc"), ordered=True)
).unique()
pandas_result = pandas.Series(
pd.Categorical(list("baabc"), categories=list("abc"), ordered=True)
).unique()
assert_array_equal(modin_result, pandas_result)
assert modin_result.shape == pandas_result.shape
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_unstack(data):
modin_series, pandas_series = create_test_series(data)
index = generate_multiindex(len(pandas_series), nlevels=4, is_tree_like=True)
modin_series = pd.Series(data[next(iter(data.keys()))], index=index)
pandas_series = pandas.Series(data[next(iter(data.keys()))], index=index)
df_equals(modin_series.unstack(), pandas_series.unstack())
df_equals(modin_series.unstack(level=0), pandas_series.unstack(level=0))
df_equals(modin_series.unstack(level=[0, 1]), pandas_series.unstack(level=[0, 1]))
df_equals(
modin_series.unstack(level=[0, 1, 2]), pandas_series.unstack(level=[0, 1, 2])
)
@pytest.mark.parametrize(
"data, other_data",
[([1, 2, 3], [4, 5, 6]), ([1, 2, 3], [4, 5, 6, 7, 8]), ([1, 2, 3], [4, np.nan, 6])],
)
def test_update(data, other_data):
modin_series, pandas_series = pd.Series(data), pandas.Series(data)
modin_series.update(pd.Series(other_data))
pandas_series.update(pandas.Series(other_data))
df_equals(modin_series, pandas_series)
@pytest.mark.parametrize("sort", bool_arg_values, ids=bool_arg_keys)
@pytest.mark.parametrize("normalize", bool_arg_values, ids=bool_arg_keys)
@pytest.mark.parametrize("bins", [3, None])
@pytest.mark.parametrize("dropna", bool_arg_values, ids=bool_arg_keys)
@pytest.mark.parametrize("ascending", bool_arg_values, ids=bool_arg_keys)
def test_value_counts(sort, normalize, bins, dropna, ascending):
def sort_sensitive_comparator(df1, df2):
# We sort indices for Modin and pandas result because of issue #1650
return (
df_equals_with_non_stable_indices(df1, df2)
if sort
else df_equals(df1.sort_index(), df2.sort_index())
)
eval_general(
*create_test_series(test_data_values[0]),
lambda df: df.value_counts(
sort=sort,
bins=bins,
normalize=normalize,
dropna=dropna,
ascending=ascending,
),
comparator=sort_sensitive_comparator,
# Modin's `sort_values` does not validate `ascending` type and so
# does not raise an exception when it isn't a bool, when pandas do so,
# visit modin-issue#3388 for more info.
check_exception_type=None if sort and ascending is None else True,
)
# from issue #2365
arr = np.random.rand(2 ** 6)
arr[::10] = np.nan
eval_general(
*create_test_series(arr),
lambda df: df.value_counts(
sort=sort,
bins=bins,
normalize=normalize,
dropna=dropna,
ascending=ascending,
),
comparator=sort_sensitive_comparator,
# Modin's `sort_values` does not validate `ascending` type and so
# does not raise an exception when it isn't a bool, when pandas do so,
# visit modin-issue#3388 for more info.
check_exception_type=None if sort and ascending is None else True,
)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
def test_values(data):
modin_series, pandas_series = create_test_series(data)
np.testing.assert_equal(modin_series.values, pandas_series.values)
@pytest.mark.parametrize("data", test_data_values, ids=test_data_keys)
@pytest.mark.parametrize(
"skipna", bool_arg_values, ids=arg_keys("skipna", bool_arg_keys)
)
@pytest.mark.parametrize("ddof", int_arg_values, ids=arg_keys("ddof", int_arg_keys))
def test_var(data, skipna, ddof):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.var(skipna=skipna, ddof=ddof)
except Exception:
with pytest.raises(TypeError):
modin_series.var(skipna=skipna, ddof=ddof)
else:
modin_result = modin_series.var(skipna=skipna, ddof=ddof)
df_equals(modin_result, pandas_result)
def test_view():
modin_series = pd.Series([-2, -1, 0, 1, 2], dtype="int8")
pandas_series = pandas.Series([-2, -1, 0, 1, 2], dtype="int8")
modin_result = modin_series.view(dtype="uint8")
pandas_result = pandas_series.view(dtype="uint8")
df_equals(modin_result, pandas_result)
modin_series = pd.Series([-20, -10, 0, 10, 20], dtype="int32")
pandas_series = pandas.Series([-20, -10, 0, 10, 20], dtype="int32")
modin_result = modin_series.view(dtype="float32")
pandas_result = pandas_series.view(dtype="float32")
df_equals(modin_result, pandas_result)
modin_series = pd.Series([-200, -100, 0, 100, 200], dtype="int64")
pandas_series = pandas.Series([-200, -100, 0, 100, 200], dtype="int64")
modin_result = modin_series.view(dtype="float64")
pandas_result = pandas_series.view(dtype="float64")
df_equals(modin_result, pandas_result)
def test_where():
frame_data = random_state.randn(100)
pandas_series = pandas.Series(frame_data)
modin_series = pd.Series(frame_data)
pandas_cond_series = pandas_series % 5 < 2
modin_cond_series = modin_series % 5 < 2
pandas_result = pandas_series.where(pandas_cond_series, -pandas_series)
modin_result = modin_series.where(modin_cond_series, -modin_series)
assert all((to_pandas(modin_result) == pandas_result))
other = pandas.Series(random_state.randn(100))
pandas_result = pandas_series.where(pandas_cond_series, other, axis=0)
modin_result = modin_series.where(modin_cond_series, other, axis=0)
assert all(to_pandas(modin_result) == pandas_result)
pandas_result = pandas_series.where(pandas_series < 2, True)
modin_result = modin_series.where(modin_series < 2, True)
assert all(to_pandas(modin_result) == pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize(
"key",
[0, slice(0, len(test_string_data_values) / 2)],
ids=["single_key", "slice_key"],
)
def test_str___getitem__(data, key):
modin_series, pandas_series = create_test_series(data)
modin_result = modin_series.str[key]
pandas_result = pandas_series.str[key]
df_equals(modin_result, pandas_result)
# Test str operations
def test_str_cat():
data = ["abC|DeF,Hik", "gSaf,qWer|Gre", "asd3,4sad|", np.NaN]
modin_series, pandas_series = create_test_series(data)
others = data
with pytest.warns(UserWarning):
# We are only testing that this defaults to pandas, so we will just check for
# the warning
modin_series.str.cat(others)
with pytest.warns(UserWarning):
# We are only testing that this defaults to pandas, so we will just check for
# the warning
modin_series.str.cat(None)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("n", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("expand", bool_arg_values, ids=bool_arg_keys)
def test_str_split(data, pat, n, expand):
# Empty pattern not supported on Python 3.7+
if sys.version_info[0] == 3 and sys.version_info[1] >= 7 and pat == "":
return
modin_series, pandas_series = create_test_series(data)
if n >= -1:
if expand and pat:
with pytest.warns(UserWarning):
# We are only testing that this defaults to pandas, so we will just check for
# the warning
modin_series.str.split(pat, n=n, expand=expand)
elif not expand:
try:
pandas_result = pandas_series.str.split(pat, n=n, expand=expand)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.split(pat, n=n, expand=expand)
else:
modin_result = modin_series.str.split(pat, n=n, expand=expand)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("n", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("expand", bool_arg_values, ids=bool_arg_keys)
def test_str_rsplit(data, pat, n, expand):
modin_series, pandas_series = create_test_series(data)
if n >= -1:
if expand and pat:
with pytest.warns(UserWarning):
# We are only testing that this defaults to pandas, so we will just check for
# the warning
modin_series.str.rsplit(pat, n=n, expand=expand)
elif not expand:
try:
pandas_result = pandas_series.str.rsplit(pat, n=n, expand=expand)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.rsplit(pat, n=n, expand=expand)
else:
modin_result = modin_series.str.rsplit(pat, n=n, expand=expand)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("i", int_arg_values, ids=int_arg_keys)
def test_str_get(data, i):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.get(i)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.get(i)
else:
modin_result = modin_series.str.get(i)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_string_list_data_values, ids=test_string_list_data_keys
)
@pytest.mark.parametrize("sep", string_sep_values, ids=string_sep_keys)
def test_str_join(data, sep):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.join(sep)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.join(sep)
else:
modin_result = modin_series.str.join(sep)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_string_list_data_values, ids=test_string_list_data_keys
)
@pytest.mark.parametrize("sep", string_sep_values, ids=string_sep_keys)
def test_str_get_dummies(data, sep):
modin_series, pandas_series = create_test_series(data)
if sep:
with pytest.warns(UserWarning):
# We are only testing that this defaults to pandas, so we will just check for
# the warning
modin_series.str.get_dummies(sep)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("case", bool_arg_values, ids=bool_arg_keys)
@pytest.mark.parametrize("na", string_na_rep_values, ids=string_na_rep_keys)
def test_str_contains(data, pat, case, na):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.contains(pat, case=case, na=na, regex=False)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.contains(pat, case=case, na=na, regex=False)
else:
modin_result = modin_series.str.contains(pat, case=case, na=na, regex=False)
df_equals(modin_result, pandas_result)
# Test regex
pat = ",|b"
try:
pandas_result = pandas_series.str.contains(pat, case=case, na=na, regex=True)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.contains(pat, case=case, na=na, regex=True)
else:
modin_result = modin_series.str.contains(pat, case=case, na=na, regex=True)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("repl", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("n", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("case", bool_arg_values, ids=bool_arg_keys)
def test_str_replace(data, pat, repl, n, case):
eval_general(
*create_test_series(data),
lambda series: series.str.replace(pat, repl, n=n, case=case, regex=False),
)
# Test regex
eval_general(
*create_test_series(data),
lambda series: series.str.replace(
pat=",|b", repl=repl, n=n, case=case, regex=True
),
)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("repeats", int_arg_values, ids=int_arg_keys)
def test_str_repeat(data, repeats):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.repeat(repeats)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.repeat(repeats)
else:
modin_result = modin_series.str.repeat(repeats)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("width", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize(
"side", ["left", "right", "both"], ids=["left", "right", "both"]
)
@pytest.mark.parametrize("fillchar", string_sep_values, ids=string_sep_keys)
def test_str_pad(data, width, side, fillchar):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.pad(width, side=side, fillchar=fillchar)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.pad(width, side=side, fillchar=fillchar)
else:
modin_result = modin_series.str.pad(width, side=side, fillchar=fillchar)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("width", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("fillchar", string_sep_values, ids=string_sep_keys)
def test_str_center(data, width, fillchar):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.center(width, fillchar=fillchar)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.center(width, fillchar=fillchar)
else:
modin_result = modin_series.str.center(width, fillchar=fillchar)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("width", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("fillchar", string_sep_values, ids=string_sep_keys)
def test_str_ljust(data, width, fillchar):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.ljust(width, fillchar=fillchar)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.ljust(width, fillchar=fillchar)
else:
modin_result = modin_series.str.ljust(width, fillchar=fillchar)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("width", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("fillchar", string_sep_values, ids=string_sep_keys)
def test_str_rjust(data, width, fillchar):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.rjust(width, fillchar=fillchar)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.rjust(width, fillchar=fillchar)
else:
modin_result = modin_series.str.rjust(width, fillchar=fillchar)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("width", int_arg_values, ids=int_arg_keys)
def test_str_zfill(data, width):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.zfill(width)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.zfill(width)
else:
modin_result = modin_series.str.zfill(width)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("width", int_arg_values, ids=int_arg_keys)
def test_str_wrap(data, width):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.wrap(width)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.wrap(width)
else:
modin_result = modin_series.str.wrap(width)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("start", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("stop", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("step", int_arg_values, ids=int_arg_keys)
def test_str_slice(data, start, stop, step):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.slice(start=start, stop=stop, step=step)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.slice(start=start, stop=stop, step=step)
else:
modin_result = modin_series.str.slice(start=start, stop=stop, step=step)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("start", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("stop", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("repl", string_sep_values, ids=string_sep_keys)
def test_str_slice_replace(data, start, stop, repl):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.slice_replace(
start=start, stop=stop, repl=repl
)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.slice_replace(start=start, stop=stop, repl=repl)
else:
modin_result = modin_series.str.slice_replace(start=start, stop=stop, repl=repl)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
def test_str_count(data, pat):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.count(pat)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.count(pat)
else:
modin_result = modin_series.str.count(pat)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("na", string_na_rep_values, ids=string_na_rep_keys)
def test_str_startswith(data, pat, na):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.startswith(pat, na=na)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.startswith(pat, na=na)
else:
modin_result = modin_series.str.startswith(pat, na=na)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("na", string_na_rep_values, ids=string_na_rep_keys)
def test_str_endswith(data, pat, na):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.endswith(pat, na=na)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.endswith(pat, na=na)
else:
modin_result = modin_series.str.endswith(pat, na=na)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
def test_str_findall(data, pat):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.findall(pat)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.findall(pat)
else:
modin_result = modin_series.str.findall(pat)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("case", bool_arg_values, ids=bool_arg_keys)
@pytest.mark.parametrize("na", string_na_rep_values, ids=string_na_rep_keys)
def test_str_match(data, pat, case, na):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.match(pat, case=case, na=na)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.match(pat, case=case, na=na)
else:
modin_result = modin_series.str.match(pat, case=case, na=na)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("expand", bool_arg_values, ids=bool_arg_keys)
def test_str_extract(data, expand):
modin_series, pandas_series = create_test_series(data)
if expand is not None:
with pytest.warns(UserWarning):
# We are only testing that this defaults to pandas, so we will just check for
# the warning
modin_series.str.extract(r"([ab])(\d)", expand=expand)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_extractall(data):
modin_series, pandas_series = create_test_series(data)
with pytest.warns(UserWarning):
# We are only testing that this defaults to pandas, so we will just check for
# the warning
modin_series.str.extractall(r"([ab])(\d)")
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_len(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.len()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.len()
else:
modin_result = modin_series.str.len()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("to_strip", string_sep_values, ids=string_sep_keys)
def test_str_strip(data, to_strip):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.strip(to_strip=to_strip)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.strip(to_strip=to_strip)
else:
modin_result = modin_series.str.strip(to_strip=to_strip)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("to_strip", string_sep_values, ids=string_sep_keys)
def test_str_rstrip(data, to_strip):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.rstrip(to_strip=to_strip)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.rstrip(to_strip=to_strip)
else:
modin_result = modin_series.str.rstrip(to_strip=to_strip)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("to_strip", string_sep_values, ids=string_sep_keys)
def test_str_lstrip(data, to_strip):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.lstrip(to_strip=to_strip)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.lstrip(to_strip=to_strip)
else:
modin_result = modin_series.str.lstrip(to_strip=to_strip)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("sep", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("expand", bool_arg_values, ids=bool_arg_keys)
def test_str_partition(data, sep, expand):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.partition(sep, expand=expand)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.partition(sep, expand=expand)
else:
modin_result = modin_series.str.partition(sep, expand=expand)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("sep", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("expand", bool_arg_values, ids=bool_arg_keys)
def test_str_rpartition(data, sep, expand):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.rpartition(sep, expand=expand)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.rpartition(sep, expand=expand)
else:
modin_result = modin_series.str.rpartition(sep, expand=expand)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_lower(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.lower()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.lower()
else:
modin_result = modin_series.str.lower()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_upper(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.upper()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.upper()
else:
modin_result = modin_series.str.upper()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_title(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.title()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.title()
else:
modin_result = modin_series.str.title()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("sub", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("start", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("end", int_arg_values, ids=int_arg_keys)
def test_str_find(data, sub, start, end):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.find(sub, start=start, end=end)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.find(sub, start=start, end=end)
else:
modin_result = modin_series.str.find(sub, start=start, end=end)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("sub", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("start", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("end", int_arg_values, ids=int_arg_keys)
def test_str_rfind(data, sub, start, end):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.rfind(sub, start=start, end=end)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.rfind(sub, start=start, end=end)
else:
modin_result = modin_series.str.rfind(sub, start=start, end=end)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("sub", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("start", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("end", int_arg_values, ids=int_arg_keys)
def test_str_index(data, sub, start, end):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.index(sub, start=start, end=end)
except ValueError:
# pytest does not get the RayGetErrors
assert True
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.index(sub, start=start, end=end)
else:
modin_result = modin_series.str.index(sub, start=start, end=end)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("sub", string_sep_values, ids=string_sep_keys)
@pytest.mark.parametrize("start", int_arg_values, ids=int_arg_keys)
@pytest.mark.parametrize("end", int_arg_values, ids=int_arg_keys)
def test_str_rindex(data, sub, start, end):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.rindex(sub, start=start, end=end)
except ValueError:
# pytest does not get the RayGetErrors
assert True
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.rindex(sub, start=start, end=end)
else:
modin_result = modin_series.str.rindex(sub, start=start, end=end)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_capitalize(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.capitalize()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.capitalize()
else:
modin_result = modin_series.str.capitalize()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_swapcase(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.swapcase()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.swapcase()
else:
modin_result = modin_series.str.swapcase()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize(
"form", ["NFC", "NFKC", "NFD", "NFKD"], ids=["NFC", "NFKC", "NFD", "NFKD"]
)
def test_str_normalize(data, form):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.normalize(form)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.normalize(form)
else:
modin_result = modin_series.str.normalize(form)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
@pytest.mark.parametrize("pat", string_sep_values, ids=string_sep_keys)
def test_str_translate(data, pat):
modin_series, pandas_series = create_test_series(data)
# Test none table
try:
pandas_result = pandas_series.str.translate(None)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.translate(None)
else:
modin_result = modin_series.str.translate(None)
df_equals(modin_result, pandas_result)
# Translation dictionary
table = {pat: "DDD"}
try:
pandas_result = pandas_series.str.translate(table)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.translate(table)
else:
modin_result = modin_series.str.translate(table)
df_equals(modin_result, pandas_result)
# Translation table with maketrans (python3 only)
if pat is not None:
table = str.maketrans(pat, "d" * len(pat))
try:
pandas_result = pandas_series.str.translate(table)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.translate(table)
else:
modin_result = modin_series.str.translate(table)
df_equals(modin_result, pandas_result)
# Test delete chars
deletechars = "|"
try:
pandas_result = pandas_series.str.translate(table, deletechars)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.translate(table, deletechars)
else:
modin_result = modin_series.str.translate(table, deletechars)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_isalnum(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.isalnum()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.isalnum()
else:
modin_result = modin_series.str.isalnum()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_isalpha(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.isalpha()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.isalpha()
else:
modin_result = modin_series.str.isalpha()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_isdigit(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.isdigit()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.isdigit()
else:
modin_result = modin_series.str.isdigit()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_isspace(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.isspace()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.isspace()
else:
modin_result = modin_series.str.isspace()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_islower(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.islower()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.islower()
else:
modin_result = modin_series.str.islower()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_isupper(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.isupper()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.isupper()
else:
modin_result = modin_series.str.isupper()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_istitle(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.istitle()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.istitle()
else:
modin_result = modin_series.str.istitle()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_isnumeric(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.isnumeric()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.isnumeric()
else:
modin_result = modin_series.str.isnumeric()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_str_isdecimal(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.isdecimal()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.isdecimal()
else:
modin_result = modin_series.str.isdecimal()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_casefold(data):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.casefold()
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.casefold()
else:
modin_result = modin_series.str.casefold()
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize("encoding_type", encoding_types)
@pytest.mark.parametrize("data", test_string_data_values, ids=test_string_data_keys)
def test_encode(data, encoding_type):
modin_series, pandas_series = create_test_series(data)
try:
pandas_result = pandas_series.str.encode(encoding=encoding_type)
except Exception as e:
with pytest.raises(type(e)):
modin_series.str.encode(encoding=encoding_type)
else:
modin_result = modin_series.str.encode(encoding=encoding_type)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"is_sparse_data", [True, False], ids=["is_sparse", "is_not_sparse"]
)
def test_hasattr_sparse(is_sparse_data):
modin_df, pandas_df = (
create_test_series(
pandas.arrays.SparseArray(test_data["float_nan_data"].values())
)
if is_sparse_data
else create_test_series(test_data["float_nan_data"])
)
eval_general(modin_df, pandas_df, lambda df: hasattr(df, "sparse"))
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
def test_cat_categories(data):
modin_series, pandas_series = create_test_series(data.copy())
df_equals(modin_series.cat.categories, pandas_series.cat.categories)
pandas_series.cat.categories = list("qwert")
modin_series.cat.categories = list("qwert")
df_equals(modin_series, pandas_series)
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
def test_cat_ordered(data):
modin_series, pandas_series = create_test_series(data.copy())
assert modin_series.cat.ordered == pandas_series.cat.ordered
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
def test_cat_codes(data):
modin_series, pandas_series = create_test_series(data.copy())
pandas_result = pandas_series.cat.codes
modin_result = modin_series.cat.codes
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
@pytest.mark.parametrize("inplace", [True, False])
def test_cat_rename_categories(data, inplace):
modin_series, pandas_series = create_test_series(data.copy())
pandas_result = pandas_series.cat.rename_categories(list("qwert"), inplace=inplace)
modin_result = modin_series.cat.rename_categories(list("qwert"), inplace=inplace)
df_equals(modin_series, pandas_series)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
@pytest.mark.parametrize("ordered", bool_arg_values, ids=bool_arg_keys)
@pytest.mark.parametrize("inplace", [True, False])
def test_cat_reorder_categories(data, ordered, inplace):
modin_series, pandas_series = create_test_series(data.copy())
pandas_result = pandas_series.cat.reorder_categories(
list("tades"), ordered=ordered, inplace=inplace
)
modin_result = modin_series.cat.reorder_categories(
list("tades"), ordered=ordered, inplace=inplace
)
df_equals(modin_series, pandas_series)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
@pytest.mark.parametrize("inplace", [True, False])
def test_cat_add_categories(data, inplace):
modin_series, pandas_series = create_test_series(data.copy())
pandas_result = pandas_series.cat.add_categories(list("qw"), inplace=inplace)
modin_result = modin_series.cat.add_categories(list("qw"), inplace=inplace)
df_equals(modin_series, pandas_series)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
@pytest.mark.parametrize("inplace", [True, False])
def test_cat_remove_categories(data, inplace):
modin_series, pandas_series = create_test_series(data.copy())
pandas_result = pandas_series.cat.remove_categories(list("at"), inplace=inplace)
modin_result = modin_series.cat.remove_categories(list("at"), inplace=inplace)
df_equals(modin_series, pandas_series)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
@pytest.mark.parametrize("inplace", [True, False])
def test_cat_remove_unused_categories(data, inplace):
modin_series, pandas_series = create_test_series(data.copy())
pandas_series[1] = np.nan
pandas_result = pandas_series.cat.remove_unused_categories(inplace=inplace)
modin_series[1] = np.nan
modin_result = modin_series.cat.remove_unused_categories(inplace=inplace)
df_equals(modin_series, pandas_series)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
@pytest.mark.parametrize("ordered", bool_arg_values, ids=bool_arg_keys)
@pytest.mark.parametrize("rename", [True, False])
@pytest.mark.parametrize("inplace", [True, False])
def test_cat_set_categories(data, ordered, rename, inplace):
modin_series, pandas_series = create_test_series(data.copy())
pandas_result = pandas_series.cat.set_categories(
list("qwert"), ordered=ordered, rename=rename, inplace=inplace
)
modin_result = modin_series.cat.set_categories(
list("qwert"), ordered=ordered, rename=rename, inplace=inplace
)
df_equals(modin_series, pandas_series)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
@pytest.mark.parametrize("inplace", [True, False])
def test_cat_as_ordered(data, inplace):
modin_series, pandas_series = create_test_series(data.copy())
pandas_result = pandas_series.cat.as_ordered(inplace=inplace)
modin_result = modin_series.cat.as_ordered(inplace=inplace)
df_equals(modin_series, pandas_series)
df_equals(modin_result, pandas_result)
@pytest.mark.parametrize(
"data", test_data_categorical_values, ids=test_data_categorical_keys
)
@pytest.mark.parametrize("inplace", [True, False])
def test_cat_as_unordered(data, inplace):
modin_series, pandas_series = create_test_series(data.copy())
pandas_result = pandas_series.cat.as_unordered(inplace=inplace)
modin_result = modin_series.cat.as_unordered(inplace=inplace)
df_equals(modin_series, pandas_series)
df_equals(modin_result, pandas_result)
def test_peculiar_callback():
def func(val):
if not isinstance(val, tuple):
raise BaseException("Urgh...")
return val
pandas_df = pandas.DataFrame({"col": [(0, 1)]})
pandas_series = pandas_df["col"].apply(func)
modin_df = pd.DataFrame({"col": [(0, 1)]})
modin_series = modin_df["col"].apply(func)
df_equals(modin_series, pandas_series)
| 36.197016
| 104
| 0.707478
|
b55b0a9153fbe48ab46176c41073cb41d3986821
| 28,483
|
py
|
Python
|
planning/GamesTests/games/spaceinvaders/Space_Invaders.py
|
ChristsY12Robots/Robots
|
306f000f85b41731a9e1249847434858319e5415
|
[
"MIT"
] | null | null | null |
planning/GamesTests/games/spaceinvaders/Space_Invaders.py
|
ChristsY12Robots/Robots
|
306f000f85b41731a9e1249847434858319e5415
|
[
"MIT"
] | null | null | null |
planning/GamesTests/games/spaceinvaders/Space_Invaders.py
|
ChristsY12Robots/Robots
|
306f000f85b41731a9e1249847434858319e5415
|
[
"MIT"
] | null | null | null |
#!python3
# Space Invaders
# Created by Lee Robinson
#IMPORTANT
#use ctrl-f to seach for 'CHANGE' and uncomment the line 'save(save)'
#was disabled for testing
from pygame import *
import sys
from random import shuffle, randrange, choice
import os
import getpass
import profile
#import mysql.connector
init()
# R G B
WHITE = (255, 255, 255)
GREEN = (78, 255, 87)
YELLOW = (241, 255, 0)
BLUE = (80, 255, 239)
PURPLE = (203, 0, 255)
RED = (237, 28, 36)
#added 1/2/18
scorebackground = image.load("scoreboardsi.jpg")
largefont = font.SysFont('Verdana', 80)
width,height = 800,600
#added Miles 1/2/18
def save(score):
username = getpass.getuser()
user_profile = profile.User_Profile(username)
user_profile.update_score(score)
user_profile.add_game_record('Space_Invaders')
SCREEN = display.set_mode((800,600))
FONT = "fonts/space_invaders.ttf"
IMG_NAMES = ["ship", "ship", "mystery", "enemy1_1", "enemy1_2", "enemy2_1", "enemy2_2",
"enemy3_1", "enemy3_2", "explosionblue", "explosiongreen", "explosionpurple", "laser", "enemylaser"]
IMAGES = {name: image.load("images/{}.png".format(name)).convert_alpha()
for name in IMG_NAMES}
class Ship(sprite.Sprite):
def __init__(self):
sprite.Sprite.__init__(self)
self.image = IMAGES["ship"]
self.rect = self.image.get_rect(topleft=(375, 540))
self.speed = 5
def update(self, keys, *args):
if keys[K_LEFT] and self.rect.x > 10:
self.rect.x -= self.speed
if keys[K_RIGHT] and self.rect.x < 740:
self.rect.x += self.speed
game.screen.blit(self.image, self.rect)
class Bullet(sprite.Sprite):
def __init__(self, xpos, ypos, direction, speed, filename, side):
sprite.Sprite.__init__(self)
self.image = IMAGES[filename]
self.rect = self.image.get_rect(topleft=(xpos, ypos))
self.speed = speed
self.direction = direction
self.side = side
self.filename = filename
def update(self, keys, *args):
game.screen.blit(self.image, self.rect)
self.rect.y += self.speed * self.direction
if self.rect.y < 15 or self.rect.y > 600:
self.kill()
class Enemy(sprite.Sprite):
def __init__(self, row, column):
sprite.Sprite.__init__(self)
self.row = row
self.column = column
self.images = []
self.load_images()
self.index = 0
self.image = self.images[self.index]
self.rect = self.image.get_rect()
self.direction = 1
self.rightMoves = 15
self.leftMoves = 30
self.moveNumber = 0
self.moveTime = 600
self.firstTime = True
self.movedY = False;
self.columns = [False] * 10
self.aliveColumns = [True] * 10
self.addRightMoves = False
self.addLeftMoves = False
self.numOfRightMoves = 0
self.numOfLeftMoves = 0
self.timer = time.get_ticks()
def update(self, keys, currentTime, killedRow, killedColumn, killedArray):
self.check_column_deletion(killedRow, killedColumn, killedArray)
if currentTime - self.timer > self.moveTime:
self.movedY = False;
if self.moveNumber >= self.rightMoves and self.direction == 1:
self.direction *= -1
self.moveNumber = 0
self.rect.y += 35
self.movedY = True
if self.addRightMoves:
self.rightMoves += self.numOfRightMoves
if self.firstTime:
self.rightMoves = self.leftMoves;
self.firstTime = False;
self.addRightMovesAfterDrop = False
if self.moveNumber >= self.leftMoves and self.direction == -1:
self.direction *= -1
self.moveNumber = 0
self.rect.y += 35
self.movedY = True
if self.addLeftMoves:
self.leftMoves += self.numOfLeftMoves
self.addLeftMovesAfterDrop = False
if self.moveNumber < self.rightMoves and self.direction == 1 and not self.movedY:
self.rect.x += 10
self.moveNumber += 1
if self.moveNumber < self.leftMoves and self.direction == -1 and not self.movedY:
self.rect.x -= 10
self.moveNumber += 1
self.index += 1
if self.index >= len(self.images):
self.index = 0
self.image = self.images[self.index]
self.timer += self.moveTime
game.screen.blit(self.image, self.rect)
def check_column_deletion(self, killedRow, killedColumn, killedArray):
if killedRow != -1 and killedColumn != -1:
killedArray[killedRow][killedColumn] = 1
for column in range(10):
if all([killedArray[row][column] == 1 for row in range(5)]):
self.columns[column] = True
for i in range(5):
if all([self.columns[x] for x in range(i + 1)]) and self.aliveColumns[i]:
self.leftMoves += 5
self.aliveColumns[i] = False
if self.direction == -1:
self.rightMoves += 5
else:
self.addRightMoves = True
self.numOfRightMoves += 5
for i in range(5):
if all([self.columns[x] for x in range(9, 8 - i, -1)]) and self.aliveColumns[9 - i]:
self.aliveColumns[9 - i] = False
self.rightMoves += 5
if self.direction == 1:
self.leftMoves += 5
else:
self.addLeftMoves = True
self.numOfLeftMoves += 5
def load_images(self):
images = {0: ["1_2", "1_1"],
1: ["2_2", "2_1"],
2: ["2_2", "2_1"],
3: ["3_1", "3_2"],
4: ["3_1", "3_2"],
}
img1, img2 = (IMAGES["enemy{}".format(img_num)] for img_num in images[self.row])
self.images.append(transform.scale(img1, (40, 35)))
self.images.append(transform.scale(img2, (40, 35)))
class Blocker(sprite.Sprite):
def __init__(self, size, color, row, column):
sprite.Sprite.__init__(self)
self.height = size
self.width = size
self.color = color
self.image = Surface((self.width, self.height))
self.image.fill(self.color)
self.rect = self.image.get_rect()
self.row = row
self.column = column
def update(self, keys, *args):
game.screen.blit(self.image, self.rect)
class Mystery(sprite.Sprite):
def __init__(self):
sprite.Sprite.__init__(self)
self.image = IMAGES["mystery"]
self.image = transform.scale(self.image, (75, 35))
self.rect = self.image.get_rect(topleft=(-80, 45))
self.row = 5
self.moveTime = 25000
self.direction = 1
self.timer = time.get_ticks()
self.mysteryEntered = mixer.Sound('sounds/mysteryentered.wav')
self.mysteryEntered.set_volume(0.3)
self.playSound = True
def update(self, keys, currentTime, *args):
resetTimer = False
if (currentTime - self.timer > self.moveTime) and (self.rect.x < 0 or self.rect.x > 800) and self.playSound:
self.mysteryEntered.play()
self.playSound = False
if (currentTime - self.timer > self.moveTime) and self.rect.x < 840 and self.direction == 1:
self.mysteryEntered.fadeout(4000)
self.rect.x += 2
game.screen.blit(self.image, self.rect)
if (currentTime - self.timer > self.moveTime) and self.rect.x > -100 and self.direction == -1:
self.mysteryEntered.fadeout(4000)
self.rect.x -= 2
game.screen.blit(self.image, self.rect)
if (self.rect.x > 830):
self.playSound = True
self.direction = -1
resetTimer = True
if (self.rect.x < -90):
self.playSound = True
self.direction = 1
resetTimer = True
if (currentTime - self.timer > self.moveTime) and resetTimer:
self.timer = currentTime
class Explosion(sprite.Sprite):
def __init__(self, xpos, ypos, row, ship, mystery, score):
sprite.Sprite.__init__(self)
self.isMystery = mystery
self.isShip = ship
if mystery:
self.text = Text(FONT, 20, str(score), WHITE, xpos+20, ypos+6)
elif ship:
self.image = IMAGES["ship"]
self.rect = self.image.get_rect(topleft=(xpos, ypos))
else:
self.row = row
self.load_image()
self.image = transform.scale(self.image, (40, 35))
self.rect = self.image.get_rect(topleft=(xpos, ypos))
game.screen.blit(self.image, self.rect)
self.timer = time.get_ticks()
def update(self, keys, currentTime):
if self.isMystery:
if currentTime - self.timer <= 200:
self.text.draw(game.screen)
if currentTime - self.timer > 400 and currentTime - self.timer <= 600:
self.text.draw(game.screen)
if currentTime - self.timer > 600:
self.kill()
elif self.isShip:
if currentTime - self.timer > 300 and currentTime - self.timer <= 600:
game.screen.blit(self.image, self.rect)
if currentTime - self.timer > 900:
self.kill()
else:
if currentTime - self.timer <= 100:
game.screen.blit(self.image, self.rect)
if currentTime - self.timer > 100 and currentTime - self.timer <= 200:
self.image = transform.scale(self.image, (50, 45))
game.screen.blit(self.image, (self.rect.x-6, self.rect.y-6))
if currentTime - self.timer > 400:
self.kill()
def load_image(self):
imgColors = ["purple", "blue", "blue", "green", "green"]
self.image = IMAGES["explosion{}".format(imgColors[self.row])]
class Life(sprite.Sprite):
def __init__(self, xpos, ypos):
sprite.Sprite.__init__(self)
self.image = IMAGES["ship"]
self.image = transform.scale(self.image, (23, 23))
self.rect = self.image.get_rect(topleft=(xpos, ypos))
def update(self, keys, *args):
game.screen.blit(self.image, self.rect)
class Text(object):
def __init__(self, textFont, size, message, color, xpos, ypos):
self.font = font.Font(textFont, size)
self.surface = self.font.render(message, True, color)
self.rect = self.surface.get_rect(topleft=(xpos, ypos))
def draw(self, surface):
surface.blit(self.surface, self.rect)
class SpaceInvaders(object):
def __init__(self):
mixer.pre_init(44100, -16, 1, 512)
init()
self.caption = display.set_caption('Space Invaders')
self.screen = SCREEN
self.background = image.load('images/background.jpg').convert()
self.startGame = False
self.mainScreen = True
self.gameOver = False
# Initial value for a new game
self.enemyPositionDefault = 65
# Counter for enemy starting position (increased each new round)
self.enemyPositionStart = self.enemyPositionDefault
# Current enemy starting position
self.enemyPosition = self.enemyPositionStart
def reset(self, score, lives, newGame=False):
self.player = Ship()
self.playerGroup = sprite.Group(self.player)
self.explosionsGroup = sprite.Group()
self.bullets = sprite.Group()
self.mysteryShip = Mystery()
self.mysteryGroup = sprite.Group(self.mysteryShip)
self.enemyBullets = sprite.Group()
self.reset_lives(lives)
self.enemyPosition = self.enemyPositionStart
self.make_enemies()
# Only create blockers on a new game, not a new round
if newGame:
self.allBlockers = sprite.Group(self.make_blockers(0), self.make_blockers(1), self.make_blockers(2), self.make_blockers(3))
self.keys = key.get_pressed()
self.clock = time.Clock()
self.timer = time.get_ticks()
self.noteTimer = time.get_ticks()
self.shipTimer = time.get_ticks()
self.score = score
self.lives = lives
self.create_audio()
self.create_text()
self.killedRow = -1
self.killedColumn = -1
self.makeNewShip = False
self.shipAlive = True
self.killedArray = [[0] * 10 for x in range(5)]
def make_blockers(self, number):
blockerGroup = sprite.Group()
for row in range(4):
for column in range(9):
blocker = Blocker(10, GREEN, row, column)
blocker.rect.x = 50 + (200 * number) + (column * blocker.width)
blocker.rect.y = 450 + (row * blocker.height)
blockerGroup.add(blocker)
return blockerGroup
def reset_lives_sprites(self):
self.life1 = Life(715, 3)
self.life2 = Life(742, 3)
self.life3 = Life(769, 3)
if self.lives == 3:
self.livesGroup = sprite.Group(self.life1, self.life2, self.life3)
elif self.lives == 2:
self.livesGroup = sprite.Group(self.life1, self.life2)
elif self.lives == 1:
self.livesGroup = sprite.Group(self.life1)
def reset_lives(self, lives):
self.lives = lives
self.reset_lives_sprites()
def create_audio(self):
self.sounds = {}
for sound_name in ["shoot", "shoot2", "invaderkilled", "mysterykilled", "shipexplosion"]:
self.sounds[sound_name] = mixer.Sound("sounds/{}.wav".format(sound_name))
self.sounds[sound_name].set_volume(0.2)
self.musicNotes = [mixer.Sound("sounds/{}.wav".format(i)) for i in range(4)]
for sound in self.musicNotes:
sound.set_volume(0.5)
self.noteIndex = 0
def play_main_music(self, currentTime):
moveTime = self.enemies.sprites()[0].moveTime
if currentTime - self.noteTimer > moveTime:
self.note = self.musicNotes[self.noteIndex]
if self.noteIndex < 3:
self.noteIndex += 1
else:
self.noteIndex = 0
self.note.play()
self.noteTimer += moveTime
def create_text(self):
self.titleText = Text(FONT, 50, "Space Invaders", WHITE, 164, 155)
self.titleText2 = Text(FONT, 25, "Press any key to continue", WHITE, 201, 225)
self.gameOverText = Text(FONT, 50, "Game Over", WHITE, 250, 270)
self.nextRoundText = Text(FONT, 50, "Next Round", WHITE, 240, 270)
self.enemy1Text = Text(FONT, 25, " = 10 pts", GREEN, 368, 270)
self.enemy2Text = Text(FONT, 25, " = 20 pts", BLUE, 368, 320)
self.enemy3Text = Text(FONT, 25, " = 30 pts", PURPLE, 368, 370)
self.enemy4Text = Text(FONT, 25, " = ?????", RED, 368, 420)
self.scoreText = Text(FONT, 20, "Score", WHITE, 5, 5)
self.livesText = Text(FONT, 20, "Lives ", WHITE, 640, 5)
def check_input(self):
self.keys = key.get_pressed()
for e in event.get():
if e.type == QUIT:
sys.exit()
if e.type == KEYDOWN:
#added 1/2/18
if e.key == K_ESCAPE:
quit()
sys.exit()
if e.key == K_SPACE:
if len(self.bullets) == 0 and self.shipAlive:
if self.score < 1000:
bullet = Bullet(self.player.rect.x+23, self.player.rect.y+5, -1, 15, "laser", "center")
self.bullets.add(bullet)
self.allSprites.add(self.bullets)
self.sounds["shoot"].play()
else:
leftbullet = Bullet(self.player.rect.x+8, self.player.rect.y+5, -1, 15, "laser", "left")
rightbullet = Bullet(self.player.rect.x+38, self.player.rect.y+5, -1, 15, "laser", "right")
self.bullets.add(leftbullet)
self.bullets.add(rightbullet)
self.allSprites.add(self.bullets)
self.sounds["shoot2"].play()
def make_enemies(self):
enemies = sprite.Group()
for row in range(5):
for column in range(10):
enemy = Enemy(row, column)
enemy.rect.x = 157 + (column * 50)
enemy.rect.y = self.enemyPosition + (row * 45)
enemies.add(enemy)
self.enemies = enemies
self.allSprites = sprite.Group(self.player, self.enemies, self.livesGroup, self.mysteryShip)
def make_enemies_shoot(self):
columnList = []
for enemy in self.enemies:
columnList.append(enemy.column)
columnSet = set(columnList)
columnList = list(columnSet)
shuffle(columnList)
column = columnList[0]
enemyList = []
rowList = []
for enemy in self.enemies:
if enemy.column == column:
rowList.append(enemy.row)
row = max(rowList)
for enemy in self.enemies:
if enemy.column == column and enemy.row == row:
if (time.get_ticks() - self.timer) > 700:
self.enemyBullets.add(Bullet(enemy.rect.x + 14, enemy.rect.y + 20, 1, 5, "enemylaser", "center"))
self.allSprites.add(self.enemyBullets)
self.timer = time.get_ticks()
def calculate_score(self, row):
scores = {0: 30,
1: 20,
2: 20,
3: 10,
4: 10,
5: choice([50, 100, 150, 300])
}
score = scores[row]
self.score += score
return score
def create_main_menu(self):
self.enemy1 = IMAGES["enemy3_1"]
self.enemy1 = transform.scale(self.enemy1 , (40, 40))
self.enemy2 = IMAGES["enemy2_2"]
self.enemy2 = transform.scale(self.enemy2 , (40, 40))
self.enemy3 = IMAGES["enemy1_2"]
self.enemy3 = transform.scale(self.enemy3 , (40, 40))
self.enemy4 = IMAGES["mystery"]
self.enemy4 = transform.scale(self.enemy4 , (80, 40))
self.screen.blit(self.enemy1, (318, 270))
self.screen.blit(self.enemy2, (318, 320))
self.screen.blit(self.enemy3, (318, 370))
self.screen.blit(self.enemy4, (299, 420))
for e in event.get():
if e.type == QUIT:
sys.exit()
if e.type == KEYUP:
self.startGame = True
self.mainScreen = False
def update_enemy_speed(self):
if len(self.enemies) <= 10:
for enemy in self.enemies:
enemy.moveTime = 400
if len(self.enemies) == 1:
for enemy in self.enemies:
enemy.moveTime = 200
def check_collisions(self):
collidedict = sprite.groupcollide(self.bullets, self.enemyBullets, True, False)
if collidedict:
for value in collidedict.values():
for currentSprite in value:
self.enemyBullets.remove(currentSprite)
self.allSprites.remove(currentSprite)
enemiesdict = sprite.groupcollide(self.bullets, self.enemies, True, False)
if enemiesdict:
for value in enemiesdict.values():
for currentSprite in value:
self.sounds["invaderkilled"].play()
self.killedRow = currentSprite.row
self.killedColumn = currentSprite.column
score = self.calculate_score(currentSprite.row)
explosion = Explosion(currentSprite.rect.x, currentSprite.rect.y, currentSprite.row, False, False, score)
self.explosionsGroup.add(explosion)
self.allSprites.remove(currentSprite)
self.enemies.remove(currentSprite)
self.gameTimer = time.get_ticks()
break
mysterydict = sprite.groupcollide(self.bullets, self.mysteryGroup, True, True)
if mysterydict:
for value in mysterydict.values():
for currentSprite in value:
currentSprite.mysteryEntered.stop()
self.sounds["mysterykilled"].play()
score = self.calculate_score(currentSprite.row)
explosion = Explosion(currentSprite.rect.x, currentSprite.rect.y, currentSprite.row, False, True, score)
self.explosionsGroup.add(explosion)
self.allSprites.remove(currentSprite)
self.mysteryGroup.remove(currentSprite)
newShip = Mystery()
self.allSprites.add(newShip)
self.mysteryGroup.add(newShip)
break
bulletsdict = sprite.groupcollide(self.enemyBullets, self.playerGroup, True, False)
if bulletsdict:
for value in bulletsdict.values():
for playerShip in value:
if self.lives == 3:
self.lives -= 1
self.livesGroup.remove(self.life3)
self.allSprites.remove(self.life3)
elif self.lives == 2:
self.lives -= 1
self.livesGroup.remove(self.life2)
self.allSprites.remove(self.life2)
elif self.lives == 1:
self.lives -= 1
self.livesGroup.remove(self.life1)
self.allSprites.remove(self.life1)
elif self.lives == 0:
self.gameOver = True
self.startGame = False
self.sounds["shipexplosion"].play()
explosion = Explosion(playerShip.rect.x, playerShip.rect.y, 0, True, False, 0)
self.explosionsGroup.add(explosion)
self.allSprites.remove(playerShip)
self.playerGroup.remove(playerShip)
self.makeNewShip = True
self.shipTimer = time.get_ticks()
self.shipAlive = False
if sprite.groupcollide(self.enemies, self.playerGroup, True, True):
self.gameOver = True
self.startGame = False
sprite.groupcollide(self.bullets, self.allBlockers, True, True)
sprite.groupcollide(self.enemyBullets, self.allBlockers, True, True)
sprite.groupcollide(self.enemies, self.allBlockers, False, True)
def create_new_ship(self, createShip, currentTime):
if createShip and (currentTime - self.shipTimer > 900):
self.player = Ship()
self.allSprites.add(self.player)
self.playerGroup.add(self.player)
self.makeNewShip = False
self.shipAlive = True
#added 1/2/18
def end_screen_display(self,score):
self.score = score
score_display = score
#score_display = score_display + (player.lives * 500)
scoretext = largefont.render(str(score_display), True, WHITE)
x_shift = len(str(score_display)) * 25
SCREEN.blit(scorebackground, [0,0])
SCREEN.blit(scoretext, [width/2-x_shift, height/2 - 50])
display.update()
time.wait(2000)
def create_game_over(self, currentTime):
self.screen.blit(self.background, (0,0))
if currentTime - self.timer < 750:
self.gameOverText.draw(self.screen)
if currentTime - self.timer > 750 and currentTime - self.timer < 1500:
self.screen.blit(self.background, (0,0))
if currentTime - self.timer > 1500 and currentTime - self.timer < 2250:
self.gameOverText.draw(self.screen)
if currentTime - self.timer > 2250 and currentTime - self.timer < 2750:
self.screen.blit(self.background, (0,0))
if currentTime - self.timer > 3000:
self.mainScreen = True
#added 1/2/18 CHANGE
score=self.score
self.end_screen_display(score)
save(score)
def main(self):
while True:
if self.mainScreen:
self.reset(0, 3, True)
self.screen.blit(self.background, (0,0))
self.titleText.draw(self.screen)
self.titleText2.draw(self.screen)
self.enemy1Text.draw(self.screen)
self.enemy2Text.draw(self.screen)
self.enemy3Text.draw(self.screen)
self.enemy4Text.draw(self.screen)
self.create_main_menu()
elif self.startGame:
if len(self.enemies) == 0:
currentTime = time.get_ticks()
if currentTime - self.gameTimer < 3000:
self.screen.blit(self.background, (0,0))
self.scoreText2 = Text(FONT, 20, str(self.score), GREEN, 85, 5)
self.scoreText.draw(self.screen)
self.scoreText2.draw(self.screen)
self.nextRoundText.draw(self.screen)
self.livesText.draw(self.screen)
self.livesGroup.update(self.keys)
self.check_input()
if currentTime - self.gameTimer > 3000:
# Move enemies closer to bottom
self.enemyPositionStart += 35
self.reset(self.score, self.lives)
self.make_enemies()
self.gameTimer += 3000
else:
currentTime = time.get_ticks()
self.play_main_music(currentTime)
self.screen.blit(self.background, (0,0))
self.allBlockers.update(self.screen)
self.scoreText2 = Text(FONT, 20, str(self.score), GREEN, 85, 5)
self.scoreText.draw(self.screen)
self.scoreText2.draw(self.screen)
self.livesText.draw(self.screen)
self.check_input()
self.allSprites.update(self.keys, currentTime, self.killedRow, self.killedColumn, self.killedArray)
self.explosionsGroup.update(self.keys, currentTime)
self.check_collisions()
self.create_new_ship(self.makeNewShip, currentTime)
self.update_enemy_speed()
if len(self.enemies) > 0:
self.make_enemies_shoot()
elif self.gameOver:
currentTime = time.get_ticks()
# Reset enemy starting position
self.enemyPositionStart = self.enemyPositionDefault
self.create_game_over(currentTime)
display.update()
self.clock.tick(60)
if __name__ == '__main__':
game = SpaceInvaders()
game.main()
| 41.10101
| 136
| 0.535337
|
dc8c4aa877311bcb0da2f24033978d798fa204bd
| 414
|
py
|
Python
|
backend/MALTrendsWeb/wsgi.py
|
danielzhaotongliu/MALTrendsWeb
|
59e9aed1d2451739a53460aaf044bf348ee3c36a
|
[
"MIT"
] | 1
|
2019-11-14T04:48:22.000Z
|
2019-11-14T04:48:22.000Z
|
backend/MALTrendsWeb/wsgi.py
|
danielzhaotongliu/MALTrendsWeb
|
59e9aed1d2451739a53460aaf044bf348ee3c36a
|
[
"MIT"
] | 13
|
2019-12-09T04:56:32.000Z
|
2022-02-26T20:12:03.000Z
|
backend/MALTrendsWeb/wsgi.py
|
danielzhaotongliu/MALTrendsWeb
|
59e9aed1d2451739a53460aaf044bf348ee3c36a
|
[
"MIT"
] | null | null | null |
"""
WSGI config for MALTrendsWeb project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "MALTrendsWeb.settings.production")
application = get_wsgi_application()
| 23
| 83
| 0.792271
|
ac63cfea7344a15a77510a48526e6a66f20db854
| 9,243
|
py
|
Python
|
tests/test_abstract_quotes_fetcher.py
|
romepeng/omega
|
f42a9dd4299914bde589235909f6c56c2116df81
|
[
"MIT"
] | 1
|
2021-03-03T12:52:53.000Z
|
2021-03-03T12:52:53.000Z
|
tests/test_abstract_quotes_fetcher.py
|
romepeng/omega
|
f42a9dd4299914bde589235909f6c56c2116df81
|
[
"MIT"
] | null | null | null |
tests/test_abstract_quotes_fetcher.py
|
romepeng/omega
|
f42a9dd4299914bde589235909f6c56c2116df81
|
[
"MIT"
] | null | null | null |
import datetime
import logging
import os
import unittest
import arrow
import cfg4py
import numpy as np
import omicron
from omicron import cache
from omicron.core.timeframe import tf
from omicron.core.types import FrameType
from omega.fetcher.abstract_quotes_fetcher import AbstractQuotesFetcher as aq
from tests import init_test_env
logger = logging.getLogger(__name__)
cfg = cfg4py.get_instance()
class TestAbstractQuotesFetcher(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self) -> None:
init_test_env()
await self.create_quotes_fetcher()
await omicron.init(aq)
async def asyncTearDown(self) -> None:
await omicron.shutdown()
def get_config_path(self):
src_dir = os.path.dirname(__file__)
return os.path.join(src_dir, "../omega/config")
async def create_quotes_fetcher(self):
fetcher_info = cfg.quotes_fetchers[0]
impl = fetcher_info["impl"]
params = fetcher_info["workers"][0]
await aq.create_instance(impl, **params)
async def clear_cache(self, sec: str, frame_type: FrameType):
await cache.security.delete(f"{sec}:{frame_type.value}")
async def test_get_security_list(self):
secs = await aq.get_security_list()
self.assertEqual("000001.XSHE", secs[0][0])
async def test_get_bars_010(self):
"""日线级别, 无停牌"""
sec = "000001.XSHE"
frame_type = FrameType.DAY
# 2020-4-3 Friday
end = arrow.get("2020-04-03").date()
# without cache
await self.clear_cache(sec, frame_type)
bars = await aq.get_bars(sec, end, 10, frame_type)
self.assertEqual(bars[0]["frame"], arrow.get("2020-03-23").date())
self.assertEqual(bars[-1]["frame"], arrow.get("2020-04-03").date())
self.assertAlmostEqual(12.0, bars[0]["open"], places=2)
self.assertAlmostEqual(12.82, bars[-1]["open"], places=2)
# 检查cache
cache_len = await cache.security.hlen(f"{sec}:{frame_type.value}")
self.assertEqual(12, cache_len)
# 日线级别,停牌期间,数据应该置为np.nan
sec = "000029.XSHE"
end = arrow.get("2020-8-18").date()
frame_type = FrameType.DAY
bars = await aq.get_bars(sec, end, 10, frame_type)
self.assertEqual(10, len(bars))
self.assertEqual(end, bars[-1]["frame"])
self.assertEqual(arrow.get("2020-08-05").date(), bars[0]["frame"])
self.assertTrue(np.all(np.isnan(bars["close"])))
async def test_get_bars_011(self):
"""分钟级别,中间有停牌,end指定时间未对齐的情况"""
# 600721, ST百花, 2020-4-29停牌一天
sec = "600721.XSHG"
frame_type = FrameType.MIN60
end = arrow.get("2020-04-30 10:32", tzinfo="Asia/Shanghai").datetime
await self.clear_cache(sec, frame_type)
bars = await aq.get_bars(sec, end, 7, frame_type)
print(bars)
self.assertEqual(7, len(bars))
self.assertEqual(
arrow.get("2020-04-28 15:00", tzinfo="Asia/Shanghai"), bars["frame"][0]
)
self.assertEqual(
arrow.get("2020-04-30 10:30", tzinfo="Asia/Shanghai"), bars["frame"][-2]
)
self.assertEqual(
arrow.get("2020-4-30 10:32", tzinfo="Asia/Shanghai"), bars["frame"][-1]
)
self.assertAlmostEqual(5.37, bars["open"][0], places=2)
self.assertAlmostEqual(5.26, bars["open"][-2], places=2)
self.assertAlmostEqual(5.33, bars["open"][-1], places=2)
# 检查cache,10:32未存入cache
cache_len = await cache.security.hlen(f"{sec}:{frame_type.value}")
self.assertEqual(8, cache_len)
bars_2 = await cache.get_bars(sec, tf.floor(end, frame_type), 6, frame_type)
np.array_equal(bars[:-1], bars_2)
async def test_get_bars_012(self):
"""分钟级别,中间有一天停牌,end指定时间正在交易"""
# 600721, ST百花, 2020-4-29停牌一天
sec = "600721.XSHG"
frame_type = FrameType.MIN60
end = arrow.get("2020-04-30 10:30", tzinfo=cfg.tz).datetime
bars = await aq.get_bars(sec, end, 6, frame_type)
print(bars)
self.assertEqual(6, len(bars))
self.assertEqual(
arrow.get("2020-04-28 15:00", tzinfo="Asia/Shanghai"), bars["frame"][0]
)
self.assertEqual(
arrow.get("2020-04-30 10:30", tzinfo="Asia/Shanghai"), bars["frame"][-1]
)
self.assertAlmostEqual(5.37, bars["open"][0], places=2)
self.assertAlmostEqual(5.26, bars["open"][-1], places=2)
self.assertTrue(np.isnan(bars["open"][1]))
# 结束时间帧未结束
end = arrow.get("2020-04-30 10:32:00.13", tzinfo=cfg.tz).datetime
frame_type = FrameType.MIN30
bars = await aq.get_bars(sec, end, 6, frame_type)
print(bars)
self.assertAlmostEqual(5.33, bars[-1]["close"], places=2)
self.assertEqual(end.replace(second=0, microsecond=0), bars[-1]["frame"])
async def test_get_bars_013(self):
"""分钟级别,end指定时间正处在停牌中"""
# 600721, ST百花, 2020-4-29停牌一天
sec = "600721.XSHG"
frame_type = FrameType.MIN60
end = arrow.get("2020-04-29 10:30", tzinfo="Asia/Chongqing").datetime
await self.clear_cache(sec, frame_type)
bars = await aq.get_bars(sec, end, 6, frame_type)
print(bars)
self.assertEqual(6, len(bars))
self.assertEqual(
arrow.get("2020-04-27 15:00", tzinfo="Asia/Shanghai"), bars["frame"][0]
)
self.assertEqual(
arrow.get("2020-04-29 10:30", tzinfo="Asia/Shanghai"), bars["frame"][-1]
)
self.assertAlmostEqual(5.47, bars["open"][0], places=2)
self.assertAlmostEqual(5.37, bars["open"][-2], places=2)
self.assertTrue(np.isnan(bars["open"][-1]))
# 检查cache,10:30 已存入cache
cache_len = await cache.security.hlen(f"{sec}:{frame_type.value}")
self.assertEqual(8, cache_len)
bars_2 = await cache.get_bars(sec, tf.floor(end, frame_type), 6, frame_type)
np.array_equal(bars, bars_2)
async def test_get_bars_014(self):
"""测试周线级别未结束的frame能否对齐"""
sec = "600721.XSHG"
frame_type = FrameType.WEEK
"""
[(datetime.date(2020, 4, 17), 6.02, 6.69, 5.84, 6.58, 22407., 1.407e+08, 1.455)
(datetime.date(2020, 4, 24), 6.51, 6.57, 5.68, 5.72, 25911., 1.92e+08, 1.455)
(datetime.date(2020, 4, 29), nan, nan, nan, nan, nan, nan, nan)]
[(datetime.date(2020, 4, 17), 6.02, 6.69, 5.84, 6.58, 2241., 1.4e+08, 1.455)
(datetime.date(2020, 4, 24), 6.51, 6.57, 5.68, 5.72, 2511., 1.55e+08, 1.455)
(datetime.date(2020, 4, 30), nan, nan, nan, nan, nan, nan, nan)]
"""
end = arrow.get("2020-4-29 15:00").datetime # 周三,当周周四结束
bars = await aq.get_bars(sec, end, 3, FrameType.WEEK)
print(bars)
self.assertEqual(arrow.get("2020-4-17").date(), bars[0]["frame"])
self.assertEqual(arrow.get("2020-4-24").date(), bars[1]["frame"])
self.assertEqual(arrow.get("2020-4-29").date(), bars[-1]["frame"])
self.assertAlmostEqual(6.02, bars[0]["open"], places=2)
self.assertAlmostEqual(6.51, bars[1]["open"], places=2)
self.assertTrue(np.isnan(bars[-1]["open"]))
end = arrow.get("2020-04-30 15:00").datetime
bars = await aq.get_bars(sec, end, 3, frame_type)
print(bars)
self.assertEqual(arrow.get("2020-4-17").date(), bars[0]["frame"])
self.assertEqual(arrow.get("2020-4-24").date(), bars[1]["frame"])
self.assertEqual(arrow.get("2020-4-30").date(), bars[-1]["frame"])
self.assertAlmostEqual(6.02, bars[0]["open"], places=2)
self.assertAlmostEqual(6.51, bars[1]["open"], places=2)
self.assertAlmostEqual(5.7, bars[-1]["open"], places=2)
async def test_get_bars_015(self):
sec = "300677.XSHE"
frame_type = FrameType.DAY
end = arrow.now().datetime
# without cache
# await self.clear_cache(sec, frame_type)
bars = await aq.get_bars(sec, end, 10, frame_type)
print(bars)
async def test_get_valuation(self):
secs = ["000001.XSHE", "600000.XSHG"]
date = arrow.get("2020-10-26").date()
# return two records, one for each
vals = await aq.get_valuation(secs, date)
self.assertSetEqual(set(secs), set(vals["code"].tolist()))
self.assertEqual(len(secs), len(vals))
# return two records, only two fields
vals = await aq.get_valuation(secs, date, fields=["frame", "code"])
self.assertEqual(set(secs), set(vals["code"].tolist()))
self.assertSequenceEqual(vals.dtype.names, ["frame", "code"])
async def test_get_bars_batch(self):
secs = ["000001.XSHE", "000001.XSHG"]
end_dt = arrow.get("2020-11-01").date()
frame_type = FrameType.DAY
bars = await aq.get_bars_batch(secs, end_dt, 5, frame_type)
self.assertSetEqual(set(secs), set(bars.keys()))
self.assertEqual(5, len(bars["000001.XSHE"]))
self.assertAlmostEqual(18.2, bars["000001.XSHE"]["open"][0], places=2)
async def test_get_all_trade_days(self):
days = await aq.get_all_trade_days()
self.assertIn(datetime.date(2020, 12, 31), days)
| 38.352697
| 87
| 0.608569
|
40a5b65387967e742b49613982f016c2e7ba7fc4
| 7,211
|
py
|
Python
|
calculate_snv_distances.py
|
ngarud/microbiome_evolution
|
4015ec03e2cbdbfb7d062415fb6a8c869f37a32e
|
[
"BSD-2-Clause"
] | 2
|
2020-08-09T06:19:11.000Z
|
2021-08-18T17:12:23.000Z
|
calculate_snv_distances.py
|
benjaminhgood/microbiome_evolution
|
4015ec03e2cbdbfb7d062415fb6a8c869f37a32e
|
[
"BSD-2-Clause"
] | null | null | null |
calculate_snv_distances.py
|
benjaminhgood/microbiome_evolution
|
4015ec03e2cbdbfb7d062415fb6a8c869f37a32e
|
[
"BSD-2-Clause"
] | 8
|
2019-02-20T22:21:55.000Z
|
2021-02-13T00:55:40.000Z
|
import config
import parse_midas_data
import parse_HMP_data
import os.path
import pylab
import sys
import numpy
import diversity_utils
import gene_diversity_utils
import stats_utils
from math import log10,ceil
from numpy.random import randint
import core_gene_utils
import gzip
import calculate_substitution_rates
import clade_utils
private_snv_directory = '%ssnv_distances/' % (parse_midas_data.data_directory)
intermediate_filename_template = '%s%s.txt.gz'
min_coverage = config.min_median_coverage
alpha = 0.5 # Confidence interval range for rate estimates
low_pi_threshold = 1e-03
low_divergence_threshold = 1e-03
min_change = 0.8
min_sample_size = 10
allowed_variant_types = set(['1D','2D','3D','4D'])
def load_snv_distance_map(species_name):
# This definition is called whenever another script downstream uses the output of this data.
intermediate_filename = intermediate_filename_template % (private_snv_directory, species_name)
snv_distance_map = {}
file = gzip.open(intermediate_filename,"r")
file.readline() # header
for line in file:
items = line.split(",")
contig = items[0].strip()
location = long(items[1])
variant_type = items[2].strip()
derived_allele_count = long(items[3])
ancestral_allele_count = long(items[4])
min_between_d = float(items[5])
max_within_d1 = float(items[6])
max_within_d2 = float(items[7])
snv_distance_map[(contig, location)] = (variant_type, derived_allele_count, ancestral_allele_count, min_between_d, max_within_d1, max_within_d2)
return snv_distance_map
if __name__=='__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--debug", help="Loads only a subset of SNPs for speed", action="store_true")
parser.add_argument("--chunk-size", type=int, help="max number of records to load", default=1000000000)
parser.add_argument("--species", help="Name of specific species to run code on", default="all")
args = parser.parse_args()
debug = args.debug
chunk_size = args.chunk_size
species=args.species
# Load subject and sample metadata
sys.stderr.write("Loading sample metadata...\n")
subject_sample_map = parse_HMP_data.parse_subject_sample_map()
sys.stderr.write("Done!\n")
# get a list of specis to run this script on.
good_species_list = parse_midas_data.parse_good_species_list()
if debug:
good_species_list = good_species_list[:3]
elif species !='all':
good_species_list = [species]
os.system('mkdir -p %s' % private_snv_directory)
for species_name in good_species_list:
# Only plot samples above a certain depth threshold that are "haploids"
snp_samples = diversity_utils.calculate_haploid_samples(species_name, debug=debug)
# Only consider one sample per person
snp_samples = snp_samples[parse_midas_data.calculate_unique_samples(subject_sample_map, sample_list=snp_samples)]
sys.stderr.write("Proceeding with %d haploid samples!\n" % len(snp_samples))
if len(snp_samples) < min_sample_size:
sys.stderr.write("Not enough haploid samples!\n")
continue
sys.stderr.write("Proceeding with %d haploid samples!\n" % len(snp_samples))
sys.stderr.write("Loading core genes...\n")
core_genes = core_gene_utils.parse_core_genes(species_name)
non_shared_genes = core_gene_utils.parse_non_shared_reference_genes(species_name)
shared_pangenome_genes = core_gene_utils.parse_shared_genes(species_name)
sys.stderr.write("Done! Core genome consists of %d genes\n" % len(core_genes))
sys.stderr.write("%d shared genes and %d non-shared genes\n" % (len(shared_pangenome_genes), len(non_shared_genes)))
sys.stderr.write("Loading pre-computed substitution rates for %s...\n" % species_name)
substitution_rate_map = calculate_substitution_rates.load_substitution_rate_map(species_name)
sys.stderr.write("Calculating matrix...\n")
dummy_samples, snp_difference_matrix, snp_opportunity_matrix = calculate_substitution_rates.calculate_matrices_from_substitution_rate_map(substitution_rate_map, 'core', allowed_samples=snp_samples)
snp_samples = dummy_samples
sys.stderr.write("Done!\n")
snp_substitution_rate = snp_difference_matrix*1.0/(snp_opportunity_matrix+(snp_opportunity_matrix==0))
# Analyze SNPs, looping over chunk sizes.
# Clunky, but necessary to limit memory usage on cluster
# Load SNP information for species_name
sys.stderr.write("Loading SNPs for %s...\n" % species_name)
sys.stderr.write("(core genes only...)\n")
snp_data = []
final_line_number = 0
while final_line_number >= 0:
sys.stderr.write("Loading chunk starting @ %d...\n" % final_line_number)
dummy_samples, allele_counts_map, passed_sites_map, final_line_number = parse_midas_data.parse_snps(species_name, debug=debug, allowed_samples=snp_samples, chunk_size=chunk_size,initial_line_number=final_line_number, allowed_genes=core_genes)
sys.stderr.write("Done! Loaded %d genes\n" % len(allele_counts_map.keys()))
if not (dummy_samples==snp_samples).all():
sys.stderr.write("Ordering problem!\n")
# Calculate fixation matrix
sys.stderr.write("Calculating snp distances...\n")
chunk_snp_data = clade_utils.calculate_snp_distances(allele_counts_map, passed_sites_map, snp_substitution_rate)
sys.stderr.write("Done!\n")
snp_data.extend(chunk_snp_data)
sys.stderr.write("Done!\n")
if len(snp_data)>0:
intermediate_filename = intermediate_filename_template % (private_snv_directory, species_name)
# Now add records
output_file = gzip.open(intermediate_filename,"w")
# Header
output_file.write("contig, location, var_type, derived_allele_count, ancestral_allele_count, min_between_d, max_within_derived_d, max_within_ancestral_d\n")
for location_tuple, variant_type, derived_allele_count, ancestral_allele_count, min_between_d, max_within_d1, avg_within_d1, max_within_d2, avg_within_d2 in snp_data:
contig, location = location_tuple
record_str_items = [contig, str(location), variant_type, str(derived_allele_count), str(ancestral_allele_count), str(min_between_d), str(max_within_d1), str(max_within_d2)]
record_str = ", ".join(record_str_items)
output_file.write(record_str)
output_file.write("\n")
output_file.close()
sys.stderr.write("Done with %s!\n" % species_name)
sys.stderr.write("Done looping over species!\n")
sys.stderr.write("Testing loading...\n")
snv_distance_map = load_snv_distance_map(good_species_list[0])
sys.stderr.write("Done!\n")
| 40.971591
| 254
| 0.69283
|
043f27c3e3d123ab77fa6a57d6493aa11951e0c5
| 5,107
|
py
|
Python
|
datasets/web_of_science/web_of_science.py
|
dkajtoch/datasets
|
12ef7f0d541a5aca5b29ebc2dddf5e1214f0e3e9
|
[
"Apache-2.0"
] | 9
|
2021-04-26T14:43:52.000Z
|
2021-11-08T09:47:24.000Z
|
datasets/web_of_science/web_of_science.py
|
jramapuram/huggingface_datasets
|
62c7ac0783a00bdc1192b6a75439a65d522b6cbc
|
[
"Apache-2.0"
] | null | null | null |
datasets/web_of_science/web_of_science.py
|
jramapuram/huggingface_datasets
|
62c7ac0783a00bdc1192b6a75439a65d522b6cbc
|
[
"Apache-2.0"
] | 3
|
2021-01-03T22:08:20.000Z
|
2021-08-12T20:09:39.000Z
|
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Web of science"""
from __future__ import absolute_import, division, print_function
import os
import datasets
_CITATION = """\
@inproceedings{kowsari2017HDLTex,
title={HDLTex: Hierarchical Deep Learning for Text Classification},
author={Kowsari, Kamran and Brown, Donald E and Heidarysafa, Mojtaba and Jafari Meimandi, Kiana and and Gerber, Matthew S and Barnes, Laura E},
booktitle={Machine Learning and Applications (ICMLA), 2017 16th IEEE International Conference on},
year={2017},
organization={IEEE}
}
"""
_DESCRIPTION = """\
The Web Of Science (WOS) dataset is a collection of data of published papers
available from the Web of Science. WOS has been released in three versions: WOS-46985, WOS-11967 and WOS-5736. WOS-46985 is the
full dataset. WOS-11967 and WOS-5736 are two subsets of WOS-46985.
"""
_DATA_URL = (
"https://data.mendeley.com/datasets/9rw3vkcfy4/6/files/c9ea673d-5542-44c0-ab7b-f1311f7d61df/WebOfScience.zip?dl=1"
)
class WebOfScienceConfig(datasets.BuilderConfig):
"""BuilderConfig for WebOfScience."""
def __init__(self, **kwargs):
"""BuilderConfig for WebOfScience.
Args:
**kwargs: keyword arguments forwarded to super.
"""
super(WebOfScienceConfig, self).__init__(version=datasets.Version("6.0.0", ""), **kwargs)
class WebOfScience(datasets.GeneratorBasedBuilder):
"""Web of Science"""
BUILDER_CONFIGS = [
WebOfScienceConfig(
name="WOS5736",
description="""Web of Science Dataset WOS-5736: This dataset contains 5,736 documents with 11 categories which include 3 parents categories.""",
),
WebOfScienceConfig(
name="WOS11967",
description="""Web of Science Dataset WOS-11967: This dataset contains 11,967 documents with 35 categories which include 7 parents categories.""",
),
WebOfScienceConfig(
name="WOS46985",
description="""Web of Science Dataset WOS-46985: This dataset contains 46,985 documents with 134 categories which include 7 parents categories.""",
),
]
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION + self.config.description,
features=datasets.Features(
{
"input_data": datasets.Value("string"),
"label": datasets.Value("int32"),
"label_level_1": datasets.Value("int32"),
"label_level_2": datasets.Value("int32"),
}
),
# No default supervised_keys (as we have to pass both premise
# and hypothesis as input).
supervised_keys=None,
homepage="https://data.mendeley.com/datasets/9rw3vkcfy4/6",
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
# dl_manager is a datasets.download.DownloadManager that can be used to
dl_path = dl_manager.download_and_extract(_DATA_URL)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
# These kwargs will be passed to _generate_examples
gen_kwargs={
"input_file": os.path.join(dl_path, self.config.name, "X.txt"),
"label_file": os.path.join(dl_path, self.config.name, "Y.txt"),
"label_level_1_file": os.path.join(dl_path, self.config.name, "YL1.txt"),
"label_level_2_file": os.path.join(dl_path, self.config.name, "YL2.txt"),
},
)
]
def _generate_examples(self, input_file, label_file, label_level_1_file, label_level_2_file):
"""Yields examples."""
with open(input_file, encoding="utf-8") as f:
input_data = f.readlines()
with open(label_file, encoding="utf-8") as f:
label_data = f.readlines()
with open(label_level_1_file, encoding="utf-8") as f:
label_level_1_data = f.readlines()
with open(label_level_2_file, encoding="utf-8") as f:
label_level_2_data = f.readlines()
for i in range(len(input_data)):
yield i, {
"input_data": input_data[i],
"label": label_data[i],
"label_level_1": label_level_1_data[i],
"label_level_2": label_level_2_data[i],
}
| 38.689394
| 159
| 0.644018
|
f5c764fa6f4858f8dfc45851b16918bd83cb2a8c
| 1,134
|
py
|
Python
|
Midterm Exam Program 1.py
|
DirkTayab/OOP-1-1
|
75d4259c7e7bd02b68bd722c8d3622d44d11eab2
|
[
"Apache-2.0"
] | null | null | null |
Midterm Exam Program 1.py
|
DirkTayab/OOP-1-1
|
75d4259c7e7bd02b68bd722c8d3622d44d11eab2
|
[
"Apache-2.0"
] | null | null | null |
Midterm Exam Program 1.py
|
DirkTayab/OOP-1-1
|
75d4259c7e7bd02b68bd722c8d3622d44d11eab2
|
[
"Apache-2.0"
] | null | null | null |
def main():
class TemperatureConversion:
def __init__(self, temp=1):
self._temp = temp
class CelsiusToFahrenheit(TemperatureConversion):
def conversion(self):
return (self._temp * 9) / 5 + 32
class CelsiusToKelvin(TemperatureConversion):
def conversion(self):
return self._temp + 273.15
class FahrenheitToCelsius(TemperatureConversion):
def conversion(self):
return ((self._temp - 32)*5)/9
class KelvinToCelsius(TemperatureConversion):
def conversion(self):
return self._temp - 273.15
tempInCelsius = float(input("Enter the temperature in Celsius: "))
convert = CelsiusToKelvin(tempInCelsius)
print(str(convert.conversion()) + " Kelvin")
convert = CelsiusToFahrenheit(tempInCelsius)
print(str(convert.conversion()) + " Fahrenheit")
tempInFahrenheit = float(input("Enter the Temperature in Fahrenheit: "))
convert = FahrenheitToCelsius(tempInFahrenheit)
print(str(convert.conversion()) + " Celsius")
tempInKelvin = float(input("Enter the Temperature in Kelvin: "))
convert = KelvinToCelsius(tempInKelvin)
print(str(convert.conversion()) + " Celsius")
main()
| 34.363636
| 74
| 0.727513
|
9fa0a4c14f076bb07101b08e857e53627e5b790b
| 3,182
|
py
|
Python
|
CalibTracker/SiStripESProducers/test/python/siStripDelayDummyPrinter_cfg.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 6
|
2017-09-08T14:12:56.000Z
|
2022-03-09T23:57:01.000Z
|
CalibTracker/SiStripESProducers/test/python/siStripDelayDummyPrinter_cfg.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 545
|
2017-09-19T17:10:19.000Z
|
2022-03-07T16:55:27.000Z
|
CalibTracker/SiStripESProducers/test/python/siStripDelayDummyPrinter_cfg.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 14
|
2017-10-04T09:47:21.000Z
|
2019-10-23T18:04:45.000Z
|
# -*- coding: utf-8 -*-
# The following comments couldn't be translated into the new config version:
# upload to database
#string timetype = "timestamp"
import FWCore.ParameterSet.Config as cms
process = cms.Process("Reader")
# Use this to have also debug info (WARNING: the resulting file is > 200MB.
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring("*"),
DelayReaderSummary = cms.untracked.PSet(
threshold = cms.untracked.string('INFO')
),
DelayReaderDebug = cms.untracked.PSet(
threshold = cms.untracked.string('DEBUG')
),
destinations = cms.untracked.vstring('DelayReaderSummary', 'DelayReaderDebug')
)
# How to use the EmptyIOVSource:
# the EmptyIOVSource will generate N events with a given interval.
# the N events must be specified in the maxEvents as usual but the
# first value, last value, timetype (runnumber, timestamp or lumiid) must be specified
# in the EmptyIOVSource configuration block. It will then generate events with the given
# interval.
# To generate one event per run in a given range of runs you should then use:
# - first - last value as the run range
# - interval == 1 (means move of one run unit at a time)
# - maxEvents = lastValue - firstValue (so that there is one event per run
# otherwise it will stop before completing the range or it will go beyond (to infinity).
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(5)
)
process.source = cms.Source("EmptyIOVSource",
timetype = cms.string('runnumber'),
firstValue = cms.uint64(97),
lastValue = cms.uint64(102),
interval = cms.uint64(1)
)
process.load("CalibTracker.SiStripESProducers.SiStripDelayESProducer_cfi")
# Need to specify the Record for each BaseDelay.
# Optionally the Label associated to the tag can also be specified (default = "").
process.siStripDelayESProducer.ListOfRecordToMerge = cms.VPSet(
cms.PSet(
Record = cms.string('SiStripBaseDelayRcd'),
Label = cms.string('baseDelay1'),
SumSign = cms.int32(1)
),
cms.PSet(
Record = cms.string('SiStripBaseDelayRcd'),
Label = cms.string('baseDelay2'),
SumSign = cms.int32(-1)
)
)
process.poolDBESSource = cms.ESSource(
"PoolDBESSource",
BlobStreamerName = cms.untracked.string('TBufferBlobStreamingService'),
DBParameters = cms.PSet(
messageLevel = cms.untracked.int32(2),
authenticationPath = cms.untracked.string('/afs/cern.ch/cms/DB/conddb')
),
timetype = cms.untracked.string('runnumber'),
connect = cms.string('sqlite_file:dbfile.db'),
toGet = cms.VPSet(
cms.PSet(
record = cms.string('SiStripBaseDelayRcd'),
tag = cms.string('SiStripBaseDelay_Ideal_31X_4'),
label = cms.untracked.string('baseDelay1')
),
cms.PSet(
record = cms.string('SiStripBaseDelayRcd'),
tag = cms.string('SiStripBaseDelay_Ideal_31X_3'),
label = cms.untracked.string('baseDelay2')
)
)
)
process.reader = cms.EDAnalyzer("SiStripDelayDummyPrinter")
process.p1 = cms.Path(process.reader)
| 34.967033
| 88
| 0.686047
|
dd47bfd4c7f418aa619274341721987ee83378cb
| 1,646
|
py
|
Python
|
env/Lib/site-packages/jupyter_server/services/api/handlers.py
|
andresgreen-byte/Laboratorio-1--Inversion-de-Capital
|
8a4707301d19c3826c31026c4077930bcd6a8182
|
[
"MIT"
] | 1
|
2022-03-17T12:56:14.000Z
|
2022-03-17T12:56:14.000Z
|
env/Lib/site-packages/jupyter_server/services/api/handlers.py
|
andresgreen-byte/Laboratorio-1--Inversion-de-Capital
|
8a4707301d19c3826c31026c4077930bcd6a8182
|
[
"MIT"
] | null | null | null |
env/Lib/site-packages/jupyter_server/services/api/handlers.py
|
andresgreen-byte/Laboratorio-1--Inversion-de-Capital
|
8a4707301d19c3826c31026c4077930bcd6a8182
|
[
"MIT"
] | 1
|
2022-03-28T09:19:34.000Z
|
2022-03-28T09:19:34.000Z
|
"""Tornado handlers for api specifications."""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import json
import os
from tornado import web
from ...base.handlers import APIHandler
from ...base.handlers import JupyterHandler
from jupyter_server._tz import isoformat
from jupyter_server._tz import utcfromtimestamp
from jupyter_server.utils import ensure_async
class APISpecHandler(web.StaticFileHandler, JupyterHandler):
def initialize(self):
web.StaticFileHandler.initialize(self, path=os.path.dirname(__file__))
@web.authenticated
def get(self):
self.log.warning("Serving api spec (experimental, incomplete)")
return web.StaticFileHandler.get(self, "api.yaml")
def get_content_type(self):
return "text/x-yaml"
class APIStatusHandler(APIHandler):
_track_activity = False
@web.authenticated
async def get(self):
# if started was missing, use unix epoch
started = self.settings.get("started", utcfromtimestamp(0))
started = isoformat(started)
kernels = await ensure_async(self.kernel_manager.list_kernels())
total_connections = sum(k["connections"] for k in kernels)
last_activity = isoformat(self.application.last_activity())
model = {
"started": started,
"last_activity": last_activity,
"kernels": len(kernels),
"connections": total_connections,
}
self.finish(json.dumps(model, sort_keys=True))
default_handlers = [
(r"/api/spec.yaml", APISpecHandler),
(r"/api/status", APIStatusHandler),
]
| 29.927273
| 78
| 0.698056
|
85dd7cf75c4b259c3565716f6f3cf7aaa1f4bde8
| 587
|
py
|
Python
|
src/core/migrations/0004_auto_20190527_0926.py
|
pradipta/back-end
|
05895b051afc4c8e0cb17db708063d80102e9de5
|
[
"MIT"
] | 17
|
2019-05-11T22:15:34.000Z
|
2022-03-26T22:45:33.000Z
|
src/core/migrations/0004_auto_20190527_0926.py
|
pradipta/back-end
|
05895b051afc4c8e0cb17db708063d80102e9de5
|
[
"MIT"
] | 390
|
2019-05-23T10:48:57.000Z
|
2021-12-17T21:01:43.000Z
|
src/core/migrations/0004_auto_20190527_0926.py
|
pradipta/back-end
|
05895b051afc4c8e0cb17db708063d80102e9de5
|
[
"MIT"
] | 40
|
2019-05-21T14:41:57.000Z
|
2021-01-30T13:39:38.000Z
|
# Generated by Django 2.2.1 on 2019-05-27 14:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("core", "0003_auto_20190526_2129")]
operations = [
migrations.AddField(
model_name="profile",
name="disciplines",
field=models.CharField(blank=True, max_length=256, null=True),
),
migrations.AddField(
model_name="profile",
name="programming_languages",
field=models.CharField(blank=True, max_length=256, null=True),
),
]
| 26.681818
| 74
| 0.613288
|
6f0bf8944a7f0301260c1b7d0d26bf2e5f31ae7b
| 3,247
|
py
|
Python
|
CADRE/orbit_dymos/orbit_eom.py
|
johnjasa/CADRE
|
a4ffd61582b8474953fc309aa540838a14f29dcf
|
[
"Apache-2.0"
] | null | null | null |
CADRE/orbit_dymos/orbit_eom.py
|
johnjasa/CADRE
|
a4ffd61582b8474953fc309aa540838a14f29dcf
|
[
"Apache-2.0"
] | null | null | null |
CADRE/orbit_dymos/orbit_eom.py
|
johnjasa/CADRE
|
a4ffd61582b8474953fc309aa540838a14f29dcf
|
[
"Apache-2.0"
] | null | null | null |
"""
Orbit discipline for CADRE
"""
from math import sqrt
from six.moves import range
import numpy as np
from openmdao.api import ExplicitComponent
from CADRE import rk4
# Constants
mu = 398600.44
Re = 6378.137
J2 = 1.08264e-3
J3 = -2.51e-6
J4 = -1.60e-6
C1 = -mu
C2 = -1.5*mu*J2*Re**2
C3 = -2.5*mu*J3*Re**3
C4 = 1.875*mu*J4*Re**4
class OrbitEOMComp(ExplicitComponent):
"""
Computes the Earth to body position vector in Earth-centered intertial frame.
"""
def initialize(self):
self.options.declare('num_nodes', types=(int,))
self.options.declare('GM', types=(float,), default=mu) # GM of earth (km**3/s**2)
def setup(self):
nn = self.options['num_nodes']
self.add_input('r_e2b_I', 1000.0*np.ones((nn, 3)), units='km',
desc='Position vectors from earth to satellite '
'in Earth-centered inertial frame over time')
self.add_input('v_e2b_I', 1000.0*np.ones((nn, 3)), units='km/s',
desc='Velocity vectors from earth to satellite '
'in Earth-centered inertial frame over time')
self.add_input('rmag_e2b_I', 1000.0*np.ones((nn,)), units='km',
desc='Position and velocity vectors from earth to satellite '
'in Earth-centered inertial frame over time')
self.add_input('a_pert_I', np.zeros((nn, 3)), units='km/s**2',
desc='Perturbing accelerations in the Earth-centered inertial '
'frame over time')
self.add_output('dXdt:r_e2b_I', 1000.0*np.ones((nn, 3)), units='km/s',
desc='Velocity vectors from earth to satellite '
'in Earth-centered inertial frame over time')
self.add_output('dXdt:v_e2b_I', 1000.0*np.ones((nn, 3)), units='km/s**2',
desc='Acceleration vectors from earth to satellite '
'in Earth-centered inertial frame over time')
ar = np.arange(3 * nn, dtype=int)
self.declare_partials(of='dXdt:r_e2b_I', wrt='v_e2b_I', rows=ar, cols=ar, val=1.0)
self.declare_partials(of='dXdt:v_e2b_I', wrt='r_e2b_I', rows=ar, cols=ar, val=1.0)
self.declare_partials(of='dXdt:v_e2b_I', wrt='a_pert_I', rows=ar, cols=ar, val=1.0)
rs = np.arange(nn * 3, dtype=int)
cs = np.repeat(np.arange(nn, dtype=int), 3)
self.declare_partials(of='dXdt:v_e2b_I', wrt='rmag_e2b_I', rows=rs, cols=cs, val=1.0)
def compute(self, inputs, outputs):
r = inputs['r_e2b_I']
rmag = inputs['rmag_e2b_I']
v = inputs['v_e2b_I']
GM = self.options['GM']
a_pert_I = inputs['a_pert_I']
outputs['dXdt:r_e2b_I'] = v
outputs['dXdt:v_e2b_I'] = (-GM * r) / rmag[:, np.newaxis]**3 + a_pert_I
def compute_partials(self, inputs, partials):
r = inputs['r_e2b_I']
rmag = inputs['rmag_e2b_I']
GM = self.options['GM']
partials['dXdt:v_e2b_I', 'rmag_e2b_I'] = (3 * GM * r / rmag[:, np.newaxis]**4).ravel()
partials['dXdt:v_e2b_I', 'r_e2b_I'] = -GM / np.repeat(rmag, 3)**3
if __name__ == '__main__':
print('foo')
| 33.822917
| 94
| 0.577764
|
20e0ef206f6d200e2ed56e98609686b0a3bce45a
| 152,930
|
py
|
Python
|
pysnmp-with-texts/H3C-ACL-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/H3C-ACL-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/H3C-ACL-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module H3C-ACL-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/H3C-ACL-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:21:21 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
h3cCommon, = mibBuilder.importSymbols("HUAWEI-3COM-OID-MIB", "h3cCommon")
InetAddressType, InetAddressPrefixLength, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddressPrefixLength", "InetAddress")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter64, iso, Bits, NotificationType, Unsigned32, Counter32, ObjectIdentity, TimeTicks, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, ModuleIdentity, Integer32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "iso", "Bits", "NotificationType", "Unsigned32", "Counter32", "ObjectIdentity", "TimeTicks", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "ModuleIdentity", "Integer32", "IpAddress")
RowStatus, TruthValue, DisplayString, TextualConvention, MacAddress = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TruthValue", "DisplayString", "TextualConvention", "MacAddress")
h3cAcl = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8))
if mibBuilder.loadTexts: h3cAcl.setLastUpdated('200409211936Z')
if mibBuilder.loadTexts: h3cAcl.setOrganization('Hangzhou H3C Tech. Co., Ltd.')
if mibBuilder.loadTexts: h3cAcl.setContactInfo('Platform Team Hangzhou H3C Tech. Co., Ltd. Hai-Dian District Beijing P.R. China http://www.h3c.com Zip:100085')
if mibBuilder.loadTexts: h3cAcl.setDescription('ACL management information base for managing devices that support access control list and packet filtering. ')
class RuleAction(TextualConvention, Integer32):
description = "The value of rule's action. permit: The packet matching the rule will be permitted to forward. deny: The packet matching the rule will be denied. "
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("invalid", 1), ("permit", 2), ("deny", 3))
class CounterClear(TextualConvention, Integer32):
description = "cleared: Reset the value of the rule's counter. nouse: 'nouse' will be returned when getting. "
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("cleared", 1), ("nouse", 2))
class PortOp(TextualConvention, Integer32):
description = "The operation type of TCP and UDP. lt : Less than given port number. eq : Equal to given port number. gt : Greater than given port number. neq : Not equal to given port number. range : Between two port numbers. Default value is 'invalid'. "
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))
namedValues = NamedValues(("invalid", 0), ("lt", 1), ("eq", 2), ("gt", 3), ("neq", 4), ("range", 5))
class DSCPValue(TextualConvention, Integer32):
description = 'The value of DSCP. <0-63> Value of DSCP af11 Specify Assured Forwarding 11 service(10) af12 Specify Assured Forwarding 12 service(12) af13 Specify Assured Forwarding 13 service(14) af21 Specify Assured Forwarding 21 service(18) af22 Specify Assured Forwarding 22 service(20) af23 Specify Assured Forwarding 23 service(22) af31 Specify Assured Forwarding 31 service(26) af32 Specify Assured Forwarding 32 service(28) af33 Specify Assured Forwarding 33 service(30) af41 Specify Assured Forwarding 41 service(34) af42 Specify Assured Forwarding 42 service(36) af43 Specify Assured Forwarding 43 service(38) be Specify Best Effort service(0) cs1 Specify Class Selector 1 service(8) cs2 Specify Class Selector 2 service(16) cs3 Specify Class Selector 3 service(24) cs4 Specify Class Selector 4 service(32) cs5 Specify Class Selector 5 service(40) cs6 Specify Class Selector 6 service(48) cs7 Specify Class Selector 7 service(56) ef Specify Expedited Forwarding service(46) '
status = 'current'
displayHint = 'd'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(ValueRangeConstraint(0, 63), ValueRangeConstraint(255, 255), )
class TCPFlag(TextualConvention, Integer32):
description = "Type of TCP. invalid(0) tcpack(1) TCP protocol ACK Packet tcpfin(2) TCP protocol PIN Packet tcppsh(3) TCP protocol PUSH Packet tcprst(4) TCP protocol RST Packet tcpsyn(5) TCP protocol SYN Packet tcpurg(6) TCP protocol URG Packet Default value is 'invalid'. "
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("invalid", 0), ("tcpack", 1), ("tcpfin", 2), ("tcppsh", 3), ("tcprst", 4), ("tcpsyn", 5), ("tcpurg", 6))
class FragmentFlag(TextualConvention, Integer32):
description = "Type of fragment. invalid(0) fragment(1) Frag-Type Fragment fragmentSubseq(2) Frag-Type Fragment-subsequent nonFragment(3) Frag-Type non-Fragment nonSubseq(4) Frag-Type non-subsequent Default value is 'invalid'. "
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))
namedValues = NamedValues(("invalid", 0), ("fragment", 1), ("fragmentSubseq", 2), ("nonFragment", 3), ("nonSubseq", 4))
class AddressFlag(TextualConvention, Integer32):
description = "Address flag to select IPv6 Address. Default value is 'invalid'. t64SrcAddrPre64DestAddrPre(1): The mean of the enumeration 't64SrcAddrPre64DestAddrPre' is that system gets the 64 bits prefix of source address and the 64 bits prefix of destination address. t64SrcAddrPre64DestAddrSuf(2): The mean of the enumeration 't64SrcAddrPre64DestAddrSuf' is that system gets the 64 bits prefix of source address and the 64 bits suffix of destination address. t64SrcAddrSuf64DestAddrPre(3): The mean of the enumeration 't64SrcAddrSuf64DestAddrPre' is that system gets the 64 bits suffix of source address and the 64 bits prefix of destination address. t64SrcAddrSuf64DestAddrSuf(4): The mean of the enumeration 't64SrcAddrSuf64DestAddrSuf' is that system gets the 64 bits suffix of source address and the 64 bits suffix of destination address. t128SourceAddress(5): The mean of the enumeration 't128SourceAddress' is that system gets the 128 bits of source address. t128DestinationAddress(6): The mean of the enumeration 't128SourceAddress' is that system gets the 128 bits of destination address. "
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("invalid", 0), ("t64SrcAddrPre64DestAddrPre", 1), ("t64SrcAddrPre64DestAddrSuf", 2), ("t64SrcAddrSuf64DestAddrPre", 3), ("t64SrcAddrSuf64DestAddrSuf", 4), ("t128SourceAddress", 5), ("t128DestinationAddress", 6))
class DirectionType(TextualConvention, Integer32):
description = 'The direction: inbound or outbound.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("inbound", 1), ("outbound", 2))
h3cAclMibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1))
h3cAclMode = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("linkBased", 1), ("ipBased", 2))).clone('ipBased')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cAclMode.setStatus('current')
if mibBuilder.loadTexts: h3cAclMode.setDescription('Access-list mode.')
h3cAclNumGroupTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 2), )
if mibBuilder.loadTexts: h3cAclNumGroupTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumGroupTable.setDescription('Configure the match-order of number-acl group.')
h3cAclNumGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 2, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclNumGroupAclNum"))
if mibBuilder.loadTexts: h3cAclNumGroupEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumGroupEntry.setDescription('Define the index of h3cAclNumGroupTable.')
h3cAclNumGroupAclNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1000, 5999)))
if mibBuilder.loadTexts: h3cAclNumGroupAclNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumGroupAclNum.setDescription('The index of number-acl group Interface type:1000..1999 Basic type:2000..2999 Advance type:3000..3999 Link type:4000..4999 User type:5000..5999')
h3cAclNumGroupMatchOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("config", 1), ("auto", 2))).clone('config')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNumGroupMatchOrder.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumGroupMatchOrder.setDescription('The match-order of number-acl group.')
h3cAclNumGroupSubitemNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclNumGroupSubitemNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumGroupSubitemNum.setDescription("The number of number-acl group's node.")
h3cAclNumGroupDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 2, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cAclNumGroupDescription.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumGroupDescription.setDescription('The description of this acl group.')
h3cAclNumGroupCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("cleared", 1), ("nouse", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNumGroupCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumGroupCountClear.setDescription("Reset the value of rules' counter, which belong to this group.")
h3cAclNumGroupRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 2, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNumGroupRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumGroupRowStatus.setDescription('RowStatus, now support three state: CreateAndGo, Active, Destroy.')
h3cAclNameGroupTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 3), )
if mibBuilder.loadTexts: h3cAclNameGroupTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclNameGroupTable.setDescription('Create acl-group that identified by name.')
h3cAclNameGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 3, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclNameGroupIndex"))
if mibBuilder.loadTexts: h3cAclNameGroupEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclNameGroupEntry.setDescription('Define the index of h3cAclNameGroupTable.')
h3cAclNameGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10000, 12999)))
if mibBuilder.loadTexts: h3cAclNameGroupIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclNameGroupIndex.setDescription('The index of name-acl group.')
h3cAclNameGroupCreateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 3, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNameGroupCreateName.setStatus('current')
if mibBuilder.loadTexts: h3cAclNameGroupCreateName.setDescription('The name of name-acl group.')
h3cAclNameGroupTypes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("basic", 1), ("advanced", 2), ("ifBased", 3), ("link", 4), ("user", 5)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNameGroupTypes.setStatus('current')
if mibBuilder.loadTexts: h3cAclNameGroupTypes.setDescription('The type of name-acl group.')
h3cAclNameGroupMatchOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("config", 1), ("auto", 2))).clone('config')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNameGroupMatchOrder.setStatus('current')
if mibBuilder.loadTexts: h3cAclNameGroupMatchOrder.setDescription('The match-order of name-acl group.')
h3cAclNameGroupSubitemNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclNameGroupSubitemNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclNameGroupSubitemNum.setDescription("The number of name-acl group's node.")
h3cAclNameGroupRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 3, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNameGroupRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclNameGroupRowStatus.setDescription('RowStatus, now support three state: CreateAndGo, Active, Destroy.')
h3cAclBasicRuleTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4), )
if mibBuilder.loadTexts: h3cAclBasicRuleTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicRuleTable.setDescription('Configure the rule for basic acl group.')
h3cAclBasicRuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclBasicAclNum"), (0, "H3C-ACL-MIB", "h3cAclBasicSubitem"))
if mibBuilder.loadTexts: h3cAclBasicRuleEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicRuleEntry.setDescription('Define the index of h3cAclBasicRuleTable.')
h3cAclBasicAclNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(2000, 2999), ValueRangeConstraint(10000, 12999), )))
if mibBuilder.loadTexts: h3cAclBasicAclNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicAclNum.setDescription('The index of basic acl group.')
h3cAclBasicSubitem = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: h3cAclBasicSubitem.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicSubitem.setDescription('The subindex of basic acl group.')
h3cAclBasicAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("permit", 1), ("deny", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclBasicAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicAct.setDescription('The action of basic acl rule.')
h3cAclBasicSrcIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclBasicSrcIp.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicSrcIp.setDescription('Source IP-address of basic acl rule.')
h3cAclBasicSrcWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 5), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclBasicSrcWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicSrcWild.setDescription('Source IP-address wild of basic acl rule.')
h3cAclBasicTimeRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclBasicTimeRangeName.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicTimeRangeName.setDescription('The Time-range of basic acl rule.')
h3cAclBasicFragments = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 7), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclBasicFragments.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicFragments.setDescription('The flag of matching fragmented packet.')
h3cAclBasicLog = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 8), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclBasicLog.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicLog.setDescription('The flag of log.')
h3cAclBasicEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 9), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclBasicEnable.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicEnable.setDescription('The rule is active or not. true : active false : inactive ')
h3cAclBasicCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclBasicCount.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicCount.setDescription('The count of matched by basic rule.')
h3cAclBasicCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("cleared", 1), ("nouse", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclBasicCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicCountClear.setDescription('Reset the value of counter.')
h3cAclBasicRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 4, 1, 12), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclBasicRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclBasicRowStatus.setDescription('RowStatus, now support three state: CreateAndGo, Active, Destroy.')
h3cAclAdvancedRuleTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5), )
if mibBuilder.loadTexts: h3cAclAdvancedRuleTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedRuleTable.setDescription('Configure the rule for advanced acl group.')
h3cAclAdvancedRuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclAdvancedAclNum"), (0, "H3C-ACL-MIB", "h3cAclAdvancedSubitem"))
if mibBuilder.loadTexts: h3cAclAdvancedRuleEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedRuleEntry.setDescription('Define the index of h3cAclAdvancedRuleTable.')
h3cAclAdvancedAclNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3000, 3999), ValueRangeConstraint(10000, 12999), )))
if mibBuilder.loadTexts: h3cAclAdvancedAclNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedAclNum.setDescription('The index of advanced acl group.')
h3cAclAdvancedSubitem = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: h3cAclAdvancedSubitem.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedSubitem.setDescription('The subindex of advanced acl group.')
h3cAclAdvancedAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("permit", 1), ("deny", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedAct.setDescription('The action of Advance acl rule.')
h3cAclAdvancedProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedProtocol.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedProtocol.setDescription('The protocol-type of advanced acl group. <1-255> Protocol number gre GRE tunneling(47) icmp Internet Control Message Protocol(1) igmp Internet Group Management Protocol(2) ip Any IP protocol ipinip IP in IP tunneling(4) ospf OSPF routing protocol(89) tcp Transmission Control Protocol (6) udp User Datagram Protocol (17)')
h3cAclAdvancedSrcIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 5), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedSrcIp.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedSrcIp.setDescription('Source IP-address of advanced acl group.')
h3cAclAdvancedSrcWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 6), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedSrcWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedSrcWild.setDescription('Source IP-address wild of advanced acl group.')
h3cAclAdvancedSrcOp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 0), ("lt", 1), ("eq", 2), ("gt", 3), ("neq", 4), ("range", 5)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedSrcOp.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedSrcOp.setDescription("The source IP-address's operator of advanced acl group.")
h3cAclAdvancedSrcPort1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedSrcPort1.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedSrcPort1.setDescription('The fourth layer source port1.')
h3cAclAdvancedSrcPort2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedSrcPort2.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedSrcPort2.setDescription('The fourth layer source port2.')
h3cAclAdvancedDestIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 10), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedDestIp.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedDestIp.setDescription('Destination IP-address of advanced acl group.')
h3cAclAdvancedDestWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 11), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedDestWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedDestWild.setDescription('Destination IP-address wild of advanced acl group.')
h3cAclAdvancedDestOp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 0), ("lt", 1), ("eq", 2), ("gt", 3), ("neq", 4), ("range", 5)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedDestOp.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedDestOp.setDescription("The destination IP-address's operator of advanced acl group.")
h3cAclAdvancedDestPort1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedDestPort1.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedDestPort1.setDescription('The fourth layer destination port1.')
h3cAclAdvancedDestPort2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedDestPort2.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedDestPort2.setDescription('The fourth layer destination port2.')
h3cAclAdvancedPrecedence = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 7), ValueRangeConstraint(255, 255), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedPrecedence.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedPrecedence.setDescription("The value of IP-packet's precedence. <0-7> Value of precedence routine Specify routine precedence(0) priority Specify priority precedence(1) immediate Specify immediate precedence(2) flash Specify flash precedence(3) flash-override Specify flash-override precedence(4) critical Specify critical precedence(5) internet Specify internetwork control precedence(6) network Specify network control precedence(7) ")
h3cAclAdvancedTos = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 15), ValueRangeConstraint(255, 255), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedTos.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedTos.setDescription("The value of IP-packet's TOS. <0-15> Value of TOS(type of service) max-reliability Match packets with max reliable TOS(2) max-throughput Match packets with max throughput TOS(4) min-delay Match packets with min delay TOS(8) min-monetary-cost Match packets with min monetary cost TOS(1) normal Match packets with normal TOS(0) ")
h3cAclAdvancedDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 63), ValueRangeConstraint(255, 255), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedDscp.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedDscp.setDescription('The value of DSCP. <0-63> Value of DSCP af11 Specify Assured Forwarding 11 service(10) af12 Specify Assured Forwarding 12 service(12) af13 Specify Assured Forwarding 13 service(14) af21 Specify Assured Forwarding 21 service(18) af22 Specify Assured Forwarding 22 service(20) af23 Specify Assured Forwarding 23 service(22) af31 Specify Assured Forwarding 31 service(26) af32 Specify Assured Forwarding 32 service(28) af33 Specify Assured Forwarding 33 service(30) af41 Specify Assured Forwarding 41 service(34) af42 Specify Assured Forwarding 42 service(36) af43 Specify Assured Forwarding 43 service(38) be Specify Best Effort service(0) cs1 Specify Class Selector 1 service(8) cs2 Specify Class Selector 2 service(16) cs3 Specify Class Selector 3 service(24) cs4 Specify Class Selector 4 service(32) cs5 Specify Class Selector 5 service(40) cs6 Specify Class Selector 6 service(48) cs7 Specify Class Selector 7 service(56) ef Specify Expedited Forwarding service(46)')
h3cAclAdvancedEstablish = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 18), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedEstablish.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedEstablish.setDescription('Establish flag.')
h3cAclAdvancedTimeRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedTimeRangeName.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedTimeRangeName.setDescription('The Time-range of advanced acl rule.')
h3cAclAdvancedIcmpType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 255), ValueRangeConstraint(65535, 65535), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedIcmpType.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedIcmpType.setDescription('The type of ICMP packet. Integer32 ICMP type echo Type=8, Code=0 echo-reply Type=0, Code=0 fragmentneed-DFset Type=3, Code=4 host-redirect Type=5, Code=1 host-tos-redirect Type=5, Code=3 host-unreachable Type=3, Code=1 information-reply Type=16, Code=0 information-request Type=15, Code=0 net-redirect Type=5, Code=0 net-tos-redirect Type=5, Code=2 net-unreachable Type=3, Code=0 parameter-problem Type=12, Code=0 port-unreachable Type=3, Code=3 protocol-unreachable Type=3, Code=2 reassembly-timeout Type=11, Code=1 source-quench Type=4, Code=0 source-route-failed Type=3, Code=5 timestamp-reply Type=14, Code=0 timestamp-request Type=13, Code=0 ttl-exceeded Type=11, Code=0 ')
h3cAclAdvancedIcmpCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 255), ValueRangeConstraint(65535, 65535), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedIcmpCode.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedIcmpCode.setDescription('The code of ICMP packet.')
h3cAclAdvancedFragments = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 22), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedFragments.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedFragments.setDescription('The flag of matching fragmented packet.')
h3cAclAdvancedLog = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 23), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedLog.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedLog.setDescription('The flag of log.')
h3cAclAdvancedEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 24), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclAdvancedEnable.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedEnable.setDescription('The rule is active or not. true : active false : inactive ')
h3cAclAdvancedCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclAdvancedCount.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedCount.setDescription('The count of matched by advanced rule.')
h3cAclAdvancedCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("cleared", 1), ("nouse", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedCountClear.setDescription('Reset the value of counter.')
h3cAclAdvancedRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 5, 1, 27), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclAdvancedRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclAdvancedRowStatus.setDescription('RowStatus, now support three state: CreateAndGo, Active, Destroy.')
h3cAclIfRuleTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6), )
if mibBuilder.loadTexts: h3cAclIfRuleTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfRuleTable.setDescription('Configure the rule for interface-based acl group.')
h3cAclIfRuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclIfAclNum"), (0, "H3C-ACL-MIB", "h3cAclIfSubitem"))
if mibBuilder.loadTexts: h3cAclIfRuleEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfRuleEntry.setDescription('Define the index of h3cAclIfRuleTable.')
h3cAclIfAclNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1000, 1999), ValueRangeConstraint(10000, 12999), )))
if mibBuilder.loadTexts: h3cAclIfAclNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfAclNum.setDescription('The index of interface-based acl group.')
h3cAclIfSubitem = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: h3cAclIfSubitem.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfSubitem.setDescription('The subindex of interface-based acl group.')
h3cAclIfAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("permit", 1), ("deny", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIfAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfAct.setDescription('The action of interface-based acl group.')
h3cAclIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIfIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfIndex.setDescription('The index of interface.')
h3cAclIfAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 5), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIfAny.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfAny.setDescription('The flag of matching any interface.')
h3cAclIfTimeRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIfTimeRangeName.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfTimeRangeName.setDescription('The Time-range of interface-based acl rule.')
h3cAclIfLog = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 7), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIfLog.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfLog.setDescription('The flag of log.')
h3cAclIfEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 8), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclIfEnable.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfEnable.setDescription('The rule is active or not. true : active false : inactive ')
h3cAclIfCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclIfCount.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfCount.setDescription('The count of matched by basic rule.')
h3cAclIfCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("cleared", 1), ("nouse", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIfCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfCountClear.setDescription("Reset the value of the rule's counter.")
h3cAclIfRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 6, 1, 11), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIfRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclIfRowStatus.setDescription('RowStatus, now support three state: CreateAndGo, Active, Destroy.')
h3cAclLinkTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7), )
if mibBuilder.loadTexts: h3cAclLinkTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkTable.setDescription('Create link acl.')
h3cAclLinkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclLinkAclNum"), (0, "H3C-ACL-MIB", "h3cAclLinkSubitem"))
if mibBuilder.loadTexts: h3cAclLinkEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkEntry.setDescription('The entry of the link acl table.')
h3cAclLinkAclNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(4000, 4999), ValueRangeConstraint(10000, 12999), )))
if mibBuilder.loadTexts: h3cAclLinkAclNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkAclNum.setDescription('The index of link-based acl group.')
h3cAclLinkSubitem = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: h3cAclLinkSubitem.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkSubitem.setDescription('The subindex of link-based acl group.')
h3cAclLinkAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("permit", 1), ("deny", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkAct.setDescription('The action of link-based acl group.')
h3cAclLinkProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2048, 2054, 32821, 34915, 34916, 34887))).clone(namedValues=NamedValues(("invalid", 0), ("ip", 2048), ("arp", 2054), ("rarp", 32821), ("pppoeControl", 34915), ("pppoeData", 34916), ("mpls", 34887))).clone('invalid')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkProtocol.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkProtocol.setDescription('The layer 2 protocol-type of link acl rule.')
h3cAclLinkFormatType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("invalid", 0), ("ethernetII", 1), ("snap", 2), ("ieee802Dot3And2", 3), ("ieee802Dot3", 4)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkFormatType.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkFormatType.setDescription('Format type of link acl rule.')
h3cAclLinkVlanTag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("invalid", 0), ("tagged", 1), ("untagged", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkVlanTag.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkVlanTag.setDescription('The flag of vlan tag of link acl rule.')
h3cAclLinkVlanPri = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 7), ValueRangeConstraint(255, 255), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkVlanPri.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkVlanPri.setDescription('Vlan priority of link acl rule.')
h3cAclLinkSrcVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkSrcVlanId.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkSrcVlanId.setDescription('Source vlan ID of link acl rule.')
h3cAclLinkSrcMac = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 9), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkSrcMac.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkSrcMac.setDescription('Source mac of link acl rule.')
h3cAclLinkSrcMacWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 10), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkSrcMacWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkSrcMacWild.setDescription('Source mac wildzard of link acl rule.')
h3cAclLinkSrcIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkSrcIfIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkSrcIfIndex.setDescription('Source IfIndex of link acl rule.')
h3cAclLinkSrcAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 12), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkSrcAny.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkSrcAny.setDescription('The flag of matching any source.')
h3cAclLinkDestVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkDestVlanId.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkDestVlanId.setDescription('Destination vlan ID of link acl rule.')
h3cAclLinkDestMac = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 14), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkDestMac.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkDestMac.setDescription('Destination mac of link acl rule.')
h3cAclLinkDestMacWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 15), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkDestMacWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkDestMacWild.setDescription('Destination mac wildzard of link acl rule.')
h3cAclLinkDestIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkDestIfIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkDestIfIndex.setDescription('Destination IfIndex of link acl rule.')
h3cAclLinkDestAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 17), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkDestAny.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkDestAny.setDescription('The flag of matching any destination.')
h3cAclLinkTimeRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 18), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkTimeRangeName.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkTimeRangeName.setDescription('The Time-range of link-based acl rule.')
h3cAclLinkEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 19), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclLinkEnable.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkEnable.setDescription('The rule is active or not. true : active false : inactive ')
h3cAclLinkRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 20), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkRowStatus.setDescription('RowStatus, now support three state: CreateAndGo, Active, Destroy.')
h3cAclLinkTypeCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkTypeCode.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkTypeCode.setDescription('The type of layer 2 protocol.0x0000...0xffff.')
h3cAclLinkTypeMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 22), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkTypeMask.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkTypeMask.setDescription('The mask of layer 2 protocol.0x0000...0xffff.')
h3cAclLinkLsapCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 23), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkLsapCode.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkLsapCode.setDescription('The type of LSAP.0x0000...0xffff.')
h3cAclLinkLsapMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 24), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkLsapMask.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkLsapMask.setDescription('The mask of LSAP.0x0000...0xffff.')
h3cAclLinkL2LabelRangeOp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("invalid", 0), ("lt", 1), ("eq", 2), ("gt", 3), ("neq", 4), ("range", 5)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkL2LabelRangeOp.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkL2LabelRangeOp.setDescription('Operation symbol of the MPLS label. If the symbol is range(5), the objects h3cAclLinkL2LabelRangeBegin and h3cAclLinkL2LabelRangeEnd should have different values indicating a range. Otherwise, only h3cAclLinkL2LabelRangeBegin counts, object h3cAclLinkL2LabelRangeEnd is ignored. invalid(0) -- unavailable lt(1) -- less than eq(2) -- equal gt(3) -- great than neq(4) -- not equal range(5) -- a range with two ends included ')
h3cAclLinkL2LabelRangeBegin = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 26), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkL2LabelRangeBegin.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkL2LabelRangeBegin.setDescription('The beginning of VPLS VC label.')
h3cAclLinkL2LabelRangeEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 27), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkL2LabelRangeEnd.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkL2LabelRangeEnd.setDescription('The end of VPLS VC label.')
h3cAclLinkMplsExp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 7, 1, 28), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclLinkMplsExp.setStatus('current')
if mibBuilder.loadTexts: h3cAclLinkMplsExp.setDescription("The value of MPLS-packet's Exp.")
h3cAclUserTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8), )
if mibBuilder.loadTexts: h3cAclUserTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserTable.setDescription('Create user acl.')
h3cAclUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclUserAclNum"), (0, "H3C-ACL-MIB", "h3cAclUserSubitem"))
if mibBuilder.loadTexts: h3cAclUserEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserEntry.setDescription('The entry of user acl table.')
h3cAclUserAclNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(5000, 5999), ValueRangeConstraint(10000, 12999), )))
if mibBuilder.loadTexts: h3cAclUserAclNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserAclNum.setDescription('The number of the user acl.')
h3cAclUserSubitem = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)))
if mibBuilder.loadTexts: h3cAclUserSubitem.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserSubitem.setDescription('The subitem of the user acl.')
h3cAclUserAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("permit", 1), ("deny", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclUserAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserAct.setDescription('The action of the user acl.')
h3cAclUserFormatType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("invalid", 0), ("ethernetII", 1), ("snap", 2), ("ieee802Dot2And3", 3), ("ieee802Dot4", 4))).clone('invalid')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclUserFormatType.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserFormatType.setDescription('Format type.')
h3cAclUserVlanTag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 0))).clone(namedValues=NamedValues(("tagged", 1), ("untagged", 2), ("invalid", 0))).clone('invalid')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclUserVlanTag.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserVlanTag.setDescription('Vlan tag exits or not.')
h3cAclUserRuleStr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclUserRuleStr.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserRuleStr.setDescription('Rule string.')
h3cAclUserRuleMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 80))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclUserRuleMask.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserRuleMask.setDescription('Rule mask.')
h3cAclUserTimeRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclUserTimeRangeName.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserTimeRangeName.setDescription('The Time-range of the user defined acl.')
h3cAclUserEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 9), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclUserEnable.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserEnable.setDescription('The rule is active or not. true : active false : inactive ')
h3cAclUserRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 8, 1, 10), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclUserRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclUserRowStatus.setDescription('RowStatus, now support three state: CreateAndGo, Active, Destroy.')
h3cAclActiveTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9), )
if mibBuilder.loadTexts: h3cAclActiveTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveTable.setDescription('Active acl.')
h3cAclActiveEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclActiveAclIndex"), (0, "H3C-ACL-MIB", "h3cAclActiveIfIndex"), (0, "H3C-ACL-MIB", "h3cAclActiveVlanID"), (0, "H3C-ACL-MIB", "h3cAclActiveDirection"))
if mibBuilder.loadTexts: h3cAclActiveEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveEntry.setDescription('The entry of active acl table.')
h3cAclActiveAclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 5999), ValueRangeConstraint(10000, 12999), )))
if mibBuilder.loadTexts: h3cAclActiveAclIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveAclIndex.setDescription('Acl index.')
h3cAclActiveIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: h3cAclActiveIfIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveIfIndex.setDescription('IfIndex.')
h3cAclActiveVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 3), Integer32())
if mibBuilder.loadTexts: h3cAclActiveVlanID.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveVlanID.setDescription('The lower 16 bits is Vlan ID, the higher 16 bits, if not zero, it describes the slot ID of the L3plus board. ')
h3cAclActiveDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 0))).clone(namedValues=NamedValues(("input", 1), ("output", 2), ("both", 3), ("invalid", 0))))
if mibBuilder.loadTexts: h3cAclActiveDirection.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveDirection.setDescription('Direction.')
h3cAclActiveUserAclNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(5000, 5999), ValueRangeConstraint(10000, 12999), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclActiveUserAclNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveUserAclNum.setDescription('The number of the user acl.')
h3cAclActiveUserAclSubitem = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclActiveUserAclSubitem.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveUserAclSubitem.setDescription('The subitem of the user acl.')
h3cAclActiveIpAclNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(2000, 3999), ValueRangeConstraint(10000, 12999), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclActiveIpAclNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveIpAclNum.setDescription('The number of the IP acl.')
h3cAclActiveIpAclSubitem = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclActiveIpAclSubitem.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveIpAclSubitem.setDescription('The subitem of the IP acl.')
h3cAclActiveLinkAclNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(4000, 4999), ValueRangeConstraint(10000, 12999), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclActiveLinkAclNum.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveLinkAclNum.setDescription('The num of the link acl.')
h3cAclActiveLinkAclSubitem = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclActiveLinkAclSubitem.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveLinkAclSubitem.setDescription('The subitem of the link acl.')
h3cAclActiveRuntime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 11), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclActiveRuntime.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveRuntime.setDescription('Is run or not.')
h3cAclActiveRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 9, 1, 12), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclActiveRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclActiveRowStatus.setDescription('RowStatus, now support three state: CreateAndGo, Active, Destroy.')
h3cAclIDSTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10), )
if mibBuilder.loadTexts: h3cAclIDSTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSTable.setDescription('Configure the rule for IDS.')
h3cAclIDSEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1), ).setIndexNames((1, "H3C-ACL-MIB", "h3cAclIDSName"))
if mibBuilder.loadTexts: h3cAclIDSEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSEntry.setDescription('The entry of acl ids table.')
h3cAclIDSName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32)))
if mibBuilder.loadTexts: h3cAclIDSName.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSName.setDescription('The name index of the IDS table.')
h3cAclIDSSrcMac = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 2), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSSrcMac.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSSrcMac.setDescription('Source mac of IDS acl rule.')
h3cAclIDSDestMac = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 3), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSDestMac.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSDestMac.setDescription('Destination mac of IDS acl rule.')
h3cAclIDSSrcIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSSrcIp.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSSrcIp.setDescription('Source IP-address of IDS acl rule.')
h3cAclIDSSrcWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 5), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSSrcWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSSrcWild.setDescription('Source IP-address wild of IDS acl rule.')
h3cAclIDSDestIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 6), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSDestIp.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSDestIp.setDescription('Destination IP-address of IDS acl rule.')
h3cAclIDSDestWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 7), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSDestWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSDestWild.setDescription('Destination IP-address wild of IDS acl rule.')
h3cAclIDSSrcPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSSrcPort.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSSrcPort.setDescription('The fourth layer source port.')
h3cAclIDSDestPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSDestPort.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSDestPort.setDescription('The fourth layer destination port.')
h3cAclIDSProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSProtocol.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSProtocol.setDescription('The protocol-type of advanced acl group. <1-255> Protocol number gre GRE tunneling(47) icmp Internet Control Message Protocol(1) igmp Internet Group Management Protocol(2) ip Any IP protocol ipinip IP in IP tunneling(4) ospf OSPF routing protocol(89) tcp Transmission Control Protocol (6) udp User Datagram Protocol (17) ')
h3cAclIDSDenyTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 11), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSDenyTime.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSDenyTime.setDescription('The maximum number of seconds which deny for this acl rule.')
h3cAclIDSAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("permit", 1), ("deny", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSAct.setDescription('The action of IDS acl rule.')
h3cAclIDSRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 1, 10, 1, 13), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIDSRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclIDSRowStatus.setDescription('RowStatus, now supports three states: CreateAndGo, Active, and Destroy.')
h3cAclMib2Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2))
h3cAclMib2GlobalGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1))
h3cAclMib2NodesGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 1))
h3cAclMib2Mode = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("linkBased", 1), ("ipBased", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cAclMib2Mode.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2Mode.setDescription('The applying mode of ACL.')
h3cAclMib2Version = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclMib2Version.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2Version.setDescription("The version of this file. The output value has the format of 'xx'or 'xxx'. For example: 10 means 1.0; 125 means 12.5. ")
h3cAclMib2ObjectsCapabilities = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 1, 3), Bits().clone(namedValues=NamedValues(("h3cAclMib2Mode", 0), ("h3cAclVersion", 1), ("h3cAclMib2ObjectsCapabilities", 2), ("h3cAclMib2CapabilityTable", 3), ("h3cAclNumberGroupTable", 4), ("h3cAclIPAclBasicTable", 5), ("h3cAclIPAclAdvancedTable", 6), ("h3cAclMACTable", 7), ("h3cAclEnUserTable", 8), ("h3cAclMib2ProcessingStatus", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclMib2ObjectsCapabilities.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2ObjectsCapabilities.setDescription('The objects of h3cAclMib2Objects.')
h3cAclMib2ProcessingStatus = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("processing", 1), ("done", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclMib2ProcessingStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2ProcessingStatus.setDescription('The processing status of ACL operation.')
h3cAclMib2CapabilityTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 2), )
if mibBuilder.loadTexts: h3cAclMib2CapabilityTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2CapabilityTable.setDescription('The capability of mib2.')
h3cAclMib2CapabilityEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 2, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclMib2EntityType"), (0, "H3C-ACL-MIB", "h3cAclMib2EntityIndex"), (0, "H3C-ACL-MIB", "h3cAclMib2ModuleIndex"), (0, "H3C-ACL-MIB", "h3cAclMib2CharacteristicsIndex"))
if mibBuilder.loadTexts: h3cAclMib2CapabilityEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2CapabilityEntry.setDescription('The information of Capability of mib2.')
h3cAclMib2EntityType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("system", 1), ("interface", 2))))
if mibBuilder.loadTexts: h3cAclMib2EntityType.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2EntityType.setDescription('The type of entity . system: The entity is systemic level. interface: The entity is interface level. ')
h3cAclMib2EntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 2, 1, 2), Integer32())
if mibBuilder.loadTexts: h3cAclMib2EntityIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2EntityIndex.setDescription("The index of entity. If h3cAclMib2EntityType is system, the value of this object is 0. If h3cAclMib2EntityType is interface, the value of this object is equal to 'ifIndex'. ")
h3cAclMib2ModuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("layer3", 1), ("layer2", 2), ("userDefined", 3))))
if mibBuilder.loadTexts: h3cAclMib2ModuleIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2ModuleIndex.setDescription('The module index of ACL.')
h3cAclMib2CharacteristicsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 2, 1, 4), Integer32())
if mibBuilder.loadTexts: h3cAclMib2CharacteristicsIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2CharacteristicsIndex.setDescription('The characteristics index of mib2. See DESCRIPTION of h3cAclMib2CharacteristicsValue to get detail information about the value of this object. ')
h3cAclMib2CharacteristicsDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 2, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclMib2CharacteristicsDesc.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2CharacteristicsDesc.setDescription('The description of characteristics.')
h3cAclMib2CharacteristicsValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 2, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclMib2CharacteristicsValue.setStatus('current')
if mibBuilder.loadTexts: h3cAclMib2CharacteristicsValue.setDescription("The value of capability of this object. TypeOfRuleStringValue : notSupport(0) and the length of RuleString. TypeOfCodeValue : OnlyOneNotSupport(0), MoreThanOneNotSupport(1) If h3cAclMib2CharacteristicsValue is 'moreThanOneNotSupport', h3cAclMib2CharacteristicsDesc must be used to depict which protocols are not supported. The output value of h3cAclMib2CharacteristicsDesc has the format of 'a,b'. For example, 'ip,rarp'. layer3 Module: Index Characteristics value 1 SourceIPAddress notSupport(0) 2 DestinationIPAddress notSupport(0) 3 SourcePort notSupport(0) 4 DestinationPort notSupport(0) 5 IPPrecedence notSupport(0) 6 TOS notSupport(0) 7 DSCP notSupport(0) 8 TCPFlag notSupport(0) 9 FragmentFlag notSupport(0) 10 Log notSupport(0) 11 RuleMatchCounter notSupport(0) 12 ResetRuleMatchCounter notSupport(0) 13 VPN notSupport(0) 15 protocol notSupport(0) 16 AddressFlag notSupport(0) layer2 Module: Index Characteristics value 1 ProtocolType TypeOfCodeValue 2 SourceMAC notSupport(0) 3 DestinationMAC notSupport(0) 4 LSAPType TypeOfCodeValue 5 CoS notSupport(0) UserDefined Module: Index Characteristics value 1 UserDefaultOffset TypeOfRuleStringValue 2 UserL2RuleOffset TypeOfRuleStringValue 3 UserMplsOffset TypeOfRuleStringValue 4 UserIPv4Offset TypeOfRuleStringValue 5 UserIPv6Offset TypeOfRuleStringValue 6 UserL4Offset TypeOfRuleStringValue 7 UserL5Offset TypeOfRuleStringValue ")
h3cAclNumberGroupTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3), )
if mibBuilder.loadTexts: h3cAclNumberGroupTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupTable.setDescription('A table of the number acl group information.')
h3cAclNumberGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclNumberGroupType"), (0, "H3C-ACL-MIB", "h3cAclNumberGroupIndex"))
if mibBuilder.loadTexts: h3cAclNumberGroupEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupEntry.setDescription('Number acl group information entry.')
h3cAclNumberGroupType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))).clone('ipv4'))
if mibBuilder.loadTexts: h3cAclNumberGroupType.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupType.setDescription('The type of number group. Basic ACL and Advanced ACL support ipv4 and ipv6. The range of Basic ACL is from 2000 to 2999. The range of Advanced ACL is from 3000 to 3999. Simple ACL supports ipv6 only. The range of Simple ACL is from 10000 to 42767. MAC ACL and User ACL support ipv4 only. The range of MAC ACL is from 4000 to 4999. The range of User ACL is from 5000 to 5999. ')
h3cAclNumberGroupIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(2000, 5999), ValueRangeConstraint(10000, 42767), )))
if mibBuilder.loadTexts: h3cAclNumberGroupIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupIndex.setDescription('The group index of number acl. Basic type:2000..2999 Advanced type:3000..3999 MAC type:4000..4999 User type:5000..5999 Simple type:10000..42767 ')
h3cAclNumberGroupRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNumberGroupRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupRowStatus.setDescription('RowStatus.')
h3cAclNumberGroupMatchOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("config", 1), ("auto", 2))).clone('config')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNumberGroupMatchOrder.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupMatchOrder.setDescription('The match-order of number acl group.')
h3cAclNumberGroupStep = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 20)).clone(5)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNumberGroupStep.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupStep.setDescription('The step of rule index.')
h3cAclNumberGroupDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNumberGroupDescription.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupDescription.setDescription('Description of this acl group.')
h3cAclNumberGroupCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1, 7), CounterClear().clone('nouse')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cAclNumberGroupCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupCountClear.setDescription('Reset the value of counters of this group.')
h3cAclNumberGroupRuleCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclNumberGroupRuleCounter.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupRuleCounter.setDescription('The rule count of number acl group.')
h3cAclNumberGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 1, 3, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclNumberGroupName.setStatus('current')
if mibBuilder.loadTexts: h3cAclNumberGroupName.setDescription('Name of this acl group.')
h3cAclIPAclGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2))
h3cAclIPAclBasicTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2), )
if mibBuilder.loadTexts: h3cAclIPAclBasicTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicTable.setDescription("A table of basic rule group. If some objects of this table are not supported by some products, these objects can't be created, changed and applied. Default value of these objects will be returned when they are read. ")
h3cAclIPAclBasicEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclNumberGroupType"), (0, "H3C-ACL-MIB", "h3cAclNumberGroupIndex"), (0, "H3C-ACL-MIB", "h3cAclIPAclBasicRuleIndex"))
if mibBuilder.loadTexts: h3cAclIPAclBasicEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicEntry.setDescription('Basic rule group information.')
h3cAclIPAclBasicRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65534)))
if mibBuilder.loadTexts: h3cAclIPAclBasicRuleIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicRuleIndex.setDescription('The rule index of basic acl group.')
h3cAclIPAclBasicRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicRowStatus.setDescription('RowStatus.')
h3cAclIPAclBasicAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 3), RuleAction()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicAct.setDescription('The action of basic acl rule.')
h3cAclIPAclBasicSrcAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 4), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcAddrType.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcAddrType.setDescription('The IP addresses type of IP pool.')
h3cAclIPAclBasicSrcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 5), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcAddr.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcAddr.setDescription('The value of a local IP address is available for this association. The type of this address is determined by the value of h3cAclIPAclBasicSrcAddrType. ')
h3cAclIPAclBasicSrcPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 6), InetAddressPrefixLength()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcPrefix.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcPrefix.setDescription('Denotes the length of a generic Internet network address prefix. A value of n corresponds to an IP address mask which has n contiguous 1-bits from the most significant bit (MSB) and all other bits set to 0. ')
h3cAclIPAclBasicSrcAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 7), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcAny.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcAny.setDescription('The flag of matching any IP address.')
h3cAclIPAclBasicSrcWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 8), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicSrcWild.setDescription("Source IPv4 address wild. Only IPv4 Basic Rule support this object. Default value is '0.0.0.0'. ")
h3cAclIPAclBasicTimeRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicTimeRangeName.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicTimeRangeName.setDescription('The Time-range of basic acl rule. Default value is null. ')
h3cAclIPAclBasicFragmentFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 10), FragmentFlag()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicFragmentFlag.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicFragmentFlag.setDescription('The flag of matching fragmented packets.')
h3cAclIPAclBasicLog = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 11), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicLog.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicLog.setDescription('The packet will be logged when it matches the rule.')
h3cAclIPAclBasicCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 12), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclIPAclBasicCount.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicCount.setDescription('The count of matched by the rule.')
h3cAclIPAclBasicCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 13), CounterClear()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cAclIPAclBasicCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicCountClear.setDescription('Reset the value of counter.')
h3cAclIPAclBasicEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 14), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclIPAclBasicEnable.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicEnable.setDescription('The rule is active or not. true : active false : inactive ')
h3cAclIPAclBasicVpnInstanceName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 15), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicVpnInstanceName.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicVpnInstanceName.setDescription('The VPN name, which the rule will be applied. Default value is null. ')
h3cAclIPAclBasicComment = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 16), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicComment.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicComment.setDescription('The description of ACL rule. Default value is Zero-length String. ')
h3cAclIPAclBasicCounting = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 17), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicCounting.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicCounting.setDescription('The packet will be counted when it matches the rule. It is disabled by default. ')
h3cAclIPAclBasicRouteTypeAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 18), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicRouteTypeAny.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicRouteTypeAny.setDescription('The flag of matching any type of routing header of IPv6 packet. ')
h3cAclIPAclBasicRouteTypeValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 2, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 255), ValueRangeConstraint(65535, 65535), )).clone(65535)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclBasicRouteTypeValue.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclBasicRouteTypeValue.setDescription('Match specify type of routing header of IPv6 packet.')
h3cAclIPAclAdvancedTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3), )
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTable.setDescription("A table of advanced and simple acl group. If some objects of this table are not supported by some products, these objects can't be created, changed and applied. Default value of these objects will be returned when they are read. ")
h3cAclIPAclAdvancedEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclNumberGroupType"), (0, "H3C-ACL-MIB", "h3cAclNumberGroupIndex"), (0, "H3C-ACL-MIB", "h3cAclIPAclAdvancedRuleIndex"))
if mibBuilder.loadTexts: h3cAclIPAclAdvancedEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedEntry.setDescription('Advanced acl group information.')
h3cAclIPAclAdvancedRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65534)))
if mibBuilder.loadTexts: h3cAclIPAclAdvancedRuleIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedRuleIndex.setDescription('The rule index of advanced acl group. As a Simple ACL group, the value of this object must be 0. As an Advanced ACL group, the value of this object is ranging from 0 to 65534. ')
h3cAclIPAclAdvancedRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedRowStatus.setDescription('RowStatus.')
h3cAclIPAclAdvancedAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 3), RuleAction()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedAct.setDescription('The action of advanced acl rule.')
h3cAclIPAclAdvancedProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedProtocol.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedProtocol.setDescription('The protocol-type of advanced acl group. <1-255> Protocol number gre GRE tunneling(47) icmp Internet Control Message Protocol(1) icmpv6 Internet Control Message Protocol6(58) igmp Internet Group Management Protocol(2) ip Any IPv4 protocol ipv6 Any IPv6 protocol ipinip IP in IP tunneling(4) ospf OSPF routing protocol(89) tcp Transmission Control Protocol (6) udp User Datagram Protocol (17) ipv6-ah IPv6 Authentication Header(51) ipv6-esp IPv6 Encapsulating Security Payload(50) ')
h3cAclIPAclAdvancedAddrFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 5), AddressFlag().clone('invalid')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedAddrFlag.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedAddrFlag.setDescription('Address flag to select address.')
h3cAclIPAclAdvancedSrcAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 6), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcAddrType.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcAddrType.setDescription('The IP addresses type of IP pool.')
h3cAclIPAclAdvancedSrcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 7), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcAddr.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcAddr.setDescription('The value of a local IP address available for this association. The type of this address is determined by the value of h3cAclIPAclAdvancedSrcAddrType. ')
h3cAclIPAclAdvancedSrcPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 8), InetAddressPrefixLength()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcPrefix.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcPrefix.setDescription('Denotes the length of a generic Internet network address prefix. A value of n corresponds to an IP address mask which has n contiguous 1-bits from the most significant bit (MSB) and all other bits set to 0. ')
h3cAclIPAclAdvancedSrcAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 9), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcAny.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcAny.setDescription('The flag of matching any IP address.')
h3cAclIPAclAdvancedSrcWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 10), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcWild.setDescription("Source IPv4 address wild. Only IPv4 Advanced Rule supports this object. Default value is '0.0.0.0'. ")
h3cAclIPAclAdvancedSrcOp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 11), PortOp()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcOp.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcOp.setDescription('Source port operation symbol of advanced acl group.')
h3cAclIPAclAdvancedSrcPort1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcPort1.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcPort1.setDescription('The fourth layer source port1.')
h3cAclIPAclAdvancedSrcPort2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(65535)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcPort2.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedSrcPort2.setDescription('The fourth layer source port2.')
h3cAclIPAclAdvancedDestAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 14), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestAddrType.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestAddrType.setDescription('The IP addresses type of IP pool.')
h3cAclIPAclAdvancedDestAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 15), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestAddr.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestAddr.setDescription('The value of a local IP address available for this association. The type of this address is determined by the value of h3cAclIPAclAdvancedDestAddrType. ')
h3cAclIPAclAdvancedDestPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 16), InetAddressPrefixLength()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestPrefix.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestPrefix.setDescription('Denotes the length of a generic Internet network address prefix. A value of n corresponds to an IP address mask which has n contiguous 1-bits from the most significant bit (MSB) and all other bits set to 0. ')
h3cAclIPAclAdvancedDestAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 17), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestAny.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestAny.setDescription('The flag of matching any IP address.')
h3cAclIPAclAdvancedDestWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 18), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestWild.setDescription("Destination IPv4 address wild. Only IPv4 Advanced Rule supports this object. Default value is '0.0.0.0'. ")
h3cAclIPAclAdvancedDestOp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 19), PortOp()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestOp.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestOp.setDescription('Destination port operation symbol of advanced acl group.')
h3cAclIPAclAdvancedDestPort1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestPort1.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestPort1.setDescription('The fourth layer destination port1.')
h3cAclIPAclAdvancedDestPort2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(65535)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestPort2.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDestPort2.setDescription('The fourth layer destination port2.')
h3cAclIPAclAdvancedIcmpType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 255), ValueRangeConstraint(65535, 65535), )).clone(65535)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedIcmpType.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedIcmpType.setDescription('The type of ICMP packet.')
h3cAclIPAclAdvancedIcmpCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 255), ValueRangeConstraint(65535, 65535), )).clone(65535)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedIcmpCode.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedIcmpCode.setDescription('The code of ICMP packet.')
h3cAclIPAclAdvancedPrecedence = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 7), ValueRangeConstraint(255, 255), )).clone(255)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedPrecedence.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedPrecedence.setDescription("The value of IP-packet's precedence. <0-7> Value of precedence routine Specify routine precedence(0) priority Specify priority precedence(1) immediate Specify immediate precedence(2) flash Specify flash precedence(3) flash-override Specify flash-override precedence(4) critical Specify critical precedence(5) internet Specify internetwork control precedence(6) network Specify network control precedence(7) ")
h3cAclIPAclAdvancedTos = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 15), ValueRangeConstraint(255, 255), )).clone(255)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTos.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTos.setDescription("The value of IP-packet's TOS. <0-15> Value of TOS(type of service) max-reliability Match packets with max reliable TOS(2) max-throughput Match packets with max throughput TOS(4) min-delay Match packets with min delay TOS(8) min-monetary-cost Match packets with min monetary cost TOS(1) normal Match packets with normal TOS(0) ")
h3cAclIPAclAdvancedDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 26), DSCPValue().clone(255)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDscp.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedDscp.setDescription('The value of DSCP of IP packet.')
h3cAclIPAclAdvancedTimeRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 27), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTimeRangeName.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTimeRangeName.setDescription('The Time-range of advanced acl rule. Default value is null. ')
h3cAclIPAclAdvancedTCPFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 28), TCPFlag().clone('invalid')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTCPFlag.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTCPFlag.setDescription('The packet type of TCP protocol.')
h3cAclIPAclAdvancedFragmentFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 29), FragmentFlag().clone('invalid')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedFragmentFlag.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedFragmentFlag.setDescription('The flag of matching fragmented packet, and now support two value: 0 or 2 .')
h3cAclIPAclAdvancedLog = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 30), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedLog.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedLog.setDescription('Log matched packets.')
h3cAclIPAclAdvancedCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 31), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedCount.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedCount.setDescription('The count of matched by the rule.')
h3cAclIPAclAdvancedCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 32), CounterClear().clone('nouse')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedCountClear.setDescription('Reset the value of counter.')
h3cAclIPAclAdvancedEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 33), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedEnable.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedEnable.setDescription('The rule is active or not. true : active false : inactive ')
h3cAclIPAclAdvancedVpnInstanceName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 34), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedVpnInstanceName.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedVpnInstanceName.setDescription('The VPN name that the rule will be applied. Default value is null. ')
h3cAclIPAclAdvancedComment = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 35), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedComment.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedComment.setDescription('The description of ACL rule. Default value is Zero-length String. ')
h3cAclIPAclAdvancedReflective = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 36), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedReflective.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedReflective.setDescription('The flag of reflective.')
h3cAclIPAclAdvancedCounting = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 37), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedCounting.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedCounting.setDescription('The packet will be counted when it matches the rule. It is disabled by default. ')
h3cAclIPAclAdvancedTCPFlagMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 38), Bits().clone(namedValues=NamedValues(("tcpack", 0), ("tcpfin", 1), ("tcppsh", 2), ("tcprst", 3), ("tcpsyn", 4), ("tcpurg", 5)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTCPFlagMask.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTCPFlagMask.setDescription('The TCP Flag Mask. This is a bit-map of possible conditions. The various bit positions are: |0 |tcpack | |1 |tcpfin | |2 |tcppsh | |3 |tcprst | |4 |tcpsyn | |5 |tcpurg | ')
h3cAclIPAclAdvancedTCPFlagValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 39), Bits().clone(namedValues=NamedValues(("tcpack", 0), ("tcpfin", 1), ("tcppsh", 2), ("tcprst", 3), ("tcpsyn", 4), ("tcpurg", 5)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTCPFlagValue.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedTCPFlagValue.setDescription('The TCP Flag Value. This is a bit-map of possible conditions. The various bit positions are: |0 |tcpack | |1 |tcpfin | |2 |tcppsh | |3 |tcprst | |4 |tcpsyn | |5 |tcpurg | ')
h3cAclIPAclAdvancedRouteTypeAny = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 40), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedRouteTypeAny.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedRouteTypeAny.setDescription('The flag of matching any type of routing header of IPv6 packet. ')
h3cAclIPAclAdvancedRouteTypeValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 41), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 255), ValueRangeConstraint(65535, 65535), )).clone(65535)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedRouteTypeValue.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedRouteTypeValue.setDescription('The type of routing header of IPv6 packet.')
h3cAclIPAclAdvancedFlowLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 2, 3, 1, 42), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 1048575), ValueRangeConstraint(4294967295, 4294967295), )).clone(4294967295)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclIPAclAdvancedFlowLabel.setStatus('current')
if mibBuilder.loadTexts: h3cAclIPAclAdvancedFlowLabel.setDescription('The value of flow label of IPv6 packet header.')
h3cAclMACAclGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3))
h3cAclMACTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1), )
if mibBuilder.loadTexts: h3cAclMACTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACTable.setDescription("A table of MAC acl group. If some objects of this table are not supported by some products, these objects can't be created, changed and applied. Default value of these objects will be returned when they are read. ")
h3cAclMACEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclNumberGroupType"), (0, "H3C-ACL-MIB", "h3cAclNumberGroupIndex"), (0, "H3C-ACL-MIB", "h3cAclMACRuleIndex"))
if mibBuilder.loadTexts: h3cAclMACEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACEntry.setDescription('MAC acl group information.')
h3cAclMACRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65534)))
if mibBuilder.loadTexts: h3cAclMACRuleIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACRuleIndex.setDescription('The rule index of MAC-based acl group.')
h3cAclMACRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACRowStatus.setDescription('RowStatus.')
h3cAclMACAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 3), RuleAction()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACAct.setDescription('The action of MAC acl rule.')
h3cAclMACTypeCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACTypeCode.setReference('rfc894, rfc1010.')
if mibBuilder.loadTexts: h3cAclMACTypeCode.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACTypeCode.setDescription('The type of protocol.')
h3cAclMACTypeMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACTypeMask.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACTypeMask.setDescription('The mask of protocol.')
h3cAclMACSrcMac = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 6), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACSrcMac.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACSrcMac.setDescription("Source MAC of MAC acl rule. Default value is '00:00:00:00:00:00'. ")
h3cAclMACSrcMacWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 7), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACSrcMacWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACSrcMacWild.setDescription("Source MAC wildzard of MAC acl rule. Default value is '00:00:00:00:00:00'. ")
h3cAclMACDestMac = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 8), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACDestMac.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACDestMac.setDescription("Destination MAC of MAC acl rule. Default value is '00:00:00:00:00:00'. ")
h3cAclMACDestMacWild = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 9), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACDestMacWild.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACDestMacWild.setDescription("Destination MAC wildzard of MAC acl rule. Default value is '00:00:00:00:00:00' ")
h3cAclMACLsapCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACLsapCode.setReference('ANSI/IEEE Std 802.3')
if mibBuilder.loadTexts: h3cAclMACLsapCode.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACLsapCode.setDescription('The type of LSAP.')
h3cAclMACLsapMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACLsapMask.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACLsapMask.setDescription('The mask of LSAP.')
h3cAclMACCos = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 12), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACCos.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACCos.setDescription('Vlan priority of MAC acl rule.')
h3cAclMACTimeRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 13), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACTimeRangeName.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACTimeRangeName.setDescription('The Time-range of MAC acl rule. Default value is null. ')
h3cAclMACCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 14), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclMACCount.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACCount.setDescription('The count of matched frame by the rule.')
h3cAclMACCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 15), CounterClear()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cAclMACCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACCountClear.setDescription('Reset the value of counter.')
h3cAclMACEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 16), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclMACEnable.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACEnable.setDescription('The rule is active or not. true : active false : inactive ')
h3cAclMACComment = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 17), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACComment.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACComment.setDescription('The description of ACL rule. Default value is Zero-length String. ')
h3cAclMACLog = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 18), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACLog.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACLog.setDescription('The packet will be logged when it matches the rule. It is disabled by default. ')
h3cAclMACCounting = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 3, 1, 1, 19), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclMACCounting.setStatus('current')
if mibBuilder.loadTexts: h3cAclMACCounting.setDescription('The packet will be counted when it matches the rule. It is disabled by default. ')
h3cAclEnUserAclGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4))
h3cAclEnUserTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3), )
if mibBuilder.loadTexts: h3cAclEnUserTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserTable.setDescription("A table of user acl group information. If some objects of this table are not supported by some products, these objects can't be created, changed and applied. Default value of these objects will be returned when they are read. ")
h3cAclEnUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclNumberGroupType"), (0, "H3C-ACL-MIB", "h3cAclNumberGroupIndex"), (0, "H3C-ACL-MIB", "h3cAclEnUserRuleIndex"))
if mibBuilder.loadTexts: h3cAclEnUserEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserEntry.setDescription('User defined acl group entry.')
h3cAclEnUserRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65534)))
if mibBuilder.loadTexts: h3cAclEnUserRuleIndex.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserRuleIndex.setDescription('The subitem of the user acl.')
h3cAclEnUserRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserRowStatus.setDescription('RowStatus.')
h3cAclEnUserAct = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 3), RuleAction()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserAct.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserAct.setDescription('The action of user defined acl rule.')
h3cAclEnUserStartString = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserStartString.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserStartString.setDescription("The rule, matching packets, input like this: 'RuleOffset','RuleString','RuleMask'. RuleOffset: The value of this object is defined by product and it indicates the offset of the rule mask in the packet(unit: byte). RuleString: The length of RuleString is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. RuleMask: The length of RuleMask is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. For example: 10,10af,ffff. Default value is null. ")
h3cAclEnUserL2String = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserL2String.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserL2String.setDescription("The rule, matching layer 2 packets, input like this: 'RuleOffset','RuleString','RuleMask'. RuleOffset: The value is defined by product and it indicates offset of the rule mask in the packet(unit: byte). RuleString: The length of RuleString is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. RuleMask: The length of RuleMask is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. For example: '10','10af','ffff'. Default value is null. ")
h3cAclEnUserMplsString = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserMplsString.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserMplsString.setDescription("The rule, matching mpls packets, input like this: 'RuleOffset','RuleString','RuleMask'. RuleOffset: The value is defined by product and it indicates offset of the rule mask in the packet(unit: byte). RuleString: The length of RuleString is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. RuleMask: The length of RuleMask is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. For example: '10','10af','ffff'. Default value is null. ")
h3cAclEnUserIPv4String = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserIPv4String.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserIPv4String.setDescription("The rule, matching IPv4 packets, input like this: 'RuleOffset','RuleString','RuleMask'. RuleOffset: The value is defined by product and it indicates offset of the rule mask in the packet(unit: byte). RuleString: The length of RuleString is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. RuleMask: The length of RuleMask is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. For example: '10','10af','ffff'. Default value is null. ")
h3cAclEnUserIPv6String = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserIPv6String.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserIPv6String.setDescription("The rule, matching IPv6 packets, input like this: 'RuleOffset','RuleString','RuleMask'. RuleOffset: The value is defined by product and it indicates offset of the rule mask in the packet(unit: byte). RuleString: The length of RuleString is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. RuleMask: The length of RuleMask is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. For example: '10','10af','ffff'. Default value is null. ")
h3cAclEnUserL4String = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserL4String.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserL4String.setDescription("The rule, matching layer 4 packets, input like this: 'RuleOffset','RuleString','RuleMask'. RuleOffset: The value is defined by product and it indicates offset of the rule mask in the packet(unit: byte). RuleString: The length of RuleString is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. RuleMask: The length of RuleMask is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. For example: '10','10af','ffff'. Default value is null. ")
h3cAclEnUserL5String = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserL5String.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserL5String.setDescription("The rule, matching layer 5 packets, input like this: 'RuleOffset','RuleString','RuleMask'. RuleOffset: The value is defined by product and it indicates offset of the rule mask in the packet(unit: byte). RuleString: The length of RuleString is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. RuleMask: The length of RuleMask is defined by product. The string must be hexadecimal. The length of string must be multiple of 2. For example: '10','10af','ffff'. Default value is null. ")
h3cAclEnUserTimeRangeName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserTimeRangeName.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserTimeRangeName.setDescription('The Time-range of user acl rule. Default value is null.')
h3cAclEnUserCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 12), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclEnUserCount.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserCount.setDescription('The count of matched by the rule.')
h3cAclEnUserCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 13), CounterClear()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cAclEnUserCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserCountClear.setDescription('Reset the value of counter.')
h3cAclEnUserEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 14), TruthValue().clone('false')).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclEnUserEnable.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserEnable.setDescription('The rule is active or not. true : active false : inactive ')
h3cAclEnUserComment = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 15), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserComment.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserComment.setDescription('The description of ACL rule. Default value is Zero-length String. ')
h3cAclEnUserLog = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 16), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserLog.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserLog.setDescription('The packet will be logged when it matches the rule. It is disabled by default. ')
h3cAclEnUserCounting = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 4, 3, 1, 17), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cAclEnUserCounting.setStatus('current')
if mibBuilder.loadTexts: h3cAclEnUserCounting.setDescription('The packet will be counted when it matches the rule. It is disabled by default. ')
h3cAclResourceGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5))
h3cAclResourceUsageTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1), )
if mibBuilder.loadTexts: h3cAclResourceUsageTable.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceUsageTable.setDescription('The table shows ACL resource usage information. Support for resource types that are denoted by h3cAclResourceType object varies with products. If a type is not supported, the corresponding row for the type will not be instantiated in this table. ')
h3cAclResourceUsageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cAclResourceChassis"), (0, "H3C-ACL-MIB", "h3cAclResourceSlot"), (0, "H3C-ACL-MIB", "h3cAclResourceChip"), (0, "H3C-ACL-MIB", "h3cAclResourceType"))
if mibBuilder.loadTexts: h3cAclResourceUsageEntry.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceUsageEntry.setDescription('Each row contains a brief description of the resource type, a port range associated with the chip, total, reserved, and configured amount of resource of this type, the percent of resource that has been allocated, and so on. ')
h3cAclResourceChassis = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 1), Unsigned32())
if mibBuilder.loadTexts: h3cAclResourceChassis.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceChassis.setDescription('The chassis number. On a centralized or distributed device, the value for this node is always zero. ')
h3cAclResourceSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 2), Unsigned32())
if mibBuilder.loadTexts: h3cAclResourceSlot.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceSlot.setDescription('The slot number. On a centralized device, the value for this node is always zero.')
h3cAclResourceChip = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 3), Unsigned32())
if mibBuilder.loadTexts: h3cAclResourceChip.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceChip.setDescription('The chip number. On a single chip device, the value for this node is always zero.')
h3cAclResourceType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)))
if mibBuilder.loadTexts: h3cAclResourceType.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceType.setDescription('The resource type.')
h3cAclPortRange = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 5), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclPortRange.setStatus('current')
if mibBuilder.loadTexts: h3cAclPortRange.setDescription('The port range associated with the chip. Commas are used to separate multiple port ranges, for example, Ethernet1/2 to Ethernet1/12, Ethernet1/31 to Ethernet1/48. ')
h3cAclResourceTotal = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclResourceTotal.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceTotal.setDescription('Total TCAM entries of the resource type.')
h3cAclResourceReserved = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclResourceReserved.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceReserved.setDescription('The amount of reserved TCAM entries of the resource type.')
h3cAclResourceConfigured = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclResourceConfigured.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceConfigured.setDescription('The amount of configured TCAM entries of the resource type.')
h3cAclResourceUsagePercent = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclResourceUsagePercent.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceUsagePercent.setDescription('The percent of TCAM entries that have been used for this resource type. ')
h3cAclResourceTypeDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 2, 5, 1, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cAclResourceTypeDescription.setStatus('current')
if mibBuilder.loadTexts: h3cAclResourceTypeDescription.setDescription('The description of this resource type.')
h3cAclPacketFilterObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3))
h3cPfilterScalarGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 1))
h3cPfilterDefaultAction = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("permit", 1), ("deny", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cPfilterDefaultAction.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterDefaultAction.setDescription('The default action of packet filter. By default, the packet filter permits packets that do not match any ACL rule to pass. ')
h3cPfilterProcessingStatus = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("processing", 1), ("done", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterProcessingStatus.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterProcessingStatus.setDescription('This object shows the status of the system when applying packet filter. It is forbidden to set or read in h3cAclPacketFilterObjects MIB module when the value is processing. ')
h3cPfilterApplyTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2), )
if mibBuilder.loadTexts: h3cPfilterApplyTable.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyTable.setDescription("A table of packet filter application. It's not supported to set default action on an entity, but supported to enable hardware count of default action on an entity. ")
h3cPfilterApplyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cPfilterApplyObjType"), (0, "H3C-ACL-MIB", "h3cPfilterApplyObjIndex"), (0, "H3C-ACL-MIB", "h3cPfilterApplyDirection"), (0, "H3C-ACL-MIB", "h3cPfilterApplyAclType"), (0, "H3C-ACL-MIB", "h3cPfilterApplyAclIndex"))
if mibBuilder.loadTexts: h3cPfilterApplyEntry.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyEntry.setDescription('Packet filter application information entry.')
h3cPfilterApplyObjType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("interface", 1), ("vlan", 2), ("global", 3))))
if mibBuilder.loadTexts: h3cPfilterApplyObjType.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyObjType.setDescription('The object type of packet filter application. interface: Apply an ACL to the interface to filter packets. vlan: Apply an ACL to the VLAN to filter packets. global: Apply an ACL globally to filter packets. ')
h3cPfilterApplyObjIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)))
if mibBuilder.loadTexts: h3cPfilterApplyObjIndex.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyObjIndex.setDescription('The object ID of packet filter application. Interface: interface index, equal to ifIndex VLAN: VLAN ID, 1..4094 Global: 0 ')
h3cPfilterApplyDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1, 3), DirectionType())
if mibBuilder.loadTexts: h3cPfilterApplyDirection.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyDirection.setDescription('The direction of packet filter application.')
h3cPfilterApplyAclType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2), ("default", 3))))
if mibBuilder.loadTexts: h3cPfilterApplyAclType.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyAclType.setDescription('ACL Type: IPv4, IPv6, default action. Take default action as a special ACL group. ')
h3cPfilterApplyAclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(2000, 5999), )))
if mibBuilder.loadTexts: h3cPfilterApplyAclIndex.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyAclIndex.setDescription('The ACL group index. Basic type: 2000..2999 Advanced type: 3000..3999 MAC type: 4000..4999 User type: 5000..5999 Default action type: 0 ')
h3cPfilterApplyHardCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1, 6), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPfilterApplyHardCount.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyHardCount.setDescription('Hardware count flag. true: enable hardware count false: disable hardware count ')
h3cPfilterApplySequence = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterApplySequence.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplySequence.setDescription('The configure sequence of packet filter application.')
h3cPfilterApplyCountClear = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1, 8), CounterClear()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cPfilterApplyCountClear.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyCountClear.setDescription('Clear the value of counters.')
h3cPfilterApplyRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 2, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: h3cPfilterApplyRowStatus.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterApplyRowStatus.setDescription('RowStatus.')
h3cPfilterAclGroupRunInfoTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 3), )
if mibBuilder.loadTexts: h3cPfilterAclGroupRunInfoTable.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclGroupRunInfoTable.setDescription('A table of group running information of ACLs for packet filtering. If hardware count function is not supported or not enabled to the packet filter application, the statistics entry will be zero. ')
h3cPfilterAclGroupRunInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 3, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cPfilterApplyObjType"), (0, "H3C-ACL-MIB", "h3cPfilterApplyObjIndex"), (0, "H3C-ACL-MIB", "h3cPfilterApplyDirection"), (0, "H3C-ACL-MIB", "h3cPfilterApplyAclType"), (0, "H3C-ACL-MIB", "h3cPfilterApplyAclIndex"))
if mibBuilder.loadTexts: h3cPfilterAclGroupRunInfoEntry.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclGroupRunInfoEntry.setDescription('ACL group running information entry for packet filtering.')
h3cPfilterAclGroupStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("success", 1), ("failed", 2), ("partialSuccess", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclGroupStatus.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclGroupStatus.setDescription('The status of ACL group applied. success: ACL applied successfully on all slots failed: failed to apply ACL on all slots partialSuccess: failed to apply ACL on some slots ')
h3cPfilterAclGroupCountStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("success", 1), ("failed", 2), ("partialSuccess", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclGroupCountStatus.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclGroupCountStatus.setDescription('The status of enabling hardware count. If hardware count is not enabled, it returns success. success: enable hardware count successfully on all slots failed: failed to enable hardware count on all slots partialSuccess: failed to enable hardware count on some slots ')
h3cPfilterAclGroupPermitPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 3, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclGroupPermitPkts.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclGroupPermitPkts.setDescription('The number of packets permitted.')
h3cPfilterAclGroupPermitBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 3, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclGroupPermitBytes.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclGroupPermitBytes.setDescription('The number of bytes permitted.')
h3cPfilterAclGroupDenyPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 3, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclGroupDenyPkts.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclGroupDenyPkts.setDescription('The number of packets denied.')
h3cPfilterAclGroupDenyBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 3, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclGroupDenyBytes.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclGroupDenyBytes.setDescription('The number of bytes denied.')
h3cPfilterAclRuleRunInfoTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 4), )
if mibBuilder.loadTexts: h3cPfilterAclRuleRunInfoTable.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclRuleRunInfoTable.setDescription("A table of rule's running information of ACLs for packet filtering. If hardware count function is not supported or not enabled to the packet filter application, the h3cPfilterAclRuleMatchPackets and h3cPfilterAclRuleMatchBytes will be zero. ")
h3cPfilterAclRuleRunInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 4, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cPfilterApplyObjType"), (0, "H3C-ACL-MIB", "h3cPfilterApplyObjIndex"), (0, "H3C-ACL-MIB", "h3cPfilterApplyDirection"), (0, "H3C-ACL-MIB", "h3cPfilterApplyAclType"), (0, "H3C-ACL-MIB", "h3cPfilterApplyAclIndex"), (0, "H3C-ACL-MIB", "h3cPfilterAclRuleIndex"))
if mibBuilder.loadTexts: h3cPfilterAclRuleRunInfoEntry.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclRuleRunInfoEntry.setDescription("ACL rule's running information entry.")
h3cPfilterAclRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65534)))
if mibBuilder.loadTexts: h3cPfilterAclRuleIndex.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclRuleIndex.setDescription('The ACL rule index.')
h3cPfilterAclRuleStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("success", 1), ("failed", 2), ("partialSuccess", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclRuleStatus.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclRuleStatus.setDescription('The status of rule application. success: rule applied successfully on all slots failed: failed to apply rule on all slots partialSuccess: failed to apply rule on some slots ')
h3cPfilterAclRuleCountStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("success", 1), ("failed", 2), ("partialSuccess", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclRuleCountStatus.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclRuleCountStatus.setDescription("The status of enabling rule's hardware count. If hardware count is not enabled, it returns success. success: enable hardware count successfully on all slots failed: failed to enable hardware count on all slots partialSuccess: failed to enable hardware count on some slots ")
h3cPfilterAclRuleMatchPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 4, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclRuleMatchPackets.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclRuleMatchPackets.setDescription('The number of packets matched.')
h3cPfilterAclRuleMatchBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 4, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterAclRuleMatchBytes.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAclRuleMatchBytes.setDescription('The number of bytes matched.')
h3cPfilterStatisticSumTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 5), )
if mibBuilder.loadTexts: h3cPfilterStatisticSumTable.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterStatisticSumTable.setDescription("A table of ACL rule's sum statistics information, accumulated by all entity application on all slots. ")
h3cPfilterStatisticSumEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 5, 1), ).setIndexNames((0, "H3C-ACL-MIB", "h3cPfilterSumDirection"), (0, "H3C-ACL-MIB", "h3cPfilterSumAclType"), (0, "H3C-ACL-MIB", "h3cPfilterSumAclIndex"), (0, "H3C-ACL-MIB", "h3cPfilterSumRuleIndex"))
if mibBuilder.loadTexts: h3cPfilterStatisticSumEntry.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterStatisticSumEntry.setDescription("ACL rule's sum statistics information entry.")
h3cPfilterSumDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 5, 1, 1), DirectionType())
if mibBuilder.loadTexts: h3cPfilterSumDirection.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterSumDirection.setDescription('The direction of application.')
h3cPfilterSumAclType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ipv4", 1), ("ipv6", 2))))
if mibBuilder.loadTexts: h3cPfilterSumAclType.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterSumAclType.setDescription('ACL type, IPv4 or IPv6.')
h3cPfilterSumAclIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 5, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2000, 5999)))
if mibBuilder.loadTexts: h3cPfilterSumAclIndex.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterSumAclIndex.setDescription('The ACL group index. Basic type: 2000..2999 Advanced type: 3000..3999 MAC type: 4000..4999 User type: 5000..5999 ')
h3cPfilterSumRuleIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65534)))
if mibBuilder.loadTexts: h3cPfilterSumRuleIndex.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterSumRuleIndex.setDescription('The ACL rule index.')
h3cPfilterSumRuleMatchPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 5, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterSumRuleMatchPackets.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterSumRuleMatchPackets.setDescription('The sum number of packets matched the ACL rule.')
h3cPfilterSumRuleMatchBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 3, 5, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cPfilterSumRuleMatchBytes.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterSumRuleMatchBytes.setDescription('The sum number of bytes matched the ACL rule.')
h3cAclPacketfilterTrapObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 4))
h3cPfilterInterface = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 4, 1), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cPfilterInterface.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterInterface.setDescription('The interface which policy apply.')
h3cPfilterDirection = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 4, 2), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cPfilterDirection.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterDirection.setDescription('Inbound or outbound.')
h3cPfilterACLNumber = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 4, 3), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cPfilterACLNumber.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterACLNumber.setDescription('ACL number.')
h3cPfilterAction = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 4, 4), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cPfilterAction.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterAction.setDescription('Permit or deny.')
h3cMACfilterSourceMac = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 4, 5), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cMACfilterSourceMac.setStatus('current')
if mibBuilder.loadTexts: h3cMACfilterSourceMac.setDescription('Source MAC address.')
h3cMACfilterDestinationMac = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 4, 6), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cMACfilterDestinationMac.setStatus('current')
if mibBuilder.loadTexts: h3cMACfilterDestinationMac.setDescription('Destination MAC address.')
h3cPfilterPacketNumber = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 4, 7), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cPfilterPacketNumber.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterPacketNumber.setDescription('The number of packets permitted or denied by ACL.')
h3cPfilterReceiveInterface = MibScalar((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 4, 8), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: h3cPfilterReceiveInterface.setStatus('current')
if mibBuilder.loadTexts: h3cPfilterReceiveInterface.setDescription('The interface where packet come from.')
h3cAclPacketfilterTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 5))
h3cPfilterTrapPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 5, 0))
h3cMACfilterTrap = NotificationType((1, 3, 6, 1, 4, 1, 2011, 10, 2, 8, 5, 0, 1)).setObjects(("H3C-ACL-MIB", "h3cPfilterInterface"), ("H3C-ACL-MIB", "h3cPfilterDirection"), ("H3C-ACL-MIB", "h3cPfilterACLNumber"), ("H3C-ACL-MIB", "h3cPfilterAction"), ("H3C-ACL-MIB", "h3cMACfilterSourceMac"), ("H3C-ACL-MIB", "h3cMACfilterDestinationMac"), ("H3C-ACL-MIB", "h3cPfilterPacketNumber"), ("H3C-ACL-MIB", "h3cPfilterReceiveInterface"))
if mibBuilder.loadTexts: h3cMACfilterTrap.setStatus('current')
if mibBuilder.loadTexts: h3cMACfilterTrap.setDescription('This notification is generated when a packet was processed by MAC address filter, but not every packet will generate one notification, the same notification only generate once in 30 seconds.')
mibBuilder.exportSymbols("H3C-ACL-MIB", h3cAclIfRowStatus=h3cAclIfRowStatus, h3cAclIPAclAdvancedDestAddr=h3cAclIPAclAdvancedDestAddr, h3cAclNumberGroupRowStatus=h3cAclNumberGroupRowStatus, h3cAclEnUserIPv6String=h3cAclEnUserIPv6String, h3cAclEnUserLog=h3cAclEnUserLog, h3cAclMACLog=h3cAclMACLog, h3cAclMACRowStatus=h3cAclMACRowStatus, h3cAclNumGroupMatchOrder=h3cAclNumGroupMatchOrder, h3cAclIPAclAdvancedCount=h3cAclIPAclAdvancedCount, h3cAclAdvancedCount=h3cAclAdvancedCount, h3cAclPacketfilterTrapObjects=h3cAclPacketfilterTrapObjects, h3cAclNumberGroupStep=h3cAclNumberGroupStep, h3cAclIPAclBasicCounting=h3cAclIPAclBasicCounting, h3cAclBasicSrcIp=h3cAclBasicSrcIp, h3cAclAdvancedDestPort2=h3cAclAdvancedDestPort2, h3cAclLinkVlanPri=h3cAclLinkVlanPri, h3cAclUserRuleStr=h3cAclUserRuleStr, h3cAclResourceChip=h3cAclResourceChip, h3cAclActiveIpAclNum=h3cAclActiveIpAclNum, h3cAclLinkLsapCode=h3cAclLinkLsapCode, h3cAclIDSSrcMac=h3cAclIDSSrcMac, h3cAclIDSRowStatus=h3cAclIDSRowStatus, h3cAclIPAclAdvancedSrcAddrType=h3cAclIPAclAdvancedSrcAddrType, h3cPfilterScalarGroup=h3cPfilterScalarGroup, h3cAclIfTimeRangeName=h3cAclIfTimeRangeName, h3cAclNameGroupMatchOrder=h3cAclNameGroupMatchOrder, h3cAclIPAclBasicFragmentFlag=h3cAclIPAclBasicFragmentFlag, h3cAclIPAclAdvancedSrcPort1=h3cAclIPAclAdvancedSrcPort1, h3cPfilterReceiveInterface=h3cPfilterReceiveInterface, h3cAclIPAclAdvancedCounting=h3cAclIPAclAdvancedCounting, h3cAclAdvancedAct=h3cAclAdvancedAct, h3cAclAdvancedTos=h3cAclAdvancedTos, h3cAclNameGroupRowStatus=h3cAclNameGroupRowStatus, h3cAclNumberGroupIndex=h3cAclNumberGroupIndex, h3cPfilterApplyDirection=h3cPfilterApplyDirection, h3cAclIfAct=h3cAclIfAct, h3cAclMib2EntityType=h3cAclMib2EntityType, h3cPfilterDefaultAction=h3cPfilterDefaultAction, h3cAclLinkEntry=h3cAclLinkEntry, h3cAclIPAclAdvancedSrcPort2=h3cAclIPAclAdvancedSrcPort2, h3cAclMib2ProcessingStatus=h3cAclMib2ProcessingStatus, h3cPfilterApplyTable=h3cPfilterApplyTable, h3cAclNumGroupEntry=h3cAclNumGroupEntry, h3cAclIPAclBasicComment=h3cAclIPAclBasicComment, h3cAclEnUserCounting=h3cAclEnUserCounting, h3cAclEnUserEntry=h3cAclEnUserEntry, h3cAclActiveVlanID=h3cAclActiveVlanID, h3cAclNameGroupCreateName=h3cAclNameGroupCreateName, h3cAclLinkSubitem=h3cAclLinkSubitem, h3cAclIDSProtocol=h3cAclIDSProtocol, h3cAclEnUserTimeRangeName=h3cAclEnUserTimeRangeName, h3cAclResourceTotal=h3cAclResourceTotal, h3cAclAdvancedRuleTable=h3cAclAdvancedRuleTable, h3cAclIDSSrcWild=h3cAclIDSSrcWild, h3cAclIPAclAdvancedTCPFlagValue=h3cAclIPAclAdvancedTCPFlagValue, h3cAclUserSubitem=h3cAclUserSubitem, h3cAclNameGroupEntry=h3cAclNameGroupEntry, h3cPfilterInterface=h3cPfilterInterface, h3cAclAdvancedDestIp=h3cAclAdvancedDestIp, h3cAclMACLsapMask=h3cAclMACLsapMask, h3cAclAdvancedSrcPort2=h3cAclAdvancedSrcPort2, h3cAclIDSDestIp=h3cAclIDSDestIp, h3cAclEnUserTable=h3cAclEnUserTable, h3cAcl=h3cAcl, h3cAclIPAclAdvancedSrcOp=h3cAclIPAclAdvancedSrcOp, h3cAclIPAclAdvancedReflective=h3cAclIPAclAdvancedReflective, h3cPfilterSumRuleMatchBytes=h3cPfilterSumRuleMatchBytes, h3cAclNumGroupCountClear=h3cAclNumGroupCountClear, h3cAclAdvancedSrcWild=h3cAclAdvancedSrcWild, h3cAclIDSEntry=h3cAclIDSEntry, h3cAclLinkDestMacWild=h3cAclLinkDestMacWild, h3cAclMib2ModuleIndex=h3cAclMib2ModuleIndex, h3cAclIPAclBasicTable=h3cAclIPAclBasicTable, h3cAclNumberGroupRuleCounter=h3cAclNumberGroupRuleCounter, h3cAclIPAclAdvancedVpnInstanceName=h3cAclIPAclAdvancedVpnInstanceName, h3cPfilterApplyCountClear=h3cPfilterApplyCountClear, h3cAclLinkDestIfIndex=h3cAclLinkDestIfIndex, h3cAclBasicAct=h3cAclBasicAct, h3cAclEnUserAct=h3cAclEnUserAct, h3cPfilterApplyAclType=h3cPfilterApplyAclType, h3cAclIPAclAdvancedDestWild=h3cAclIPAclAdvancedDestWild, h3cAclAdvancedSrcOp=h3cAclAdvancedSrcOp, h3cAclNumGroupTable=h3cAclNumGroupTable, h3cAclLinkSrcMacWild=h3cAclLinkSrcMacWild, DirectionType=DirectionType, h3cAclIfAclNum=h3cAclIfAclNum, h3cAclBasicCountClear=h3cAclBasicCountClear, h3cAclIPAclAdvancedDestAny=h3cAclIPAclAdvancedDestAny, h3cAclMACCos=h3cAclMACCos, TCPFlag=TCPFlag, h3cAclIDSDestPort=h3cAclIDSDestPort, h3cAclIfLog=h3cAclIfLog, h3cAclBasicSubitem=h3cAclBasicSubitem, h3cAclLinkDestVlanId=h3cAclLinkDestVlanId, h3cAclIfEnable=h3cAclIfEnable, h3cAclResourceUsageEntry=h3cAclResourceUsageEntry, h3cAclLinkL2LabelRangeOp=h3cAclLinkL2LabelRangeOp, h3cAclIPAclBasicSrcAny=h3cAclIPAclBasicSrcAny, h3cAclIPAclAdvancedRouteTypeValue=h3cAclIPAclAdvancedRouteTypeValue, h3cAclEnUserComment=h3cAclEnUserComment, h3cPfilterProcessingStatus=h3cPfilterProcessingStatus, h3cAclIfRuleEntry=h3cAclIfRuleEntry, h3cPfilterApplyAclIndex=h3cPfilterApplyAclIndex, h3cAclIPAclBasicSrcAddr=h3cAclIPAclBasicSrcAddr, h3cAclIPAclAdvancedLog=h3cAclIPAclAdvancedLog, h3cAclMACSrcMac=h3cAclMACSrcMac, h3cPfilterApplyEntry=h3cPfilterApplyEntry, h3cAclAdvancedPrecedence=h3cAclAdvancedPrecedence, h3cAclIPAclAdvancedDestPort1=h3cAclIPAclAdvancedDestPort1, h3cAclUserRuleMask=h3cAclUserRuleMask, h3cAclNumberGroupMatchOrder=h3cAclNumberGroupMatchOrder, h3cAclLinkDestMac=h3cAclLinkDestMac, h3cAclIPAclAdvancedRowStatus=h3cAclIPAclAdvancedRowStatus, h3cAclMACEntry=h3cAclMACEntry, h3cAclAdvancedIcmpCode=h3cAclAdvancedIcmpCode, h3cAclEnUserRowStatus=h3cAclEnUserRowStatus, h3cAclAdvancedLog=h3cAclAdvancedLog, h3cAclActiveAclIndex=h3cAclActiveAclIndex, h3cAclNumberGroupType=h3cAclNumberGroupType, h3cAclMibObjects=h3cAclMibObjects, h3cAclIfCountClear=h3cAclIfCountClear, h3cAclLinkAct=h3cAclLinkAct, h3cAclLinkSrcMac=h3cAclLinkSrcMac, h3cAclIPAclAdvancedComment=h3cAclIPAclAdvancedComment, h3cAclNameGroupIndex=h3cAclNameGroupIndex, h3cAclLinkTimeRangeName=h3cAclLinkTimeRangeName, h3cAclIPAclAdvancedEnable=h3cAclIPAclAdvancedEnable, h3cAclLinkTypeMask=h3cAclLinkTypeMask, h3cAclIPAclBasicRuleIndex=h3cAclIPAclBasicRuleIndex, h3cAclPacketfilterTrap=h3cAclPacketfilterTrap, h3cAclBasicTimeRangeName=h3cAclBasicTimeRangeName, h3cAclActiveIpAclSubitem=h3cAclActiveIpAclSubitem, h3cAclIDSSrcPort=h3cAclIDSSrcPort, h3cPfilterDirection=h3cPfilterDirection, h3cAclLinkTypeCode=h3cAclLinkTypeCode, h3cAclAdvancedTimeRangeName=h3cAclAdvancedTimeRangeName, h3cAclMib2Mode=h3cAclMib2Mode, h3cPfilterAclGroupDenyBytes=h3cPfilterAclGroupDenyBytes, h3cAclMib2CharacteristicsIndex=h3cAclMib2CharacteristicsIndex, h3cAclEnUserAclGroup=h3cAclEnUserAclGroup, h3cMACfilterDestinationMac=h3cMACfilterDestinationMac, h3cAclAdvancedSrcIp=h3cAclAdvancedSrcIp, h3cAclIfSubitem=h3cAclIfSubitem, h3cAclIPAclAdvancedTCPFlag=h3cAclIPAclAdvancedTCPFlag, h3cAclIPAclAdvancedDestPort2=h3cAclIPAclAdvancedDestPort2, h3cAclMACCounting=h3cAclMACCounting, h3cAclAdvancedDestWild=h3cAclAdvancedDestWild, h3cAclLinkL2LabelRangeBegin=h3cAclLinkL2LabelRangeBegin, h3cPfilterAclRuleIndex=h3cPfilterAclRuleIndex, h3cAclMib2EntityIndex=h3cAclMib2EntityIndex, h3cAclResourceUsagePercent=h3cAclResourceUsagePercent, h3cAclNumGroupSubitemNum=h3cAclNumGroupSubitemNum, h3cAclMACTypeMask=h3cAclMACTypeMask, h3cAclPortRange=h3cAclPortRange, h3cAclEnUserCountClear=h3cAclEnUserCountClear, h3cAclActiveEntry=h3cAclActiveEntry, h3cAclEnUserIPv4String=h3cAclEnUserIPv4String, h3cAclIPAclAdvancedTos=h3cAclIPAclAdvancedTos, h3cAclBasicSrcWild=h3cAclBasicSrcWild, h3cPfilterAclGroupPermitPkts=h3cPfilterAclGroupPermitPkts, h3cAclIfCount=h3cAclIfCount, CounterClear=CounterClear, h3cAclUserTable=h3cAclUserTable, h3cAclIPAclGroup=h3cAclIPAclGroup, h3cPfilterApplySequence=h3cPfilterApplySequence, h3cAclIPAclAdvancedIcmpType=h3cAclIPAclAdvancedIcmpType, h3cAclEnUserEnable=h3cAclEnUserEnable, h3cAclLinkRowStatus=h3cAclLinkRowStatus, AddressFlag=AddressFlag, h3cPfilterSumAclIndex=h3cPfilterSumAclIndex, h3cAclBasicLog=h3cAclBasicLog, h3cAclActiveLinkAclNum=h3cAclActiveLinkAclNum, h3cAclBasicRuleEntry=h3cAclBasicRuleEntry, h3cAclMACDestMac=h3cAclMACDestMac, h3cAclResourceConfigured=h3cAclResourceConfigured, h3cPfilterAclGroupPermitBytes=h3cPfilterAclGroupPermitBytes, h3cAclMACEnable=h3cAclMACEnable, h3cAclLinkTable=h3cAclLinkTable, h3cAclLinkAclNum=h3cAclLinkAclNum, h3cAclIPAclAdvancedRuleIndex=h3cAclIPAclAdvancedRuleIndex, h3cAclUserEnable=h3cAclUserEnable, h3cAclActiveLinkAclSubitem=h3cAclActiveLinkAclSubitem, h3cPfilterSumDirection=h3cPfilterSumDirection, h3cAclIPAclAdvancedFlowLabel=h3cAclIPAclAdvancedFlowLabel, h3cPfilterApplyObjIndex=h3cPfilterApplyObjIndex, h3cAclIPAclBasicTimeRangeName=h3cAclIPAclBasicTimeRangeName, h3cAclIDSTable=h3cAclIDSTable, h3cAclIPAclAdvancedDestPrefix=h3cAclIPAclAdvancedDestPrefix, h3cAclAdvancedDestOp=h3cAclAdvancedDestOp, h3cPfilterSumRuleMatchPackets=h3cPfilterSumRuleMatchPackets, h3cAclIPAclAdvancedDscp=h3cAclIPAclAdvancedDscp, h3cAclActiveTable=h3cAclActiveTable, h3cAclIPAclAdvancedDestOp=h3cAclIPAclAdvancedDestOp, h3cAclUserTimeRangeName=h3cAclUserTimeRangeName, h3cPfilterStatisticSumEntry=h3cPfilterStatisticSumEntry, h3cAclEnUserMplsString=h3cAclEnUserMplsString, h3cAclIDSDestMac=h3cAclIDSDestMac, h3cAclLinkVlanTag=h3cAclLinkVlanTag, h3cAclAdvancedSrcPort1=h3cAclAdvancedSrcPort1, h3cAclEnUserCount=h3cAclEnUserCount, h3cAclIfRuleTable=h3cAclIfRuleTable, h3cAclMib2ObjectsCapabilities=h3cAclMib2ObjectsCapabilities, h3cPfilterAclRuleMatchPackets=h3cPfilterAclRuleMatchPackets, h3cAclResourceGroup=h3cAclResourceGroup, h3cAclIPAclAdvancedEntry=h3cAclIPAclAdvancedEntry, h3cAclEnUserL4String=h3cAclEnUserL4String, h3cAclIDSAct=h3cAclIDSAct, DSCPValue=DSCPValue, h3cAclBasicAclNum=h3cAclBasicAclNum, h3cAclNumberGroupEntry=h3cAclNumberGroupEntry, h3cPfilterAclGroupCountStatus=h3cPfilterAclGroupCountStatus, h3cPfilterTrapPrefix=h3cPfilterTrapPrefix, h3cAclResourceUsageTable=h3cAclResourceUsageTable, h3cPfilterAclGroupStatus=h3cPfilterAclGroupStatus, h3cAclMib2CapabilityEntry=h3cAclMib2CapabilityEntry, h3cAclBasicRuleTable=h3cAclBasicRuleTable, h3cAclUserRowStatus=h3cAclUserRowStatus, h3cAclIPAclAdvancedAddrFlag=h3cAclIPAclAdvancedAddrFlag, h3cAclIPAclBasicCountClear=h3cAclIPAclBasicCountClear, h3cAclBasicCount=h3cAclBasicCount, h3cPfilterStatisticSumTable=h3cPfilterStatisticSumTable, h3cAclIPAclAdvancedTCPFlagMask=h3cAclIPAclAdvancedTCPFlagMask, h3cAclIPAclAdvancedAct=h3cAclIPAclAdvancedAct, h3cAclActiveRowStatus=h3cAclActiveRowStatus, h3cPfilterAction=h3cPfilterAction, h3cAclEnUserRuleIndex=h3cAclEnUserRuleIndex, RuleAction=RuleAction, h3cPfilterAclGroupRunInfoTable=h3cPfilterAclGroupRunInfoTable, PortOp=PortOp, h3cAclActiveRuntime=h3cAclActiveRuntime, h3cAclLinkSrcVlanId=h3cAclLinkSrcVlanId, h3cAclAdvancedProtocol=h3cAclAdvancedProtocol, h3cAclMib2CharacteristicsDesc=h3cAclMib2CharacteristicsDesc, h3cAclIPAclBasicSrcAddrType=h3cAclIPAclBasicSrcAddrType, h3cAclIPAclBasicRouteTypeAny=h3cAclIPAclBasicRouteTypeAny, h3cAclIPAclBasicEntry=h3cAclIPAclBasicEntry, h3cAclEnUserStartString=h3cAclEnUserStartString, h3cAclIPAclAdvancedTable=h3cAclIPAclAdvancedTable, h3cAclNumGroupDescription=h3cAclNumGroupDescription, h3cAclResourceType=h3cAclResourceType, h3cAclAdvancedEstablish=h3cAclAdvancedEstablish, h3cAclUserVlanTag=h3cAclUserVlanTag, h3cAclIPAclBasicCount=h3cAclIPAclBasicCount, h3cMACfilterSourceMac=h3cMACfilterSourceMac, h3cPfilterAclGroupDenyPkts=h3cPfilterAclGroupDenyPkts, h3cAclAdvancedIcmpType=h3cAclAdvancedIcmpType, h3cMACfilterTrap=h3cMACfilterTrap, h3cAclIDSSrcIp=h3cAclIDSSrcIp, h3cAclMACAct=h3cAclMACAct, h3cAclAdvancedRowStatus=h3cAclAdvancedRowStatus, h3cAclNameGroupSubitemNum=h3cAclNameGroupSubitemNum, h3cAclIDSDestWild=h3cAclIDSDestWild, h3cAclIfAny=h3cAclIfAny, h3cAclIPAclAdvancedTimeRangeName=h3cAclIPAclAdvancedTimeRangeName)
mibBuilder.exportSymbols("H3C-ACL-MIB", h3cPfilterAclGroupRunInfoEntry=h3cPfilterAclGroupRunInfoEntry, h3cAclIPAclBasicVpnInstanceName=h3cAclIPAclBasicVpnInstanceName, h3cAclNumGroupRowStatus=h3cAclNumGroupRowStatus, h3cAclIDSDenyTime=h3cAclIDSDenyTime, h3cAclAdvancedRuleEntry=h3cAclAdvancedRuleEntry, h3cAclUserEntry=h3cAclUserEntry, h3cAclAdvancedDscp=h3cAclAdvancedDscp, h3cPfilterApplyHardCount=h3cPfilterApplyHardCount, h3cAclMACDestMacWild=h3cAclMACDestMacWild, h3cAclIPAclBasicRowStatus=h3cAclIPAclBasicRowStatus, h3cPfilterACLNumber=h3cPfilterACLNumber, h3cAclMACTimeRangeName=h3cAclMACTimeRangeName, h3cAclIPAclAdvancedPrecedence=h3cAclIPAclAdvancedPrecedence, PYSNMP_MODULE_ID=h3cAcl, h3cAclNumberGroupCountClear=h3cAclNumberGroupCountClear, h3cAclMib2CharacteristicsValue=h3cAclMib2CharacteristicsValue, h3cAclMode=h3cAclMode, h3cAclResourceTypeDescription=h3cAclResourceTypeDescription, h3cPfilterApplyObjType=h3cPfilterApplyObjType, h3cAclNumberGroupName=h3cAclNumberGroupName, h3cAclMACLsapCode=h3cAclMACLsapCode, h3cAclMACCountClear=h3cAclMACCountClear, h3cAclMACSrcMacWild=h3cAclMACSrcMacWild, h3cAclEnUserL5String=h3cAclEnUserL5String, h3cAclNameGroupTypes=h3cAclNameGroupTypes, h3cAclMACTypeCode=h3cAclMACTypeCode, h3cAclMACRuleIndex=h3cAclMACRuleIndex, h3cAclLinkSrcAny=h3cAclLinkSrcAny, h3cAclMib2NodesGroup=h3cAclMib2NodesGroup, h3cAclIPAclAdvancedSrcWild=h3cAclIPAclAdvancedSrcWild, h3cAclLinkMplsExp=h3cAclLinkMplsExp, h3cAclAdvancedAclNum=h3cAclAdvancedAclNum, FragmentFlag=FragmentFlag, h3cPfilterAclRuleRunInfoTable=h3cPfilterAclRuleRunInfoTable, h3cPfilterAclRuleRunInfoEntry=h3cPfilterAclRuleRunInfoEntry, h3cAclMib2CapabilityTable=h3cAclMib2CapabilityTable, h3cAclIPAclBasicSrcPrefix=h3cAclIPAclBasicSrcPrefix, h3cAclPacketFilterObjects=h3cAclPacketFilterObjects, h3cPfilterAclRuleStatus=h3cPfilterAclRuleStatus, h3cAclUserFormatType=h3cAclUserFormatType, h3cAclNameGroupTable=h3cAclNameGroupTable, h3cAclResourceChassis=h3cAclResourceChassis, h3cAclLinkFormatType=h3cAclLinkFormatType, h3cAclActiveUserAclSubitem=h3cAclActiveUserAclSubitem, h3cAclIPAclAdvancedSrcAddr=h3cAclIPAclAdvancedSrcAddr, h3cAclIPAclAdvancedDestAddrType=h3cAclIPAclAdvancedDestAddrType, h3cAclLinkEnable=h3cAclLinkEnable, h3cAclNumGroupAclNum=h3cAclNumGroupAclNum, h3cAclActiveIfIndex=h3cAclActiveIfIndex, h3cAclResourceReserved=h3cAclResourceReserved, h3cAclIPAclAdvancedCountClear=h3cAclIPAclAdvancedCountClear, h3cAclLinkSrcIfIndex=h3cAclLinkSrcIfIndex, h3cAclAdvancedFragments=h3cAclAdvancedFragments, h3cPfilterApplyRowStatus=h3cPfilterApplyRowStatus, h3cAclLinkL2LabelRangeEnd=h3cAclLinkL2LabelRangeEnd, h3cAclIPAclBasicEnable=h3cAclIPAclBasicEnable, h3cAclLinkProtocol=h3cAclLinkProtocol, h3cAclIPAclAdvancedSrcAny=h3cAclIPAclAdvancedSrcAny, h3cPfilterSumAclType=h3cPfilterSumAclType, h3cAclMACComment=h3cAclMACComment, h3cAclIDSName=h3cAclIDSName, h3cAclIPAclBasicSrcWild=h3cAclIPAclBasicSrcWild, h3cAclIPAclAdvancedFragmentFlag=h3cAclIPAclAdvancedFragmentFlag, h3cAclNumberGroupDescription=h3cAclNumberGroupDescription, h3cPfilterSumRuleIndex=h3cPfilterSumRuleIndex, h3cAclBasicFragments=h3cAclBasicFragments, h3cAclMACTable=h3cAclMACTable, h3cAclIPAclAdvancedIcmpCode=h3cAclIPAclAdvancedIcmpCode, h3cAclIfIndex=h3cAclIfIndex, h3cAclAdvancedDestPort1=h3cAclAdvancedDestPort1, h3cAclBasicRowStatus=h3cAclBasicRowStatus, h3cAclBasicEnable=h3cAclBasicEnable, h3cAclMib2Version=h3cAclMib2Version, h3cAclUserAct=h3cAclUserAct, h3cAclIPAclAdvancedRouteTypeAny=h3cAclIPAclAdvancedRouteTypeAny, h3cPfilterPacketNumber=h3cPfilterPacketNumber, h3cPfilterAclRuleMatchBytes=h3cPfilterAclRuleMatchBytes, h3cAclEnUserL2String=h3cAclEnUserL2String, h3cAclMACCount=h3cAclMACCount, h3cAclAdvancedCountClear=h3cAclAdvancedCountClear, h3cPfilterAclRuleCountStatus=h3cPfilterAclRuleCountStatus, h3cAclLinkDestAny=h3cAclLinkDestAny, h3cAclIPAclBasicLog=h3cAclIPAclBasicLog, h3cAclAdvancedEnable=h3cAclAdvancedEnable, h3cAclUserAclNum=h3cAclUserAclNum, h3cAclNumberGroupTable=h3cAclNumberGroupTable, h3cAclActiveUserAclNum=h3cAclActiveUserAclNum, h3cAclIPAclBasicRouteTypeValue=h3cAclIPAclBasicRouteTypeValue, h3cAclActiveDirection=h3cAclActiveDirection, h3cAclAdvancedSubitem=h3cAclAdvancedSubitem, h3cAclLinkLsapMask=h3cAclLinkLsapMask, h3cAclResourceSlot=h3cAclResourceSlot, h3cAclMib2Objects=h3cAclMib2Objects, h3cAclMib2GlobalGroup=h3cAclMib2GlobalGroup, h3cAclMACAclGroup=h3cAclMACAclGroup, h3cAclIPAclBasicAct=h3cAclIPAclBasicAct, h3cAclIPAclAdvancedProtocol=h3cAclIPAclAdvancedProtocol, h3cAclIPAclAdvancedSrcPrefix=h3cAclIPAclAdvancedSrcPrefix)
| 142.658582
| 11,397
| 0.784019
|
e46e86b808a12dd82c45a94241200612f1e297d2
| 745
|
py
|
Python
|
combo_cases_and_deaths/setup.py
|
JedGrabman/covidcast-indicators
|
d2a5a232431c8392c54bfc301dcb9beecc541b97
|
[
"MIT"
] | null | null | null |
combo_cases_and_deaths/setup.py
|
JedGrabman/covidcast-indicators
|
d2a5a232431c8392c54bfc301dcb9beecc541b97
|
[
"MIT"
] | null | null | null |
combo_cases_and_deaths/setup.py
|
JedGrabman/covidcast-indicators
|
d2a5a232431c8392c54bfc301dcb9beecc541b97
|
[
"MIT"
] | null | null | null |
from setuptools import setup
from setuptools import find_packages
required = [
"pandas",
"pytest",
"pytest-cov",
"pylint",
"delphi-utils",
"covidcast"
]
setup(
name="delphi_combo_cases_and_deaths",
version="0.1.0",
description="A combined signal for cases and deaths using JHU for Puerto Rico and USA Facts everywhere else",
author="Jingjing Tang, Kathryn Mazaitis",
author_email="krivard@cs.cmu.edu",
url="https://github.com/cmu-delphi/covidcast-indicators",
install_requires=required,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.7",
],
packages=find_packages(),
)
| 26.607143
| 113
| 0.668456
|
c26ca1573bd36635f0f58ecada3416123cd69422
| 4,840
|
py
|
Python
|
fnl/fnlx.py
|
decorator-factory/python-fnl
|
20db64d5f983c002846df125c55080a62e1b577e
|
[
"MIT"
] | null | null | null |
fnl/fnlx.py
|
decorator-factory/python-fnl
|
20db64d5f983c002846df125c55080a62e1b577e
|
[
"MIT"
] | null | null | null |
fnl/fnlx.py
|
decorator-factory/python-fnl
|
20db64d5f983c002846df125c55080a62e1b577e
|
[
"MIT"
] | null | null | null |
import json
import fnl
from . import entities as e
from . import entity_types as et
from .definitions import fn
from context_manager_patma import match
from typing import Dict, List, NamedTuple, Iterable
from enum import Enum
exports: Dict[str, e.Entity] = {}
class TagKind(Enum):
Open = 0
ClosedWithSlash = 1
ClosedWithoutSlash = 2
class TagInfo(NamedTuple):
classes: List[str]
options: List[str]
body: List[e.Entity]
kind: TagKind
@property
def as_option_string(self):
if self.classes == []:
return " ".join(self.options)
else:
class_str = 'class="' + " ".join(self.classes) + '"'
return " ".join([class_str, *self.options])
def parse_html_options(args: Iterable[e.Entity]):
"""Separates actual elements from HTML options: class names, ID names etc."""
classes = []
options = []
body = []
tag_kind = TagKind.Open
for arg in args:
with match(arg) as case:
with case('Quoted(Name("/"))') as [m]:
tag_kind = TagKind.ClosedWithSlash
with case('Quoted(Name("."))') as [m]:
tag_kind = TagKind.ClosedWithoutSlash
with case('Quoted(Name(Cons(".", class_name)))') as [m]:
classes.append(m.class_name)
with case('Quoted(Name(Cons("#", id_name)))') as [m]:
options.append(f'id="{m.id_name}"')
with case('Quoted(Name(name)|String(name))') as [m]:
options.append(m.name)
with case('Quoted(Sexpr(Name(name), String(value)))') as [m]:
options.append(f"{m.name}={json.dumps(m.value)}")
with case('Quoted(other)') as [m]:
raise TypeError(f"Expected name or call, got {m.other}")
with case('element') as [m]:
body.append(m.element)
return TagInfo(classes, options, body, tag_kind)
@fn(exports, "+")
def div():
"""
Part of the 'fnl.x' module.
Shortcut for %%(tt "b &div")%%
"""
def _div(*args: e.Entity):
info = parse_html_options(args)
return e.BlockTag("div", info.as_option_string, tuple(info.body)) # type: ignore
yield ("(λ ...&[name]|&[(name str)]|inline|block . block)", _div)
_BLOCK_TAGS = frozenset((
"address", "article", "aside", "blockquote", "canvas", "dd", "div", "dl",
"dt", "fieldset", "figcaption", "figure", "footer", "form", "h1", "h6",
"header", "hr", "li", "main", "nav", "noscript", "ol", "p", "pre", "section",
"table", "tfoot", "ul", "video"
))
_INLINE_TAGS = frozenset((
"a", "abbr", "acronym", "b", "bdo", "big", "br", "button", "cite", "code",
"dfn", "em", "i", "img", "input", "kbd", "label", "map", "object", "output",
"q", "samp", "script", "select", "small", "span", "strong", "sub", "sup",
"textarea", "time", "tt", "var"
))
@fn(exports, "b")
def block_tag():
"""
Part of the 'fnl.x' module.
Creates a block element. See the 'Quoted expressions' tutorial for more info.
"""
def _block_tag(name_arg: e.Entity, *options: e.Entity):
with match(name_arg) as case:
with case('Quoted(Name(name))|String(name)') as [m]:
name: str = m.name
if name in _INLINE_TAGS:
raise TypeError(f"<{name}> is an inline tag")
info = parse_html_options(options)
if info.kind == TagKind.ClosedWithSlash:
return e.ClosedBlockTag(name, info.as_option_string, include_slash=True)
elif info.kind == TagKind.ClosedWithoutSlash:
return e.ClosedBlockTag(name, info.as_option_string, include_slash=False)
else:
return e.BlockTag(name, info.as_option_string, tuple(info.body)) # type: ignore
yield ("(λ str|&[name] ...&[str]|&[name]|&[(name str)]|inline|block . block)", _block_tag)
@fn(exports, "i")
def inline_tag():
"""
Part of the 'fnl.x' module.
Creates an inline element. See the 'Quoted expressions' tutorial for more info.
"""
def _inline_tag(name_arg: e.Entity, *options: e.Entity):
with match(name_arg) as case:
with case('Quoted(Name(name))|String(name)') as [m]:
name: str = m.name
if name in _BLOCK_TAGS:
raise TypeError(f"<{name}> is an inline tag")
info = parse_html_options(options)
if info.kind == TagKind.ClosedWithSlash:
return e.ClosedInlineTag(name, info.as_option_string, include_slash=True)
elif info.kind == TagKind.ClosedWithoutSlash:
return e.ClosedInlineTag(name, info.as_option_string, include_slash=False)
else:
return e.InlineTag(name, info.as_option_string, tuple(info.body)) # type: ignore
yield ("(λ str|&[name] ...&[name]|&[(name str)]|inline . inline)", _inline_tag)
| 32.05298
| 94
| 0.585537
|
6e2ca108ed1e4737bdbcaf16c619d6f48be50061
| 20,374
|
py
|
Python
|
kratos/tests/test_exact_integration.py
|
clazaro/Kratos
|
b947b82c90dfcbf13d60511427f85990d36b90be
|
[
"BSD-4-Clause"
] | 2
|
2020-12-22T11:50:11.000Z
|
2021-09-15T11:36:30.000Z
|
kratos/tests/test_exact_integration.py
|
clazaro/Kratos
|
b947b82c90dfcbf13d60511427f85990d36b90be
|
[
"BSD-4-Clause"
] | 3
|
2021-08-18T16:12:20.000Z
|
2021-09-02T07:36:15.000Z
|
kratos/tests/test_exact_integration.py
|
clazaro/Kratos
|
b947b82c90dfcbf13d60511427f85990d36b90be
|
[
"BSD-4-Clause"
] | 1
|
2017-05-02T00:52:44.000Z
|
2017-05-02T00:52:44.000Z
|
import KratosMultiphysics
import KratosMultiphysics.KratosUnittest as KratosUnittest
class TestExactIntegration(KratosUnittest.TestCase):
def setUp(self):
pass
# Test exact integration in 2D
# LINE
def test_line_exact_integration_1(self):
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.SetBufferSize(3)
model_part.AddProperties(KratosMultiphysics.Properties(1))
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NORMAL)
normal = KratosMultiphysics.Vector(3)
# Line 1
model_part.CreateNewNode(1, 0.00, 0.00, 0.00)
model_part.CreateNewNode(2, 1.00, 0.00, 0.00)
cond1 = model_part.CreateNewCondition("LineCondition2D2N", 1, [1, 2], model_part.GetProperties()[1])
normal = cond1.GetGeometry().UnitNormal()
cond1.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(1).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(2).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
# Creating the utility:
exact_integration = KratosMultiphysics.ExactMortarIntegrationUtility2D2N(2)
# Line 2
model_part.CreateNewNode(3, 0.00, 0.001, 0.00)
model_part.CreateNewNode(4, 1.00, 0.001, 0.00)
cond2 = model_part.CreateNewCondition("LineCondition2D2N", 2, [3, 4], model_part.GetProperties()[1])
normal = cond2.GetGeometry().UnitNormal()
cond2.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(3).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(4).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
matrix_solution = KratosMultiphysics.Matrix()
solution = exact_integration.TestGetExactIntegration(cond1, cond2, matrix_solution)
# Debug
#if solution:
#print("Integration accomplished", matrix_solution)
self.assertTrue(solution)
self.assertAlmostEqual(matrix_solution[0, 0], -0.57735026918963)
self.assertAlmostEqual(matrix_solution[0, 1], 1.0)
self.assertAlmostEqual(matrix_solution[1, 0], 0.57735026918963)
self.assertAlmostEqual(matrix_solution[1, 1], 1.0)
def test_line_exact_integration_2(self):
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.SetBufferSize(3)
model_part.AddProperties(KratosMultiphysics.Properties(1))
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NORMAL)
normal = KratosMultiphysics.Vector(3)
# Line 1
model_part.CreateNewNode(1, 0.00, 0.00, 0.00)
model_part.CreateNewNode(2, 1.00, 0.00, 0.00)
cond1 = model_part.CreateNewCondition("LineCondition2D2N", 1, [1, 2], model_part.GetProperties()[1])
normal = cond1.GetGeometry().UnitNormal()
cond1.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(1).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(2).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
# Creating the utility:
exact_integration = KratosMultiphysics.ExactMortarIntegrationUtility2D2N(2)
# Line 2
model_part.CreateNewNode(3, 0.50, 0.001, 0.00)
model_part.CreateNewNode(4, 1.50, 0.001, 0.00)
cond2 = model_part.CreateNewCondition("LineCondition2D2N", 2, [3, 4], model_part.GetProperties()[1])
normal = cond2.GetGeometry().UnitNormal()
cond2.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(3).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(4).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
matrix_solution = KratosMultiphysics.Matrix()
solution = exact_integration.TestGetExactIntegration(cond1, cond2, matrix_solution)
# Debug
#if solution:
#print("Integration accomplished", matrix_solution)
self.assertTrue(solution)
self.assertAlmostEqual(matrix_solution[0, 0], 0.21132486540517492)
self.assertAlmostEqual(matrix_solution[0, 1], 0.5)
self.assertAlmostEqual(matrix_solution[1, 0], 0.7886751345947951)
self.assertAlmostEqual(matrix_solution[1, 1], 0.5)
def test_line_exact_integration_3(self):
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.SetBufferSize(3)
model_part.AddProperties(KratosMultiphysics.Properties(1))
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NORMAL)
normal = KratosMultiphysics.Vector(3)
# Line 1
model_part.CreateNewNode(1, 0.00, -0.5, 0.00)
model_part.CreateNewNode(2, 1.00, 0.5, 0.00)
cond1 = model_part.CreateNewCondition("LineCondition2D2N", 1, [1, 2], model_part.GetProperties()[1])
normal = cond1.GetGeometry().UnitNormal()
cond1.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(1).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(2).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
# Creating the utility:
exact_integration = KratosMultiphysics.ExactMortarIntegrationUtility2D2N(2)
# Line 2
model_part.CreateNewNode(3, 0.0, 0.5, 0.00)
model_part.CreateNewNode(4, 1.0, 0.5, 0.00)
cond2 = model_part.CreateNewCondition("LineCondition2D2N", 2, [3, 4], model_part.GetProperties()[1])
normal = cond2.GetGeometry().UnitNormal()
cond2.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(3).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(4).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
matrix_solution = KratosMultiphysics.Matrix()
solution = exact_integration.TestGetExactIntegration(cond1, cond2, matrix_solution)
# Debug
#if solution:
#print("Integration accomplished", matrix_solution)
self.assertTrue(solution)
self.assertAlmostEqual(matrix_solution[0, 0], 0.21132486540517847)
self.assertAlmostEqual(matrix_solution[1, 0], 0.7886751345948)
self.assertAlmostEqual(matrix_solution[0, 1], 1.0 / (2.0**0.5))
self.assertAlmostEqual(matrix_solution[1, 1], 1.0 / (2.0**0.5))
# Test exact integration in 3D
# TRIANGLE
def _test_triangle_exact_integration_1(self, use_delaunator = False):
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.SetBufferSize(3)
model_part.AddProperties(KratosMultiphysics.Properties(1))
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NORMAL)
normal = KratosMultiphysics.Vector(3)
# Triangle 1
model_part.CreateNewNode(1, 0.00, 0.00, 0.00)
model_part.CreateNewNode(2, 1.00, 0.00, 0.00)
model_part.CreateNewNode(3, 0.00, 1.00, 0.00)
cond1 = model_part.CreateNewCondition("SurfaceCondition3D3N", 1, [1, 2, 3], model_part.GetProperties()[1])
normal = cond1.GetGeometry().UnitNormal()
cond1.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(1).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(2).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(3).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
# Creating the utility:
exact_integration = KratosMultiphysics.ExactMortarIntegrationUtility3D3N(2,1.0e6, 0, 1.0, use_delaunator)
# Triangle 2
normal[2] = -1.0
model_part.CreateNewNode(4, 0.00, 0.00, 0.01)
model_part.CreateNewNode(5, 1.00, 0.00, 0.01)
model_part.CreateNewNode(6, 0.00, 1.00, 0.01)
cond2 = model_part.CreateNewCondition("SurfaceCondition3D3N", 2, [4, 5, 6], model_part.GetProperties()[1])
normal = cond2.GetGeometry().UnitNormal()
cond2.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(4).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(5).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(6).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
matrix_solution = KratosMultiphysics.Matrix()
solution = exact_integration.TestGetExactIntegration(cond1, cond2, matrix_solution)
## Debug
#exact_integration.TestIODebug(model_part, "GiD")
##exact_integration.TestIODebug(model_part, "VTK")
#if solution:
#print("Integration accomplished", matrix_solution)
self.assertTrue(solution)
self.assertAlmostEqual(matrix_solution[0, 0], 1.0 / 6.0)
self.assertAlmostEqual(matrix_solution[0, 1], 1.0 / 6.0)
self.assertAlmostEqual(matrix_solution[0, 2], 1.0 / 6.0)
self.assertAlmostEqual(matrix_solution[1, 0], 4.0 / 6.0)
self.assertAlmostEqual(matrix_solution[1, 1], 1.0 / 6.0)
self.assertAlmostEqual(matrix_solution[1, 2], 1.0 / 6.0)
self.assertAlmostEqual(matrix_solution[2, 0], 1.0 / 6.0)
self.assertAlmostEqual(matrix_solution[2, 1], 4.0 / 6.0)
self.assertAlmostEqual(matrix_solution[2, 2], 1.0 / 6.0)
def test_triangle_exact_integration_1(self):
self._test_triangle_exact_integration_1(False)
def test_triangle_exact_integration_1_delanautor(self):
self._test_triangle_exact_integration_1(True)
def _test_triangle_exact_integration_2(self, use_delaunator = False):
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.SetBufferSize(3)
model_part.AddProperties(KratosMultiphysics.Properties(1))
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NORMAL)
normal = KratosMultiphysics.Vector(3)
# Triangle 1
model_part.CreateNewNode(1, 0.00, 0.00, 0.00)
model_part.CreateNewNode(2, 1.00, 0.00, 0.00)
model_part.CreateNewNode(3, 0.00, 1.00, 0.00)
cond1 = model_part.CreateNewCondition("SurfaceCondition3D3N", 1, [1, 2, 3], model_part.GetProperties()[1])
normal = cond1.GetGeometry().UnitNormal()
cond1.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(1).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(2).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(3).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
# Creating the utility:
exact_integration = KratosMultiphysics.ExactMortarIntegrationUtility3D3N(2,1.0e6, 0, 1.0, use_delaunator)
# Triangle 2
model_part.CreateNewNode(4, 0.00, 0.00, 0.01)
model_part.CreateNewNode(5, 1.00, 0.00, 0.01)
model_part.CreateNewNode(6, 1.00, 1.00, 0.01)
cond2 = model_part.CreateNewCondition("SurfaceCondition3D3N", 2, [4, 5, 6], model_part.GetProperties()[1])
normal = cond2.GetGeometry().UnitNormal()
cond2.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(4).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(5).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(6).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
solution = exact_integration.TestGetExactAreaIntegration(cond1, cond2)
self.assertAlmostEqual(solution, 0.25)
def test_triangle_exact_integration_2(self):
self._test_triangle_exact_integration_2(False)
def test_triangle_exact_integration_2_delanautor(self):
self._test_triangle_exact_integration_2(True)
def _test_triangle_exact_integration_3(self, use_delaunator = False):
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.SetBufferSize(3)
model_part.AddProperties(KratosMultiphysics.Properties(1))
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NORMAL)
normal = KratosMultiphysics.Vector(3)
# Triangle 1 and 2
model_part.CreateNewNode(1, 0.00, 0.00, 0.00)
model_part.CreateNewNode(2, 1.00, 0.00, 0.00)
model_part.CreateNewNode(3, 0.00, 1.00, 0.00)
model_part.CreateNewNode(4, 1.00, 1.00, 0.00)
cond1 = model_part.CreateNewCondition("SurfaceCondition3D3N", 1, [1, 2, 3], model_part.GetProperties()[1])
cond2 = model_part.CreateNewCondition("SurfaceCondition3D3N", 2, [2, 4, 3], model_part.GetProperties()[1])
normal = cond1.GetGeometry().UnitNormal()
cond1.SetValue(KratosMultiphysics.NORMAL, normal)
cond2.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(1).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(2).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(3).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(4).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
# Creating the utility:
exact_integration = KratosMultiphysics.ExactMortarIntegrationUtility3D3N(2,1.0e6, 0, 1.0, use_delaunator)
# Triangle 3 and 4
model_part.CreateNewNode(5, 0.00, 0.00, 0.01)
model_part.CreateNewNode(6, 1.00, 0.00, 0.01)
model_part.CreateNewNode(7, 0.00, 1.00, 0.01)
model_part.CreateNewNode(8, 1.00, 1.00, 0.01)
cond3 = model_part.CreateNewCondition("SurfaceCondition3D3N", 3, [5, 6, 8], model_part.GetProperties()[1])
cond4 = model_part.CreateNewCondition("SurfaceCondition3D3N", 4, [5, 8, 7], model_part.GetProperties()[1])
normal = cond3.GetGeometry().UnitNormal()
cond3.SetValue(KratosMultiphysics.NORMAL, normal)
cond4.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(5).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(6).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(7).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(8).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
solution = exact_integration.TestGetExactAreaIntegration(cond1, cond3)
self.assertAlmostEqual(solution, 0.25)
solution = exact_integration.TestGetExactAreaIntegration(cond1, cond4)
self.assertAlmostEqual(solution, 0.25)
solution = exact_integration.TestGetExactAreaIntegration(cond2, cond3)
self.assertAlmostEqual(solution, 0.25)
solution = exact_integration.TestGetExactAreaIntegration(cond2, cond4)
self.assertAlmostEqual(solution, 0.25)
def test_triangle_exact_integration_3(self):
self._test_triangle_exact_integration_3(False)
def test_triangle_exact_integration_3_delaunator(self):
self._test_triangle_exact_integration_3(True)
# QUADRILATERAL
def _test_quadrilateral_exact_integration_1(self, use_delaunator = False):
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.SetBufferSize(3)
model_part.AddProperties(KratosMultiphysics.Properties(1))
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NORMAL)
normal = KratosMultiphysics.Vector(3)
# Quadrilateral 1
model_part.CreateNewNode(1, 0.00, 0.00, 0.00)
model_part.CreateNewNode(2, 1.00, 0.00, 0.00)
model_part.CreateNewNode(3, 1.00, 1.00, 0.00)
model_part.CreateNewNode(4, 0.00, 1.00, 0.00)
cond1 = model_part.CreateNewCondition("SurfaceCondition3D4N", 1, [1, 2, 3, 4], model_part.GetProperties()[1])
normal = cond1.GetGeometry().UnitNormal()
cond1.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(1).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(2).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(3).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(4).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
# Creating the utility:
exact_integration = KratosMultiphysics.ExactMortarIntegrationUtility3D4N(2,1.0e6, 3, 1.0, use_delaunator)
# Quadrilateral 2
model_part.CreateNewNode(5, 0.00, 0.00, 0.01)
model_part.CreateNewNode(6, 1.00, 0.00, 0.01)
model_part.CreateNewNode(7, 1.00, 1.00, 0.01)
model_part.CreateNewNode(8, 0.00, 1.00, 0.01)
cond2 = model_part.CreateNewCondition("SurfaceCondition3D4N", 2, [5, 6, 7, 8], model_part.GetProperties()[1])
normal = cond2.GetGeometry().UnitNormal()
cond2.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(5).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(6).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(7).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(8).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
solution = exact_integration.TestGetExactAreaIntegration(cond1, cond2)
self.assertAlmostEqual(solution, 1.0)
def test_quadrilateral_exact_integration_1(self):
self._test_quadrilateral_exact_integration_1(False)
def test_quadrilateral_exact_integration_1_delaunator(self):
self._test_quadrilateral_exact_integration_1(True)
def _test_quadrilateral_exact_integration_2(self, use_delaunator = False):
current_model = KratosMultiphysics.Model()
model_part = current_model.CreateModelPart("Main")
model_part.SetBufferSize(3)
model_part.AddProperties(KratosMultiphysics.Properties(1))
model_part.AddNodalSolutionStepVariable(KratosMultiphysics.NORMAL)
normal = KratosMultiphysics.Vector(3)
# Quadrilateral 1
model_part.CreateNewNode(1, 0.00, 0.00, 0.00)
model_part.CreateNewNode(2, 1.00, 0.00, 0.00)
model_part.CreateNewNode(3, 1.00, 1.00, 0.00)
model_part.CreateNewNode(4, 0.00, 1.00, 0.00)
cond1 = model_part.CreateNewCondition("SurfaceCondition3D4N", 1, [1, 2, 3, 4], model_part.GetProperties()[1])
normal = cond1.GetGeometry().UnitNormal()
cond1.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(1).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(2).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(3).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(4).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
# Creating the utility:
exact_integration = KratosMultiphysics.ExactMortarIntegrationUtility3D4N(2,1.0e6, 0, 1.0, use_delaunator)
# Quadrilateral 2
normal[2] = -1.0
model_part.CreateNewNode(5, 0.50, 0.50, 0.01)
model_part.CreateNewNode(6, 1.50, 0.50, 0.01)
model_part.CreateNewNode(7, 1.50, 1.50, 0.01)
model_part.CreateNewNode(8, 0.50, 1.50, 0.01)
cond2 = model_part.CreateNewCondition("SurfaceCondition3D4N", 2, [5, 6, 7, 8], model_part.GetProperties()[1])
normal = cond2.GetGeometry().UnitNormal()
cond2.SetValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(5).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(6).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(7).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
model_part.GetNode(8).SetSolutionStepValue(KratosMultiphysics.NORMAL, normal)
solution = exact_integration.TestGetExactAreaIntegration(cond1, cond2)
self.assertAlmostEqual(solution, 0.25)
def test_quadrilateral_exact_integration_2(self):
self._test_quadrilateral_exact_integration_2(False)
def test_quadrilateral_exact_integration_2_delaunator(self):
self._test_quadrilateral_exact_integration_2(True)
if __name__ == '__main__':
KratosUnittest.main()
| 47.053118
| 117
| 0.714587
|
f6ef41fa89cc98faf93c353870deefa90752a7a9
| 5,285
|
py
|
Python
|
aiida/cmdline/commands/cmd_user.py
|
tomzhang/aiida_core
|
949810e9f3daff0f748c5c9aa1dde4f5222bb49b
|
[
"BSD-2-Clause"
] | 1
|
2019-04-29T12:39:31.000Z
|
2019-04-29T12:39:31.000Z
|
aiida/cmdline/commands/cmd_user.py
|
tomzhang/aiida_core
|
949810e9f3daff0f748c5c9aa1dde4f5222bb49b
|
[
"BSD-2-Clause"
] | null | null | null |
aiida/cmdline/commands/cmd_user.py
|
tomzhang/aiida_core
|
949810e9f3daff0f748c5c9aa1dde4f5222bb49b
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
"""
This allows to setup and configure a user from command line.
"""
from __future__ import absolute_import
from functools import partial
import click
from aiida.cmdline.commands.cmd_verdi import verdi
from aiida.cmdline.params.types.user import UserParamType
from aiida.cmdline.utils.decorators import with_dbenv
from aiida.cmdline.params import options
@verdi.group('user')
def verdi_user():
"""Inspect and manage users."""
pass
def get_default(value, ctx):
"""
Get the default argument using a user instance property
:param value: The name of the property to use
:param ctx: The click context (which will be used to get the user)
:return: The default value, or None
"""
user = ctx.params['user']
value = getattr(user, value)
# In our case the empty string means there is no default
if value == "":
return None
return value
PASSWORD_UNCHANGED = '***' # noqa
@verdi_user.command()
@click.argument('user', metavar='USER', type=UserParamType(create=True))
@options.NON_INTERACTIVE()
@click.option(
'--first-name',
prompt='First name',
type=str,
contextual_default=partial(get_default, 'first_name'),
cls=options.interactive.InteractiveOption)
@click.option(
'--last-name',
prompt='Last name',
type=str,
contextual_default=partial(get_default, 'last_name'),
cls=options.interactive.InteractiveOption)
@click.option(
'--institution',
prompt='Institution',
type=str,
contextual_default=partial(get_default, 'institution'),
cls=options.interactive.InteractiveOption)
@click.option(
'--password',
prompt='Password',
hide_input=True,
required=False,
type=str,
default=PASSWORD_UNCHANGED,
confirmation_prompt=True,
cls=options.interactive.InteractiveOption)
@with_dbenv()
def configure(user, first_name, last_name, institution, password, non_interactive):
"""
Create or update a user. Email address us taken as the user identiier.
"""
# pylint: disable=W0612,W0613
if first_name is not None:
user.first_name = first_name
if last_name is not None:
user.last_name = last_name
if institution is not None:
user.institution = institution
if password != PASSWORD_UNCHANGED:
user.password = password
if user.is_stored:
action = 'updated'
else:
action = 'created'
user.store()
click.echo(">> User {} {} ({}) {}. <<".format(user.first_name, user.last_name, user.email, action))
if not user.has_usable_password():
click.echo("** NOTE: no password set for this user, ")
click.echo(" so they will not be able to login")
click.echo(" via the REST API and the Web Interface.")
# pylint: disable=too-many-branches
@verdi_user.command('list')
@click.option('--color', is_flag=True, help='Show results with colors', default=False)
@with_dbenv()
def user_list(color):
"""
List all the users.
:param color: Show the list using colors
"""
from aiida.common.utils import get_configured_user_email
from aiida.common.exceptions import ConfigurationError
from aiida.orm.backend import construct_backend
backend = construct_backend()
try:
current_user = get_configured_user_email()
except ConfigurationError:
current_user = None
if current_user is not None:
pass
else:
click.echo("### No default user configured yet, run 'verdi install'! ###", err=True)
for user in backend.users.all():
name_pieces = []
if user.first_name:
name_pieces.append(user.first_name)
if user.last_name:
name_pieces.append(user.last_name)
full_name = " ".join(name_pieces)
if full_name:
full_name = " {}".format(full_name)
institution_str = " ({})".format(user.institution) if user.institution else ""
permissions_list = []
if not user.has_usable_password():
permissions_list.append("NO_PWD")
color_id = 'black' # Dark gray
else:
color_id = 'blue' # Blue
permissions_str = ",".join(permissions_list)
if permissions_str:
permissions_str = " [{}]".format(permissions_str)
if user.email == current_user:
symbol = ">"
color_id = 'red'
else:
symbol = "*"
if not color:
color_id = None
click.secho("{}{}".format(symbol, user.email), fg=color_id, bold=True, nl=False)
click.secho(":{}{}{}".format(full_name, institution_str, permissions_str), fg=color_id)
| 32.423313
| 103
| 0.620246
|
274dea3e036eb754e7b8fe458d1f35bb40d13fcc
| 2,750
|
py
|
Python
|
examples/process_param_sweep_sims.py
|
georgegunter/flow
|
15848ec9bafd250364a51fa162786037645b19bf
|
[
"MIT"
] | null | null | null |
examples/process_param_sweep_sims.py
|
georgegunter/flow
|
15848ec9bafd250364a51fa162786037645b19bf
|
[
"MIT"
] | null | null | null |
examples/process_param_sweep_sims.py
|
georgegunter/flow
|
15848ec9bafd250364a51fa162786037645b19bf
|
[
"MIT"
] | null | null | null |
import sys
import os
import numpy as np
from copy import deepcopy
import csv
import ray
def get_sim_data_dict_i24(csv_path):
row_num = 1
curr_veh_id = 'id'
sim_dict = {}
curr_veh_data = []
with open(csv_path, newline='') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
for row in csvreader:
if(row_num > 1):
# Don't read header
if(curr_veh_id != row[1]):
#Add in new data to the dictionary:
#Store old data:
if(len(curr_veh_data)>0):
sim_dict[curr_veh_id] = curr_veh_data
#Rest where data is being stashed:
curr_veh_data = []
curr_veh_id = row[1] # Set new veh id
#Allocate space for storing:
#sim_dict[curr_veh_id] = []
curr_veh_id = row[1]
time = float(row[0])
edge = row[-9]
include_data = (time>300 and edge != 'Eastbound_On_1' and edge != 'Eastbound_Off_2')
if(include_data):
curr_veh_data.append(row)
# sys.stdout.write('\r'+'Veh id: '+curr_veh_id+ ' row: ' +str(row_num)+'\r')
row_num += 1
#Add the very last vehicle's information:
if(len(curr_veh_data)>0):
sim_dict[curr_veh_id] = curr_veh_data
# sys.stdout.write('\r'+'Veh id: '+curr_veh_id+ ' row: ' +str(row_num)+'\r')
print('Data loaded.')
return sim_dict
def write_sim_results(csv_path,file_write_path):
sim_data_dict = get_sim_data_dict_i24(csv_path)
data_list = []
veh_ids = list(sim_data_dict.keys())
for veh_id in veh_ids:
temp_veh_data = np.array(sim_data_dict[veh_id])
speeds = temp_veh_data[:,4].astype(float)
is_malicious_vals = temp_veh_data[:,-2].astype(float)
mean_speed = np.mean(speeds)
speed_std = np.std(speeds)
is_malicious = np.sum(is_malicious_vals)>0
data_list.append([veh_id,mean_speed,speed_std,is_malicious])
file = open(file_write_path, 'w+', newline ='')
with file:
write = csv.writer(file,delimiter=',')
write.writerows(data_list)
print('Written: '+file_write_path)
@ray.remote
def write_sim_results_ray(csv_path,file_write_path):
write_sim_results(csv_path,file_write_path)
if __name__ == '__main__':
ray.init(num_cpus=5)
repo_path = '/Volumes/My Passport for Mac/benign_parameter_sweep/'
results_repo_path = '/Volumes/My Passport for Mac/benign_parameter_sweep/Sim_Results'
file_names = os.listdir(repo_path)
csv_paths = []
csv_file_names = []
for file in file_names:
if(file[-3:]=='csv'):
csv_paths.append(os.path.join(repo_path,file))
csv_file_names.append(file)
temp = []
for i,csv_path in enumerate(csv_paths):
file_name = csv_file_names[i][:-4]+'_results.csv'
file_write_path = os.path.join(results_repo_path,file_name)
temp.append(write_sim_results_ray.remote(csv_path,file_write_path))
temp_data = ray.get(temp)
print('Finished writing all files.')
| 26.960784
| 88
| 0.705091
|
afae709ce5a9d1f52d0645819e9b85c3e7befd13
| 255
|
py
|
Python
|
tests/manage.py
|
jmons/ramlwrap
|
43506cde5aa1d5717b217273ab460e877ef25db1
|
[
"MIT"
] | 9
|
2016-10-24T09:00:53.000Z
|
2021-12-24T10:15:14.000Z
|
tests/manage.py
|
jmons/ramlwrap
|
43506cde5aa1d5717b217273ab460e877ef25db1
|
[
"MIT"
] | 33
|
2017-03-14T11:36:59.000Z
|
2021-12-27T11:20:09.000Z
|
tests/manage.py
|
jmons/ramlwrap
|
43506cde5aa1d5717b217273ab460e877ef25db1
|
[
"MIT"
] | 4
|
2017-03-30T09:58:22.000Z
|
2020-02-18T12:42:28.000Z
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "RamlWrapTest.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 23.181818
| 76
| 0.776471
|
87137f8839e4ca1e8e53ec5b61ef86e5b578fc2d
| 6,420
|
py
|
Python
|
tests/matchers/test_classification_match.py
|
phillips96/similarity
|
3794f288f17f47f1f90b5368e5c0eeac1e81e10d
|
[
"Apache-2.0"
] | 706
|
2021-09-04T02:11:05.000Z
|
2022-03-31T13:29:14.000Z
|
tests/matchers/test_classification_match.py
|
phillips96/similarity
|
3794f288f17f47f1f90b5368e5c0eeac1e81e10d
|
[
"Apache-2.0"
] | 119
|
2021-09-01T22:32:40.000Z
|
2022-03-30T22:39:27.000Z
|
tests/matchers/test_classification_match.py
|
phillips96/similarity
|
3794f288f17f47f1f90b5368e5c0eeac1e81e10d
|
[
"Apache-2.0"
] | 57
|
2021-09-04T02:11:14.000Z
|
2022-03-31T13:29:15.000Z
|
import re
from typing import Tuple
import numpy as np
import pytest
import tensorflow as tf
from tensorflow_similarity.matchers import ClassificationMatch
from tensorflow_similarity.types import FloatTensor, IntTensor
attributes = ("tp", "fp", "tn", "fn", "count")
class ConcreteClassificationMatch(ClassificationMatch):
def derive_match(
self, lookup_labels: IntTensor, lookup_distances: FloatTensor
) -> Tuple[FloatTensor, FloatTensor]:
return lookup_labels, lookup_distances
class BadClassificationMatch(ClassificationMatch):
"Derive match should return 2D tensors, but here we return 1D."
def derive_match(
self, lookup_labels: IntTensor, lookup_distances: FloatTensor
) -> Tuple[FloatTensor, FloatTensor]:
return (
tf.reshape(lookup_labels, (-1,)),
tf.reshape(lookup_distances, (-1,)),
)
def test_compile():
cm = ConcreteClassificationMatch(name="foo", canonical_name="bar")
# Pass distance_thresholds as a 1D tensor.
distance_thresholds = tf.constant([1, 2, 3, 4, 5, 6, 7])
cm.compile(distance_thresholds=distance_thresholds)
expected_dt = tf.cast(distance_thresholds, dtype="float32")
assert cm.name == "foo"
assert cm.canonical_name == "bar"
assert tf.math.reduce_all(
tf.shape(cm.distance_thresholds) == tf.shape(expected_dt)
)
assert tf.math.reduce_all(
tf.math.equal(cm.distance_thresholds, expected_dt)
)
def test_compute_match_indicators():
cm = ConcreteClassificationMatch(name="foo", canonical_name="bar")
# Pass distance_thresholds as a 1D tensor.
distance_thresholds = tf.constant([1.0, 2.0])
cm.compile(distance_thresholds=distance_thresholds)
query_labels = tf.constant([10, 20, 10, 20])
lookup_labels = tf.constant([[10], [20], [30], [40]])
lookup_distances = tf.constant([[1.0], [1.0], [2.0], [2.0]])
match_mask, distance_mask = cm._compute_match_indicators(
query_labels, lookup_labels, lookup_distances
)
np.testing.assert_array_equal(
match_mask.numpy(), np.array([[True], [True], [False], [False]])
)
np.testing.assert_array_equal(
distance_mask.numpy(),
np.array([[True, True], [True, True], [False, True], [False, True]]),
)
def test_compute_match_indicators_1d():
"""Check that we handle 1D derive match results."""
cm = BadClassificationMatch(name="foo", canonical_name="bar")
# Pass distance_thresholds as a 1D tensor.
distance_thresholds = tf.constant([1.0, 2.0])
cm.compile(distance_thresholds=distance_thresholds)
query_labels = tf.constant([10, 20, 10, 20])
lookup_labels = tf.constant([[10], [20], [30], [40]])
lookup_distances = tf.constant([[1.0], [1.0], [2.0], [2.0]])
match_mask, distance_mask = cm._compute_match_indicators(
query_labels, lookup_labels, lookup_distances
)
np.testing.assert_array_equal(
match_mask.numpy(), np.array([[True], [True], [False], [False]])
)
np.testing.assert_array_equal(
distance_mask.numpy(),
np.array([[True, True], [True, True], [False, True], [False, True]]),
)
def test_compute_count():
cm = ConcreteClassificationMatch(name="foo", canonical_name="bar")
# Pass distance_thresholds as a 1D tensor.
distance_thresholds = tf.constant([1.0, 2.0])
cm.compile(distance_thresholds=distance_thresholds)
query_labels = tf.constant([10, 20, 10, 20])
lookup_labels = tf.constant([[10], [20], [30], [40]])
lookup_distances = tf.constant([[1.0], [1.0], [2.0], [2.0]])
cm.compute_count(query_labels, lookup_labels, lookup_distances)
np.testing.assert_array_equal(cm.tp.numpy(), np.array([2, 2]))
np.testing.assert_array_equal(cm.fp.numpy(), np.array([0, 2]))
np.testing.assert_array_equal(cm.tn.numpy(), np.array([2, 0]))
np.testing.assert_array_equal(cm.fn.numpy(), np.array([0, 0]))
assert cm.count == 4
@pytest.mark.parametrize("attribute", attributes, ids=attributes)
def test_attribute_asserts(attribute):
"""Uninitialized attrs should through a ValueError."""
cm = ConcreteClassificationMatch(name="foo", canonical_name="bar")
msg = "Matcher.compute_count() must be called before accessing the counts."
with pytest.raises(AttributeError, match=re.escape(msg)):
getattr(cm, attribute)
def test_check_shape_valid():
cm = ConcreteClassificationMatch(name="foo", canonical_name="bar")
queries = tf.constant([[1], [2], [3]])
ll = tf.constant([[1], [2], [3]])
ld = tf.constant([[0.1], [0.2], [0.3]])
assert cm._check_shape(queries, ll, ld)
def test_check_shape_invalid_queries():
cm = ConcreteClassificationMatch(name="foo", canonical_name="bar")
queries = tf.constant([[1], [2], [3], [4]])
ll = tf.constant([[1], [2], [3]])
ld = tf.constant([[0.1], [0.2], [0.3]])
msg = "Number of query labels must match the number of lookup_label sets."
with pytest.raises(ValueError, match=re.escape(msg)):
cm._check_shape(queries, ll, ld)
def test_check_shape_invalid_lookup_rank():
cm = ConcreteClassificationMatch(name="foo", canonical_name="bar")
queries = tf.constant([[1], [2], [3]])
ll = tf.constant([1, 2, 3])
ld = tf.constant([[0.1], [0.2], [0.3]])
msg = "lookup_labels must be a 2D tensor of shape [len(query_labels), K]."
with pytest.raises(ValueError, match=re.escape(msg)):
cm._check_shape(queries, ll, ld)
def test_check_shape_invalid_distance_rank():
cm = ConcreteClassificationMatch(name="foo", canonical_name="bar")
queries = tf.constant([[1], [2], [3]])
ll = tf.constant([[1], [2], [3]])
ld = tf.constant([0.1, 0.2, 0.3])
msg = (
"lookup_distances must be a 2D tensor of shape "
"[len(query_labels), K]."
)
with pytest.raises(ValueError, match=re.escape(msg)):
cm._check_shape(queries, ll, ld)
def test_check_shape_labels_dist_mismatch():
cm = ConcreteClassificationMatch(name="foo", canonical_name="bar")
queries = tf.constant([[1], [2], [3]])
ll = tf.constant([[1], [2], [3]])
ld = tf.constant([[0.1, 0.2], [0.2, 0.3], [0.3, 0.4]])
msg = (
"Number of number of lookup labels must match the number "
"of lookup distances."
)
with pytest.raises(ValueError, match=re.escape(msg)):
cm._check_shape(queries, ll, ld)
| 32.261307
| 79
| 0.661215
|
0bbfff14eeef42b5fcb9316cb03104b5f9b5ac91
| 1,264
|
py
|
Python
|
nnvm/tvm/apps/extension/tests/test_ext.py
|
CynthiaProtector/helo
|
ad9e22363a92389b3fa519ecae9061c6ead28b05
|
[
"Apache-2.0"
] | 22
|
2019-02-20T12:42:20.000Z
|
2021-12-25T06:09:46.000Z
|
nnvm/tvm/apps/extension/tests/test_ext.py
|
CynthiaProtector/helo
|
ad9e22363a92389b3fa519ecae9061c6ead28b05
|
[
"Apache-2.0"
] | 4
|
2019-04-01T07:36:04.000Z
|
2022-03-24T03:11:26.000Z
|
nnvm/tvm/apps/extension/tests/test_ext.py
|
CynthiaProtector/helo
|
ad9e22363a92389b3fa519ecae9061c6ead28b05
|
[
"Apache-2.0"
] | 7
|
2019-03-20T16:04:37.000Z
|
2021-04-28T18:40:11.000Z
|
import tvm_ext
import tvm
import numpy as np
def test_bind_add():
def add(a, b):
return a + b
f = tvm_ext.bind_add(add, 1)
assert f(2) == 3
def test_ext_dev():
n = 10
A = tvm.placeholder((n,), name='A')
B = tvm.compute((n,), lambda *i: A(*i) + 1.0, name='B')
s = tvm.create_schedule(B.op)
def check_llvm():
if not tvm.module.enabled("llvm"):
return
f = tvm.build(s, [A, B], "ext_dev", "llvm")
ctx = tvm.ext_dev(0)
# launch the kernel.
a = tvm.nd.array(np.random.uniform(size=n).astype(A.dtype), ctx)
b = tvm.nd.array(np.zeros(n, dtype=B.dtype), ctx)
f(a, b)
np.testing.assert_allclose(b.asnumpy(), a.asnumpy() + 1)
check_llvm()
def test_sym_add():
a = tvm.var('a')
b = tvm.var('b')
c = tvm_ext.sym_add(a, b)
assert c.a == a and c.b == b
def test_ext_vec():
ivec = tvm_ext.ivec_create(1, 2, 3)
assert(isinstance(ivec, tvm_ext.IntVec))
assert ivec[0] == 1
assert ivec[1] == 2
def ivec_cb(v2):
assert(isinstance(v2, tvm_ext.IntVec))
assert v2[2] == 3
tvm.convert(ivec_cb)(ivec)
if __name__ == "__main__":
test_ext_dev()
test_ext_vec()
test_bind_add()
test_sym_add()
| 24.307692
| 72
| 0.567247
|
89802780bb1e9157efa65ed027fa1ee95649e3fa
| 2,128
|
py
|
Python
|
python/interview_questions/longest_repeated_substring.py
|
rcanepa/cs-fundamentals
|
b362fc206417501e53a5739df1edf7568901eef8
|
[
"MIT"
] | null | null | null |
python/interview_questions/longest_repeated_substring.py
|
rcanepa/cs-fundamentals
|
b362fc206417501e53a5739df1edf7568901eef8
|
[
"MIT"
] | null | null | null |
python/interview_questions/longest_repeated_substring.py
|
rcanepa/cs-fundamentals
|
b362fc206417501e53a5739df1edf7568901eef8
|
[
"MIT"
] | null | null | null |
"""Longest repeated substring (or LCP = longest common prefix in a suffix array).
Problem: find the longest repeated substring inside a string.
Steps:
1. Create suffixes. This should be linear in time and space, but it isn't.
Slicing strings in Python (with slice or [a:b]) is a linear operation
with regard to the size of the string. In the end, this implementation
provides a quadratic time O(N^2).
2. Sort suffixes. This should be N * log(N) in time.
3. Find LCP between adjacent suffixes.
Usage:
This script can be use reading data from the standard input. Example:
cat ~/manifesto.txt | python3 -m interview_questions.longest_repeated_substring
"""
import sys
import time
def lcp(s1, s2):
"""Return the length of the longest common prefix
between strings `s1` and `s2`."""
comp = 0
for i in range(min(len(s1), len(s2))):
if s1[i] != s2[i]:
break
comp += 1
return comp
def lrs(text):
"""Return the longest repeated substring using a Suffix Array."""
# Step 1: create the suffixes array.
suffixes = []
for i in range(len(s)):
suffixes.append(s[i:])
# Step 2: sort the suffixes array.
sorted_suffixes = sorted(suffixes)
# Step: find the longest repeated substring.
result = ""
for i in range(len(sorted_suffixes) - 1):
l = lcp(sorted_suffixes[i], sorted_suffixes[i + 1])
if l > len(result):
result = sorted_suffixes[i][:l]
return result
if __name__ == "__main__":
s = ""
t0 = time.time()
for line in sys.stdin:
s += line
t1 = time.time()
print("################################################################################")
print('-> Took {:.3f}ms to read the file.'.format((t1 - t0) * 1000))
t0 = time.time()
r = lrs(s)
t1 = time.time()
print('-> Took {:.3f}ms to find the longest repeated substring the file.'.format((t1 - t0) * 1000))
print("################################################################################")
print("The longest repeated substring is:")
print(r)
| 31.294118
| 103
| 0.577068
|
34176b51c02959053b90ad062a51e6997b5647ad
| 4,160
|
py
|
Python
|
tools/make_test_data.py
|
bclau/ceilometer
|
90ad86c08494596dfa03c8cbfcea2c2be58fc8dc
|
[
"Apache-2.0"
] | null | null | null |
tools/make_test_data.py
|
bclau/ceilometer
|
90ad86c08494596dfa03c8cbfcea2c2be58fc8dc
|
[
"Apache-2.0"
] | null | null | null |
tools/make_test_data.py
|
bclau/ceilometer
|
90ad86c08494596dfa03c8cbfcea2c2be58fc8dc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 New Dream Network, LLC (DreamHost)
#
# Author: Doug Hellmann <doug.hellmann@dreamhost.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Command line tool for creating test data for ceilometer.
"""
import argparse
import datetime
import logging
import sys
from oslo.config import cfg
from ceilometer.publisher import rpc
from ceilometer import sample
from ceilometer import storage
from ceilometer.openstack.common import timeutils
def main():
cfg.CONF([], project='ceilometer')
parser = argparse.ArgumentParser(
description='generate metering data',
)
parser.add_argument(
'--interval',
default=10,
type=int,
help='the period between events, in minutes',
)
parser.add_argument(
'--start',
default=31,
help='the number of days in the past to start timestamps',
)
parser.add_argument(
'--end',
default=2,
help='the number of days into the future to continue timestamps',
)
parser.add_argument(
'--type',
choices=('gauge', 'cumulative'),
default='gauge',
help='counter type',
)
parser.add_argument(
'--unit',
default=None,
help='counter unit',
)
parser.add_argument(
'--project',
help='project id of owner',
)
parser.add_argument(
'--user',
help='user id of owner',
)
parser.add_argument(
'resource',
help='the resource id for the meter data',
)
parser.add_argument(
'counter',
help='the counter name for the meter data',
)
parser.add_argument(
'volume',
help='the amount to attach to the meter',
type=int,
default=1,
)
args = parser.parse_args()
# Set up logging to use the console
console = logging.StreamHandler(sys.stderr)
console.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(message)s')
console.setFormatter(formatter)
root_logger = logging.getLogger('')
root_logger.addHandler(console)
root_logger.setLevel(logging.DEBUG)
# Connect to the metering database
conn = storage.get_connection(cfg.CONF)
# Find the user and/or project for a real resource
if not (args.user or args.project):
for r in conn.get_resources():
if r['resource_id'] == args.resource:
args.user = r['user_id']
args.project = r['project_id']
break
# Compute start and end timestamps for the
# new data.
timestamp = timeutils.parse_isotime(args.start)
end = timeutils.parse_isotime(args.end)
increment = datetime.timedelta(minutes=args.interval)
# Generate events
n = 0
while timestamp <= end:
c = sample.Sample(name=args.counter,
type=args.type,
unit=args.unit,
volume=args.volume,
user_id=args.user,
project_id=args.project,
resource_id=args.resource,
timestamp=timestamp,
resource_metadata={},
source='artificial',
)
data = rpc.meter_message_from_counter(
c,
cfg.CONF.publisher_rpc.metering_secret)
conn.record_metering_data(data)
n += 1
timestamp = timestamp + increment
print 'Added %d new events' % n
return 0
if __name__ == '__main__':
main()
| 28.493151
| 75
| 0.602644
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.