hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5c629102950de0908ab0512a2741f2b444ea39e8
| 12,653
|
py
|
Python
|
site-packages/osc_lib/tests/cli/test_parseractions.py
|
hariza17/freezer_libraries
|
e0bd890eba5e7438976fb3b4d66c41c128bab790
|
[
"PSF-2.0"
] | null | null | null |
site-packages/osc_lib/tests/cli/test_parseractions.py
|
hariza17/freezer_libraries
|
e0bd890eba5e7438976fb3b4d66c41c128bab790
|
[
"PSF-2.0"
] | null | null | null |
site-packages/osc_lib/tests/cli/test_parseractions.py
|
hariza17/freezer_libraries
|
e0bd890eba5e7438976fb3b4d66c41c128bab790
|
[
"PSF-2.0"
] | null | null | null |
# Copyright 2012-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import argparse
from osc_lib.cli import parseractions
from osc_lib.tests import utils
class TestKeyValueAction(utils.TestCase):
def setUp(self):
super(TestKeyValueAction, self).setUp()
self.parser = argparse.ArgumentParser()
# Set up our typical usage
self.parser.add_argument(
'--property',
metavar='<key=value>',
action=parseractions.KeyValueAction,
default={'green': '20%', 'format': '#rgb'},
help='Property to store for this volume '
'(repeat option to set multiple properties)',
)
def test_good_values(self):
results = self.parser.parse_args([
'--property', 'red=',
'--property', 'green=100%',
'--property', 'blue=50%',
])
actual = getattr(results, 'property', {})
# All should pass through unmolested
expect = {'red': '', 'green': '100%', 'blue': '50%', 'format': '#rgb'}
self.assertEqual(expect, actual)
def test_error_values(self):
data_list = [
['--property', 'red', ],
['--property', '=', ],
['--property', '=red', ]
]
for data in data_list:
self.assertRaises(argparse.ArgumentTypeError,
self.parser.parse_args, data)
class TestMultiKeyValueAction(utils.TestCase):
def setUp(self):
super(TestMultiKeyValueAction, self).setUp()
self.parser = argparse.ArgumentParser()
# Set up our typical usage
self.parser.add_argument(
'--test',
metavar='req1=xxx,req2=yyy',
action=parseractions.MultiKeyValueAction,
dest='test',
default=None,
required_keys=['req1', 'req2'],
optional_keys=['opt1', 'opt2'],
help='Test'
)
def test_good_values(self):
results = self.parser.parse_args([
'--test', 'req1=aaa,req2=bbb',
'--test', 'req1=,req2=',
])
actual = getattr(results, 'test', [])
expect = [
{'req1': 'aaa', 'req2': 'bbb'},
{'req1': '', 'req2': ''},
]
self.assertItemsEqual(expect, actual)
def test_empty_required_optional(self):
self.parser.add_argument(
'--test-empty',
metavar='req1=xxx,req2=yyy',
action=parseractions.MultiKeyValueAction,
dest='test_empty',
default=None,
required_keys=[],
optional_keys=[],
help='Test'
)
results = self.parser.parse_args([
'--test-empty', 'req1=aaa,req2=bbb',
'--test-empty', 'req1=,req2=',
])
actual = getattr(results, 'test_empty', [])
expect = [
{'req1': 'aaa', 'req2': 'bbb'},
{'req1': '', 'req2': ''},
]
self.assertItemsEqual(expect, actual)
def test_error_values_with_comma(self):
data_list = [
['--test', 'mmm,nnn=zzz', ],
['--test', 'nnn=zzz,=', ],
['--test', 'nnn=zzz,=zzz', ]
]
for data in data_list:
self.assertRaises(argparse.ArgumentTypeError,
self.parser.parse_args, data)
def test_error_values_without_comma(self):
self.assertRaises(
argparse.ArgumentTypeError,
self.parser.parse_args,
[
'--test', 'mmmnnn',
]
)
def test_missing_key(self):
self.assertRaises(
argparse.ArgumentTypeError,
self.parser.parse_args,
[
'--test', 'req2=ddd',
]
)
def test_invalid_key(self):
self.assertRaises(
argparse.ArgumentTypeError,
self.parser.parse_args,
[
'--test', 'req1=aaa,req2=bbb,aaa=req1',
]
)
def test_required_keys_not_list(self):
self.assertRaises(
TypeError,
self.parser.add_argument,
'--test-required-dict',
metavar='req1=xxx,req2=yyy',
action=parseractions.MultiKeyValueAction,
dest='test_required_dict',
default=None,
required_keys={'aaa': 'bbb'},
optional_keys=['opt1', 'opt2'],
help='Test'
)
def test_optional_keys_not_list(self):
self.assertRaises(
TypeError,
self.parser.add_argument,
'--test-optional-dict',
metavar='req1=xxx,req2=yyy',
action=parseractions.MultiKeyValueAction,
dest='test_optional_dict',
default=None,
required_keys=['req1', 'req2'],
optional_keys={'aaa': 'bbb'},
help='Test'
)
class TestMultiKeyValueCommaAction(utils.TestCase):
def setUp(self):
super(TestMultiKeyValueCommaAction, self).setUp()
self.parser = argparse.ArgumentParser()
# Typical usage
self.parser.add_argument(
'--test',
metavar='req1=xxx,yyy',
action=parseractions.MultiKeyValueCommaAction,
dest='test',
default=None,
required_keys=['req1'],
optional_keys=['opt2'],
help='Test',
)
def test_mkvca_required(self):
results = self.parser.parse_args([
'--test', 'req1=aaa,bbb',
])
actual = getattr(results, 'test', [])
expect = [
{'req1': 'aaa,bbb'},
]
self.assertItemsEqual(expect, actual)
results = self.parser.parse_args([
'--test', 'req1=',
])
actual = getattr(results, 'test', [])
expect = [
{'req1': ''},
]
self.assertItemsEqual(expect, actual)
results = self.parser.parse_args([
'--test', 'req1=aaa,bbb',
'--test', 'req1=',
])
actual = getattr(results, 'test', [])
expect = [
{'req1': 'aaa,bbb'},
{'req1': ''},
]
self.assertItemsEqual(expect, actual)
def test_mkvca_optional(self):
results = self.parser.parse_args([
'--test', 'req1=aaa,bbb',
])
actual = getattr(results, 'test', [])
expect = [
{'req1': 'aaa,bbb'},
]
self.assertItemsEqual(expect, actual)
results = self.parser.parse_args([
'--test', 'req1=aaa,bbb',
'--test', 'req1=,opt2=ccc',
])
actual = getattr(results, 'test', [])
expect = [
{'req1': 'aaa,bbb'},
{'req1': '', 'opt2': 'ccc'},
]
self.assertItemsEqual(expect, actual)
try:
results = self.parser.parse_args([
'--test', 'req1=aaa,bbb',
'--test', 'opt2=ccc',
])
self.fail('ArgumentTypeError should be raised')
except argparse.ArgumentTypeError as e:
self.assertEqual(
'Missing required keys req1.\nRequired keys are: req1',
str(e),
)
def test_mkvca_multiples(self):
results = self.parser.parse_args([
'--test', 'req1=aaa,bbb,opt2=ccc',
])
actual = getattr(results, 'test', [])
expect = [{
'req1': 'aaa,bbb',
'opt2': 'ccc',
}]
self.assertItemsEqual(expect, actual)
def test_mkvca_no_required_optional(self):
self.parser.add_argument(
'--test-empty',
metavar='req1=xxx,yyy',
action=parseractions.MultiKeyValueCommaAction,
dest='test_empty',
default=None,
required_keys=[],
optional_keys=[],
help='Test',
)
results = self.parser.parse_args([
'--test-empty', 'req1=aaa,bbb',
])
actual = getattr(results, 'test_empty', [])
expect = [
{'req1': 'aaa,bbb'},
]
self.assertItemsEqual(expect, actual)
results = self.parser.parse_args([
'--test-empty', 'xyz=aaa,bbb',
])
actual = getattr(results, 'test_empty', [])
expect = [
{'xyz': 'aaa,bbb'},
]
self.assertItemsEqual(expect, actual)
def test_mkvca_invalid_key(self):
try:
self.parser.parse_args([
'--test', 'req1=aaa,bbb=',
])
self.fail('ArgumentTypeError should be raised')
except argparse.ArgumentTypeError as e:
self.assertIn(
'Invalid keys bbb specified.\nValid keys are:',
str(e),
)
try:
self.parser.parse_args([
'--test', 'nnn=aaa',
])
self.fail('ArgumentTypeError should be raised')
except argparse.ArgumentTypeError as e:
self.assertIn(
'Invalid keys nnn specified.\nValid keys are:',
str(e),
)
def test_mkvca_value_no_key(self):
try:
self.parser.parse_args([
'--test', 'req1=aaa,=bbb',
])
self.fail('ArgumentTypeError should be raised')
except argparse.ArgumentTypeError as e:
self.assertEqual(
"A key must be specified before '=': =bbb",
str(e),
)
try:
self.parser.parse_args([
'--test', '=nnn',
])
self.fail('ArgumentTypeError should be raised')
except argparse.ArgumentTypeError as e:
self.assertEqual(
"A key must be specified before '=': =nnn",
str(e),
)
try:
self.parser.parse_args([
'--test', 'nnn',
])
self.fail('ArgumentTypeError should be raised')
except argparse.ArgumentTypeError as e:
self.assertIn(
'A key=value pair is required:',
str(e),
)
def test_mkvca_required_keys_not_list(self):
self.assertRaises(
TypeError,
self.parser.add_argument,
'--test-required-dict',
metavar='req1=xxx',
action=parseractions.MultiKeyValueCommaAction,
dest='test_required_dict',
default=None,
required_keys={'aaa': 'bbb'},
optional_keys=['opt1', 'opt2'],
help='Test',
)
def test_mkvca_optional_keys_not_list(self):
self.assertRaises(
TypeError,
self.parser.add_argument,
'--test-optional-dict',
metavar='req1=xxx',
action=parseractions.MultiKeyValueCommaAction,
dest='test_optional_dict',
default=None,
required_keys=['req1', 'req2'],
optional_keys={'aaa': 'bbb'},
help='Test',
)
class TestNonNegativeAction(utils.TestCase):
def setUp(self):
super(TestNonNegativeAction, self).setUp()
self.parser = argparse.ArgumentParser()
# Set up our typical usage
self.parser.add_argument(
'--foo',
metavar='<foo>',
type=int,
action=parseractions.NonNegativeAction,
)
def test_negative_values(self):
self.assertRaises(
argparse.ArgumentTypeError,
self.parser.parse_args,
"--foo -1".split()
)
def test_zero_values(self):
results = self.parser.parse_args(
'--foo 0'.split()
)
actual = getattr(results, 'foo', None)
self.assertEqual(actual, 0)
def test_positive_values(self):
results = self.parser.parse_args(
'--foo 1'.split()
)
actual = getattr(results, 'foo', None)
self.assertEqual(actual, 1)
| 29.494172
| 78
| 0.512606
|
f8cd39e556009aa435acb8857309b318c7a0e36c
| 2,369
|
py
|
Python
|
src/primaires/scripting/actions/interrompre.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/primaires/scripting/actions/interrompre.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/primaires/scripting/actions/interrompre.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
# -*-coding:Utf-8 -*
# Copyright (c) 2012 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant l'action interrompre."""
from primaires.scripting.action import Action
from primaires.scripting.exceptions import InterrompreCommande
class ClasseAction(Action):
"""Interrompt le script et ce que fait le joueur.
Utilisée dans un script de salle sort.avant par exemple, cette action
empêche le joueur de se déplacer.
"""
@classmethod
def init_types(cls):
cls.ajouter_types(cls.interrompre)
cls.ajouter_types(cls.interrompre_msg, "str")
@staticmethod
def interrompre():
"""Interrompt le script."""
raise InterrompreCommande
@staticmethod
def interrompre_msg(message):
"""Interrompt le script en renvoyant le message au joueur."""
raise InterrompreCommande(message)
| 40.152542
| 79
| 0.747573
|
b2de191335c4557c3479be2cfd98c0e0ebeeded3
| 2,124
|
py
|
Python
|
pypowerbi/import_class.py
|
brunompacheco/pypowerbi
|
647951b7b0127a83c98427b0d58e380dc622e3b8
|
[
"MIT"
] | null | null | null |
pypowerbi/import_class.py
|
brunompacheco/pypowerbi
|
647951b7b0127a83c98427b0d58e380dc622e3b8
|
[
"MIT"
] | null | null | null |
pypowerbi/import_class.py
|
brunompacheco/pypowerbi
|
647951b7b0127a83c98427b0d58e380dc622e3b8
|
[
"MIT"
] | null | null | null |
# -*- coding: future_fstrings -*-
from .dataset import Dataset
from .report import Report
class Import:
# json keys
id_key = 'id'
name_key = 'name'
created_timedate_key = 'createdDateTime'
datasets_key = 'datasets'
import_state_key = 'importState'
reports_key = 'reports'
updated_datetime_key = 'updatedDateTime'
source_key = 'source'
connection_type_key = 'connectionType'
value_key = 'value'
# import state values
import_state_succeeded = 'Succeeded'
import_state_publishing = 'Publishing'
def __init__(self, import_id, name=None, created_datetime=None, datasets=None, import_state=None,
reports=None, updated_datetime=None, source=None, connection_type=None):
self.id = import_id
self.name = name
self.created_datetime = created_datetime
self.datasets = datasets
self.import_state = import_state
self.reports = reports
self.updated_datetime = updated_datetime
self.source = source
self.connection_type = connection_type
@classmethod
def from_dict(cls, dictionary):
import_id = dictionary.get(cls.id_key)
if import_id is None:
raise RuntimeError("Import dictionary has no id key")
name = dictionary.get(cls.name_key)
created_datetime = dictionary.get(cls.created_timedate_key)
if cls.datasets_key in dictionary:
datasets = [Dataset.from_dict(x) for x in dictionary.get(cls.datasets_key)]
else:
datasets = None
import_state = dictionary.get(cls.import_state_key)
if cls.reports_key in dictionary:
reports = [Report.from_dict(x) for x in dictionary.get(cls.reports_key)]
else:
reports = None
updated_datetime = dictionary.get(cls.updated_datetime_key)
source = dictionary.get(cls.source_key)
connection_type = dictionary.get(cls.connection_type_key)
return cls(import_id, name, created_datetime, datasets, import_state,
reports, updated_datetime, source, connection_type)
| 33.714286
| 101
| 0.673258
|
87ef3df3ce4e823c45042f9e8658d3a6423f98c0
| 8,582
|
py
|
Python
|
assopy/forms.py
|
zevaverbach/epcon
|
8352c030ee0d4197f559cdb58a54ee45c7a4471a
|
[
"BSD-2-Clause"
] | null | null | null |
assopy/forms.py
|
zevaverbach/epcon
|
8352c030ee0d4197f559cdb58a54ee45c7a4471a
|
[
"BSD-2-Clause"
] | null | null | null |
assopy/forms.py
|
zevaverbach/epcon
|
8352c030ee0d4197f559cdb58a54ee45c7a4471a
|
[
"BSD-2-Clause"
] | null | null | null |
from django import forms
from django.conf import settings as dsettings
from django.utils.translation import ugettext as _
from assopy import models
from assopy import settings
from conference import models as cmodels
import logging
log = logging.getLogger('assopy.forms')
# autostrip - http://djangosnippets.org/snippets/956/
# il motivo per questo abominio?
# http://code.djangoproject.com/ticket/6362
def autostrip(cls):
fields = [(key, value) for key, value in cls.base_fields.items() if isinstance(value, forms.CharField)]
for field_name, field_object in fields:
def get_clean_func(original_clean):
return lambda value: original_clean(value and value.strip())
clean_func = get_clean_func(getattr(field_object, 'clean'))
setattr(field_object, 'clean', clean_func)
return cls
PRIVACY_POLICY_CHECKBOX = """
I consent to the use of my data subject to the <a href='/privacy/'>EuroPython
data privacy policy</a>
""".strip()
PRIVACY_POLICY_ERROR = """
You need to consent to use of your data before we can continue
""".strip()
class Profile(forms.ModelForm):
first_name = forms.CharField(
label=_('First Name'),
help_text=_('Please do not enter a company name here.<br />You will be able to specify billing details during the checkout.'),
max_length=32,)
last_name = forms.CharField(
label=_('Last Name'),
max_length=32,)
class Meta:
model = models.AssopyUser
fields = ('first_name', 'last_name')
def __init__(self, *args, **kwargs):
o = kwargs.get('instance')
if o:
initial = kwargs.get('initial', {})
if 'first_name' not in initial:
initial['first_name'] = o.user.first_name
if 'last_name' not in initial:
initial['last_name'] = o.user.last_name
kwargs['initial'] = initial
super(Profile, self).__init__(*args, **kwargs)
def save(self, commit=True):
data = self.cleaned_data
self.instance.user.first_name = data['first_name']
self.instance.user.last_name = data['last_name']
u = super(Profile, self).save(commit=commit)
if commit:
self.instance.user.save()
return u
Profile = autostrip(Profile)
class BillingData(forms.ModelForm):
class Meta:
model = models.AssopyUser
exclude = ('user', 'token', 'assopy_id')
def _required(self, name):
data = self.cleaned_data.get(name, '')
try:
data = data.strip()
except:
pass
if not data:
raise forms.ValidationError('this field is required')
return data
clean_country = lambda self: self._required('country')
clean_address = lambda self: self._required('address')
def clean_card_name(self):
data = self.cleaned_data.get('card_name', '')
if not data:
return self.instance.name()
else:
return data
BillingData = autostrip(BillingData)
class FormTickets(forms.Form):
payment = forms.ChoiceField(choices=(('paypal', 'PayPal'),('bank', 'Bank')))
order_type = forms.ChoiceField(
choices=(
('non-deductible', _('Personal Purchase')),
('deductible', _('Company Purchase'))),
initial='non-deductible')
def __init__(self, *args, **kwargs):
super(FormTickets, self).__init__(*args, **kwargs)
for t in self.available_fares():
field = forms.IntegerField(
label=t.name,
min_value=0,
required=False,
)
field.fare = t
self.fields[t.code] = field
def available_fares(self):
return cmodels.Fare.objects.available()
def clean(self):
fares = dict( (x.code, x) for x in self.available_fares() )
data = self.cleaned_data
o = []
total = 0
for k, q in data.items():
if k not in fares:
continue
if not q:
continue
total += q
f = fares[k]
if not f.valid():
self._errors[k] = self.error_class(['Invalid fare'])
del data[k]
continue
o.append((f, {'qty': q}))
data['tickets'] = o
return data
class RefundItemForm(forms.Form):
reason = forms.CharField(
label=_("Reason"),
max_length=200,
help_text=_("""Please enter the reason of your refund request"""),
widget=forms.Textarea)
paypal = forms.EmailField(
label=_("Your paypal address"),
help_text=_("""If you prefer to receive payment via paypal"""),
required=False)
bank = forms.CharField(
label=_("Bank routing information"),
help_text=_("""Please specify IBAN, BIC and bank address (if in Europe) or any needed information for a worldwide transfer"""),
required=False,
widget=forms.Textarea)
def __init__(self, item, *args, **kw):
super(RefundItemForm, self).__init__(*args, **kw)
self.item = item
def clean(self):
data = self.cleaned_data
if self.item.refund_type() == 'payment':
if not data.get('paypal') and not data.get('bank'):
raise forms.ValidationError('Please specify at least one of the paypal account or the bank details')
return data
if 'paypal.standard.ipn' in dsettings.INSTALLED_APPS:
from paypal.standard.forms import PayPalPaymentsForm
from paypal.standard.widgets import ValueHiddenInput
from paypal.standard.conf import POSTBACK_ENDPOINT, SANDBOX_POSTBACK_ENDPOINT
class PayPalForm(PayPalPaymentsForm):
#Do not prompt buyers for a shipping address.
#Allowable values are:
#
#0 – prompt for an address, but do not require one
#1 – do not prompt for an address
#2 – prompt for an address, and require one
no_shipping = forms.IntegerField(initial=1)
address_override = forms.IntegerField(initial=0)
def __init__(self, order, *args, **kwargs):
from django.db import models
initial = settings.PAYPAL_DEFAULT_FORM_CONTEXT(order)
initial.update({'cmd':self.CMD_CHOICES[1][0]})
kwargs['initial'] = initial
super(PayPalForm, self).__init__(*args, **kwargs)
items = list(order.orderitem_set \
.filter(price__gte=0).values('code','description','price') \
.annotate(count=models.Count('price')) \
.order_by('-price'))
discount = order.total(apply_discounts=False) - order.total()
if discount > 0:
self.fields['discount_amount_cart'] = forms.IntegerField(
widget=ValueHiddenInput(),
initial= discount
)
self.fields['upload'] = forms.IntegerField(
widget=ValueHiddenInput(),
initial=1
)
for n, item in enumerate(items, start=1):
self.fields['item_name_%d' % n ] = forms.CharField(
widget=ValueHiddenInput(),
initial=settings.PAYPAL_ITEM_NAME(item)
)
self.fields['quantity_%d' % n ] = forms.CharField(
widget=ValueHiddenInput(),
initial=item['count']
)
self.fields['amount_%d' % n ] = forms.CharField(
widget=ValueHiddenInput(),
initial=item['price']
)
def paypal_url(self):
return SANDBOX_POSTBACK_ENDPOINT if getattr(dsettings, 'PAYPAL_TEST') else POSTBACK_ENDPOINT
def as_url_args(self):
import urllib.request, urllib.parse, urllib.error
data = dict(
[(f.field.widget.attrs.get('name', f.html_name), f.value())
for f in self if f.value()]
)
return urllib.parse.urlencode(data)
| 36.832618
| 135
| 0.557795
|
21c57f3283299ea716976a14b65ae63d5e4926ce
| 26,862
|
py
|
Python
|
djangobb_forum/migrations/0001_initial.py
|
dboczek/DjangoBB
|
80c42274839714f0cc6c4529ba1b44ae5e4e03c1
|
[
"BSD-3-Clause"
] | 15
|
2015-02-26T13:59:30.000Z
|
2021-11-08T09:50:47.000Z
|
djangobb_forum/migrations/0001_initial.py
|
dboczek/DjangoBB
|
80c42274839714f0cc6c4529ba1b44ae5e4e03c1
|
[
"BSD-3-Clause"
] | 52
|
2015-01-08T21:57:43.000Z
|
2021-03-25T07:39:20.000Z
|
djangobb_forum/migrations/0001_initial.py
|
dboczek/DjangoBB
|
80c42274839714f0cc6c4529ba1b44ae5e4e03c1
|
[
"BSD-3-Clause"
] | 18
|
2015-01-20T00:11:28.000Z
|
2021-09-04T18:03:14.000Z
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Category'
db.create_table('djangobb_forum_category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=80)),
('position', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
))
db.send_create_signal('djangobb_forum', ['Category'])
# Adding M2M table for field groups on 'Category'
db.create_table('djangobb_forum_category_groups', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('category', models.ForeignKey(orm['djangobb_forum.category'], null=False)),
('group', models.ForeignKey(orm['auth.group'], null=False))
))
db.create_unique('djangobb_forum_category_groups', ['category_id', 'group_id'])
# Adding model 'Forum'
db.create_table('djangobb_forum_forum', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(related_name='forums', to=orm['djangobb_forum.Category'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=80)),
('position', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(default='', blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('post_count', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('topic_count', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('last_post', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='last_forum_post', null=True, to=orm['djangobb_forum.Post'])),
))
db.send_create_signal('djangobb_forum', ['Forum'])
# Adding M2M table for field moderators on 'Forum'
db.create_table('djangobb_forum_forum_moderators', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('forum', models.ForeignKey(orm['djangobb_forum.forum'], null=False)),
('user', models.ForeignKey(orm['auth.user'], null=False))
))
db.create_unique('djangobb_forum_forum_moderators', ['forum_id', 'user_id'])
# Adding model 'Topic'
db.create_table('djangobb_forum_topic', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('forum', self.gf('django.db.models.fields.related.ForeignKey')(related_name='topics', to=orm['djangobb_forum.Forum'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('views', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('sticky', self.gf('django.db.models.fields.BooleanField')(default=False)),
('closed', self.gf('django.db.models.fields.BooleanField')(default=False)),
('post_count', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('last_post', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='last_topic_post', null=True, to=orm['djangobb_forum.Post'])),
))
db.send_create_signal('djangobb_forum', ['Topic'])
# Adding M2M table for field subscribers on 'Topic'
db.create_table('djangobb_forum_topic_subscribers', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('topic', models.ForeignKey(orm['djangobb_forum.topic'], null=False)),
('user', models.ForeignKey(orm['auth.user'], null=False))
))
db.create_unique('djangobb_forum_topic_subscribers', ['topic_id', 'user_id'])
# Adding model 'Post'
db.create_table('djangobb_forum_post', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('topic', self.gf('django.db.models.fields.related.ForeignKey')(related_name='posts', to=orm['djangobb_forum.Topic'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='posts', to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('updated_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('markup', self.gf('django.db.models.fields.CharField')(default='bbcode', max_length=15)),
('body', self.gf('django.db.models.fields.TextField')()),
('body_html', self.gf('django.db.models.fields.TextField')()),
('user_ip', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
))
db.send_create_signal('djangobb_forum', ['Post'])
# Adding model 'Reputation'
db.create_table('djangobb_forum_reputation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('from_user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='reputations_from', to=orm['auth.User'])),
('to_user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='reputations_to', to=orm['auth.User'])),
('post', self.gf('django.db.models.fields.related.ForeignKey')(related_name='post', to=orm['djangobb_forum.Post'])),
('time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('sign', self.gf('django.db.models.fields.IntegerField')(default=0)),
('reason', self.gf('django.db.models.fields.TextField')(max_length=1000)),
))
db.send_create_signal('djangobb_forum', ['Reputation'])
# Adding unique constraint on 'Reputation', fields ['from_user', 'post']
db.create_unique('djangobb_forum_reputation', ['from_user_id', 'post_id'])
# Adding model 'Profile'
db.create_table('djangobb_forum_profile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('djangobb_forum.fields.AutoOneToOneField')(related_name='forum_profile', unique=True, to=orm['auth.User'])),
('status', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('site', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('jabber', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('icq', self.gf('django.db.models.fields.CharField')(max_length=12, blank=True)),
('msn', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('aim', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('yahoo', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('location', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('signature', self.gf('django.db.models.fields.TextField')(default='', max_length=1024, blank=True)),
('time_zone', self.gf('django.db.models.fields.FloatField')(default=3.0)),
('language', self.gf('django.db.models.fields.CharField')(default='', max_length=5)),
('avatar', self.gf('djangobb_forum.fields.ExtendedImageField')(default='', max_length=100, blank=True)),
('theme', self.gf('django.db.models.fields.CharField')(default='default', max_length=80)),
('show_avatar', self.gf('django.db.models.fields.BooleanField')(default=True)),
('show_signatures', self.gf('django.db.models.fields.BooleanField')(default=True)),
('privacy_permission', self.gf('django.db.models.fields.IntegerField')(default=1)),
('markup', self.gf('django.db.models.fields.CharField')(default='bbcode', max_length=15)),
('post_count', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
))
db.send_create_signal('djangobb_forum', ['Profile'])
# Adding model 'PostTracking'
db.create_table('djangobb_forum_posttracking', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('djangobb_forum.fields.AutoOneToOneField')(to=orm['auth.User'], unique=True)),
('topics', self.gf('djangobb_forum.fields.JSONField')(null=True)),
('last_read', self.gf('django.db.models.fields.DateTimeField')(null=True)),
))
db.send_create_signal('djangobb_forum', ['PostTracking'])
# Adding model 'Report'
db.create_table('djangobb_forum_report', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('reported_by', self.gf('django.db.models.fields.related.ForeignKey')(related_name='reported_by', to=orm['auth.User'])),
('post', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['djangobb_forum.Post'])),
('zapped', self.gf('django.db.models.fields.BooleanField')(default=False)),
('zapped_by', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='zapped_by', null=True, to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(blank=True)),
('reason', self.gf('django.db.models.fields.TextField')(default='', max_length='1000', blank=True)),
))
db.send_create_signal('djangobb_forum', ['Report'])
# Adding model 'Ban'
db.create_table('djangobb_forum_ban', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='ban_users', unique=True, to=orm['auth.User'])),
('ban_start', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('ban_end', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('reason', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('djangobb_forum', ['Ban'])
# Adding model 'Attachment'
db.create_table('djangobb_forum_attachment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('post', self.gf('django.db.models.fields.related.ForeignKey')(related_name='attachments', to=orm['djangobb_forum.Post'])),
('size', self.gf('django.db.models.fields.IntegerField')()),
('content_type', self.gf('django.db.models.fields.CharField')(max_length=255)),
('path', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name', self.gf('django.db.models.fields.TextField')()),
('hash', self.gf('django.db.models.fields.CharField')(default='', max_length=40, db_index=True, blank=True)),
))
db.send_create_signal('djangobb_forum', ['Attachment'])
def backwards(self, orm):
# Removing unique constraint on 'Reputation', fields ['from_user', 'post']
db.delete_unique('djangobb_forum_reputation', ['from_user_id', 'post_id'])
# Deleting model 'Category'
db.delete_table('djangobb_forum_category')
# Removing M2M table for field groups on 'Category'
db.delete_table('djangobb_forum_category_groups')
# Deleting model 'Forum'
db.delete_table('djangobb_forum_forum')
# Removing M2M table for field moderators on 'Forum'
db.delete_table('djangobb_forum_forum_moderators')
# Deleting model 'Topic'
db.delete_table('djangobb_forum_topic')
# Removing M2M table for field subscribers on 'Topic'
db.delete_table('djangobb_forum_topic_subscribers')
# Deleting model 'Post'
db.delete_table('djangobb_forum_post')
# Deleting model 'Reputation'
db.delete_table('djangobb_forum_reputation')
# Deleting model 'Profile'
db.delete_table('djangobb_forum_profile')
# Deleting model 'PostTracking'
db.delete_table('djangobb_forum_posttracking')
# Deleting model 'Report'
db.delete_table('djangobb_forum_report')
# Deleting model 'Ban'
db.delete_table('djangobb_forum_ban')
# Deleting model 'Attachment'
db.delete_table('djangobb_forum_attachment')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'djangobb_forum.attachment': {
'Meta': {'object_name': 'Attachment'},
'content_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'hash': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['djangobb_forum.Post']"}),
'size': ('django.db.models.fields.IntegerField', [], {})
},
'djangobb_forum.ban': {
'Meta': {'object_name': 'Ban'},
'ban_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'ban_start': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.TextField', [], {}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'ban_users'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'djangobb_forum.category': {
'Meta': {'ordering': "['position']", 'object_name': 'Category'},
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'djangobb_forum.forum': {
'Meta': {'ordering': "['position']", 'object_name': 'Forum'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'forums'", 'to': "orm['djangobb_forum.Category']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_forum_post'", 'null': 'True', 'to': "orm['djangobb_forum.Post']"}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'topic_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'djangobb_forum.post': {
'Meta': {'ordering': "['created']", 'object_name': 'Post'},
'body': ('django.db.models.fields.TextField', [], {}),
'body_html': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'markup': ('django.db.models.fields.CharField', [], {'default': "'bbcode'", 'max_length': '15'}),
'topic': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['djangobb_forum.Topic']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['auth.User']"}),
'user_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'})
},
'djangobb_forum.posttracking': {
'Meta': {'object_name': 'PostTracking'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'topics': ('djangobb_forum.fields.JSONField', [], {'null': 'True'}),
'user': ('djangobb_forum.fields.AutoOneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'djangobb_forum.profile': {
'Meta': {'object_name': 'Profile'},
'aim': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'avatar': ('djangobb_forum.fields.ExtendedImageField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'icq': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jabber': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '5'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'markup': ('django.db.models.fields.CharField', [], {'default': "'bbcode'", 'max_length': '15'}),
'msn': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'privacy_permission': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'show_avatar': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_signatures': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'signature': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'site': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'theme': ('django.db.models.fields.CharField', [], {'default': "'default'", 'max_length': '80'}),
'time_zone': ('django.db.models.fields.FloatField', [], {'default': '3.0'}),
'user': ('djangobb_forum.fields.AutoOneToOneField', [], {'related_name': "'forum_profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'yahoo': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'})
},
'djangobb_forum.report': {
'Meta': {'object_name': 'Report'},
'created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djangobb_forum.Post']"}),
'reason': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': "'1000'", 'blank': 'True'}),
'reported_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reported_by'", 'to': "orm['auth.User']"}),
'zapped': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'zapped_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'zapped_by'", 'null': 'True', 'to': "orm['auth.User']"})
},
'djangobb_forum.reputation': {
'Meta': {'unique_together': "(('from_user', 'post'),)", 'object_name': 'Reputation'},
'from_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reputations_from'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'post'", 'to': "orm['djangobb_forum.Post']"}),
'reason': ('django.db.models.fields.TextField', [], {'max_length': '1000'}),
'sign': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reputations_to'", 'to': "orm['auth.User']"})
},
'djangobb_forum.topic': {
'Meta': {'ordering': "['-updated']", 'object_name': 'Topic'},
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'forum': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'topics'", 'to': "orm['djangobb_forum.Forum']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_topic_post'", 'null': 'True', 'to': "orm['djangobb_forum.Post']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'sticky': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subscribers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'subscriptions'", 'blank': 'True', 'to': "orm['auth.User']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
}
}
complete_apps = ['djangobb_forum']
| 72.016086
| 187
| 0.597684
|
5fab3c8a29e9b481a9c96cefec36989f38582e2d
| 2,600
|
py
|
Python
|
test/connectivity/acts/framework/acts/signals.py
|
Keneral/atools
|
055e76621340c7dced125e9de56e2645b5e1cdfb
|
[
"Unlicense"
] | null | null | null |
test/connectivity/acts/framework/acts/signals.py
|
Keneral/atools
|
055e76621340c7dced125e9de56e2645b5e1cdfb
|
[
"Unlicense"
] | null | null | null |
test/connectivity/acts/framework/acts/signals.py
|
Keneral/atools
|
055e76621340c7dced125e9de56e2645b5e1cdfb
|
[
"Unlicense"
] | 1
|
2018-02-24T19:13:01.000Z
|
2018-02-24T19:13:01.000Z
|
#!/usr/bin/env python3.4
#
# Copyright 2016 - The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module is where all the test signal classes and related utilities live.
"""
import functools
import json
def generated_test(func):
"""A decorator used to suppress result reporting for the test case that
kicks off a group of generated test cases.
Returns:
What the decorated function returns.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
raise TestSilent(
"Result reporting for %s is suppressed" % func.__name__)
return wrapper
class TestSignalError(Exception):
"""Raised when an error occurs inside a test signal."""
class TestSignal(Exception):
"""Base class for all test result control signals."""
def __init__(self, details, extras=None):
if not isinstance(details, str):
raise TestSignalError("Message has to be a string.")
super(TestSignal, self).__init__(details)
self.details = details
try:
json.dumps(extras)
self.extras = extras
except TypeError:
raise TestSignalError(("Extras must be json serializable. %s "
"is not.") % extras)
def __str__(self):
return "Details=%s, Extras=%s" % (self.details, self.extras)
class TestFailure(TestSignal):
"""Raised when a test has failed."""
class TestPass(TestSignal):
"""Raised when a test has passed."""
class TestSkip(TestSignal):
"""Raised when a test has been skipped."""
class TestSilent(TestSignal):
"""Raised when a test should not be reported. This should only be used for
generated test cases.
"""
class TestAbortClass(TestSignal):
"""Raised when all subsequent test cases within the same test class should
be aborted.
"""
class TestAbortAll(TestSignal):
"""Raised when all subsequent test cases should be aborted."""
class ControllerError(Exception):
"""Raised when an error occured in controller classes."""
| 32.911392
| 79
| 0.682308
|
28fd9c30761c9ec1c19d28fef061abe87cba3c94
| 206
|
py
|
Python
|
cogdl/loggers/base_logger.py
|
li-ziang/cogdl
|
60022d3334e3abae2d2a505e6e049a26acf10f39
|
[
"MIT"
] | 1,072
|
2019-08-02T05:46:21.000Z
|
2022-03-31T07:51:53.000Z
|
cogdl/loggers/base_logger.py
|
li-ziang/cogdl
|
60022d3334e3abae2d2a505e6e049a26acf10f39
|
[
"MIT"
] | 96
|
2019-08-05T17:27:22.000Z
|
2022-03-03T08:36:57.000Z
|
cogdl/loggers/base_logger.py
|
li-ziang/cogdl
|
60022d3334e3abae2d2a505e6e049a26acf10f39
|
[
"MIT"
] | 299
|
2019-08-08T07:33:10.000Z
|
2022-03-31T09:30:07.000Z
|
class Logger:
def __init__(self, log_path):
self.log_path = log_path
def start(self):
pass
def note(self, metrics, step=None):
pass
def finish(self):
pass
| 15.846154
| 39
| 0.567961
|
86712c56c66b3441805ad088ef0b8c8ce0b28bc0
| 10,074
|
py
|
Python
|
src/MainPanel.py
|
DaniW42/Hector9000
|
b9161c71ae23f1671787298f8fae6c503b50d6e6
|
[
"MIT"
] | 7
|
2019-07-22T10:09:59.000Z
|
2019-11-16T12:28:38.000Z
|
src/MainPanel.py
|
assgex/Hector9000
|
b9161c71ae23f1671787298f8fae6c503b50d6e6
|
[
"MIT"
] | null | null | null |
src/MainPanel.py
|
assgex/Hector9000
|
b9161c71ae23f1671787298f8fae6c503b50d6e6
|
[
"MIT"
] | 7
|
2019-07-22T19:56:17.000Z
|
2019-11-16T10:48:39.000Z
|
import time
import json
from kivy.core.text import Label
from drinks import drink_list, ingredients
from kivy.properties import StringProperty, ListProperty
from kivy.uix.progressbar import ProgressBar
from functools import partial
from kivy.uix.label import Label
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.popup import Popup
from kivy.uix.button import Button
from kivy.uix.screenmanager import ScreenManager, Screen, FadeTransition
from kivy.uix.image import Image
from kivy.clock import Clock
from database import Database
from HectorConfig import config
from pygame import mixer
from HectorHardware import HectorHardware
## Für LND-Script (if file exists)
from pathlib import Path
import subprocess
## logging
import logging
log_format = "%(asctime)s::%(levelname)s::%(name)s::"\
"%(filename)s::%(lineno)d::%(message)s"
logging.basicConfig(filename="/home/pi/log/cocktail.log", level='DEBUG', format=log_format) ###TODO: put log location into config
class MainPanel(Screen):
buttonText = ListProperty([StringProperty(),
StringProperty(),
StringProperty(),
StringProperty(),
StringProperty(),
StringProperty(),
StringProperty(),
StringProperty()])
image = ListProperty([StringProperty(),
StringProperty(),
StringProperty(),
StringProperty(),
StringProperty(),
StringProperty(),
StringProperty(),
StringProperty()])
buttonColor = ListProperty([ListProperty(),
ListProperty(),
ListProperty(),
ListProperty(),
ListProperty(),
ListProperty(),
ListProperty(),
ListProperty()])
db = None
drinkOnScreen = None
screenPage = None
maxScreenPage = None
lightning = True
def __init__(self, **kwargs):
super(MainPanel, self).__init__(**kwargs)
self.db = Database("h9k")
self.db.createIfNotExists()
self.screenPage = 1
items = len(drink_list) % 8
self.maxScreenPage = (len(drink_list) // 8)
if items > 0:
self.maxScreenPage += 1
self.drinkOnScreen = list()
self.drinkOnScreen = drink_list[:8]
self.fillButtons(self.drinkOnScreen)
self.initVent()
def initVent(self):
print("Prepare vets.")
h = HectorHardware(config)
h.light_on()
time.sleep(1)
h.arm_in()
h.pump_stop()
for vnum in range(24):
print("Vent %d closing..." % (vnum,))
time.sleep(1)
h.valve_close(vnum)
h.light_off()
def isalcoholic(self, drink):
for ing, _ in drink["recipe"]:
if ingredients[ing][1]: return True
return False
def fillButtons(self, drinks):
countDrinksOnScreen = len(drinks)
count = 0
while count < countDrinksOnScreen:
self.buttonText[count] = drinks[count]['name']
self.image[count] = drinks[count]['image']
if self.buttonText[count].startswith("..."):
self.buttonColor[count] = [.3, .3, .3, 1]
elif self.isalcoholic(drinks[count]):
self.buttonColor[count] = [1, 0, 0, 1]
else: # non-alcoholic
self.buttonColor[count] = [0, 1, 0, 1]
count += 1
while count < 8:
self.buttonText[count] = ''
self.buttonColor[count] = [1, 1, 1, 1]
count += 1
def choiceDrink(self, *args):
self.readPumpConfiguration()
if len(self.drinkOnScreen) -1 < args[0]:
print("no drinks found.")
return
## Start Script to create Invoice
if self.lightning:
print("start lnd-invoicetoqr.sh")
subprocess.call("lnd/lnd-invoicetoqr.sh")
print("end lnd-invoicetoqr.sh")
root = BoxLayout(orientation='vertical')
root2 = BoxLayout()
if self.lightning:
root2.add_widget(Image(source='lnd/temp/tempQRCode.png'))
else:
root2.add_widget(Image(source='img/empty-glass.png'))
list_ing = "Ingredients:\n"
for ing in self.drinkOnScreen[args[0]]["recipe"]:
list_ing = list_ing + ingredients[ing[0]][0] + ": " + str(ing[1]) + "\n"
root2.add_widget(
Label(text=list_ing + '\nPlease be sure\nthat a glass with min 200 ml \nis placed onto the black fixture.', font_size='20sp'))
root.add_widget(root2)
if not self.lightning:
contentOK = Button(text='OK', font_size=60, size_hint_y=0.15)
root.add_widget(contentOK)
contentCancel = Button(text='Cancel', font_size=60, size_hint_y=0.15)
root.add_widget(contentCancel)
popup = Popup(title=self.drinkOnScreen[args[0]]["name"], content=root,
auto_dismiss=False)
def closeme(button):
popup.dismiss()
Clock.schedule_once(partial(self.doGiveDrink, args[0]), .01)
if not self.lightning:
contentOK.bind(on_press=closeme)
def cancelme(button):
popup.dismiss()
contentCancel.bind(on_press=cancelme)
## Beginn Function to periodically check the payment using lnd-checkinvoice1.sh
def checkPayment(parent):
print("start check script")
## while loop to check if lnd-checkinvoice1.sh returns SETTLED, if not wait for a second and start over
paymentSettled = False
counter = 0
while paymentSettled == False:
## run lnd-checkinvoice1.sh and write output to variable s
s = subprocess.check_output(["sh","lnd/lnd-checkinvoice1.sh"])
print(s)
counter +=1
print( counter )
## check if s is 'SETTLED', if so, close popup and start doGiveDrink
if (b'SETTLED' in s):
paymentSettled = True
popup.dismiss()
Clock.schedule_once(partial(self.doGiveDrink, args[0]), .01)
elif (counter > 60):
paymentSettled = True
popup.dismiss()
Clock.schedule_once( partial( self.doGiveDrink, args[0] ), .01 )
else:
## if not 'SETTLED' wait a second and start over
paymentSettled = False
time.sleep(1)
pass
pass
print("end check script")
## End Function to periodically check the payment using lnd-checkinvoice1.sh
## start 'checkPayment-loop' when loading popup
if self.lightning:
popup.bind(on_open=checkPayment)
popup.open()
def doGiveDrink(self, drink, intervaltime):
root = BoxLayout(orientation='vertical')
content = Label(text='Take a break -- \nYour \n\n' + self.drinkOnScreen[drink]["name"]+'\n\nwill be mixed.', font_size='40sp')
root.add_widget(content)
popup = Popup(title='Life, the Universe, and Everything. There is an answer.', content=root,
auto_dismiss=False)
if (self.drinkOnScreen[drink]["sound"]):
mixer.init()
mixer.music.load(self.drinkOnScreen[drink]["sound"])
mixer.music.play()
def makeDrink(parentwindow):
drinks = self.drinkOnScreen[drink]
hector = HectorHardware(config)
hector.light_on()
time.sleep(1)
hector.arm_out()
for ingridient in drinks["recipe"]:
hector.valve_dose(pumpList[ingridient[0]], ingridient[1])
time.sleep(.1)
print("IndexPumpe: ", pumpList[ingridient[0]])
print("Ingredient: ", ingridient[0])
print("Output in ml: ", ingridient[1])
self.db.countUpIngredient(ingridient[0],ingridient[1])
time.sleep(1)
self.db.countUpDrink(drinks["name"])
hector.arm_in()
hector.light_off()
hector.finger(1)
hector.ping(3)
hector.finger(0)
print(drinks["name"])
parentwindow.dismiss()
popup.bind(on_open=makeDrink)
popup.open()
def back(self):
if self.screenPage == 1:
self.screenPage = self.maxScreenPage
else:
self.screenPage -= 1
self.drinkOnScreen = drink_list[(self.screenPage * 8) - 8:8 * self.screenPage]
self.fillButtons(self.drinkOnScreen)
def forward(self):
if self.screenPage == self.maxScreenPage:
self.screenPage = 1
else:
self.screenPage += 1
i = (self.screenPage * 8) - 8
self.drinkOnScreen = drink_list[i:8 * self.screenPage]
self.fillButtons(self.drinkOnScreen)
def readPumpConfiguration(self):
x = json.load(open('servo_config.json'))
global pumpList
pumpList = {}
for key in x:
chan = x[key]
pumpList[chan['value']] = chan['channel']
return pumpList
pass
| 34.737931
| 139
| 0.528588
|
59120d373ccf4335d60fb9ded6693a446f8df97a
| 442
|
py
|
Python
|
DjangoCRUD/venv/Scripts/pip3.7-script.py
|
Dawwie/Django
|
8c0382d5e44e125d9c5b52742f8dc07008e0b8b7
|
[
"MIT"
] | null | null | null |
DjangoCRUD/venv/Scripts/pip3.7-script.py
|
Dawwie/Django
|
8c0382d5e44e125d9c5b52742f8dc07008e0b8b7
|
[
"MIT"
] | null | null | null |
DjangoCRUD/venv/Scripts/pip3.7-script.py
|
Dawwie/Django
|
8c0382d5e44e125d9c5b52742f8dc07008e0b8b7
|
[
"MIT"
] | null | null | null |
#!"E:\Pobrane\Programowanie\Pycharm 2018.3.4\Projects\DjangoCRUD\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.7'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.7')()
)
| 34
| 89
| 0.674208
|
af60024a11b9bc4bbb464de703d547517d2906f0
| 5,176
|
py
|
Python
|
plugins/holland.backup.mysql_lvm/holland/backup/mysql_lvm/plugin/raw/plugin.py
|
Alibloke/holland
|
e630b511a95ed8e36205e8300e632018918223ff
|
[
"BSD-3-Clause"
] | null | null | null |
plugins/holland.backup.mysql_lvm/holland/backup/mysql_lvm/plugin/raw/plugin.py
|
Alibloke/holland
|
e630b511a95ed8e36205e8300e632018918223ff
|
[
"BSD-3-Clause"
] | null | null | null |
plugins/holland.backup.mysql_lvm/holland/backup/mysql_lvm/plugin/raw/plugin.py
|
Alibloke/holland
|
e630b511a95ed8e36205e8300e632018918223ff
|
[
"BSD-3-Clause"
] | null | null | null |
"""MySQL LVM snapshot backups"""
import os
import logging
from holland.core.util.path import directory_size
from holland.core.backup import BackupError
from holland.lib.lvm import LogicalVolume, CallbackFailuresError, LVMCommandError, relpath, getmount
from holland.lib.mysql.client import MySQLError
from holland.lib.mysql.client.base import MYSQL_CLIENT_CONFIG_STRING
from holland.backup.mysql_lvm.plugin.common import build_snapshot, connect_simple, _dry_run
from holland.backup.mysql_lvm.plugin.raw.util import setup_actions
from holland.lib.compression import COMPRESSION_CONFIG_STRING
LOG = logging.getLogger(__name__)
CONFIGSPEC = (
"""
[mysql-lvm]
# default: mysql lv + _snapshot
snapshot-name = string(default=None)
# default: minimum of 20% of mysql lv or mysql vg free size
snapshot-size = string(default=None)
# default: temporary directory
snapshot-mountpoint = string(default=None)
# default: no
innodb-recovery = boolean(default=no)
# ignore errors due to strange innodb configurations
force-innodb-backup = boolean(default=no)
# default: flush tables with read lock by default
lock-tables = boolean(default=yes)
# default: do an extra (non-locking) flush tables before
# run flush tables with read lock
extra-flush-tables = boolean(default=yes)
# default: create tar file from snapshot
archive-method = option(dir,tar,default="tar")
[mysqld]
mysqld-exe = force_list(default=list('mysqld', '/usr/libexec/mysqld'))
user = string(default='mysql')
innodb-buffer-pool-size = string(default=128M)
tmpdir = string(default=None)
[tar]
exclude = force_list(default='mysql.sock')
post-args = string(default=None)
pre-args = string(default=None)
"""
+ MYSQL_CLIENT_CONFIG_STRING
+ COMPRESSION_CONFIG_STRING
)
CONFIGSPEC = CONFIGSPEC.splitlines()
class MysqlLVMBackup(object):
"""
A Holland Backup plugin suitable for performing LVM snapshots of a
filesystem underlying a live MySQL instance.
This plugin produces tar archives of a MySQL data directory.
"""
CONFIGSPEC = CONFIGSPEC
def __init__(self, name, config, target_directory, dry_run=False):
self.config = config
self.config.validate_config(self.configspec())
LOG.debug("Validated config: %r", self.config)
self.name = name
self.target_directory = target_directory
self.dry_run = dry_run
self.client = connect_simple(self.config["mysql:client"])
def estimate_backup_size(self):
"""Estimate the backup size this plugin will produce
This is currently the total directory size of the MySQL datadir
"""
try:
self.client.connect()
datadir = self.client.show_variable("datadir")
self.client.disconnect()
except MySQLError as exc:
raise BackupError("[%d] %s" % exc.args)
return directory_size(datadir)
def configspec(self):
"""INI Spec for the configuration values this plugin supports"""
return self.CONFIGSPEC
def backup(self):
"""Run a backup by running through a LVM snapshot against the device
the MySQL datadir resides on
"""
# connect to mysql and lookup what we're supposed to snapshot
try:
self.client.connect()
datadir = os.path.realpath(self.client.show_variable("datadir"))
except MySQLError as exc:
raise BackupError("[%d] %s" % exc.args)
LOG.info("Backing up %s via snapshot", datadir)
# lookup the logical volume mysql's datadir sits on
try:
volume = LogicalVolume.lookup_from_fspath(datadir)
except LookupError as exc:
raise BackupError("Failed to lookup logical volume for %s: %s" % (datadir, str(exc)))
except Exception as ex:
raise BackupError("Failed to lookup logical volume for %s: %s" % (datadir, str(ex)))
# create a snapshot manager
snapshot = build_snapshot(self.config["mysql-lvm"], volume, suppress_tmpdir=self.dry_run)
# calculate where the datadirectory on the snapshot will be located
rpath = relpath(datadir, getmount(datadir))
snap_datadir = os.path.abspath(os.path.join(snapshot.mountpoint, rpath))
# setup actions to perform at each step of the snapshot process
setup_actions(
snapshot=snapshot,
config=self.config,
client=self.client,
snap_datadir=snap_datadir,
spooldir=self.target_directory,
)
if self.dry_run:
return _dry_run(self.target_directory, volume, snapshot, datadir)
try:
snapshot.start(volume)
except CallbackFailuresError as exc:
for callback, error in exc.errors:
LOG.error("%s: %s", callback, error)
raise BackupError("Error occurred during snapshot process. Aborting.")
except LVMCommandError as exc:
# Something failed in the snapshot process
raise BackupError(str(exc))
except BaseException as ex:
LOG.debug(ex)
return None
| 34.738255
| 100
| 0.676198
|
604d14b1570f4a4b1b1d12c13fef9048767d1fba
| 84
|
py
|
Python
|
main.py
|
us-upal/The_Self_Taught_Programmer
|
1ccdd519a964c01ec1c892a22fd5b8c4cce2267f
|
[
"MIT"
] | null | null | null |
main.py
|
us-upal/The_Self_Taught_Programmer
|
1ccdd519a964c01ec1c892a22fd5b8c4cce2267f
|
[
"MIT"
] | null | null | null |
main.py
|
us-upal/The_Self_Taught_Programmer
|
1ccdd519a964c01ec1c892a22fd5b8c4cce2267f
|
[
"MIT"
] | null | null | null |
print("hello world")
for _ in range(10):
print("hello world") #hello world *10
| 28
| 41
| 0.654762
|
6c3df2671a95387912ef92ea0bc0da5afe6d1312
| 7,656
|
py
|
Python
|
tests/python/topi/python/test_topi_math.py
|
shengxinhu/tvm
|
06c443e9959452c6da3a911fe0c11e08c5554477
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 4,640
|
2017-08-17T19:22:15.000Z
|
2019-11-04T15:29:46.000Z
|
tests/python/topi/python/test_topi_math.py
|
shengxinhu/tvm
|
06c443e9959452c6da3a911fe0c11e08c5554477
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 2,863
|
2017-08-17T19:55:50.000Z
|
2019-11-04T17:18:41.000Z
|
tests/python/topi/python/test_topi_math.py
|
shengxinhu/tvm
|
06c443e9959452c6da3a911fe0c11e08c5554477
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1,352
|
2017-08-17T19:30:38.000Z
|
2019-11-04T16:09:29.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
import numpy as np
import pytest
import scipy
from scipy import special
import tvm
import tvm.testing
import tvm.topi.testing
from tvm import te, topi
from tvm.topi import utils
def test_util():
x = tvm.tir.const(100, "int32")
assert utils.get_const_int(x) == 100
assert utils.get_const_tuple((x, x)) == (100, 100)
ewise_operations = {
"floor": {"topi": topi.floor, "ref": np.floor, "input_range": (-100, 100)},
"ceil": {"topi": topi.ceil, "ref": np.ceil, "input_range": (-100, 100)},
"sign": {
"topi": topi.sign,
"ref": np.sign,
"input_range": (-100, 100),
"skip_name_check": True,
},
"trunc": {"topi": topi.trunc, "ref": np.trunc, "input_range": (-100, 100)},
"fabs": {"topi": topi.abs, "ref": np.fabs, "input_range": (-100, 100)},
"round": {"topi": topi.round, "ref": np.round, "input_range": (-100, 100), "check_round": True},
"exp": {"topi": topi.exp, "ref": np.exp, "input_range": (-1, 1)},
"tanh": {
"topi": topi.tanh,
"ref": np.tanh,
"input_range": (-10, 10),
"shape": (128, 128),
"dtype": ["float32", "float64"],
},
"sigmoid": {
"topi": topi.sigmoid,
"ref": lambda x: 1 / (1 + np.exp(-x)),
"input_range": (-1, 1),
},
"log": {"topi": topi.log, "ref": np.log, "input_range": (0, 100)},
"sqrt": {"topi": topi.sqrt, "ref": np.sqrt, "input_range": (0, 100)},
"rsqrt": {
"topi": topi.rsqrt,
"ref": lambda x: np.ones_like(x) / np.sqrt(x),
"input_range": (0, 100),
"skip_name_check": True,
},
"cos": {"topi": topi.cos, "ref": np.cos, "input_range": (-2.0 * np.pi, 2.0 * np.pi)},
"tan": {
"topi": topi.tan,
"ref": np.tan,
"input_range": (-2.0 * np.pi, 2.0 * np.pi),
"dtypes": ["float32", "float64"],
},
"sin": {"topi": topi.sin, "ref": np.sin, "input_range": (-2.0 * np.pi, 2.0 * np.pi)},
"erf": {"topi": topi.erf, "ref": scipy.special.erf, "input_range": (-0.1, 0.1)},
"isnan": {
"topi": topi.isnan,
"ref": np.isnan,
"input_range": (-1, 1),
"replace_with_nan": True,
},
"isfinite": {
"topi": topi.isfinite,
"ref": np.isfinite,
"input_range": (0, 1),
"shape": (8, 8),
"skip_name_check": True,
"replace_with_nan": True,
"replace_with_inf": True,
"dtypes": ["float32", "float64", "int32", "int16"],
},
"isinf": {
"topi": topi.isinf,
"ref": np.isinf,
"input_range": (0, 1),
"shape": (8, 8),
"skip_name_check": True,
"replace_with_nan": True,
"replace_with_inf": True,
"dtypes": ["float32", "float64", "int32", "int16"],
},
"fast_exp": {
"topi": topi.fast_exp,
"ref": np.exp,
"skip_name_check": True,
"input_range": (-88, 88),
"step": 0.01,
},
"fast_erf": {
"topi": topi.fast_erf,
"ref": scipy.special.erf,
"skip_name_check": True,
"input_range": (-10, 10),
"step": 0.01,
"dtypes": ["float32", "float16"],
"cast_output": True,
"tolerance": [1e-5, 1e-1],
},
"fast_tanh": {
"topi": topi.fast_tanh,
"ref": np.tanh,
"skip_name_check": True,
"input_range": (-10, 10),
"step": 0.01,
},
}
topi_name, dtype, tolerance = tvm.testing.parameters(
*[
(name, dtype, config.get("tolerance", [1e-5] * len(dtype))[i])
for name, config in ewise_operations.items()
for i, dtype in enumerate(config.get("dtypes", ["float32"]))
]
)
@tvm.testing.fixture(cache_return_value=True)
def ewise_ref_data(topi_name, dtype):
config = ewise_operations[topi_name]
input_range = config["input_range"]
shape = config.get("shape", (20, 3))
a_np = np.random.uniform(*input_range, size=shape).astype(dtype)
if dtype.startswith("float"):
if config.get("replace_with_nan", False):
a_np.ravel()[np.random.choice(a_np.size, int(a_np.size * 0.5), replace=False)] = np.nan
if config.get("replace_with_inf", False):
a_np.ravel()[
np.random.choice(a_np.size, int(a_np.size * 0.5), replace=False)
] = np.infty
# avoid round check too close to boundary
if topi_name == "round":
a_np += ((np.abs(np.fmod(a_np, 1)) - 0.5) < 1e-6) * 1e-4
b_np = config["ref"](a_np)
if config.get("cast_output", False):
b_np = b_np.astype(dtype)
return a_np, b_np
def test_ewise(target, dev, topi_name, dtype, tolerance, ewise_ref_data):
target = tvm.target.Target(target)
if target.kind.name == "vulkan" and topi_name in ["tan", "erf", "isnan", "isfinite", "isinf"]:
pytest.xfail(f"Vulkan runtime doesn't support {topi_name} yet")
topi_op = ewise_operations[topi_name]["topi"]
skip_name_check = ewise_operations[topi_name].get("skip_name_check", False)
m = te.var("m")
l = te.var("l")
A = te.placeholder((m, l), dtype=dtype, name="A")
B = topi_op(A)
assert tuple(B.shape) == tuple(A.shape)
if not skip_name_check:
assert B.op.body[0].op.name == "tir." + topi_name
a_np, b_np = ewise_ref_data
with tvm.target.Target(target):
s = tvm.topi.testing.get_injective_schedule(target)(B)
foo = tvm.build(s, [A, B], target, name=topi_name)
a = tvm.nd.array(a_np, dev)
b = tvm.nd.array(np.zeros_like(b_np), dev)
foo(a, b)
tvm.testing.assert_allclose(b.numpy(), b_np, rtol=tolerance, atol=tolerance)
from_dtype, to_dtype = tvm.testing.parameters(
("int32", "float32"),
("int32", "float64"),
("int32", "bool"),
("float32", "int32"),
("float32", "float64"),
("float32", "bool"),
("bool", "float32"),
("bool", "int32"),
)
@tvm.testing.fixture(cache_return_value=True)
def cast_ref_data(from_dtype, to_dtype):
shape = (5, 4)
input_range = (-100, 100)
if from_dtype == "bool":
a_np = np.random.choice([True, False], size=shape)
else:
a_np = np.random.uniform(*input_range, size=shape).astype(from_dtype)
if to_dtype == "bool":
a_np = a_np - a_np[2, 3]
b_np = a_np.astype(to_dtype)
return a_np, b_np
def test_cast(target, dev, cast_ref_data, from_dtype, to_dtype):
m = te.var("m")
l = te.var("l")
A = te.placeholder((m, l), dtype=from_dtype, name="A")
B = topi.cast(A, to_dtype)
a_np, b_np = cast_ref_data
with tvm.target.Target(target):
s = tvm.topi.testing.get_injective_schedule(target)(B)
foo = tvm.build(s, [A, B], target)
a = tvm.nd.array(a_np, dev)
b = tvm.nd.empty(b_np.shape, dtype=to_dtype, device=dev)
foo(a, b)
tvm.testing.assert_allclose(b.numpy(), b_np)
if __name__ == "__main__":
tvm.testing.main()
| 30.995951
| 100
| 0.583072
|
30521fbaa38774fdb0ed7cb902a10df1ad93eeac
| 2,833
|
py
|
Python
|
test/functional/wallet_zapwallettxes.py
|
BioA3/BioA3
|
a7ad7021121aaa468b11a9925972e315cea70f50
|
[
"MIT"
] | null | null | null |
test/functional/wallet_zapwallettxes.py
|
BioA3/BioA3
|
a7ad7021121aaa468b11a9925972e315cea70f50
|
[
"MIT"
] | null | null | null |
test/functional/wallet_zapwallettxes.py
|
BioA3/BioA3
|
a7ad7021121aaa468b11a9925972e315cea70f50
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the zapwallettxes functionality.
- start two bioa3d nodes
- create two transactions on node 0 - one is confirmed and one is unconfirmed.
- restart node 0 and verify that both the confirmed and the unconfirmed
transactions are still available.
- restart node 0 with zapwallettxes and persistmempool, and verify that both
the confirmed and the unconfirmed transactions are still available.
- restart node 0 with just zapwallettxes and verify that the confirmed
transactions are still available, but that the unconfirmed transaction has
been zapped.
"""
from test_framework.test_framework import BioA3TestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
wait_until,
)
class ZapWalletTXesTest (BioA3TestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.sync_all()
self.nodes[1].generate(101)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 250)
# This transaction will be confirmed
txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
# This transaction will not be confirmed
txid2 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20)
# Confirmed and unconfirmed transactions are now in the wallet.
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet.
self.stop_node(0)
self.start_node(0)
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed
# transaction is zapped from the wallet, but is re-added when the mempool is reloaded.
self.stop_node(0)
self.start_node(0, ["-zapwallettxes=2"])
# tx1 is still be available because it was confirmed
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
# This will raise an exception because the unconfirmed transaction has been zapped
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', self.nodes[0].gettransaction, txid2)
if __name__ == '__main__':
ZapWalletTXesTest().main()
| 39.901408
| 112
| 0.710201
|
d2c8effd068b1b2ba95997af01aa12a589157d2f
| 540
|
py
|
Python
|
rockstreet/artista/models.py
|
CeMenezesJunior/DevWebDjango
|
7b442411b6444f1c7c3c781ef8e5fdad73a476e0
|
[
"MIT"
] | null | null | null |
rockstreet/artista/models.py
|
CeMenezesJunior/DevWebDjango
|
7b442411b6444f1c7c3c781ef8e5fdad73a476e0
|
[
"MIT"
] | null | null | null |
rockstreet/artista/models.py
|
CeMenezesJunior/DevWebDjango
|
7b442411b6444f1c7c3c781ef8e5fdad73a476e0
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.urls import reverse
class Artista(models.Model):
nome = models.CharField(max_length=70, db_index=True, unique=True)
descricao = models.CharField(max_length=100)
imagem = models.CharField(max_length=100, blank=True)
slug = models.SlugField(max_length=70)
class Meta:
db_table='artista'
ordering = ('nome',)
def __str__(self):
return self.nome
def get_absolute_url(self):
return reverse('artista:artistadetail',args=[self.id,self.slug])
| 28.421053
| 72
| 0.696296
|
7db30ba28ce1598afe6c3a63c621d97ccd5e96f6
| 2,663
|
py
|
Python
|
astropy/io/misc/tests/test_pickle_helpers.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | 3
|
2018-03-20T15:09:16.000Z
|
2021-05-27T11:17:33.000Z
|
astropy/io/misc/tests/test_pickle_helpers.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/io/misc/tests/test_pickle_helpers.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | null | null | null |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import pytest
from .. import fnpickle, fnunpickle
from ....extern.six.moves import range
def test_fnpickling_simple(tmpdir):
"""
Tests the `fnpickle` and `fnupickle` functions' basic operation by
pickling and unpickling a string, using both a filename and a
file.
"""
fn = str(tmpdir.join('test1.pickle'))
obj1 = 'astring'
fnpickle(obj1, fn)
res = fnunpickle(fn)
assert obj1 == res
# try without cPickle
fnpickle(obj1, fn, usecPickle=False)
res = fnunpickle(fn, usecPickle=False)
assert obj1 == res
# now try with a file-like object instead of a string
with open(fn, 'wb') as f:
fnpickle(obj1, f)
with open(fn, 'rb') as f:
res = fnunpickle(f)
assert obj1 == res
# same without cPickle
with open(fn, 'wb') as f:
fnpickle(obj1, f, usecPickle=False)
with open(fn, 'rb') as f:
res = fnunpickle(f, usecPickle=False)
assert obj1 == res
class ToBePickled(object):
def __init__(self, item):
self.item = item
def __eq__(self, other):
if isinstance(other, ToBePickled):
return self.item == other.item
else:
return False
def test_fnpickling_class(tmpdir):
"""
Tests the `fnpickle` and `fnupickle` functions' ability to pickle
and unpickle custom classes.
"""
fn = str(tmpdir.join('test2.pickle'))
obj1 = 'astring'
obj2 = ToBePickled(obj1)
fnpickle(obj2, fn)
res = fnunpickle(fn)
assert res == obj2
def test_fnpickling_protocol(tmpdir):
"""
Tests the `fnpickle` and `fnupickle` functions' ability to pickle
and unpickle pickle files from all protcols.
"""
import pickle
obj1 = 'astring'
obj2 = ToBePickled(obj1)
for p in range(pickle.HIGHEST_PROTOCOL + 1):
fn = str(tmpdir.join('testp{}.pickle'.format(p)))
fnpickle(obj2, fn, protocol=p)
res = fnunpickle(fn)
assert res == obj2
def test_fnpickling_many(tmpdir):
"""
Tests the `fnpickle` and `fnupickle` functions' ability to pickle
and unpickle multiple objects from a single file.
"""
fn = str(tmpdir.join('test3.pickle'))
# now try multiples
obj3 = 328.3432
obj4 = 'blahblahfoo'
fnpickle(obj3, fn)
fnpickle(obj4, fn, append=True)
res = fnunpickle(fn, number=-1)
assert len(res) == 2
assert res[0] == obj3
assert res[1] == obj4
fnpickle(obj4, fn, append=True)
res = fnunpickle(fn, number=2)
assert len(res) == 2
with pytest.raises(EOFError):
fnunpickle(fn, number=5)
| 24.431193
| 70
| 0.626361
|
7b3300e3157bb51fc605dcb61fda49d5f9e9378d
| 49,454
|
py
|
Python
|
src/full_node/full_node_api.py
|
nup002/chia-blockchain
|
93adb84f29c60bf06d30493c104be9329d7886dc
|
[
"Apache-2.0"
] | null | null | null |
src/full_node/full_node_api.py
|
nup002/chia-blockchain
|
93adb84f29c60bf06d30493c104be9329d7886dc
|
[
"Apache-2.0"
] | null | null | null |
src/full_node/full_node_api.py
|
nup002/chia-blockchain
|
93adb84f29c60bf06d30493c104be9329d7886dc
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import dataclasses
import time
import src.server.ws_connection as ws
from typing import AsyncGenerator, List, Optional, Tuple, Callable, Dict
from chiabip158 import PyBIP158
from blspy import G2Element, AugSchemeMPL
from src.consensus.block_creation import create_unfinished_block
from src.consensus.pot_iterations import (
calculate_ip_iters,
calculate_sp_iters,
calculate_iterations_quality,
)
from src.full_node.full_node import FullNode
from src.full_node.mempool_check_conditions import get_puzzle_and_solution_for_coin
from src.full_node.signage_point import SignagePoint
from src.consensus.sub_block_record import SubBlockRecord
from src.protocols import (
farmer_protocol,
full_node_protocol,
timelord_protocol,
wallet_protocol,
)
from src.protocols.full_node_protocol import RejectSubBlocks
from src.protocols.wallet_protocol import RejectHeaderRequest, PuzzleSolutionResponse, RejectHeaderBlocks
from src.server.outbound_message import Message, NodeType, OutboundMessage
from src.types.coin import Coin, hash_coin_list
from src.types.end_of_slot_bundle import EndOfSubSlotBundle
from src.types.full_block import FullBlock
from src.types.header_block import HeaderBlock
from src.types.mempool_inclusion_status import MempoolInclusionStatus
from src.types.mempool_item import MempoolItem
from src.types.pool_target import PoolTarget
from src.types.program import Program
from src.types.sized_bytes import bytes32
from src.types.spend_bundle import SpendBundle
from src.types.unfinished_block import UnfinishedBlock
from src.util.api_decorators import api_request, peer_required
from src.util.errors import Err
from src.util.ints import uint64, uint128, uint8, uint32
from src.types.peer_info import PeerInfo
from src.util.merkle_set import MerkleSet
OutboundMessageGenerator = AsyncGenerator[OutboundMessage, None]
class FullNodeAPI:
full_node: FullNode
def __init__(self, full_node):
self.full_node = full_node
def _set_state_changed_callback(self, callback: Callable):
self.full_node.state_changed_callback = callback
@property
def server(self):
return self.full_node.server
@property
def log(self):
return self.full_node.log
@peer_required
@api_request
async def request_peers(self, _request: full_node_protocol.RequestPeers, peer: ws.WSChiaConnection):
if peer.peer_server_port is None:
return None
peer_info = PeerInfo(peer.peer_host, peer.peer_server_port)
if self.full_node.full_node_peers is not None:
msg = await self.full_node.full_node_peers.request_peers(peer_info)
return msg
@peer_required
@api_request
async def respond_peers(
self, request: full_node_protocol.RespondPeers, peer: ws.WSChiaConnection
) -> Optional[Message]:
if self.full_node.full_node_peers is not None:
if peer.connection_type is NodeType.INTRODUCER:
is_full_node = False
else:
is_full_node = True
await self.full_node.full_node_peers.respond_peers(request, peer.get_peer_info(), is_full_node)
if peer.connection_type is NodeType.INTRODUCER:
await peer.close()
return None
@peer_required
@api_request
async def new_peak(self, request: full_node_protocol.NewPeak, peer: ws.WSChiaConnection) -> Optional[Message]:
"""
A peer notifies us that they have added a new peak to their blockchain. If we don't have it,
we can ask for it.
"""
return await self.full_node.new_peak(request, peer)
@api_request
async def new_transaction(self, transaction: full_node_protocol.NewTransaction) -> Optional[Message]:
"""
A peer notifies us of a new transaction.
Requests a full transaction if we haven't seen it previously, and if the fees are enough.
"""
# Ignore if syncing
if self.full_node.sync_store.get_sync_mode():
return None
# Ignore if already seen
if self.full_node.mempool_manager.seen(transaction.transaction_id):
return None
if self.full_node.mempool_manager.is_fee_enough(transaction.fees, transaction.cost):
request_tx = full_node_protocol.RequestTransaction(transaction.transaction_id)
msg = Message("request_transaction", request_tx)
return msg
return None
@api_request
async def request_transaction(self, request: full_node_protocol.RequestTransaction) -> Optional[Message]:
""" Peer has requested a full transaction from us. """
# Ignore if syncing
if self.full_node.sync_store.get_sync_mode():
return None
spend_bundle = self.full_node.mempool_manager.get_spendbundle(request.transaction_id)
if spend_bundle is None:
return None
transaction = full_node_protocol.RespondTransaction(spend_bundle)
msg = Message("respond_transaction", transaction)
self.log.info(f"sending transaction (tx_id: {spend_bundle.name()}) to peer")
return msg
@peer_required
@api_request
async def respond_transaction(
self, tx: full_node_protocol.RespondTransaction, peer: ws.WSChiaConnection
) -> Optional[Message]:
"""
Receives a full transaction from peer.
If tx is added to mempool, send tx_id to others. (new_transaction)
"""
# Ignore if syncing
if self.full_node.sync_store.get_sync_mode():
return None
async with self.full_node.blockchain.lock:
# Ignore if we have already added this transaction
if self.full_node.mempool_manager.get_spendbundle(tx.transaction.name()) is not None:
return None
cost, status, error = await self.full_node.mempool_manager.add_spendbundle(tx.transaction)
if status == MempoolInclusionStatus.SUCCESS:
self.log.info(f"Added transaction to mempool: {tx.transaction.name()}")
fees = tx.transaction.fees()
assert fees >= 0
assert cost is not None
new_tx = full_node_protocol.NewTransaction(
tx.transaction.name(),
cost,
uint64(tx.transaction.fees()),
)
message = Message("new_transaction", new_tx)
await self.server.send_to_all_except([message], NodeType.FULL_NODE, peer.peer_node_id)
else:
self.log.warning(
f"Was not able to add transaction with id {tx.transaction.name()}, {status} error: {error}"
)
return None
@api_request
async def request_proof_of_weight(self, request: full_node_protocol.RequestProofOfWeight) -> Optional[Message]:
if request.tip not in self.full_node.blockchain.sub_blocks:
self.log.error(f"got weight proof request for unknown peak {request.tip}")
return None
if request.tip in self.full_node.pow_creation:
event = self.full_node.pow_creation[request.tip]
await event.wait()
wp = await self.full_node.weight_proof_handler.get_proof_of_weight(request.tip)
else:
event = asyncio.Event()
self.full_node.pow_creation[request.tip] = event
wp = await self.full_node.weight_proof_handler.get_proof_of_weight(request.tip)
event.set()
tips = list(self.full_node.pow_creation.keys())
if len(tips) > 4:
# Remove old from cache
for i in range(0, 4):
self.full_node.pow_creation.pop(tips[i])
if wp is None:
self.log.error(f"failed creating weight proof for peak {request.tip}")
return None
return Message("respond_proof_of_weight", full_node_protocol.RespondProofOfWeight(wp, request.tip))
@api_request
@peer_required
async def respond_proof_of_weight(
self,
response: full_node_protocol.RespondProofOfWeight,
peer: ws.WSChiaConnection,
) -> Optional[Message]:
if peer.peer_node_id not in self.full_node.pow_pending:
self.log.warning("weight proof not in pending request list")
return None
validated, fork_point = self.full_node.weight_proof_handler.validate_weight_proof(response.wp)
if not validated:
raise Exception("bad weight proof, disconnecting peer")
# get tip params
tip_weight = response.wp.recent_chain_data[-1].reward_chain_sub_block.weight
tip_height = response.wp.recent_chain_data[-1].reward_chain_sub_block.sub_block_height
self.full_node.sync_store.add_potential_peak(response.tip, tip_height, tip_weight)
self.full_node.sync_store.add_potential_fork_point(response.tip, fork_point)
return Message(
"request_sub_block",
full_node_protocol.RequestSubBlock(uint32(tip_height), True),
)
@api_request
async def request_sub_block(self, request: full_node_protocol.RequestSubBlock) -> Optional[Message]:
if request.sub_height not in self.full_node.blockchain.sub_height_to_hash:
return None
block: Optional[FullBlock] = await self.full_node.block_store.get_full_block(
self.full_node.blockchain.sub_height_to_hash[request.sub_height]
)
if block is not None:
if not request.include_transaction_block:
block = dataclasses.replace(block, transactions_generator=None)
msg = Message("respond_sub_block", full_node_protocol.RespondSubBlock(block))
return msg
return None
@api_request
async def request_sub_blocks(self, request: full_node_protocol.RequestSubBlocks) -> Optional[Message]:
if request.end_sub_height < request.start_sub_height or request.end_sub_height - request.start_sub_height > 32:
return None
for i in range(request.start_sub_height, request.end_sub_height + 1):
if i not in self.full_node.blockchain.sub_height_to_hash:
reject = RejectSubBlocks(request.start_sub_height, request.end_sub_height)
msg = Message("reject_sub_blocks", reject)
return msg
blocks = []
for i in range(request.start_sub_height, request.end_sub_height + 1):
block: Optional[FullBlock] = await self.full_node.block_store.get_full_block(
self.full_node.blockchain.sub_height_to_hash[uint32(i)]
)
if block is None:
reject = RejectSubBlocks(request.start_sub_height, request.end_sub_height)
msg = Message("reject_sub_blocks", reject)
return msg
if not request.include_transaction_block:
block = dataclasses.replace(block, transactions_generator=None)
blocks.append(block)
msg = Message(
"respond_sub_blocks",
full_node_protocol.RespondSubBlocks(request.start_sub_height, request.end_sub_height, blocks),
)
return msg
@api_request
async def reject_sub_blocks(self, request: full_node_protocol.RequestSubBlocks):
self.log.info(f"reject_sub_blocks {request.start_sub_height} {request.end_sub_height}")
pass
@api_request
async def respond_sub_blocks(self, request: full_node_protocol.RespondSubBlocks):
pass
@api_request
@peer_required
async def respond_sub_block(
self,
respond_sub_block: full_node_protocol.RespondSubBlock,
peer: ws.WSChiaConnection,
) -> Optional[Message]:
"""
Receive a full block from a peer full node (or ourselves).
"""
if self.full_node.sync_store.get_sync_mode():
return await self.full_node.respond_sub_block(respond_sub_block, peer)
else:
async with self.full_node.timelord_lock:
return await self.full_node.respond_sub_block(respond_sub_block, peer)
@api_request
async def new_unfinished_sub_block(
self, new_unfinished_sub_block: full_node_protocol.NewUnfinishedSubBlock
) -> Optional[Message]:
# Ignore if syncing
if self.full_node.sync_store.get_sync_mode():
return None
if (
self.full_node.full_node_store.get_unfinished_block(new_unfinished_sub_block.unfinished_reward_hash)
is not None
):
return None
msg = Message(
"request_unfinished_sub_block",
full_node_protocol.RequestUnfinishedSubBlock(new_unfinished_sub_block.unfinished_reward_hash),
)
return msg
@api_request
async def request_unfinished_sub_block(
self, request_unfinished_sub_block: full_node_protocol.RequestUnfinishedSubBlock
) -> Optional[Message]:
unfinished_block: Optional[UnfinishedBlock] = self.full_node.full_node_store.get_unfinished_block(
request_unfinished_sub_block.unfinished_reward_hash
)
if unfinished_block is not None:
msg = Message(
"respond_unfinished_sub_block",
full_node_protocol.RespondUnfinishedSubBlock(unfinished_block),
)
return msg
return None
@peer_required
@api_request
async def respond_unfinished_sub_block(
self,
respond_unfinished_sub_block: full_node_protocol.RespondUnfinishedSubBlock,
peer: ws.WSChiaConnection,
) -> Optional[Message]:
if self.full_node.sync_store.get_sync_mode():
return None
await self.full_node.respond_unfinished_sub_block(respond_unfinished_sub_block, peer)
return None
@api_request
async def new_signage_point_or_end_of_sub_slot(
self, new_sp: full_node_protocol.NewSignagePointOrEndOfSubSlot
) -> Optional[Message]:
# Ignore if syncing
if self.full_node.sync_store.get_sync_mode():
return None
if (
self.full_node.full_node_store.get_signage_point_by_index(
new_sp.challenge_hash,
new_sp.index_from_challenge,
new_sp.last_rc_infusion,
)
is not None
):
return None
if self.full_node.full_node_store.have_newer_signage_point(
new_sp.challenge_hash, new_sp.index_from_challenge, new_sp.last_rc_infusion
):
return None
if new_sp.index_from_challenge == 0 and new_sp.prev_challenge_hash is not None:
if self.full_node.full_node_store.get_sub_slot(new_sp.prev_challenge_hash) is None:
# If this is an end of sub slot, and we don't have the prev, request the prev instead
full_node_request = full_node_protocol.RequestSignagePointOrEndOfSubSlot(
new_sp.prev_challenge_hash, uint8(0), new_sp.last_rc_infusion
)
return Message("request_signage_point_or_end_of_sub_slot", full_node_request)
if new_sp.index_from_challenge > 0:
if (
new_sp.challenge_hash != self.full_node.constants.FIRST_CC_CHALLENGE
and self.full_node.full_node_store.get_sub_slot(new_sp.challenge_hash) is None
):
# If this is a normal signage point,, and we don't have the end of sub slot, request the end of sub slot
full_node_request = full_node_protocol.RequestSignagePointOrEndOfSubSlot(
new_sp.challenge_hash, uint8(0), new_sp.last_rc_infusion
)
return Message("request_signage_point_or_end_of_sub_slot", full_node_request)
# Otherwise (we have the prev or the end of sub slot), request it normally
full_node_request = full_node_protocol.RequestSignagePointOrEndOfSubSlot(
new_sp.challenge_hash, new_sp.index_from_challenge, new_sp.last_rc_infusion
)
return Message("request_signage_point_or_end_of_sub_slot", full_node_request)
@api_request
async def request_signage_point_or_end_of_sub_slot(
self, request: full_node_protocol.RequestSignagePointOrEndOfSubSlot
) -> Optional[Message]:
if request.index_from_challenge == 0:
sub_slot: Optional[Tuple[EndOfSubSlotBundle, int, uint128]] = self.full_node.full_node_store.get_sub_slot(
request.challenge_hash
)
if sub_slot is not None:
return Message(
"respond_end_of_sub_slot",
full_node_protocol.RespondEndOfSubSlot(sub_slot[0]),
)
else:
if self.full_node.full_node_store.get_sub_slot(request.challenge_hash) is None:
if request.challenge_hash != self.full_node.constants.FIRST_CC_CHALLENGE:
self.log.warning(f"Don't have challenge hash {request.challenge_hash}")
sp: Optional[SignagePoint] = self.full_node.full_node_store.get_signage_point_by_index(
request.challenge_hash,
request.index_from_challenge,
request.last_rc_infusion,
)
if sp is not None:
assert (
sp.cc_vdf is not None
and sp.cc_proof is not None
and sp.rc_vdf is not None
and sp.rc_proof is not None
)
full_node_response = full_node_protocol.RespondSignagePoint(
request.index_from_challenge,
sp.cc_vdf,
sp.cc_proof,
sp.rc_vdf,
sp.rc_proof,
)
return Message("respond_signage_point", full_node_response)
else:
self.log.warning(f"Don't have signage point {request}")
return None
@peer_required
@api_request
async def respond_signage_point(
self, request: full_node_protocol.RespondSignagePoint, peer: ws.WSChiaConnection
) -> Optional[Message]:
if self.full_node.sync_store.get_sync_mode():
return None
async with self.full_node.timelord_lock:
# Already have signage point
if (
self.full_node.full_node_store.get_signage_point(request.challenge_chain_vdf.output.get_hash())
is not None
):
return None
peak = self.full_node.blockchain.get_peak()
if peak is not None and peak.sub_block_height > self.full_node.constants.MAX_SUB_SLOT_SUB_BLOCKS:
sub_slot_iters = peak.sub_slot_iters
difficulty = uint64(peak.weight - self.full_node.blockchain.sub_blocks[peak.prev_hash].weight)
next_sub_slot_iters = self.full_node.blockchain.get_next_slot_iters(peak.header_hash, True)
next_difficulty = self.full_node.blockchain.get_next_difficulty(peak.header_hash, True)
sub_slots_for_peak = await self.full_node.blockchain.get_sp_and_ip_sub_slots(peak.header_hash)
assert sub_slots_for_peak is not None
ip_sub_slot: Optional[EndOfSubSlotBundle] = sub_slots_for_peak[1]
else:
sub_slot_iters = self.full_node.constants.SUB_SLOT_ITERS_STARTING
difficulty = self.full_node.constants.DIFFICULTY_STARTING
next_sub_slot_iters = sub_slot_iters
next_difficulty = difficulty
ip_sub_slot = None
added = self.full_node.full_node_store.new_signage_point(
request.index_from_challenge,
self.full_node.blockchain.sub_blocks,
self.full_node.blockchain.get_peak(),
next_sub_slot_iters,
SignagePoint(
request.challenge_chain_vdf,
request.challenge_chain_proof,
request.reward_chain_vdf,
request.reward_chain_proof,
),
)
if added:
self.log.info(
f"⏲️ Finished signage point {request.index_from_challenge}/"
f"{self.full_node.constants.NUM_SPS_SUB_SLOT}: "
f"{request.challenge_chain_vdf.output.get_hash()} "
)
sub_slot_tuple = self.full_node.full_node_store.get_sub_slot(request.challenge_chain_vdf.challenge)
if sub_slot_tuple is not None:
prev_challenge = sub_slot_tuple[0].challenge_chain.challenge_chain_end_of_slot_vdf.challenge
else:
prev_challenge = None
# Notify nodes of the new signage point
broadcast = full_node_protocol.NewSignagePointOrEndOfSubSlot(
prev_challenge,
request.challenge_chain_vdf.challenge,
request.index_from_challenge,
request.reward_chain_vdf.challenge,
)
msg = Message("new_signage_point_or_end_of_sub_slot", broadcast)
await self.server.send_to_all_except([msg], NodeType.FULL_NODE, peer.peer_node_id)
if peak is not None and peak.sub_block_height > self.full_node.constants.MAX_SUB_SLOT_SUB_BLOCKS:
# Makes sure to potentially update the difficulty if we are past the peak (into a new sub-slot)
assert ip_sub_slot is not None
if request.challenge_chain_vdf.challenge != ip_sub_slot.challenge_chain.get_hash():
difficulty = next_difficulty
sub_slot_iters = next_sub_slot_iters
# Notify farmers of the new signage point
broadcast_farmer = farmer_protocol.NewSignagePoint(
request.challenge_chain_vdf.challenge,
request.challenge_chain_vdf.output.get_hash(),
request.reward_chain_vdf.output.get_hash(),
difficulty,
sub_slot_iters,
request.index_from_challenge,
)
msg = Message("new_signage_point", broadcast_farmer)
await self.server.send_to_all([msg], NodeType.FARMER)
else:
self.log.warning(
f"Signage point {request.index_from_challenge} not added, CC challenge: "
f"{request.challenge_chain_vdf.challenge}, RC challenge: {request.reward_chain_vdf.challenge}"
)
return None
@peer_required
@api_request
async def respond_end_of_sub_slot(
self, request: full_node_protocol.RespondEndOfSubSlot, peer: ws.WSChiaConnection
) -> Optional[Message]:
if self.full_node.sync_store.get_sync_mode():
return None
msg, _ = await self.full_node.respond_end_of_sub_slot(request, peer)
return msg
@peer_required
@api_request
async def request_mempool_transactions(
self,
request: full_node_protocol.RequestMempoolTransactions,
peer: ws.WSChiaConnection,
) -> Optional[Message]:
received_filter = PyBIP158(bytearray(request.filter))
items: List[MempoolItem] = await self.full_node.mempool_manager.get_items_not_in_filter(received_filter)
for item in items:
transaction = full_node_protocol.RespondTransaction(item.spend_bundle)
msg = Message("respond_transaction", transaction)
await peer.send_message(msg)
return None
# FARMER PROTOCOL
@api_request
async def declare_proof_of_space(self, request: farmer_protocol.DeclareProofOfSpace) -> Optional[Message]:
"""
Creates a block body and header, with the proof of space, coinbase, and fee targets provided
by the farmer, and sends the hash of the header data back to the farmer.
"""
async with self.full_node.timelord_lock:
if request.pool_target is None or request.pool_signature is None:
raise ValueError("Adaptable pool protocol not yet available.")
sp_vdfs: Optional[SignagePoint] = self.full_node.full_node_store.get_signage_point(
request.challenge_chain_sp
)
if sp_vdfs is None:
self.log.warning(f"Received proof of space for an unknown signage point {request.challenge_chain_sp}")
return None
if request.signage_point_index > 0:
assert sp_vdfs.rc_vdf is not None
if sp_vdfs.rc_vdf.output.get_hash() != request.reward_chain_sp:
self.log.info(
f"Received proof of space for a potentially old signage point {request.challenge_chain_sp}. "
f"Current sp: {sp_vdfs.rc_vdf.output.get_hash()}"
)
return None
if request.signage_point_index == 0:
cc_challenge_hash: bytes32 = request.challenge_chain_sp
else:
assert sp_vdfs.cc_vdf is not None
cc_challenge_hash = sp_vdfs.cc_vdf.challenge
pos_sub_slot: Optional[Tuple[EndOfSubSlotBundle, int, uint128]] = None
if request.challenge_hash != self.full_node.constants.FIRST_CC_CHALLENGE:
# Checks that the proof of space is a response to a recent challenge and valid SP
pos_sub_slot = self.full_node.full_node_store.get_sub_slot(cc_challenge_hash)
if pos_sub_slot is None:
self.log.warning(f"Received proof of space for an unknown sub slot: {request}")
return None
total_iters_pos_slot: uint128 = pos_sub_slot[2]
else:
total_iters_pos_slot = uint128(0)
assert cc_challenge_hash == request.challenge_hash
# Now we know that the proof of space has a signage point either:
# 1. In the previous sub-slot of the peak (overflow)
# 2. In the same sub-slot as the peak
# 3. In a future sub-slot that we already know of
# Checks that the proof of space is valid
quality_string: Optional[bytes32] = request.proof_of_space.verify_and_get_quality_string(
self.full_node.constants, cc_challenge_hash, request.challenge_chain_sp
)
assert quality_string is not None and len(quality_string) == 32
# Grab best transactions from Mempool for given tip target
async with self.full_node.blockchain.lock:
peak: Optional[SubBlockRecord] = self.full_node.blockchain.get_peak()
if peak is None:
spend_bundle: Optional[SpendBundle] = None
else:
spend_bundle = await self.full_node.mempool_manager.create_bundle_from_mempool(peak.header_hash)
def get_plot_sig(to_sign, _) -> G2Element:
if to_sign == request.challenge_chain_sp:
return request.challenge_chain_sp_signature
elif to_sign == request.reward_chain_sp:
return request.reward_chain_sp_signature
return G2Element.infinity()
def get_pool_sig(_1, _2) -> G2Element:
return request.pool_signature
prev_sb: Optional[SubBlockRecord] = self.full_node.blockchain.get_peak()
# Finds the previous sub block from the signage point, ensuring that the reward chain VDF is correct
if prev_sb is not None:
if request.signage_point_index == 0:
if pos_sub_slot is None:
self.log.warning("Pos sub slot is None")
return None
rc_challenge = pos_sub_slot[0].reward_chain.end_of_slot_vdf.challenge
else:
assert sp_vdfs.rc_vdf is not None
rc_challenge = sp_vdfs.rc_vdf.challenge
# Backtrack through empty sub-slots
for eos, _, _ in reversed(self.full_node.full_node_store.finished_sub_slots):
if eos is not None and eos.reward_chain.get_hash() == rc_challenge:
rc_challenge = eos.reward_chain.end_of_slot_vdf.challenge
found = False
attempts = 0
while prev_sb is not None and attempts < 10:
if prev_sb.reward_infusion_new_challenge == rc_challenge:
found = True
break
if prev_sb.finished_reward_slot_hashes is not None and len(prev_sb.finished_reward_slot_hashes) > 0:
if prev_sb.finished_reward_slot_hashes[-1] == rc_challenge:
# This sub-block includes a sub-slot which is where our SP vdf starts. Go back one more
# to find the prev sub block
prev_sb = self.full_node.blockchain.sub_blocks.get(prev_sb.prev_hash, None)
found = True
break
prev_sb = self.full_node.blockchain.sub_blocks.get(prev_sb.prev_hash, None)
attempts += 1
if not found:
self.log.warning("Did not find a previous block with the correct reward chain hash")
return None
try:
finished_sub_slots: List[EndOfSubSlotBundle] = self.full_node.full_node_store.get_finished_sub_slots(
prev_sb, self.full_node.blockchain.sub_blocks, cc_challenge_hash
)
if (
len(finished_sub_slots) > 0
and pos_sub_slot is not None
and finished_sub_slots[-1] != pos_sub_slot[0]
):
self.log.error("Have different sub-slots than is required to farm this sub-block")
return None
except ValueError as e:
self.log.warning(f"Value Error: {e}")
return None
if prev_sb is None:
pool_target = PoolTarget(
self.full_node.constants.GENESIS_PRE_FARM_POOL_PUZZLE_HASH,
uint32(0),
)
else:
pool_target = request.pool_target
if peak is None or peak.sub_block_height <= self.full_node.constants.MAX_SUB_SLOT_SUB_BLOCKS:
difficulty = self.full_node.constants.DIFFICULTY_STARTING
sub_slot_iters = self.full_node.constants.SUB_SLOT_ITERS_STARTING
else:
difficulty = uint64(peak.weight - self.full_node.blockchain.sub_blocks[peak.prev_hash].weight)
sub_slot_iters = peak.sub_slot_iters
for sub_slot in finished_sub_slots:
if sub_slot.challenge_chain.new_difficulty is not None:
difficulty = sub_slot.challenge_chain.new_difficulty
if sub_slot.challenge_chain.new_sub_slot_iters is not None:
sub_slot_iters = sub_slot.challenge_chain.new_sub_slot_iters
required_iters: uint64 = calculate_iterations_quality(
quality_string,
request.proof_of_space.size,
difficulty,
request.challenge_chain_sp,
)
sp_iters: uint64 = calculate_sp_iters(self.full_node.constants, sub_slot_iters, request.signage_point_index)
ip_iters: uint64 = calculate_ip_iters(
self.full_node.constants,
sub_slot_iters,
request.signage_point_index,
required_iters,
)
self.log.info("Starting to make the unfinished sub-block")
unfinished_block: UnfinishedBlock = create_unfinished_block(
self.full_node.constants,
total_iters_pos_slot,
sub_slot_iters,
request.signage_point_index,
sp_iters,
ip_iters,
request.proof_of_space,
cc_challenge_hash,
request.farmer_puzzle_hash,
pool_target,
get_plot_sig,
get_pool_sig,
sp_vdfs,
uint64(int(time.time())),
b"",
spend_bundle,
prev_sb,
self.full_node.blockchain.sub_blocks,
finished_sub_slots,
)
self.log.info("Made the unfinished sub-block")
if prev_sb is not None:
height: uint32 = uint32(prev_sb.sub_block_height + 1)
else:
height = uint32(0)
self.full_node.full_node_store.add_candidate_block(quality_string, height, unfinished_block)
foliage_sb_data_hash = unfinished_block.foliage_sub_block.foliage_sub_block_data.get_hash()
if unfinished_block.is_block():
foliage_block_hash = unfinished_block.foliage_sub_block.foliage_block_hash
else:
foliage_block_hash = bytes([0] * 32)
message = farmer_protocol.RequestSignedValues(
quality_string,
foliage_sb_data_hash,
foliage_block_hash,
)
return Message("request_signed_values", message)
@api_request
async def signed_values(self, farmer_request: farmer_protocol.SignedValues) -> Optional[Message]:
"""
Signature of header hash, by the harvester. This is enough to create an unfinished
block, which only needs a Proof of Time to be finished. If the signature is valid,
we call the unfinished_block routine.
"""
candidate: Optional[UnfinishedBlock] = self.full_node.full_node_store.get_candidate_block(
farmer_request.quality_string
)
if candidate is None:
self.log.warning(f"Quality string {farmer_request.quality_string} not found in database")
return None
if not AugSchemeMPL.verify(
candidate.reward_chain_sub_block.proof_of_space.plot_public_key,
candidate.foliage_sub_block.foliage_sub_block_data.get_hash(),
farmer_request.foliage_sub_block_signature,
):
self.log.warning("Signature not valid. There might be a collision in plots. Ignore this during tests.")
return None
fsb2 = dataclasses.replace(
candidate.foliage_sub_block,
foliage_sub_block_signature=farmer_request.foliage_sub_block_signature,
)
if candidate.is_block():
fsb2 = dataclasses.replace(fsb2, foliage_block_signature=farmer_request.foliage_block_signature)
new_candidate = dataclasses.replace(candidate, foliage_sub_block=fsb2)
if not self.full_node.has_valid_pool_sig(new_candidate):
self.log.warning("Trying to make a pre-farm block but height is not 0")
return None
# Propagate to ourselves (which validates and does further propagations)
request = full_node_protocol.RespondUnfinishedSubBlock(new_candidate)
await self.full_node.respond_unfinished_sub_block(request, None, True)
return None
# TIMELORD PROTOCOL
@api_request
async def new_infusion_point_vdf(self, request: timelord_protocol.NewInfusionPointVDF) -> Optional[Message]:
if self.full_node.sync_store.get_sync_mode():
return None
# Lookup unfinished blocks
return await self.full_node.new_infusion_point_vdf(request)
@peer_required
@api_request
async def new_signage_point_vdf(
self, request: timelord_protocol.NewSignagePointVDF, peer: ws.WSChiaConnection
) -> None:
if self.full_node.sync_store.get_sync_mode():
return None
full_node_message = full_node_protocol.RespondSignagePoint(
request.index_from_challenge,
request.challenge_chain_sp_vdf,
request.challenge_chain_sp_proof,
request.reward_chain_sp_vdf,
request.reward_chain_sp_proof,
)
await self.respond_signage_point(full_node_message, peer)
@peer_required
@api_request
async def new_end_of_sub_slot_vdf(
self, request: timelord_protocol.NewEndOfSubSlotVDF, peer: ws.WSChiaConnection
) -> Optional[Message]:
if self.full_node.sync_store.get_sync_mode():
return None
if (
self.full_node.full_node_store.get_sub_slot(request.end_of_sub_slot_bundle.challenge_chain.get_hash())
is not None
):
return None
# Calls our own internal message to handle the end of sub slot, and potentially broadcasts to other peers.
full_node_message = full_node_protocol.RespondEndOfSubSlot(request.end_of_sub_slot_bundle)
msg, added = await self.full_node.respond_end_of_sub_slot(full_node_message, peer)
if not added:
self.log.error(
f"Was not able to add end of sub-slot: "
f"{request.end_of_sub_slot_bundle.challenge_chain.challenge_chain_end_of_slot_vdf.challenge}. "
f"Re-sending new-peak to timelord"
)
await self.full_node.send_peak_to_timelords()
return None
else:
return msg
@api_request
async def request_sub_block_header(self, request: wallet_protocol.RequestSubBlockHeader) -> Optional[Message]:
if request.sub_height not in self.full_node.blockchain.sub_height_to_hash:
msg = Message("reject_sub_block_header", RejectHeaderRequest(request.sub_height))
return msg
block: Optional[FullBlock] = await self.full_node.block_store.get_full_block(
self.full_node.blockchain.sub_height_to_hash[request.sub_height]
)
if block is not None:
header_block: HeaderBlock = await block.get_block_header()
msg = Message(
"respond_sub_block_header",
wallet_protocol.RespondSubBlockHeader(header_block),
)
return msg
return None
@api_request
async def request_additions(self, request: wallet_protocol.RequestAdditions) -> Optional[Message]:
block: Optional[FullBlock] = await self.full_node.block_store.get_full_block(request.header_hash)
if (
block is None
or block.is_block() is False
or block.sub_block_height not in self.full_node.blockchain.sub_height_to_hash
):
reject = wallet_protocol.RejectAdditionsRequest(request.sub_height, request.header_hash)
msg = Message("reject_additions_request", reject)
return msg
assert block is not None and block.foliage_block is not None
_, additions = await block.tx_removals_and_additions()
puzzlehash_coins_map: Dict[bytes32, List[Coin]] = {}
for coin in additions + list(block.get_included_reward_coins()):
if coin.puzzle_hash in puzzlehash_coins_map:
puzzlehash_coins_map[coin.puzzle_hash].append(coin)
else:
puzzlehash_coins_map[coin.puzzle_hash] = [coin]
coins_map: List[Tuple[bytes32, List[Coin]]] = []
proofs_map: List[Tuple[bytes32, bytes, Optional[bytes]]] = []
if request.puzzle_hashes is None:
for puzzle_hash, coins in puzzlehash_coins_map.items():
coins_map.append((puzzle_hash, coins))
response = wallet_protocol.RespondAdditions(block.sub_block_height, block.header_hash, coins_map, None)
else:
# Create addition Merkle set
addition_merkle_set = MerkleSet()
# Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
for puzzle, coins in puzzlehash_coins_map.items():
addition_merkle_set.add_already_hashed(puzzle)
addition_merkle_set.add_already_hashed(hash_coin_list(coins))
assert addition_merkle_set.get_root() == block.foliage_block.additions_root
for puzzle_hash in request.puzzle_hashes:
result, proof = addition_merkle_set.is_included_already_hashed(puzzle_hash)
if puzzle_hash in puzzlehash_coins_map:
coins_map.append((puzzle_hash, puzzlehash_coins_map[puzzle_hash]))
hash_coin_str = hash_coin_list(puzzlehash_coins_map[puzzle_hash])
result_2, proof_2 = addition_merkle_set.is_included_already_hashed(hash_coin_str)
assert result
assert result_2
proofs_map.append((puzzle_hash, proof, proof_2))
else:
coins_map.append((puzzle_hash, []))
assert not result
proofs_map.append((puzzle_hash, proof, None))
response = wallet_protocol.RespondAdditions(
block.sub_block_height, block.header_hash, coins_map, proofs_map
)
msg = Message("respond_additions", response)
return msg
@api_request
async def request_removals(self, request: wallet_protocol.RequestRemovals) -> Optional[Message]:
block: Optional[FullBlock] = await self.full_node.block_store.get_full_block(request.header_hash)
if (
block is None
or block.is_block() is False
or block.sub_block_height != request.sub_height
or block.sub_block_height not in self.full_node.blockchain.sub_height_to_hash
or self.full_node.blockchain.sub_height_to_hash[block.sub_block_height] != block.header_hash
):
reject = wallet_protocol.RejectRemovalsRequest(request.sub_height, request.header_hash)
msg = Message("reject_removals_request", reject)
return msg
assert block is not None and block.foliage_block is not None
all_removals, _ = await block.tx_removals_and_additions()
coins_map: List[Tuple[bytes32, Optional[Coin]]] = []
proofs_map: List[Tuple[bytes32, bytes]] = []
# If there are no transactions, respond with empty lists
if block.transactions_generator is None:
proofs: Optional[List]
if request.coin_names is None:
proofs = None
else:
proofs = []
response = wallet_protocol.RespondRemovals(block.height, block.header_hash, [], proofs)
elif request.coin_names is None or len(request.coin_names) == 0:
for removal in all_removals:
cr = await self.full_node.coin_store.get_coin_record(removal)
assert cr is not None
coins_map.append((cr.coin.name(), cr.coin))
response = wallet_protocol.RespondRemovals(block.height, block.header_hash, coins_map, None)
else:
assert block.transactions_generator
removal_merkle_set = MerkleSet()
for coin_name in all_removals:
removal_merkle_set.add_already_hashed(coin_name)
assert removal_merkle_set.get_root() == block.foliage_block.removals_root
for coin_name in request.coin_names:
result, proof = removal_merkle_set.is_included_already_hashed(coin_name)
proofs_map.append((coin_name, proof))
if coin_name in all_removals:
cr = await self.full_node.coin_store.get_coin_record(coin_name)
assert cr is not None
coins_map.append((coin_name, cr.coin))
assert result
else:
coins_map.append((coin_name, None))
assert not result
response = wallet_protocol.RespondRemovals(block.height, block.header_hash, coins_map, proofs_map)
msg = Message("respond_removals", response)
return msg
@api_request
async def send_transaction(self, request: wallet_protocol.SendTransaction) -> Optional[Message]:
# Ignore if syncing
if self.full_node.sync_store.get_sync_mode():
return None
# Ignore if syncing
if self.full_node.sync_store.get_sync_mode():
status = MempoolInclusionStatus.FAILED
error: Optional[Err] = Err.UNKNOWN
else:
async with self.full_node.blockchain.lock:
cost, status, error = await self.full_node.mempool_manager.add_spendbundle(request.transaction)
if status == MempoolInclusionStatus.SUCCESS:
self.log.info(f"Added transaction to mempool: {request.transaction.name()}")
# Only broadcast successful transactions, not pending ones. Otherwise it's a DOS
# vector.
fees = request.transaction.fees()
assert fees >= 0
assert cost is not None
new_tx = full_node_protocol.NewTransaction(
request.transaction.name(),
cost,
uint64(request.transaction.fees()),
)
msg = Message("new_transaction", new_tx)
await self.full_node.server.send_to_all([msg], NodeType.FULL_NODE)
else:
self.log.warning(
f"Wasn't able to add transaction with id {request.transaction.name()}, "
f"status {status} error: {error}"
)
error_name = error.name if error is not None else None
if status == MempoolInclusionStatus.SUCCESS:
response = wallet_protocol.TransactionAck(request.transaction.name(), status, error_name)
else:
# If if failed/pending, but it previously succeeded (in mempool), this is idempotence, return SUCCESS
if self.full_node.mempool_manager.get_spendbundle(request.transaction.name()) is not None:
response = wallet_protocol.TransactionAck(
request.transaction.name(), MempoolInclusionStatus.SUCCESS, None
)
else:
response = wallet_protocol.TransactionAck(request.transaction.name(), status, error_name)
msg = Message("transaction_ack", response)
return msg
@api_request
async def request_puzzle_solution(self, request: wallet_protocol.RequestPuzzleSolution) -> Optional[Message]:
coin_name = request.coin_name
sub_height = request.sub_height
coin_record = await self.full_node.coin_store.get_coin_record(coin_name)
reject = wallet_protocol.RejectPuzzleSolution(coin_name, sub_height)
reject_msg = Message("reject_puzzle_solution", reject)
if coin_record is None or coin_record.spent_block_index != sub_height:
return reject_msg
header_hash = self.full_node.blockchain.sub_height_to_hash[sub_height]
block: Optional[FullBlock] = await self.full_node.block_store.get_full_block(header_hash)
if block is None or block.transactions_generator is None:
return reject_msg
error, puzzle, solution = get_puzzle_and_solution_for_coin(block.transactions_generator, coin_name)
if error is not None:
return reject_msg
pz = Program.to(puzzle)
sol = Program.to(solution)
wrapper = PuzzleSolutionResponse(coin_name, sub_height, pz, sol)
response = wallet_protocol.RespondPuzzleSolution(wrapper)
response_msg = Message("respond_puzzle_solution", response)
return response_msg
@api_request
async def request_header_blocks(self, request: wallet_protocol.RequestHeaderBlocks) -> Optional[Message]:
if request.end_sub_height < request.start_sub_height or request.end_sub_height - request.start_sub_height > 32:
return None
for i in range(request.start_sub_height, request.end_sub_height + 1):
if i not in self.full_node.blockchain.sub_height_to_hash:
reject = RejectHeaderBlocks(request.start_sub_height, request.end_sub_height)
msg = Message("reject_header_blocks_request", reject)
return msg
blocks: List[HeaderBlock] = []
for i in range(request.start_sub_height, request.end_sub_height + 1):
block: Optional[FullBlock] = await self.full_node.block_store.get_full_block(
self.full_node.blockchain.sub_height_to_hash[uint32(i)]
)
if block is None:
reject = RejectHeaderBlocks(request.start_sub_height, request.end_sub_height)
msg = Message("reject_header_blocks_request", reject)
return msg
blocks.append(await block.get_block_header())
msg = Message(
"respond_header_blocks",
wallet_protocol.RespondHeaderBlocks(request.start_sub_height, request.end_sub_height, blocks),
)
return msg
| 46.089469
| 120
| 0.641182
|
5d0b5298314e4d455126348b27873ae058700386
| 3,618
|
py
|
Python
|
python/tink/integration/gcpkms/_gcp_kms_aead_test.py
|
sgammon/tink
|
852e689f057794beb4784833d1af71c4a25920af
|
[
"Apache-2.0"
] | null | null | null |
python/tink/integration/gcpkms/_gcp_kms_aead_test.py
|
sgammon/tink
|
852e689f057794beb4784833d1af71c4a25920af
|
[
"Apache-2.0"
] | null | null | null |
python/tink/integration/gcpkms/_gcp_kms_aead_test.py
|
sgammon/tink
|
852e689f057794beb4784833d1af71c4a25920af
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tink.python.tink.integration.gcp_kms_aead."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl.testing import absltest
from tink import core
from tink.integration import gcpkms
from tink.testing import helper
CREDENTIAL_PATH = os.path.join(helper.get_tink_src_path(),
'testdata/credential.json')
KEY_URI = 'gcp-kms://projects/tink-test-infrastructure/locations/global/keyRings/unit-and-integration-testing/cryptoKeys/aead-key'
LOCAL_KEY_URI = 'gcp-kms://projects/tink-test-infrastructure/locations/europe-west1/keyRings/unit-and-integration-test/cryptoKeys/aead-key'
BAD_KEY_URI = 'aws-kms://arn:aws:kms:us-east-2:235739564943:key/3ee50705-5a82-4f5b-9753-05c4f473922f'
if 'TEST_SRCDIR' in os.environ:
# Set root certificates for gRPC in Bazel Test which are needed on MacOS
os.environ['GRPC_DEFAULT_SSL_ROOTS_FILE_PATH'] = os.path.join(
os.environ['TEST_SRCDIR'], 'google_root_pem/file/downloaded')
class GcpKmsAeadTest(absltest.TestCase):
def test_encrypt_decrypt(self):
gcp_client = gcpkms.GcpKmsClient(KEY_URI, CREDENTIAL_PATH)
aead = gcp_client.get_aead(KEY_URI)
plaintext = b'helloworld'
ciphertext = aead.encrypt(plaintext, b'')
self.assertEqual(plaintext, aead.decrypt(ciphertext, b''))
plaintext = b'hello'
associated_data = b'world'
ciphertext = aead.encrypt(plaintext, associated_data)
self.assertEqual(plaintext, aead.decrypt(ciphertext, associated_data))
def test_encrypt_decrypt_localized_uri(self):
gcp_client = gcpkms.GcpKmsClient(LOCAL_KEY_URI, CREDENTIAL_PATH)
aead = gcp_client.get_aead(LOCAL_KEY_URI)
plaintext = b'helloworld'
ciphertext = aead.encrypt(plaintext, b'')
self.assertEqual(plaintext, aead.decrypt(ciphertext, b''))
plaintext = b'hello'
associated_data = b'world'
ciphertext = aead.encrypt(plaintext, associated_data)
self.assertEqual(plaintext, aead.decrypt(ciphertext, associated_data))
def test_encrypt_with_bad_uri(self):
with self.assertRaises(core.TinkError):
gcp_client = gcpkms.GcpKmsClient(KEY_URI, CREDENTIAL_PATH)
gcp_client.get_aead(BAD_KEY_URI)
def test_corrupted_ciphertext(self):
gcp_client = gcpkms.GcpKmsClient(KEY_URI, CREDENTIAL_PATH)
aead = gcp_client.get_aead(KEY_URI)
plaintext = b'helloworld'
ciphertext = aead.encrypt(plaintext, b'')
self.assertEqual(plaintext, aead.decrypt(ciphertext, b''))
# Corrupt each byte once and check that decryption fails
# NOTE: Only starting at 4th byte here, as the 3rd byte is malleable
# (see b/146633745).
for byte_idx in range(3, len(ciphertext)):
tmp_ciphertext = list(ciphertext)
tmp_ciphertext[byte_idx] ^= 1
corrupted_ciphertext = bytes(tmp_ciphertext)
with self.assertRaises(core.TinkError):
aead.decrypt(corrupted_ciphertext, b'')
if __name__ == '__main__':
# TODO(b/154273145): re-enable this.
pass
# absltest.main()
| 38.084211
| 139
| 0.74848
|
54c6751d9c4902ab65bd45b764554dfb87a3ccc7
| 2,259
|
py
|
Python
|
github_comparison/settings.py
|
osama-mohamed/github_comparison_django
|
5cf13c0891e492ec2322d6cd813d2b50cab362d1
|
[
"MIT"
] | 3
|
2018-05-02T20:37:11.000Z
|
2020-10-15T17:19:26.000Z
|
github_comparison/settings.py
|
osama-mohamed/github_comparison_django
|
5cf13c0891e492ec2322d6cd813d2b50cab362d1
|
[
"MIT"
] | 1
|
2019-06-10T21:35:13.000Z
|
2019-06-10T21:35:13.000Z
|
github_comparison/settings.py
|
osama-mohamed/github_comparison_django
|
5cf13c0891e492ec2322d6cd813d2b50cab362d1
|
[
"MIT"
] | null | null | null |
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'b#b94r&k^ek8f#=c3y&p=%0pv5h@vyhsrt_*8xm8p4z4eik34k'
DEBUG = False
ALLOWED_HOSTS = ['localhost']
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'github.apps.GithubConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'github_comparison.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'github_comparison.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'short',
'HOST': 'localhost',
'PORT': '3306',
'USER': 'OSAMA',
'PASSWORD': 'OSAMA',
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
| 25.1
| 91
| 0.653386
|
f5e9c2739af7a1ae8b2c52614e46816aeb558de3
| 4,344
|
py
|
Python
|
cifar10_train.py
|
dotrungkien/face_recognition
|
52c552c4f73850e62db88d0dc7271d73e4150180
|
[
"MIT"
] | null | null | null |
cifar10_train.py
|
dotrungkien/face_recognition
|
52c552c4f73850e62db88d0dc7271d73e4150180
|
[
"MIT"
] | null | null | null |
cifar10_train.py
|
dotrungkien/face_recognition
|
52c552c4f73850e62db88d0dc7271d73e4150180
|
[
"MIT"
] | null | null | null |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A binary to train CIFAR-10 using a single GPU.
Accuracy:
cifar10_train.py achieves ~86% accuracy after 100K steps (256 epochs of
data) as judged by cifar10_eval.py.
Speed: With batch_size 128.
System | Step Time (sec/batch) | Accuracy
------------------------------------------------------------------
1 Tesla K20m | 0.35-0.60 | ~86% at 60K steps (5 hours)
1 Tesla K40m | 0.25-0.35 | ~86% at 100K steps (4 hours)
Usage:
Please see the tutorial and website for how to download the CIFAR-10
data set, compile the program and train the model.
http://tensorflow.org/tutorials/deep_cnn/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import time
import tensorflow as tf
import cifar10
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('train_dir', 'tmp/cifar10_train',
"""Directory where to write event logs """
"""and checkpoint.""")
tf.app.flags.DEFINE_integer('max_steps', 10000,
"""Number of batches to run.""")
tf.app.flags.DEFINE_boolean('log_device_placement', False,
"""Whether to log device placement.""")
tf.app.flags.DEFINE_integer('log_frequency', 10,
"""How often to log results to the console.""")
def train():
"""Train CIFAR-10 for a number of steps."""
with tf.Graph().as_default():
global_step = tf.contrib.framework.get_or_create_global_step()
# Get images and labels for CIFAR-10.
images, labels = cifar10.distorted_inputs()
# Build a Graph that computes the logits predictions from the
# inference model.
logits = cifar10.inference(images)
# Calculate loss.
loss = cifar10.loss(logits, labels)
# Build a Graph that trains the model with one batch of examples and
# updates the model parameters.
train_op = cifar10.train(loss, global_step)
class _LoggerHook(tf.train.SessionRunHook):
"""Logs loss and runtime."""
def begin(self):
self._step = -1
self._start_time = time.time()
def before_run(self, run_context):
self._step += 1
return tf.train.SessionRunArgs(loss) # Asks for loss value.
def after_run(self, run_context, run_values):
if self._step % FLAGS.log_frequency == 0:
current_time = time.time()
duration = current_time - self._start_time
self._start_time = current_time
loss_value = run_values.results
examples_per_sec = FLAGS.log_frequency * FLAGS.batch_size / duration
sec_per_batch = float(duration / FLAGS.log_frequency)
format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '
'sec/batch)')
print (format_str % (datetime.now(), self._step, loss_value,
examples_per_sec, sec_per_batch))
with tf.train.MonitoredTrainingSession(
checkpoint_dir=FLAGS.train_dir,
hooks=[tf.train.StopAtStepHook(last_step=FLAGS.max_steps),
tf.train.NanTensorHook(loss),
_LoggerHook()],
config=tf.ConfigProto(
log_device_placement=FLAGS.log_device_placement)) as mon_sess:
while not mon_sess.should_stop():
mon_sess.run(train_op)
def main(argv=None): # pylint: disable=unused-argument
#cifar10.maybe_download_and_extract()
if tf.gfile.Exists(FLAGS.train_dir):
tf.gfile.DeleteRecursively(FLAGS.train_dir)
tf.gfile.MakeDirs(FLAGS.train_dir)
train()
if __name__ == '__main__':
tf.app.run()
| 36.504202
| 80
| 0.645718
|
6058642bb2e5bb521e6a47ffad5125dcb052fb19
| 3,596
|
py
|
Python
|
gmane.py
|
rovelee/gmane
|
47d00d7ad5ce8b7c1fe472e3f7bcd642aee2c437
|
[
"MIT"
] | null | null | null |
gmane.py
|
rovelee/gmane
|
47d00d7ad5ce8b7c1fe472e3f7bcd642aee2c437
|
[
"MIT"
] | null | null | null |
gmane.py
|
rovelee/gmane
|
47d00d7ad5ce8b7c1fe472e3f7bcd642aee2c437
|
[
"MIT"
] | null | null | null |
import re
import sqlite3
import ssl
import time
from urllib.request import urlopen
from gtools import parsemaildate
# 数据测试网站域名
baseurl = 'http://mbox.dr-chuck.net/sakai.devel/'
# 无视ssl认证错误
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
# 连接数据库,如果不存在则在目录下创建一个
conn = sqlite3.connect('content.sqlite')
cur = conn.cursor()
# 如果表Message不存在则创建表
sql_create = '''CREATE TABLE IF NOT EXISTS Messages
(id INTEGER UNIQUE, email TEXT, sent_at TEXT,
subject TEXT, headers TEXT, body TEXT)'''
cur.execute(sql_create)
# 查询要开始爬取的初始id
sql_query = 'SELECT max(id) FROM Messages'
cur.execute(sql_query)
try:
row = cur.fetchone()
if row is not None:
start = row[0]
except Exception as e:
print('Database select start id false with:', e)
if start is None:
start = 0
many = 0
fail = 0
count = 0
# 添加数据
while True:
# 如果爬取数据错误大于五则退出循环
if fail > 5:
break
# 输入要爬取的数据数量
if many < 1:
sval = input('How many messages:')
if len(sval) < 1:
break
try:
many = int(sval)
except:
print('Type number!')
continue
# 开始爬取数据
many -= 1
start += 1
url = baseurl + str(start) + '/' + str(start+1)
# 获取页面内容
text = 'None'
try:
# 打开超过30秒超时
document = urlopen(url, None, 30, context=ctx)
text = document.read().decode()
# 处理各种错误:
# 页面代码不等于200,意味着连接错误
if document.getcode() != 200:
print("Error code=", document.getcode(), url)
break
# 使用Ctrl+c退出
except KeyboardInterrupt:
print('')
print('Program interrupted by user...')
break
# 其他异常
except Exception as e:
print("Unable to retrieve or parse page", url)
print("Error", e)
fail = fail + 1
continue
# 如果text不是以From开头,则数据内容异常
if not text.startswith('From'):
print(text)
print("Did not find From ")
fail = fail + 1
if fail > 5: break
continue
# 找到head和body的位置
pos = text.find("\n\n")
if pos > 0:
header = text[:pos]
body = text[pos+2:]
else:
# 数据内容异常
print(text)
print("Could not find break between headers and body")
fail += 1
continue
# 开始处理数据
count += 1
# 使用正则查找email、sent_at、subject的值
# From: "Glenn R. Golden" <ggolden@umich.edu>
emails = re.findall('From: .* <(.+@.+)>', header)
if len(emails) == 1:
email = emails[0]
email = email.strip().lower()
else:
emails = re.findall('From: .* (.+@.+) ', header)
if len(emails) == 1:
email = emails[0]
email = email.strip().lower()
date = None
y = re.findall('Date: .*, (.*)', header)
if len(y) == 1:
tdate = y[0]
tdate = tdate[:26]
try:
sent_at = parsemaildate(tdate)
except:
print(text)
print("Parse fail", tdate)
fail = fail + 1
if fail > 5: break
continue
subject = None
z = re.findall('Subject: (.*)', header)
if len(z) == 1: subject = z[0].strip().lower();
# Reset the fail counter
fail = 0
print(" ",start, email, sent_at, subject)
cur.execute('''INSERT OR IGNORE INTO Messages (id, email, sent_at, subject, headers, body)
VALUES ( ?, ?, ?, ?, ?, ? )''', (start, email, sent_at, subject, header, body))
if count % 50 == 0: conn.commit()
if count % 100 == 0: time.sleep(1)
conn.commit()
cur.close()
| 25.323944
| 94
| 0.555617
|
7ac63e86f253fc09da9092a43d2a4f568107587f
| 1,791
|
py
|
Python
|
administration/src/embedded/mfrc_service.py
|
shivamvku/flakrfid
|
559198d23907eea6e87f38fac1c5fb5c2b6fbca8
|
[
"MIT"
] | null | null | null |
administration/src/embedded/mfrc_service.py
|
shivamvku/flakrfid
|
559198d23907eea6e87f38fac1c5fb5c2b6fbca8
|
[
"MIT"
] | 1
|
2019-05-13T16:19:36.000Z
|
2019-05-19T11:21:22.000Z
|
administration/src/embedded/mfrc_service.py
|
shivamvku/flakrfid
|
559198d23907eea6e87f38fac1c5fb5c2b6fbca8
|
[
"MIT"
] | null | null | null |
def load_src(name, fdir, fpath):
import os, imp
res_full_path = os.path.join(
os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))), fdir, fpath)
return imp.load_source(name, res_full_path)
load_src("MFRC522", "rfid_python_lib", "MFRC522.py")
import MFRC522
import time
class ServiceMFRC:
def __init__(self):
self.continue_reading = True
self.MIFAREReader = MFRC522.MFRC522()
self.message = ""
self.counter = 30
def end_read(self):
self.continue_reading = False
self.counter = -1
print "Ctrl+C captured, ending read."
self.MIFAREReader.GPIO_CLEEN()
def do_read(self):
self.continue_reading = True
while self.continue_reading and self.counter > 0:
# print('Reader TTL: %s' % self.counter)
(status, TagType) = self.MIFAREReader.MFRC522_Request(self.MIFAREReader.PICC_REQIDL)
if status == self.MIFAREReader.MI_OK:
self.message += "Card detected. "
(status, backData) = self.MIFAREReader.MFRC522_Anticoll()
if status == self.MIFAREReader.MI_OK:
self.message += (
"Card read UID: " + str(backData[0]) + "," + str(backData[1]) + "," + str(backData[2]) + "," + str(
backData[3]) + "," + str(backData[4])
)
self.end_read()
return {
'message': self.message,
'data': reduce(lambda x, y: str(x) + str(y), backData)
}
self.counter -= 1
time.sleep(0.5)
print('No tag data found...')
return {
'message': 'No tag data detected...',
'data': '00000000'
}
| 35.117647
| 119
| 0.546622
|
cde50345c13d57d925371087799b61ab1bb4b186
| 20,791
|
py
|
Python
|
src/onegov/winterthur/daycare.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/winterthur/daycare.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/winterthur/daycare.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
import chameleon
import textwrap
import yaml
from babel.numbers import format_decimal
from cached_property import cached_property
from collections import defaultdict
from collections import OrderedDict
from decimal import Decimal, localcontext
from onegov.core.utils import Bunch
from onegov.core.utils import normalize_for_url
from onegov.directory import DirectoryCollection
from onegov.form import Form
from onegov.org.models import Organisation
from onegov.org.models.directory import ExtendedDirectoryEntryCollection
from onegov.winterthur import _
from ordered_set import OrderedSet
from wtforms.fields import Field, BooleanField, SelectField
from wtforms.fields.html5 import DecimalField
from wtforms.validators import NumberRange, InputRequired, ValidationError
from wtforms.widgets.core import HTMLString
SERVICE_DAYS = {
'mo': 0,
'di': 1,
'mi': 2,
'do': 3,
'fr': 4,
'sa': 5,
'so': 6,
}
SERVICE_DAYS_LABELS = {
0: _("Monday"),
1: _("Tuesday"),
2: _("Wednesday"),
3: _("Thursday"),
4: _("Friday"),
5: _("Saturday"),
6: _("Sunday"),
}
# http://babel.pocoo.org/en/latest/numbers.html#pattern-syntax
FORMAT = '#,##0.00########'
def round_to(n, precision):
assert isinstance(precision, str)
precision = Decimal(precision)
correction = Decimal('0.5') if n >= 0 else Decimal('-0.5')
return int(n / precision + correction) * precision
def format_precise(amount):
if not amount:
return '0.00'
with localcontext() as ctx:
ctx.prec = 28
return format_decimal(amount, format=FORMAT, locale='de_CH')
def format_1_cent(amount):
return format_precise(round_to(amount, '0.01'))
def format_5_cents(amount):
return format_precise(round_to(amount, '0.05'))
class Daycare(object):
def __init__(self, id, title, rate, weeks):
self.id = id
self.title = title
self.rate = Decimal(rate)
self.weeks = weeks
@property
def factor(self):
return Decimal(self.weeks) / Decimal('12')
class Services(object):
def __init__(self, definition):
if definition:
self.available = OrderedDict(self.parse_definition(definition))
else:
self.available = OrderedDict()
self.selected = defaultdict(set)
@classmethod
def from_org(cls, org):
if 'daycare_settings' not in org.meta:
return cls(None)
if 'services' not in org.meta['daycare_settings']:
return cls(None)
return cls(org.meta['daycare_settings']['services'])
@classmethod
def from_session(cls, session):
return cls.from_org(session.query(Organisation).one())
@staticmethod
def parse_definition(definition):
for service in yaml.safe_load(definition):
service_id = normalize_for_url(service['titel'])
days = (d.strip() for d in service['tage'].split(','))
yield service_id, Bunch(
id=service_id,
title=service['titel'],
percentage=Decimal(service['prozent']),
days=OrderedSet(SERVICE_DAYS[d.lower()[:2]] for d in days),
)
def select(self, service_id, day):
self.selected[service_id].add(day)
def deselect(self, service_id, day):
self.selected[service_id].remove(day)
def is_selected(self, service_id, day):
if service_id not in self.selected:
return False
return day in self.selected[service_id]
@property
def total(self):
""" Returns the total percentage of used services. """
return sum(
self.available[s].percentage * len(self.selected[s])
for s in self.selected
)
class Result(object):
def __init__(self, title, amount=None, note=None, operation=None,
important=False, currency='CHF', output_format=None):
self.title = title
self.amount = amount
self.note = textwrap.dedent(note or '').strip(' \n')
self.operation = operation
self.important = important
self.currency = currency
self.output_format = output_format or format_1_cent
def __bool__(self):
return bool(self.amount)
@property
def readable_amount(self):
return self.output_format(self.amount)
class Block(object):
def __init__(self, id, title):
self.id = id
self.title = title
self.results = []
self.total = Decimal(0)
def op(self, title, amount=None, note=None, operation=None,
important=False, currency='CHF', output_format=None,
total_places=2, amount_places=2):
if amount == 0:
amount = Decimal('0')
def limit_total(total):
return total.quantize(Decimal(f'0.{"0" * (total_places - 1)}1'))
def limit_amount(amount):
return amount.quantize(Decimal(f'0.{"0" * (amount_places - 1)}1'))
if operation is None:
assert amount is not None
self.total = amount
elif operation == '+':
assert amount is not None
self.total += amount
elif operation == '=':
amount = self.total if amount is None else amount
self.total = max(amount, Decimal('0'))
elif operation == '-':
assert amount is not None
self.total -= amount
elif operation in ('*', 'x', '×', '⋅'):
assert amount is not None
self.total *= amount
elif operation in ('/', '÷'):
assert amount is not None
self.total /= amount
# limit the amount and the total after the operation, not before
self.total = limit_total(self.total)
amount = limit_amount(amount)
self.results.append(Result(
title=title,
amount=amount,
note=note,
operation=operation,
important=important,
currency=currency,
output_format=output_format,
))
return self.total
class DirectoryDaycareAdapter(object):
def __init__(self, directory):
self.directory = directory
@cached_property
def fieldmap(self):
fieldmap = {
'daycare_rate': None,
'daycare_weeks': None,
'daycare_url': None,
}
for field in self.directory.basic_fields:
if 'tarif' in field.label.lower():
fieldmap['daycare_rate'] = field.id
continue
if 'woche' in field.label.lower():
fieldmap['daycare_weeks'] = field.id
continue
if 'web' in field.label.lower():
fieldmap['daycare_url'] = field.id
continue
return fieldmap
def as_daycare(self, entry):
return Daycare(
id=entry.id,
title=entry.title,
rate=entry.values[self.fieldmap['daycare_rate']],
weeks=entry.values[self.fieldmap['daycare_weeks']],
)
class Settings(object):
def __init__(self, organisation):
settings = organisation.meta.get('daycare_settings', {})
for key, value in settings.items():
setattr(self, key, value)
def is_valid(self):
keys = (
'directory',
'max_income',
'max_rate',
'max_subsidy',
'max_wealth',
'min_income',
'min_rate',
'rebate',
'services',
'wealth_premium',
)
for key in keys:
if not hasattr(self, key):
return False
return True
def factor(self, daycare):
min_day_rate = daycare.rate - self.min_rate
min_day_rate = min(min_day_rate, self.max_subsidy)
factor = min_day_rate / (self.max_income - self.min_income)
factor = factor.quantize(Decimal('0.000000001'))
return factor
class DaycareSubsidyCalculator(object):
def __init__(self, session):
self.session = session
@cached_property
def organisation(self):
return self.session.query(Organisation).one()
@cached_property
def settings(self):
return Settings(self.organisation)
@cached_property
def directory(self):
return DirectoryCollection(self.session).by_id(self.settings.directory)
@cached_property
def daycares(self):
adapter = DirectoryDaycareAdapter(self.directory)
items = ExtendedDirectoryEntryCollection(self.directory).query()
items = (i for i in items if i.access == 'public')
items = {i.id.hex: adapter.as_daycare(i) for i in items}
return items
def daycare_by_title(self, title):
return next(d for d in self.daycares.values() if d.title == title)
def calculate(self, *args, **kwargs):
return self.calculate_precisely(*args, **kwargs)
def calculate_precisely(self, daycare, services, income, wealth, rebate):
""" Creates a detailed calculation of the subsidy paid by Winterthur.
The reslt is a list of tables with explanations.
:param daycare:
The selected daycare (a :class:`Daycare` instance).
:param services:
Services used (a :class:`Services` instance)
:param income:
The income as a decimal.
:param wealth:
The wealth as decimal.
:param rebate:
True if a rebate is applied
Note, due to the specific nature of the content here, which is probably
not going to be translated, we use German. For consistency we want to
limit this, but with Winterthur these kinds of things crop up as the
wording is quite specific and adding translations would just make
this a lot harder.
"""
cfg = self.settings
fmt = format_precise
# Base Rate
# ---------
base = Block('base', "Berechnungsgrundlage für die Elternbeiträge")
base.op(
title="Steuerbares Einkommen",
amount=income,
note="""
Steuerbares Einkommen gemäss letzter Veranlagung.
""")
base.op(
title="Vermögenszuschlag",
amount=max(
(wealth - cfg.max_wealth)
* cfg.wealth_premium
/ Decimal('100'),
0),
operation="+",
note=f"""
Der Vermögenszuschlag beträgt
{fmt(cfg.wealth_premium).rstrip('0').rstrip('.')}% des
Vermögens, für das tatsächlich Steuern anfallen
(ab {fmt(cfg.max_wealth)} CHF).
""")
base.op(
title="Massgebendes Gesamteinkommen",
operation="=")
base.op(
title="Abzüglich Minimaleinkommen",
operation="-",
amount=cfg.min_income)
base.op(
title="Berechnungsgrundlage",
operation="=")
# Gross Contribution
# ------------------
gross = Block('gross', "Berechnung des Brutto-Elternbeitrags")
gross.op(
title="Übertrag",
amount=base.total)
gross.op(
title="Faktor",
amount=cfg.factor(daycare),
currency=None,
operation="×",
note="""
Ihr Elternbeitrag wird aufgrund eines Faktors berechnet
(Kita-Reglement Art. 20 Abs 3).
""",
output_format=format_precise,
amount_places=10)
gross.op(
title="Einkommensabhängiger Elternbeitragsbestandteil",
operation="=")
gross.op(
title="Mindestbeitrag Eltern",
amount=cfg.min_rate,
operation="+")
gross.op(
title="Elternbeitrag brutto",
operation="=",
amount=min(gross.total, daycare.rate))
# Rebate
# ------
rebate = gross.total * cfg.rebate / 100 if rebate else 0
net = Block('net', "Berechnung des Rabatts")
net.op(
title="Übertrag",
amount=gross.total)
net.op(
title="Rabatt",
amount=rebate,
operation="-",
note=f"""
Bei einem Betreuungsumfang von insgesamt mehr als 2 ganzen
Tagen pro Woche gilt ein Rabatt von
{fmt(cfg.rebate).rstrip('0').rstrip('.')}%.
""")
net.op(
title="Elternbeitrag netto",
operation="=",
amount=max(cfg.min_rate, gross.total - rebate))
# Actual contribution
# -------------------
actual = Block('actual', (
"Berechnung des Elternbeitrags und des "
"städtischen Beitrags pro Tag"
))
actual.op(
title="Übertrag",
amount=net.total)
actual.op(
title="Zusatzbeitrag Eltern",
amount=max(daycare.rate - cfg.max_rate, 0),
operation="+",
note=f"""
Zusatzbeitrag für Kitas, deren Tagestarif über
{cfg.max_rate} CHF liegt.
""")
parent_share_per_day = actual.op(
title="Elternbeitrag pro Tag",
operation="=",
note="""
Ihr Beitrag pro Tag (100%) und Kind.
""",
important=True)
city_share_per_day = actual.op(
title="Städtischer Beitrag pro Tag",
amount=max(daycare.rate - parent_share_per_day, Decimal('0.00')),
important=True,
note="""
Städtischer Beitrag für Ihr Kind pro Tag.
""")
# Monthly contribution
# --------------------
monthly = Block(
'monthly', (
"Berechnung des Elternbeitrags und des städtischen "
"Beitrags pro Monat"
)
)
monthly.op(
title="Wochentarif",
amount=parent_share_per_day * services.total / 100,
note="""
Wochentarif: Elternbeiträge der gewählten Betreuungstage.
""")
monthly.op(
title="Faktor",
amount=daycare.factor,
currency=None,
operation="×",
note="""
Faktor für jährliche Öffnungswochen Ihrer Kita.
""",
output_format=format_precise,
amount_places=4)
parent_share_per_month = monthly.op(
title="Elternbeitrag pro Monat",
operation="=",
important=True,
output_format=format_5_cents)
city_share_per_month = monthly.op(
title="Städtischer Beitrag pro Monat",
amount=city_share_per_day * services.total / 100 * daycare.factor,
important=True,
output_format=format_5_cents)
# Services table
# --------------
def services_table():
total = Decimal(0)
total_percentage = Decimal(0)
for day in SERVICE_DAYS.values():
for service_id in services.selected:
if day in services.selected[service_id]:
service = services.available[service_id]
cost = parent_share_per_day * service.percentage / 100
total += cost
total_percentage += service.percentage
label = SERVICE_DAYS_LABELS[day]
yield (label, service.title, format_5_cents(cost))
yield (_("Total"), None, format_5_cents(total))
total = round_to(parent_share_per_month, '0.05')\
+ round_to(city_share_per_month, '0.05')
return Bunch(
blocks=(base, gross, net, actual, monthly),
parent_share_per_month=format_5_cents(parent_share_per_month),
city_share_per_month=format_5_cents(city_share_per_month),
total_per_month=format_5_cents(total),
agenda=tuple(services_table()),
)
class DaycareServicesWidget(object):
template = chameleon.PageTemplate("""
<table class="daycare-services">
<thead>
<tr>
<th></th>
<th tal:repeat="service this.services.available.values()">
<div class="daycare-services-title">
${service.title}
</div>
<div class="daycare-services-percentage">
${service.percentage}%
</div>
</th>
</tr>
</thead>
<tbody>
<tr tal:repeat="day this.days">
<th>
<strong class="show-for-small-only">
${this.day_label(day)[:2]}
</strong>
<strong class="show-for-medium-up">
${this.day_label(day)}
</strong>
</th>
<td tal:repeat="svc this.services.available.values()">
<label>
<input
type="checkbox"
id="${svc.id}-${day}"
name="${this.field.name}"
value="${svc.id}-${day}"
tal:attributes="
checked this.is_selected(svc, day)
"
/>
</label>
</td>
</tr>
</tbody>
</table
""")
def __call__(self, field, **kwargs):
self.field = field
self.services = field.services
return HTMLString(self.template.render(this=self))
def is_selected(self, service, day):
return self.services.is_selected(service.id, day)
def day_label(self, day):
return self.field.meta.request.translate(SERVICE_DAYS_LABELS[day])
@property
def days(self):
days = OrderedSet()
for service in self.services.available.values():
for day in service.days:
days.add(day)
return days
class DaycareServicesField(Field):
widget = DaycareServicesWidget()
@cached_property
def services(self):
return Services.from_session(self.meta.request.session)
def process_formdata(self, valuelist):
for value in valuelist:
service_id, day = value.rsplit('-', maxsplit=1)
self.services.select(service_id, int(day))
def pre_validate(self, form):
for day in SERVICE_DAYS.values():
days = sum(
1 for id in self.services.available
if self.services.is_selected(id, day)
)
if days > 1:
raise ValidationError(_("Each day may only be selected once."))
class DaycareSubsidyCalculatorForm(Form):
daycare = SelectField(
label=_("Select Daycare"),
validators=(InputRequired(), ),
choices=(), )
services = DaycareServicesField(
label=_("Care"),
validators=(InputRequired(), ))
income = DecimalField(
label=_("Definite Taxable income"),
validators=(InputRequired(), NumberRange(min=0)))
wealth = DecimalField(
label=_("Definite Taxable wealth"),
validators=(InputRequired(), NumberRange(min=0)))
rebate = BooleanField(
label=_("Rebate"),
description=_(
"Does at least one child in your household attend the same "
"daycare for more than two whole days a week?"
))
def on_request(self):
self.daycare.choices = tuple(self.daycare_choices)
@property
def daycare_choices(self):
def choice(daycare):
label = _((
"${title} / day rate CHF ${rate} / "
"${weeks} weeks open per year"
), mapping={
'title': daycare.title,
'rate': daycare.rate,
'weeks': daycare.weeks
})
return (daycare.id.hex, self.request.translate(label))
for daycare in self.model.daycares.values():
yield choice(daycare)
@property
def selected_daycare(self):
for daycare in self.model.daycares.values():
if daycare.id.hex == self.daycare.data:
return daycare
| 28.876389
| 79
| 0.547304
|
d54b5accf069f15dabd1388e391b4b8ccf41319e
| 24,131
|
py
|
Python
|
models/relu_not_concat.py
|
dishen12/RFB_aspp
|
d968ad3cca1ff048212bc2d0c179557edfd1241c
|
[
"MIT"
] | null | null | null |
models/relu_not_concat.py
|
dishen12/RFB_aspp
|
d968ad3cca1ff048212bc2d0c179557edfd1241c
|
[
"MIT"
] | null | null | null |
models/relu_not_concat.py
|
dishen12/RFB_aspp
|
d968ad3cca1ff048212bc2d0c179557edfd1241c
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from layers import *
import torchvision.transforms as transforms
import torchvision.models as models
import torch.backends.cudnn as cudnn
import os
class BasicConv(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride=1, padding=0, dilation=1, groups=1, relu=True, bn=True, bias=False):
super(BasicConv, self).__init__()
self.out_channels = out_planes
self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias)
self.bn = nn.BatchNorm2d(out_planes,eps=1e-5, momentum=0.01, affine=True) if bn else None
self.relu = nn.ReLU(inplace=True) if relu else None
def forward(self, x):
x = self.conv(x)
if self.bn is not None:
x = self.bn(x)
if self.relu is not None:
x = self.relu(x)
return x
class Aspp_b_2_mid_concat_relu(nn.Module):
"""
串联加并联的操作的aspp,每层延伸出去,相当于一个fpn,注意,此处每层都添加了BN,没有加relu,只在最后添加了relu
"""
def __init__(self,in_planes,out_planes,stride=1,scale=0.1,rate=[6,3,2,1]):
#rate 1 2 5 9
# 2 4 10 18
# 3 6 15 27
super(Aspp_b_2_mid_concat_relu,self).__init__()
self.scale = scale
self.out_channels = out_planes
self.rate = rate
inter_planes = in_planes // 8 # 后边这个值,考虑微调 原来是8
if(len(rate)==4):
self.branch0 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[0], dilation=rate[0], relu=False)
)
self.branch0_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[0], dilation=2*rate[0], relu=False)
self.branch0_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[0], dilation=3*rate[0], relu=False)
self.branch1 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[1], dilation=rate[1], relu=False))
self.branch1_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[1], dilation=2*rate[1], relu=False)
self.branch1_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[1], dilation=3*rate[1], relu=False)
self.branch2 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[2], dilation=rate[2], relu=False))
self.branch2_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[2], dilation=2*rate[2], relu=False)
self.branch2_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[2], dilation=3*rate[2], relu=False)
self.branch3 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[3], dilation=rate[3], relu=False))
self.branch3_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[3], dilation=2*rate[3], relu=False)
self.branch3_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[3], dilation=3*rate[3], relu=False)
self.ConvLinear = BasicConv(24*inter_planes,out_planes,kernel_size=1,stride=1,relu=False)
self.shortcut = BasicConv(in_planes,out_planes,kernel_size=1,stride=stride, relu=False)
self.relu = nn.ReLU(inplace=False)
elif(len(rate)==3):
self.branch0 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[0], dilation=rate[0], relu=False)
)
self.branch0_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[0], dilation=2*rate[0], relu=False)
self.branch0_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[0], dilation=3*rate[0], relu=False)
self.branch1 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[1], dilation=rate[1], relu=False))
self.branch1_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[1], dilation=2*rate[1], relu=False)
self.branch1_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[1], dilation=3*rate[1], relu=False)
self.branch2 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[2], dilation=rate[2], relu=False))
self.branch2_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[0], dilation=2*rate[2], relu=False)
self.branch2_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[0], dilation=3*rate[2], relu=False)
self.ConvLinear = BasicConv(18*inter_planes,out_planes,kernel_size=1,stride=1,relu=False)
self.shortcut = BasicConv(in_planes,out_planes,kernel_size=1,stride=stride, relu=False)
self.relu = nn.ReLU(inplace=False)
else:
print("error! the rate is incorrect!")
def forward(self,x):
# some thing there
if(len(self.rate)==4):
x0 = self.branch0(x)
x0_r = self.relu(x0)
x01 = self.branch0_1(x0_r)
x01_r = self.relu(x01)
x02 = self.branch0_2(x01_r)
#print("0",x0.size(),x01.size(),x02.size())
x1 = self.branch1(x)
x1_r = self.relu(x1)
x11 = self.branch1_1(x1_r)
x11_r = self.relu(x11)
x12 = self.branch1_2(x11_r)
#print("1",x1.size(),x11.size(),x12.size())
x2 = self.branch2(x)
x2_r = self.relu(x2)
#print("x2",x2.size())
x21 = self.branch2_1(x2_r)
x21_r = self.relu(x21)
#print("x21",x21.size())
x22 = self.branch2_2(x21_r)
#print("x22",x22.size())
#print("2",x2.size(),x21.size(),x22.size())
x3 = self.branch3(x)
x3_r = self.relu(x3)
x31 = self.branch3_1(x3_r)
x31_r = self.relu(x31)
x32 = self.branch3_2(x31_r)
#print("3",x3.size(),x31.size(),x32.size())
#mid concat
out1 = torch.cat((x0,x1,x2,x3),1)
#out1 = self.relu(out1)
out2 = torch.cat((x01,x11,x21,x31),1)
#out2 = self.relu(out2)
out3 = torch.cat((x02,x12,x22,x32),1)
#out3 = self.relu(out3)
out = torch.cat((out1,out2,out3),1)
#out = torch.cat((x0,x01,x02,x1,x11,x12,x2,x21,x22,x3,x31,x32),1)
out = self.ConvLinear(out)
short = self.shortcut(x)
#print("the size of shortcut is:",short.size())
out = out*self.scale + short
out = self.relu(out)
return out
elif(len(self.rate)==3):
x0 = self.branch0(x)
x01 = self.branch0_1(x0)
x02 = self.branch0_2(x01)
x1 = self.branch1(x)
x11 = self.branch1_1(x1)
x12 = self.branch1_2(x11)
x2 = self.branch2(x)
x21 = self.branch2_1(x2)
x22 = self.branch2_2(x21)
out = torch.cat((x0,x01,x02,x1,x11,x12,x2,x21,x22),1)
out = self.ConvLinear(out)
short = self.shortcut(x)
out = out*self.scale + short
out = self.relu(out)
return out
else:
print("error!")
return
class Aspp_b_2(nn.Module):
"""
串联加并联的操作的aspp,每层延伸出去,相当于一个fpn,注意,此处每层都添加了BN,没有加relu,只在最后添加了relu
"""
def __init__(self,in_planes,out_planes,stride=1,scale=0.1,rate=[6,3,2,1]):
#rate 1 2 5 9
# 2 4 10 18
# 3 6 15 27
super(Aspp_b_2,self).__init__()
self.scale = scale
self.out_channels = out_planes
self.rate = rate
inter_planes = in_planes // 8 # 后边这个值,考虑微调 原来是8
if(len(rate)==4):
self.branch0 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[0], dilation=rate[0], relu=False)
)
self.branch0_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[0], dilation=2*rate[0], relu=False)
self.branch0_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[0], dilation=3*rate[0], relu=False)
self.branch1 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[1], dilation=rate[1], relu=False))
self.branch1_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[1], dilation=2*rate[1], relu=False)
self.branch1_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[1], dilation=3*rate[1], relu=False)
self.branch2 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[2], dilation=rate[2], relu=False))
self.branch2_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[2], dilation=2*rate[2], relu=False)
self.branch2_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[2], dilation=3*rate[2], relu=False)
self.branch3 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[3], dilation=rate[3], relu=False))
self.branch3_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[3], dilation=2*rate[3], relu=False)
self.branch3_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[3], dilation=3*rate[3], relu=False)
self.ConvLinear = BasicConv(24*inter_planes,out_planes,kernel_size=1,stride=1,relu=False)
self.shortcut = BasicConv(in_planes,out_planes,kernel_size=1,stride=stride, relu=False)
self.relu = nn.ReLU(inplace=False)
elif(len(rate)==3):
self.branch0 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[0], dilation=rate[0], relu=False)
)
self.branch0_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[0], dilation=2*rate[0], relu=False)
self.branch0_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[0], dilation=3*rate[0], relu=False)
self.branch1 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[1], dilation=rate[1], relu=False))
self.branch1_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[1], dilation=2*rate[1], relu=False)
self.branch1_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[1], dilation=3*rate[1], relu=False)
self.branch2 = nn.Sequential(
BasicConv(in_planes, 2*inter_planes, kernel_size=1, stride=stride),
BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=rate[2], dilation=rate[2], relu=False))
self.branch2_1 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=2*rate[0], dilation=2*rate[2], relu=False)
self.branch2_2 = BasicConv(2*inter_planes, 2*inter_planes, kernel_size=3, stride=1, padding=3*rate[0], dilation=3*rate[2], relu=False)
self.ConvLinear = BasicConv(18*inter_planes,out_planes,kernel_size=1,stride=1,relu=False)
self.shortcut = BasicConv(in_planes,out_planes,kernel_size=1,stride=stride, relu=False)
self.relu = nn.ReLU(inplace=False)
else:
print("error! the rate is incorrect!")
def forward(self,x):
# some thing there
if(len(self.rate)==4):
x0 = self.branch0(x)
x01 = self.branch0_1(x0)
x02 = self.branch0_2(x01)
#print("0",x0.size(),x01.size(),x02.size())
x1 = self.branch1(x)
x11 = self.branch1_1(x1)
x12 = self.branch1_2(x11)
#print("1",x1.size(),x11.size(),x12.size())
x2 = self.branch2(x)
#print("x2",x2.size())
x21 = self.branch2_1(x2)
#print("x21",x21.size())
x22 = self.branch2_2(x21)
#print("x22",x22.size())
#print("2",x2.size(),x21.size(),x22.size())
x3 = self.branch3(x)
x31 = self.branch3_1(x3)
x32 = self.branch3_2(x31)
#print("3",x3.size(),x31.size(),x32.size())
out = torch.cat((x0,x01,x02,x1,x11,x12,x2,x21,x22,x3,x31,x32),1)
out = self.ConvLinear(out)
short = self.shortcut(x)
#print("the size of shortcut is:",short.size())
out = out*self.scale + short
out = self.relu(out)
return out
elif(len(self.rate)==3):
x0 = self.branch0(x)
x01 = self.branch0_1(x0)
x02 = self.branch0_2(x01)
x1 = self.branch1(x)
x11 = self.branch1_1(x1)
x12 = self.branch1_2(x11)
x2 = self.branch2(x)
x21 = self.branch2_1(x2)
x22 = self.branch2_2(x21)
out = torch.cat((x0,x01,x02,x1,x11,x12,x2,x21,x22),1)
out = self.ConvLinear(out)
short = self.shortcut(x)
out = out*self.scale + short
out = self.relu(out)
return out
else:
print("error!")
return
class RFBNet(nn.Module):
"""RFB Net for object detection
The network is based on the SSD architecture.
Each multibox layer branches into
1) conv2d for class conf scores
2) conv2d for localization predictions
3) associated priorbox layer to produce default bounding
boxes specific to the layer's feature map size.
See: https://arxiv.org/pdf/1711.07767.pdf for more details on RFB Net.
Args:
phase: (string) Can be "test" or "train"
base: VGG16 layers for input, size of either 300 or 512
extras: extra layers that feed to multibox loc and conf layers
head: "multibox head" consists of loc and conf conv layers
"""
def __init__(self, phase, size, base, extras, head, num_classes,Rate=[9,5,2,1]):
super(RFBNet, self).__init__()
self.phase = phase
self.num_classes = num_classes
self.size = size
if size == 300:
self.indicator = 3
elif size == 512:
self.indicator = 5
else:
print("Error: Sorry only SSD300 and SSD512 are supported!")
return
# vgg network
self.base = nn.ModuleList(base)
# conv_4
#self.Norm = BasicRFB_a(512,512,stride = 1,scale=1.0)
self.Norm = Aspp_b_2_mid_concat_relu(512,512,stride=1,scale=1,rate=Rate)
#self.aspp_a_7 = Aspp_b_2(1024,1024,stride=1,scale=1,rate=Rate)
self.extras = nn.ModuleList(extras)
self.loc = nn.ModuleList(head[0])
self.conf = nn.ModuleList(head[1])
if self.phase == 'test':
self.softmax = nn.Softmax(dim=-1)
def forward(self, x):
"""Applies network layers and ops on input image(s) x.
Args:
x: input image or batch of images. Shape: [batch,3*batch,300,300].
Return:
Depending on phase:
test:
list of concat outputs from:
1: softmax layers, Shape: [batch*num_priors,num_classes]
2: localization layers, Shape: [batch,num_priors*4]
3: priorbox layers, Shape: [2,num_priors*4]
train:
list of concat outputs from:
1: confidence layers, Shape: [batch*num_priors,num_classes]
2: localization layers, Shape: [batch,num_priors*4]
3: priorbox layers, Shape: [2,num_priors*4]
"""
sources = list()
loc = list()
conf = list()
# apply vgg up to conv4_3 relu
for k in range(23):
x = self.base[k](x)
#s = self.Norm(x)
s = self.Norm(x)
sources.append(s)
# apply vgg up to fc7
for k in range(23, len(self.base)):
x = self.base[k](x)
# apply extra layers and cache source layer outputs
for k, v in enumerate(self.extras):
x = v(x)
if k < self.indicator or k%2 ==0:
sources.append(x)
# apply multibox head to source layers
for (x, l, c) in zip(sources, self.loc, self.conf):
loc.append(l(x).permute(0, 2, 3, 1).contiguous())
conf.append(c(x).permute(0, 2, 3, 1).contiguous())
#print([o.size() for o in loc])
loc = torch.cat([o.view(o.size(0), -1) for o in loc], 1)
conf = torch.cat([o.view(o.size(0), -1) for o in conf], 1)
if self.phase == "test":
output = (
loc.view(loc.size(0), -1, 4), # loc preds
self.softmax(conf.view(-1, self.num_classes)), # conf preds
)
else:
output = (
loc.view(loc.size(0), -1, 4),
conf.view(conf.size(0), -1, self.num_classes),
)
return output
def load_weights(self, base_file):
other, ext = os.path.splitext(base_file)
if ext == '.pkl' or '.pth':
print('Loading weights into state dict...')
self.load_state_dict(torch.load(base_file))
print('Finished!')
else:
print('Sorry only .pth and .pkl files supported.')
# This function is derived from torchvision VGG make_layers()
# https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py
def vgg(cfg, i, batch_norm=False):
layers = []
in_channels = i
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
elif v == 'C':
layers += [nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
pool5 = nn.MaxPool2d(kernel_size=3, stride=1, padding=1)
conv6 = nn.Conv2d(512, 1024, kernel_size=3, padding=6, dilation=6)
conv7 = nn.Conv2d(1024, 1024, kernel_size=1)
layers += [pool5, conv6,
nn.ReLU(inplace=True), conv7, nn.ReLU(inplace=True)]
return layers
base = {
'300': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M',
512, 512, 512],
'512': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M',
512, 512, 512],
}
def add_extras(size, cfg, i, batch_norm=False,Rate=[6,3,2,1]):
# Extra layers added to VGG for feature scaling
layers = []
in_channels = i
flag = False
for k, v in enumerate(cfg):
if in_channels != 'S':
if v == 'S':
if in_channels == 256 and size == 512:
layers += [Aspp_b_2_mid_concat_relu(in_channels,cfg[k+1],stride=2,scale=1,rate=Rate)]
else:
layers += [Aspp_b_2_mid_concat_relu(in_channels,cfg[k+1],stride=2,scale=1,rate=Rate)]
else:
layers += [Aspp_b_2_mid_concat_relu(in_channels,v,scale=1,rate=Rate)]
in_channels = v
if size == 512:
layers += [BasicConv(256,128,kernel_size=1,stride=1)]
layers += [BasicConv(128,256,kernel_size=4,stride=1,padding=1)]
elif size ==300:
layers += [BasicConv(256,128,kernel_size=1,stride=1)]
layers += [BasicConv(128,256,kernel_size=3,stride=1)]
layers += [BasicConv(256,128,kernel_size=1,stride=1)]
layers += [BasicConv(128,256,kernel_size=3,stride=1)]
else:
print("Error: Sorry only RFBNet300 and RFBNet512 are supported!")
return
return layers
extras = {
'300': [1024, 'S', 512, 'S', 256],
'512': [1024, 'S', 512, 'S', 256, 'S', 256,'S',256],
}
def multibox(size, vgg, extra_layers, cfg, num_classes):
loc_layers = []
conf_layers = []
vgg_source = [-2]
for k, v in enumerate(vgg_source):
if k == 0:
loc_layers += [nn.Conv2d(512,
cfg[k] * 4, kernel_size=3, padding=1)]
conf_layers +=[nn.Conv2d(512,
cfg[k] * num_classes, kernel_size=3, padding=1)]
else:
loc_layers += [nn.Conv2d(vgg[v].out_channels,
cfg[k] * 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(vgg[v].out_channels,
cfg[k] * num_classes, kernel_size=3, padding=1)]
i = 1
indicator = 0
if size == 300:
indicator = 3
elif size == 512:
indicator = 5
else:
print("Error: Sorry only RFBNet300 and RFBNet512 are supported!")
return
for k, v in enumerate(extra_layers):
if k < indicator or k%2== 0:
loc_layers += [nn.Conv2d(v.out_channels, cfg[i]
* 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(v.out_channels, cfg[i]
* num_classes, kernel_size=3, padding=1)]
i +=1
return vgg, extra_layers, (loc_layers, conf_layers)
mbox = {
'300': [6, 6, 6, 6, 4, 4], # number of boxes per feature map location
'512': [6, 6, 6, 6, 6, 4, 4],
}
def build_net(phase, size=300, num_classes=21,rate="6,3,2,1"):
Rate = [int(i) for i in rate.strip().split(",")]
print("the rate is ",Rate)
if phase != "test" and phase != "train":
print("Error: Phase not recognized")
return
if size != 300 and size != 512:
print("Error: Sorry only RFBNet300 and RFBNet512 are supported!")
return
return RFBNet(phase, size, *multibox(size, vgg(base[str(size)], 3),
add_extras(size, extras[str(size)], 1024,Rate=Rate),
mbox[str(size)], num_classes), num_classes,Rate)
| 47.974155
| 154
| 0.583482
|
a0704cc67924ee3176fe445c007dcfedda0569d7
| 4,583
|
py
|
Python
|
yardstick/benchmark/runners/iteration.py
|
mythwm/yardstick-wm
|
319ced11df92456b42c80cfd6e53c66dbd22a746
|
[
"Apache-2.0"
] | 1
|
2019-12-08T21:57:31.000Z
|
2019-12-08T21:57:31.000Z
|
yardstick/benchmark/runners/iteration.py
|
mythwm/yardstick-wm
|
319ced11df92456b42c80cfd6e53c66dbd22a746
|
[
"Apache-2.0"
] | null | null | null |
yardstick/benchmark/runners/iteration.py
|
mythwm/yardstick-wm
|
319ced11df92456b42c80cfd6e53c66dbd22a746
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# yardstick comment: this is a modified copy of
# rally/rally/benchmark/runners/constant.py
"""A runner that runs a configurable number of times before it returns
"""
from __future__ import absolute_import
import os
import multiprocessing
import logging
import traceback
import time
from yardstick.benchmark.runners import base
LOG = logging.getLogger(__name__)
def _worker_process(queue, cls, method_name, scenario_cfg,
context_cfg, aborted, output_queue):
sequence = 1
runner_cfg = scenario_cfg['runner']
interval = runner_cfg.get("interval", 1)
iterations = runner_cfg.get("iterations", 1)
run_step = runner_cfg.get("run_step", "setup,run,teardown")
delta = runner_cfg.get("delta", 2)
LOG.info("worker START, iterations %d times, class %s", iterations, cls)
runner_cfg['runner_id'] = os.getpid()
benchmark = cls(scenario_cfg, context_cfg)
if "setup" in run_step:
benchmark.setup()
method = getattr(benchmark, method_name)
sla_action = None
if "sla" in scenario_cfg:
sla_action = scenario_cfg["sla"].get("action", "assert")
if "run" in run_step:
while True:
LOG.debug("runner=%(runner)s seq=%(sequence)s START",
{"runner": runner_cfg["runner_id"],
"sequence": sequence})
data = {}
errors = ""
try:
result = method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
raise
elif sla_action == "monitor":
LOG.warning("SLA validation failed: %s", assertion.args)
errors = assertion.args
elif sla_action == "rate-control":
try:
scenario_cfg['options']['rate']
except KeyError:
scenario_cfg.setdefault('options', {})
scenario_cfg['options']['rate'] = 100
scenario_cfg['options']['rate'] -= delta
sequence = 1
continue
except Exception as e:
errors = traceback.format_exc()
LOG.exception(e)
raise
else:
if result:
output_queue.put(result)
time.sleep(interval)
benchmark_output = {
'timestamp': time.time(),
'sequence': sequence,
'data': data,
'errors': errors
}
queue.put(benchmark_output)
LOG.debug("runner=%(runner)s seq=%(sequence)s END",
{"runner": runner_cfg["runner_id"],
"sequence": sequence})
sequence += 1
if (errors and sla_action is None) or \
(sequence > iterations or aborted.is_set()):
LOG.info("worker END")
break
if "teardown" in run_step:
benchmark.teardown()
class IterationRunner(base.Runner):
"""Run a scenario for a configurable number of times
If the scenario ends before the time has elapsed, it will be started again.
Parameters
iterations - amount of times the scenario will be run for
type: int
unit: na
default: 1
interval - time to wait between each scenario invocation
type: int
unit: seconds
default: 1 sec
"""
__execution_type__ = 'Iteration'
def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
args=(self.result_queue, cls, method, scenario_cfg,
context_cfg, self.aborted, self.output_queue))
self.process.start()
| 31.826389
| 78
| 0.580624
|
13046201545d50b8d73d5fb76826c76a83d51425
| 2,363
|
py
|
Python
|
www/mf.py
|
MAGENTAFACES/MAGENTAFACES
|
96908a8233efbe2e7eaa9e4221928cd63035ad00
|
[
"MIT"
] | null | null | null |
www/mf.py
|
MAGENTAFACES/MAGENTAFACES
|
96908a8233efbe2e7eaa9e4221928cd63035ad00
|
[
"MIT"
] | null | null | null |
www/mf.py
|
MAGENTAFACES/MAGENTAFACES
|
96908a8233efbe2e7eaa9e4221928cd63035ad00
|
[
"MIT"
] | null | null | null |
import os
from jinja2 import Environment, FileSystemLoader
from markdown import markdown
from random import random
from werkzeug.exceptions import HTTPException, NotFound
from werkzeug.routing import Map, Rule
from werkzeug.wrappers import Request, Response
from werkzeug.wsgi import SharedDataMiddleware
class MagentaFaces(object):
def __init__(self):
path = os.path.join(os.path.dirname(__file__), 'templates')
self.jenv = Environment(loader=FileSystemLoader(path), autoescape=False)
self.url_map = Map([Rule('/', endpoint='mf')])
def mf(self, request, **context):
path = os.path.join(os.path.dirname(__file__), 'prose')
hlist = []
for x in range(0,15):
h = "h%04d.md" % x
hlist.append(h)
f = open(os.path.join(path, hlist[int(random() * (len(hlist)))]), 'r')
try:
head = unicode(f.read(), 'utf-8')
finally:
f.close()
blam = ['diz', 'daz', 'dux']
html = blam[int(random() * 3)] + '.html'
prlist = []
for x in range(0,31):
p = "p%04d.md" % x
prlist.append(p)
f = open(os.path.join(path, prlist[int(random() * len(prlist))]), 'r')
try:
data = unicode(f.read(), 'utf-8')
finally:
f.close()
prose = markdown(data)
return self.render_template(html, head=head, prose=prose)
def render_template(self, template_name, **context):
t = self.jenv.get_template(template_name)
return Response(t.render(context), mimetype='text/html')
def dispatch(self, request):
adapter = self.url_map.bind_to_environ(request.environ)
try:
endpoint, values = adapter.match()
return getattr(self, endpoint)(request, **values)
except NotFound, e:
return self.render_template("daz.html", head="these faces are magenta", prose='and so are we')
except HTTPException, e:
return e
def wsgi_app(self, environ, start_response):
request = Request(environ)
response = self.dispatch(request)
return response(environ, start_response)
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def make_app(with_static=True):
app = MagentaFaces()
if with_static:
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
})
return app
if __name__ == '__main__':
from werkzeug.serving import run_simple
app = make_app()
run_simple('127.0.0.1', 5000, app, use_debugger=True, use_reloader=True)
| 29.5375
| 97
| 0.703766
|
f67b41f7e7c050f6c19852a8d7fc25e955f3354a
| 460
|
py
|
Python
|
leetCode/algorithms/medium/flatten_nested_list_iterator.py
|
ferhatelmas/algo
|
a7149c7a605708bc01a5cd30bf5455644cefd04d
|
[
"WTFPL"
] | 25
|
2015-01-21T16:39:18.000Z
|
2021-05-24T07:01:24.000Z
|
leetCode/algorithms/medium/flatten_nested_list_iterator.py
|
gauravsingh58/algo
|
397859a53429e7a585e5f6964ad24146c6261326
|
[
"WTFPL"
] | 2
|
2020-09-30T19:39:36.000Z
|
2020-10-01T17:15:16.000Z
|
leetCode/algorithms/medium/flatten_nested_list_iterator.py
|
ferhatelmas/algo
|
a7149c7a605708bc01a5cd30bf5455644cefd04d
|
[
"WTFPL"
] | 15
|
2015-01-21T16:39:27.000Z
|
2020-10-01T17:00:22.000Z
|
from collections import deque
class NestedIterator(object):
def __init__(self, nestedList):
self.q = deque([])
self.processList(nestedList)
def processList(self, ls):
for e in ls:
if e.isInteger():
self.q.append(e.getInteger())
else:
self.processList(e.getList())
def next(self):
return self.q.popleft()
def hasNext(self):
return bool(self.q)
| 21.904762
| 45
| 0.56087
|
2d140b44032324330d071a467d267e5f9a15ed3c
| 24,945
|
py
|
Python
|
eosim/gui/visualize/vis2dframe.py
|
EarthObservationSimulator/eosim-gui
|
3067026f5f32be214e9ec2c4461a734ad25bb6a4
|
[
"Apache-2.0"
] | null | null | null |
eosim/gui/visualize/vis2dframe.py
|
EarthObservationSimulator/eosim-gui
|
3067026f5f32be214e9ec2c4461a734ad25bb6a4
|
[
"Apache-2.0"
] | null | null | null |
eosim/gui/visualize/vis2dframe.py
|
EarthObservationSimulator/eosim-gui
|
3067026f5f32be214e9ec2c4461a734ad25bb6a4
|
[
"Apache-2.0"
] | null | null | null |
"""
.. module:: vis2dframe
:synopsis: *Module to handle visualization with X-Y plots.*
The module contains the class ``Vis2DFrame`` to build the frame in which the user enters the plotting parameters.
A time-interval of interest is to be specified, and the X, Y data corresponding to this time-interval shall be plotted.
A single x-variable (belonging to a satellite) is selected (see the class ``Plot2DVisVars`` for list of possible variables).
Multiple y-variables may be selected to be plotted on the same figure.
The module currently only allows plotting of satellite orbit-propagation parameters (and hence association of only the satellite
(no need of sensor) with the variable is sufficient).
"""
from tkinter import ttk
import tkinter as tk
import tkinter.filedialog, tkinter.messagebox
from eosim import config
import orbitpy, instrupy
import pandas as pd
import numpy as np
from matplotlib.backends.backend_tkagg import (
FigureCanvasTkAgg, NavigationToolbar2Tk)
# Implement the default Matplotlib key bindings.
from matplotlib.backend_bases import key_press_handler
from matplotlib.figure import Figure
import logging
logger = logging.getLogger(__name__)
class Plot2DVisVars(instrupy.util.EnumEntity):
""" This class holds and handles the variables which can be plotted (either on x or y axis).
The class-variables are all the variables make up all the possible variables which can be plotted.
The class also includes two functions which aid in the retrieval of the variable-data from the OrbitPy datafiles.
"""
TIME = "Time"
ALT = "Altitude [km]"
INC = "Inclination [deg]"
TA = "True Anomaly [km]"
RAAN = "RAAN [deg]"
AOP = "AOP [deg]"
ECC = "ECC"
SPD = "ECI Speed [km/s]"
ECIX = "ECI X-position [km]"
ECIY = "ECI Y-position [km]"
ECIZ = "ECI Z-position [km]"
VX = "ECI X Velocity [km/s]"
VY = "ECI Y Velocity [km/s]"
VZ = "ECI Z Velocity [km/s]"
LAT = "Latitude [deg]"
LON = "Longitude [deg]"
@classmethod
def get_orbitpy_file_column_header(cls, var):
""" Function returns the OrbitPy column header (label) corresponding to the input variable.
If not present, ``False`` is returned indicating a "derived" variable.
"""
if(var==cls.ECIX):
return "x [km]"
elif(var==cls.ECIY):
return "y [km]"
elif(var==cls.ECIZ):
return "z [km]"
elif(var==cls.VX):
return "vx [km/s]"
elif(var==cls.VY):
return "vy [km/s]"
elif(var==cls.VZ):
return "vz [km/s]"
elif(var==cls.INC):
return "inc [deg]"
elif(var==cls.RAAN):
return "raan [deg]"
elif(var==cls.AOP):
return "aop [deg]"
elif(var==cls.TA):
return "ta [deg]"
elif(var==cls.ECC):
return "ecc"
else:
return False # could be a derived variable
@classmethod
def get_data_from_orbitpy_file(cls, sat_df, sat_id, var, step_size, epoch_JDUT1):
""" Extract the variable data from the input orbit-propagation data.
:param sat_df: Dataframe corresponding to the orbit-propagation data.
:paramtype sat_df: :class:`pandas.DataFrame`
:param sat_id: Satellite identifier.
:paramtype sat_id: str or int
:param var: Variable of interest to be plotted (on either the X or Y axis).
:paramtype var: class-variable of the ``Plot2DVisVars`` class.
:param step_size: step-size
:paramtype step_size: float
:param epoch_JDUT1: Epoch in Julian Date UT1 at which the input data is referenced.
:paramtype epoch_JDUT1: float
:return: Tuple containing the variable plot-name (label) and the corresponding data to be plotted.
:rtype: tuple
"""
_header = Plot2DVisVars.get_orbitpy_file_column_header(var)
if(_header is not False):
if _header == sat_df.index.name:
data = sat_df.index
else:
data = sat_df[_header]
else:
# a derived variable
if(var == cls.TIME):
data = np.array(sat_df.index) * step_size # index = "time index"
_header = 'time [s]'
elif(var == cls.ALT):
sat_dist = []
sat_dist = np.array(sat_df["x [km]"])*np.array(sat_df["x [km]"]) + np.array(sat_df["y [km]"])*np.array(sat_df["y [km]"]) + np.array(sat_df["z [km]"])*np.array(sat_df["z [km]"])
sat_dist = np.sqrt(sat_dist)
data = np.array(sat_dist) - instrupy.util.Constants.radiusOfEarthInKM
_header = 'alt [km]'
elif(var==cls.SPD):
data = np.array(sat_df["vx [km/s]"])*np.array(sat_df["vx [km/s]"]) + np.array(sat_df["vy [km/s]"])*np.array(sat_df["vy [km/s]"]) + np.array(sat_df["vz [km/s]"])*np.array(sat_df["vz [km/s]"])
data = np.sqrt(data)
_header = 'speed [km/s]'
elif(var==cls.LAT):
lat = np.zeros((len(sat_df["x [km]"]), 1)) # make empty result array
sat_df_index = list(sat_df.index)
sat_df_x = list(sat_df["x [km]"])
sat_df_y = list(sat_df["y [km]"])
sat_df_z = list(sat_df["z [km]"])
for k in range(0,len(sat_df["x [km]"])):
time = epoch_JDUT1 + sat_df_index[k] * step_size * 1/86400
[lat[k], _x, _y] = instrupy.util.GeoUtilityFunctions.eci2geo([sat_df_x[k], sat_df_y[k], sat_df_z[k]], time)
data = lat
_header = 'latitude [deg]'
elif(var==cls.LON):
lon = np.zeros((len(sat_df["x [km]"]), 1)) # make empty result array
sat_df_index = list(sat_df.index)
sat_df_x = list(sat_df["x [km]"])
sat_df_y = list(sat_df["y [km]"])
sat_df_z = list(sat_df["z [km]"])
for k in range(0,len(sat_df["x [km]"])):
time = epoch_JDUT1 + sat_df_index[k] * step_size * 1/86400
[lon[k], _x, _y] = instrupy.util.GeoUtilityFunctions.eci2geo([sat_df_x[k], sat_df_y[k], sat_df_z[k]], time)
data = lon
_header = 'longitude [deg]'
return (str(sat_id)+'.'+_header, data)
class TwoDimVisPlotAttributes():
""" Container class to hold and handle the plot attributes which are specified by the user.
"""
def __init__(self, x_sat_id=None, x_var=None, y_sat_id=None, y_var=None, time_start=None, time_end=None):
self.x_sat_id = x_sat_id if x_sat_id is not None else None # x-variable satellite-identifier
self.x_var = x_var if x_var is not None else None # x-variable
self.y_sat_id = y_sat_id if y_sat_id is not None else list() # y-variable satellite-identifier. Is a list to accommodate multiple plots over the same x-axis.
self.y_var = y_var if y_var is not None else list() # y-variable. Is a list to accommodate multiple plots over the same x-axis.
self.time_start = time_start if time_start is not None else None
self.time_end = time_end if time_end is not None else None
def update_x_variables(self, x_sat_id, x_var):
self.x_sat_id = x_sat_id
self.x_var = x_var
def update_y_variables(self, y_sat_id, y_var):
self.y_sat_id.append(y_sat_id)
self.y_var.append(y_var)
def reset_y_variables(self):
self.y_sat_id = list()
self.y_var = list()
def update_time_interval(self, time_start, time_end):
self.time_start = time_start
self.time_end = time_end
def get_x_variables(self):
return [self.x_sat_id, self.x_var]
def get_y_variables(self):
return [self.y_sat_id, self.y_var]
def get_time_interval(self):
return [self.time_start, self.time_end]
class Vis2DFrame(ttk.Frame):
""" Primary class to create the frame and the widgets."""
def __init__(self, win, tab):
self.two_dim_vis_plt_attr = TwoDimVisPlotAttributes() # instance variable storing the 2D plot attributes
# 2d plots frame
vis_2d_frame = ttk.Frame(tab)
vis_2d_frame.pack(expand = True, fill ="both", padx=10, pady=10)
vis_2d_frame.rowconfigure(0,weight=1)
vis_2d_frame.rowconfigure(1,weight=1)
vis_2d_frame.columnconfigure(0,weight=1)
vis_2d_frame.columnconfigure(1,weight=1)
vis_2d_time_frame = ttk.LabelFrame(vis_2d_frame, text='Set Time Interval', labelanchor='n')
vis_2d_time_frame.grid(row=0, column=0, sticky='nswe', rowspan=2, padx=(40,0))
vis_2d_time_frame.rowconfigure(0,weight=1)
vis_2d_time_frame.rowconfigure(1,weight=1)
vis_2d_time_frame.rowconfigure(2,weight=1)
vis_2d_time_frame.columnconfigure(0,weight=1)
vis_2d_time_frame.columnconfigure(1,weight=1)
vis_2d_xaxis_frame = ttk.LabelFrame(vis_2d_frame, text='Set X-variable', labelanchor='n')
vis_2d_xaxis_frame.grid(row=0, column=1, sticky='nswe')
vis_2d_xaxis_frame.columnconfigure(0,weight=1)
vis_2d_xaxis_frame.columnconfigure(1,weight=1)
vis_2d_xaxis_frame.rowconfigure(0,weight=1)
vis_2d_yaxis_frame = ttk.LabelFrame(vis_2d_frame, text='Set Y-variable(s)', labelanchor='n')
vis_2d_yaxis_frame.grid(row=1, column=1, sticky='nswe')
vis_2d_yaxis_frame.columnconfigure(0,weight=1)
vis_2d_yaxis_frame.columnconfigure(1,weight=1)
vis_2d_yaxis_frame.rowconfigure(0,weight=1)
vis_2d_plot_frame = ttk.Frame(vis_2d_frame)
vis_2d_plot_frame.grid(row=2, column=0, columnspan=2, sticky='nswe', pady=(10,2))
vis_2d_plot_frame.columnconfigure(0,weight=1)
vis_2d_plot_frame.columnconfigure(1,weight=1)
vis_2d_plot_frame.rowconfigure(0,weight=1)
# 2D vis frame
ttk.Label(vis_2d_time_frame, text="Time (hh:mm:ss) from mission-epoch", wraplength="110", justify='center').grid(row=0, column=0,columnspan=2,ipady=5)
ttk.Label(vis_2d_time_frame, text="From").grid(row=1, column=0, sticky='ne')
self.vis_2d_time_from_entry = ttk.Entry(vis_2d_time_frame, width=10, takefocus = False)
self.vis_2d_time_from_entry.grid(row=1, column=1, sticky='nw', padx=10)
self.vis_2d_time_from_entry.insert(0,'00:00:00')
self.vis_2d_time_from_entry.bind("<FocusIn>", lambda args: self.vis_2d_time_from_entry.delete('0', 'end'))
ttk.Label(vis_2d_time_frame, text="To").grid(row=2, column=0, sticky='ne')
self.vis_2d_time_to_entry = ttk.Entry(vis_2d_time_frame, width=10, takefocus = False)
self.vis_2d_time_to_entry.grid(row=2, column=1, sticky='nw', padx=10)
self.vis_2d_time_to_entry.insert(0,'10:00:00')
self.vis_2d_time_to_entry.bind("<FocusIn>", lambda args: self.vis_2d_time_to_entry.delete('0', 'end'))
vis_2d_x_sel_var_btn = ttk.Button(vis_2d_xaxis_frame, text="X.Var", command=self.click_select_xvar_btn)
vis_2d_x_sel_var_btn.grid(row=0, column=0)
self.vis_2d_x_sel_var_disp = tk.Text(vis_2d_xaxis_frame, state='disabled',height = 1, width = 3, background="light grey")
self.vis_2d_x_sel_var_disp.grid(row=0, column=1, sticky='nsew', padx=20, pady=20)
vis_2d_y_sel_var_btn = ttk.Button(vis_2d_yaxis_frame, text="Y.Var(s)", command=self.click_select_yvar_btn)
vis_2d_y_sel_var_btn.grid(row=0, column=0)
self.vis_2d_y_sel_var_disp = tk.Text(vis_2d_yaxis_frame, state='disabled',height = 2, width = 3, background="light grey")
self.vis_2d_y_sel_var_disp.grid(row=0, column=1, sticky='nsew', padx=20, pady=20)
plot_btn = ttk.Button(vis_2d_plot_frame, text="Plot", command=lambda: self.click_plot_btn(plot=True))
plot_btn.grid(row=0, column=0, sticky='e', padx=20)
export_btn = ttk.Button(vis_2d_plot_frame, text="Export", command=lambda: self.click_plot_btn(export=True))
export_btn.grid(row=0, column=1, sticky='w', padx=20)
def click_select_xvar_btn(self):
""" Create window to ask what should be the x-variable. Only 1 x-variable selection per plot is allowed (for obvious reasons)."""
select_xvar_win = tk.Toplevel()
select_xvar_win.rowconfigure(0,weight=1)
select_xvar_win.rowconfigure(1,weight=1)
select_xvar_win.columnconfigure(0,weight=1)
select_xvar_win.columnconfigure(1,weight=1)
select_sat_win_frame = ttk.LabelFrame(select_xvar_win, text='Select Satellite')
select_sat_win_frame.grid(row=0, column=0, padx=10, pady=10)
select_var_frame = ttk.LabelFrame(select_xvar_win, text='Select Variable')
select_var_frame.grid(row=0, column=1, padx=10, pady=10)
okcancel_frame = ttk.Label(select_xvar_win)
okcancel_frame.grid(row=1, column=0, columnspan=2, padx=10, pady=10)
# place the widgets in the frame
available_sats = [x._id for x in config.mission.spacecraft]# get all available satellite-ids for which outputs are available
sats_combo_box = ttk.Combobox(select_sat_win_frame,
values=available_sats)
sats_combo_box.grid(row=0, column=0)
sats_combo_box = ttk.Combobox(select_sat_win_frame,
values=available_sats)
sats_combo_box.current(0)
sats_combo_box.grid(row=0, column=0)
self._2dvis_xvar= tk.StringVar() # using self so that the variable is retained even after exit from the function, make sure variable name is unique
j = 0
k = 0
for _var in list(Plot2DVisVars):
var_rbtn = ttk.Radiobutton(select_var_frame, text=_var, variable=self._2dvis_xvar, value=_var)
var_rbtn.grid(row=j, column=k, sticky='w')
j = j + 1
if(j==5):
j=0
k=k+1
def click_ok_btn():
self.two_dim_vis_plt_attr.update_x_variables(sats_combo_box.get(), self._2dvis_xvar.get())
[sats, xvars] = self.two_dim_vis_plt_attr.get_x_variables()
# write the selected variable in the display window for user
xvars_str = str(sats + '.' + xvars)
self.vis_2d_x_sel_var_disp.configure(state='normal')
self.vis_2d_x_sel_var_disp.delete(1.0,'end')
self.vis_2d_x_sel_var_disp.insert(1.0, xvars_str)
self.vis_2d_x_sel_var_disp.configure(state='disabled')
select_xvar_win.destroy()
ok_btn = ttk.Button(okcancel_frame, text="Ok", command=click_ok_btn, width=15)
ok_btn.grid(row=0, column=0, sticky ='e')
cancel_btn = ttk.Button(okcancel_frame, text="Exit", command=select_xvar_win.destroy, width=15)
cancel_btn.grid(row=0, column=1, sticky ='w')
def click_select_yvar_btn(self):
""" Create window to ask what should be the y-variable(s). Multiple variables can be configured."""
# reset any previously configured y-variables
self.two_dim_vis_plt_attr.reset_y_variables()
# create window to ask which satellite
select_yvar_win = tk.Toplevel()
select_yvar_win.rowconfigure(0,weight=1)
select_yvar_win.rowconfigure(1,weight=1)
select_yvar_win.columnconfigure(0,weight=1)
select_yvar_win.columnconfigure(1,weight=1)
select_sat_win_frame = ttk.LabelFrame(select_yvar_win, text='Select Satellite')
select_sat_win_frame.grid(row=0, column=0, padx=10, pady=10)
select_var_frame = ttk.LabelFrame(select_yvar_win, text='Select Variable')
select_var_frame.grid(row=0, column=1, padx=10, pady=10)
okcancel_frame = ttk.Label(select_yvar_win)
okcancel_frame.grid(row=1, column=0, columnspan=2, padx=10, pady=10)
# place the widgets in the frame
available_sats = [x._id for x in config.mission.spacecraft]# get all available satellite-ids for which outputs are available
sats_combo_box = ttk.Combobox(select_sat_win_frame,
values=available_sats)
sats_combo_box.current(0)
sats_combo_box.grid(row=0, column=0)
self._2dvis_yvar= tk.StringVar() # using self so that the variable is retained even after exit from the function, make sure variable name is unique
j = 0
k = 0
for _var in list(Plot2DVisVars):
var_rbtn = ttk.Radiobutton(select_var_frame, text=_var, variable=self._2dvis_yvar, value=_var)
var_rbtn.grid(row=j, column=k, sticky='w')
j = j + 1
if(j==5):
j=0
k=k+1
def click_ok_btn():
self.two_dim_vis_plt_attr.update_y_variables(sats_combo_box.get(), self._2dvis_yvar.get())
def click_exit_btn():
self.vis_2d_y_sel_var_disp.configure(state='normal')
self.vis_2d_y_sel_var_disp.delete(1.0,'end')
# write the selected variable in the display window for user
[sats, yvars] = self.two_dim_vis_plt_attr.get_y_variables()
yvars_str = [str(sats[k]+'.'+yvars[k]) for k in range(0,len(sats))]
self.vis_2d_y_sel_var_disp.insert(1.0,' '.join(yvars_str))
self.vis_2d_y_sel_var_disp.configure(state='disabled')
select_yvar_win.destroy()
ok_btn = ttk.Button(okcancel_frame, text="Add", command=click_ok_btn, width=15)
ok_btn.grid(row=0, column=0, sticky ='e')
cancel_btn = ttk.Button(okcancel_frame, text="Exit", command=click_exit_btn, width=15)
cancel_btn.grid(row=0, column=1, sticky ='w')
def update_time_interval_in_attributes_variable(self):
""" Update the time-interval of interest from the user-input."""
# read the plotting time interval
time_start = str(self.vis_2d_time_from_entry.get()).split(":") # split and reverse list
time_start.reverse()
# convert to seconds
x = 0
for k in range(0,len(time_start)):
x = x + float(time_start[k]) * (60**k)
time_start_s = x
time_end = str(self.vis_2d_time_to_entry.get()).split(":") # split and reverse list
time_end.reverse()
# convert to seconds
x = 0
for k in range(0,len(time_end)):
x = x + float(time_end[k]) * (60**k)
time_end_s = x
self.two_dim_vis_plt_attr.update_time_interval(time_start_s, time_end_s)
def click_plot_btn(self, export=False, plot=False):
""" Make X-Y scatter plots of the variables indicated in :code:`two_dim_vis_plt_attr` instance variable.
"""
# get the time-interval of interest
self.update_time_interval_in_attributes_variable()
[time_start_s, time_end_s] = self.two_dim_vis_plt_attr.get_time_interval()
# get the x-axis data
[x_sat_id, x_var] = self.two_dim_vis_plt_attr.get_x_variables()
# search for the orbit-propagation data corresponding to the satellite with identifier = x_sat_id
x_sat_prop_out_info = orbitpy.util.OutputInfoUtility.locate_output_info_object_in_list(out_info_list=config.mission.outputInfo,
out_info_type=orbitpy.util.OutputInfoUtility.OutputInfoType.PropagatorOutputInfo,
spacecraft_id=x_sat_id
)
x_sat_state_fp = x_sat_prop_out_info.stateCartFile
x_sat_kepstate_fp = x_sat_prop_out_info.stateKeplerianFile
# read the epoch and time-step size and fix the start and stop indices
(epoch_JDUT1, step_size, duration) = orbitpy.util.extract_auxillary_info_from_state_file(x_sat_state_fp)
logger.debug("epoch_JDUT1 is " + str(epoch_JDUT1))
logger.debug("step_size is " + str(step_size))
time_start_index = int(time_start_s/step_size)
time_end_index = int(time_end_s/step_size)
# Get the orbit-propagation data.
# Cartesian ECI state file
x_sat_state_df = pd.read_csv(x_sat_state_fp,skiprows = [0,1,2,3])
x_sat_state_df.set_index('time index', inplace=True)
# Keplerian state file
x_sat_kepstate_df = pd.read_csv(x_sat_kepstate_fp,skiprows = [0,1,2,3])
x_sat_kepstate_df.set_index('time index', inplace=True)
# check if the user-specified time interval is within bounds
min_time_index = min(x_sat_state_df.index)
max_time_index = max(x_sat_state_df.index)
if(time_start_index < min_time_index or time_start_index > max_time_index or
time_end_index < min_time_index or time_end_index > max_time_index or
time_start_index > time_end_index):
logger.info("Please enter valid time-interval.")
return
# get data only in the relevant time-interval
x_sat_state_df = x_sat_state_df.iloc[time_start_index:time_end_index]
x_sat_kepstate_df = x_sat_kepstate_df.iloc[time_start_index:time_end_index]
x_sat_df = pd.concat([x_sat_state_df, x_sat_kepstate_df], axis=1)
# make empty dataframe to store the plot related data
plt_data = pd.DataFrame(index=x_sat_state_df.index)
# extract the x-variable from the orbit-propagation data
(_xvarname, _xdata) = Plot2DVisVars.get_data_from_orbitpy_file(sat_df=x_sat_df, sat_id=x_sat_id, var=x_var, step_size=step_size, epoch_JDUT1=epoch_JDUT1)
plt_data[_xvarname] = _xdata
# iterate over the list of y-vars
[y_sat_id, y_var] = self.two_dim_vis_plt_attr.get_y_variables()
num_y_vars = len(y_var)
for k in range(0,num_y_vars):
# extract the y-variable data from of the particular satellite
# search for the orbit-propagation data corresponding to the satellite with identifier = y_sat_id[k]
y_sat_prop_out_info = orbitpy.util.OutputInfoUtility.locate_output_info_object_in_list(out_info_list=config.mission.outputInfo,
out_info_type=orbitpy.util.OutputInfoUtility.OutputInfoType.PropagatorOutputInfo,
spacecraft_id=y_sat_id[k]
)
y_sat_state_fp = y_sat_prop_out_info.stateCartFile
y_sat_kepstate_fp = y_sat_prop_out_info.stateKeplerianFile
# load the cartesian eci state data, get data only in the relevant time-interval
y_sat_state_df = pd.read_csv(y_sat_state_fp, skiprows = [0,1,2,3])
y_sat_state_df.set_index('time index', inplace=True)
y_sat_state_df = y_sat_state_df.iloc[time_start_index:time_end_index]
# load the keplerian state data, get data only in the relevant time-interval
y_sat_kepstate_df = pd.read_csv(y_sat_kepstate_fp, skiprows = [0,1,2,3])
y_sat_kepstate_df.set_index('time index', inplace=True)
y_sat_kepstate_df = y_sat_kepstate_df.iloc[time_start_index:time_end_index]
y_sat_df = pd.concat([y_sat_state_df, y_sat_kepstate_df], axis=1)
# add new column with the y-data
(_yvarname, _ydata) = Plot2DVisVars.get_data_from_orbitpy_file(sat_df=y_sat_df, sat_id=y_sat_id[k], var=y_var[k], step_size=step_size, epoch_JDUT1=epoch_JDUT1)
plt_data[_yvarname] = _ydata
if(export is True):
vis2d_data_fp = tkinter.filedialog.asksaveasfile()
plt_data.to_csv(vis2d_data_fp)
if(plot is True):
fig_win = tk.Toplevel()
fig = Figure(figsize=(5, 4), dpi=100)
ax = fig.add_subplot(111)
_lgnd=[]
for k in range(0,num_y_vars):
ax.scatter(plt_data.iloc[:,0],plt_data.iloc[:,k+1])
_lgnd.append(plt_data.columns[k+1]) # pylint: disable=E1136 # pylint/issues/3139
ax.set_xlabel(plt_data.columns[0]) # pylint: disable=E1136 # pylint/issues/3139
ax.set_ylabel('Y-axis')
ax.legend(_lgnd)
canvas = FigureCanvasTkAgg(fig, master=fig_win) # A tk.DrawingArea.
canvas.draw()
canvas.get_tk_widget().pack(side=tkinter.TOP, fill=tkinter.BOTH, expand=1)
toolbar = NavigationToolbar2Tk(canvas, fig_win)
toolbar.update()
canvas.get_tk_widget().pack(side=tkinter.TOP, fill=tkinter.BOTH, expand=1)
| 49.39604
| 206
| 0.636119
|
6eee5a86a178cbf54d0b8ca80443d611ab78f80f
| 11,832
|
py
|
Python
|
pywren/state.py
|
Pkanjan37/Lightweight_big_data_framework_serverless
|
4a1489429a71d488f449f9dffbeca85ead31db14
|
[
"Apache-2.0"
] | null | null | null |
pywren/state.py
|
Pkanjan37/Lightweight_big_data_framework_serverless
|
4a1489429a71d488f449f9dffbeca85ead31db14
|
[
"Apache-2.0"
] | null | null | null |
pywren/state.py
|
Pkanjan37/Lightweight_big_data_framework_serverless
|
4a1489429a71d488f449f9dffbeca85ead31db14
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2018 PyWren Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import print_function
import logging
import time
import enum
from tblib import pickling_support
try:
from six import reraise
from six.moves import cPickle as pickle
except:
import pickle
from pywren import wrenconfig
from pywren.storage import storage, storage_utils
from pywren.jobrunner import stepFunctionbuilder
pickling_support.install()
logger = logging.getLogger(__name__)
class JobState(enum.Enum):
new = 1
invoked = 2
running = 3
success = 4
error = 5
class ResponseStateFuture:
"""
Object representing the result of a PyWren invocation. Returns the status of the
execution and the result when available.
"""
GET_RESULT_SLEEP_SECS = 4
def __init__(self, input_set, storage_path,statemachine_arn,storage_instance,output_path,intermediate_bucket=None,output_path_list=None):
self.input_set = input_set
self._exception = Exception()
self._return_val = None
self._traceback = None
self._call_invoker_result = None
self.run_status = None
self.invoke_status = None
self.status_query_count = 0
self.storage = storage_instance
self.storage_path = storage_path
self.output_bucket = "output-bucky"
self.intermediate_bucket = intermediate_bucket
self.statemachine_arn=statemachine_arn
if output_path_list == None:
self.output_path = output_path
else: self.output_path = output_path_list
def _set_state(self, new_state):
## FIXME add state machine
self._state = new_state
def _set_sm_arn(self,stateMachine):
self.statemachine_arn = stateMachine
def _get_sm_arn():
return self.statemachine_arn
def _get_output_pth(self):
return self.output_path
def _get_intermediate_bucket(self):
return self.intermediate_bucket
def cancel(self, storage_handler=None):
# TODO Figure out a better way for this function to have
# access to a custom storage handler
if storage_handler is None:
storage_config = wrenconfig.extract_storage_config(wrenconfig.default())
storage_handler = storage.Storage(storage_config)
storage_handler.put_cancelled(self.callset_id,
self.call_id, "CANCEL")
def cancelled(self):
raise NotImplementedError("Cannot cancel dispatched jobs")
def running(self):
raise NotImplementedError()
def done(self):
if self._state in [JobState.success, JobState.error]:
return True
return self.result(check_only=True)
def succeeded(self):
return self._state == JobState.success
def errored(self):
return self._state == JobState.error
def result_state(self,Mode="ALL"):
stepFunc = stepFunctionbuilder.StateFunctionWrapper()
succ,fail,undone = stepFunc.wait(self.statemachine_arn,self.input_set,stepFunc.ALL_COMPLETED)
print("State succ<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
print(succ)
print(len(succ))
print("State fail<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
print(fail)
print(len(fail))
print("State undone<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
print(undone)
print(len(undone))
if Mode=="ALL":
if(len(succ)>0 and len(fail)<1):
print("Suppose to be here????????????")
print(self.output_path)
output = self.storage.get_state_output(self.output_path,Mode)
else:
print("ORRRRRRRRRRR here????????????")
output = stepFunc.buildStateChecker(self.statemachine_arn)
else:
if(len(succ)>0):
print("Suppose to be here????????????")
print(self.output_path)
output = self.storage.get_state_output(self.output_path,Mode)
else:
print("ORRRRRRRRRRR here????????????")
output = stepFunc.buildStateChecker(self.statemachine_arn)
return output
def wait_state(self):
stepFunc = stepFunctionbuilder.StateFunctionWrapper()
succ,fail,undone = stepFunc.wait(self.statemachine_arn,self.input_set,stepFunc.ALL_COMPLETED)
return "Complete"
def result(self, timeout=None, check_only=False,
throw_except=True, storage_handler=None):
"""
check_only = True implies we only check if the job is completed.
# FIXME check_only is the worst API and should be refactored
# out to be part of done()
From the python docs:
Return the value returned by the call. If the call hasn't yet
completed then this method will wait up to timeout seconds. If
the call hasn't completed in timeout seconds then a
TimeoutError will be raised. timeout can be an int or float.If
timeout is not specified or None then there is no limit to the
wait time.
Return the value returned by the call.
If the call raised an exception, this method will raise the same exception
If the future is cancelled before completing then CancelledError will be raised.
:param timeout: This method will wait up to timeout seconds before raising
a TimeoutError if function hasn't completed. If None, wait indefinitely. Default None.
:param check_only: Return None immediately if job is not complete. Default False.
:param throw_except: Reraise exception if call raised. Default true.
:param storage_handler: Storage handler to poll cloud storage. Default None.
:return: Result of the call.
:raises CancelledError: If the job is cancelled before completed.
:raises TimeoutError: If job is not complete after `timeout` seconds.
"""
if self._state == JobState.new:
raise ValueError("job not yet invoked")
if check_only:
if self._state == JobState.success or self._state == JobState.error:
return True
if self._state == JobState.success:
return self._return_val
if self._state == JobState.error:
if throw_except:
raise self._exception
else:
return None
if storage_handler is None:
storage_config = wrenconfig.extract_storage_config(wrenconfig.default())
storage_handler = storage.Storage(storage_config)
storage_utils.check_storage_path(storage_handler.get_storage_config(), self.storage_path)
call_status = storage_handler.get_call_status(self.callset_id, self.call_id)
self.status_query_count += 1
## FIXME implement timeout
if timeout is not None:
raise NotImplementedError()
if check_only:
if call_status is None:
return False
else:
return True
while call_status is None:
time.sleep(self.GET_RESULT_SLEEP_SECS)
call_status = storage_handler.get_call_status(self.callset_id, self.call_id)
self.status_query_count += 1
self._invoke_metadata['status_done_timestamp'] = time.time()
self._invoke_metadata['status_query_count'] = self.status_query_count
self.run_status = call_status # this is the remote status information
self.invoke_status = self._invoke_metadata # local status information
if call_status['exception'] is not None:
# the wrenhandler had an exception
exception_str = call_status['exception']
exception_args = call_status['exception_args']
if exception_args[0] == "WRONGVERSION":
if throw_except:
raise Exception("Pywren version mismatch: remote " + \
"expected version {}, local library is version {}".format(
exception_args[2], exception_args[3]))
return None
elif exception_args[0] == "OUTATIME":
if throw_except:
raise Exception("process ran out of time")
return None
elif exception_args[0] == "CANCELLED":
if throw_except:
raise Exception("job was cancelled")
elif exception_args[0] == "RETCODE":
if throw_except:
raise Exception("python process failed, returned a non-zero return code"
"(check stdout for information)")
return None
else:
if throw_except:
if 'exception_traceback' in call_status:
logger.error(call_status['exception_traceback'])
raise Exception(exception_str, *exception_args)
return None
# FIXME this shouldn't be called if check_only is True
call_output_time = time.time()
call_invoker_result = pickle.loads(storage_handler.get_call_output(
self.callset_id, self.call_id))
call_output_time_done = time.time()
self._invoke_metadata['download_output_time'] = call_output_time_done - call_output_time
self._invoke_metadata['download_output_timestamp'] = call_output_time_done
call_success = call_invoker_result['success']
logger.info("ResponseFuture.result() {} {} call_success {}".format(self.callset_id,
self.call_id,
call_success))
self._call_invoker_result = call_invoker_result
if call_success:
self._return_val = call_invoker_result['result']
self._set_state(JobState.success)
return self._return_val
else:
self._set_state(JobState.error)
self._exception = call_invoker_result['result']
self._traceback = (call_invoker_result['exc_type'],
call_invoker_result['exc_value'],
call_invoker_result['exc_traceback'])
if throw_except:
if call_invoker_result.get('pickle_fail', False):
logging.warning(
"there was an error pickling. The original exception: " + \
"{}\nThe pickling exception: {}".format(
call_invoker_result['exc_value'],
str(call_invoker_result['pickle_exception'])))
reraise(Exception, call_invoker_result['exc_value'],
call_invoker_result['exc_traceback'])
else:
# reraise the exception
reraise(*self._traceback)
else:
return None # nothing, don't raise, no value
def exception(self, timeout=None):
raise NotImplementedError()
def add_done_callback(self, fn):
raise NotImplementedError()
| 37.443038
| 141
| 0.61452
|
db8cb334a4c58b04bf31a26064c4ed311a5bde4e
| 3,140
|
py
|
Python
|
src/train.py
|
joelsjoyt/Animal-Classifier
|
dd4e93a5e50631a82dd2284dea18cb32d8927b82
|
[
"MIT"
] | 4
|
2020-11-09T03:48:30.000Z
|
2021-07-12T23:54:45.000Z
|
src/train.py
|
joelsjoyt/Animal-Classifier
|
dd4e93a5e50631a82dd2284dea18cb32d8927b82
|
[
"MIT"
] | null | null | null |
src/train.py
|
joelsjoyt/Animal-Classifier
|
dd4e93a5e50631a82dd2284dea18cb32d8927b82
|
[
"MIT"
] | null | null | null |
import torch
import copy
from torch import nn, optim
from torch.optim import lr_scheduler
import time
def train(dataloaders, dataset_sizes, model, device):
def train_model(model, criterion, optimizer, scheduler, num_epochs=10):
since = time.time()
best_model_wts = copy.deepcopy(model.state_dict())
best_acc = 0.0
for epoch in range(num_epochs):
print("Start of an epoch")
print('Epoch {}/{}'.format(epoch, num_epochs - 1))
print('-' * 10)
# Each epoch has a training and validation phase
for phase in ['train', 'val']:
if phase == 'train':
print("Train")
model.train() # Set model to training mode
else:
print("Validation")
model.eval() # Set model to evaluate mode
running_loss = 0.0
running_corrects = 0.0
# Iterate over data.
for inputs, labels in dataloaders[phase]:
inputs = inputs.to(device)
labels = labels.to(device)
optimizer.zero_grad()
with torch.set_grad_enabled(phase == 'train'):
outputs = model(inputs)
_, preds = torch.max(outputs, 1)
loss = criterion(outputs, labels)
if phase == 'train':
loss.backward()
optimizer.step()
running_loss += loss.item() * inputs.size(0)
running_corrects += torch.sum(preds == labels.data)
if phase == 'train':
scheduler.step()
epoch_loss = running_loss / dataset_sizes[phase]
epoch_acc = running_corrects.double() / dataset_sizes[phase]
print('{} Loss: {:.4f} Acc: {:.4f}'.format(
phase, epoch_loss, epoch_acc))
if phase == 'val' and epoch_acc > best_acc:
best_acc = epoch_acc
best_model_wts = copy.deepcopy(model.state_dict())
print()
time_elapsed = time.time() - since
print('Training complete in {:.0f}m {:.0f}s'.format(
time_elapsed // 60, time_elapsed % 60))
print('Best val Acc: {:4f}'.format(best_acc))
# load best model weights
model.load_state_dict(best_model_wts)
return model
# Training Hyperparameters
print("Prefered epochs is 20")
num_epochs = int(input("Enter your desired epochs: \t"))
model = model.to(device)
loss_fn = nn.CrossEntropyLoss()
# Optimisation of model parameters
optimizer_ft = optim.Adam(model.parameters(), lr=0.001)
# Decay LR by a factor of 0.1 every 7 epochs
exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=7, gamma=0.1)
model_ft = train_model(model, loss_fn, optimizer_ft, exp_lr_scheduler, num_epochs)
return model_ft, optimizer_ft
| 35.681818
| 86
| 0.53121
|
37f9ff8bc26db8542a141dff508c61498bb0d17f
| 7,134
|
py
|
Python
|
src/datadog_api_client/v2/model/role_relationships.py
|
rchenzheng/datadog-api-client-python
|
2e86ac098c6f0c7fdd90ed218224587c0f8eafef
|
[
"Apache-2.0"
] | null | null | null |
src/datadog_api_client/v2/model/role_relationships.py
|
rchenzheng/datadog-api-client-python
|
2e86ac098c6f0c7fdd90ed218224587c0f8eafef
|
[
"Apache-2.0"
] | null | null | null |
src/datadog_api_client/v2/model/role_relationships.py
|
rchenzheng/datadog-api-client-python
|
2e86ac098c6f0c7fdd90ed218224587c0f8eafef
|
[
"Apache-2.0"
] | null | null | null |
# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
import re # noqa: F401
import sys # noqa: F401
from datadog_api_client.v2.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from datadog_api_client.v2.model.relationship_to_permissions import RelationshipToPermissions
from datadog_api_client.v2.model.relationship_to_users import RelationshipToUsers
globals()["RelationshipToPermissions"] = RelationshipToPermissions
globals()["RelationshipToUsers"] = RelationshipToUsers
class RoleRelationships(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
"permissions": (RelationshipToPermissions,), # noqa: E501
"users": (RelationshipToUsers,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
"permissions": "permissions", # noqa: E501
"users": "users", # noqa: E501
}
_composed_schemas = {}
required_properties = set(
[
"_data_store",
"_check_type",
"_spec_property_naming",
"_path_to_item",
"_configuration",
"_visited_composed_classes",
]
)
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""RoleRelationships - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
permissions (RelationshipToPermissions): [optional] # noqa: E501
users (RelationshipToUsers): [optional] # noqa: E501
"""
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_path_to_item = kwargs.pop("_path_to_item", ())
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
| 40.534091
| 108
| 0.59672
|
e2c841234a29fb6b2b0ca97b7d50bec9c98f2c8e
| 171
|
py
|
Python
|
sample_user_model/sample_user_model/routes.py
|
JSchatzman/sample_user_model
|
9a15b1940ab340c717aeab053f75ff66c183abbc
|
[
"MIT"
] | null | null | null |
sample_user_model/sample_user_model/routes.py
|
JSchatzman/sample_user_model
|
9a15b1940ab340c717aeab053f75ff66c183abbc
|
[
"MIT"
] | null | null | null |
sample_user_model/sample_user_model/routes.py
|
JSchatzman/sample_user_model
|
9a15b1940ab340c717aeab053f75ff66c183abbc
|
[
"MIT"
] | null | null | null |
def includeme(config):
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('home', '/')
config.add_route('register', '/register')
| 28.5
| 66
| 0.690058
|
22f12f269dc7aabd06fe7da78fb8c701a8b9ff06
| 1,681
|
py
|
Python
|
test/test_user_api.py
|
MPW1412/kimai-python
|
7c89b0866b85fbc4b1092b30eca21f1be48db533
|
[
"MIT"
] | 6
|
2019-12-19T16:01:58.000Z
|
2022-01-19T18:10:16.000Z
|
test/test_user_api.py
|
MPW1412/kimai-python
|
7c89b0866b85fbc4b1092b30eca21f1be48db533
|
[
"MIT"
] | 4
|
2020-05-16T23:33:15.000Z
|
2021-07-06T20:53:32.000Z
|
test/test_user_api.py
|
MPW1412/kimai-python
|
7c89b0866b85fbc4b1092b30eca21f1be48db533
|
[
"MIT"
] | 3
|
2020-05-16T23:14:13.000Z
|
2021-06-30T08:53:11.000Z
|
# coding: utf-8
"""
Kimai 2 - API Docs
JSON API for the Kimai 2 time-tracking software. Read more about its usage in the [API documentation](https://www.kimai.org/documentation/rest-api.html) and then download a [Swagger file](doc.json) for import e.g. in Postman. Be aware: it is not yet considered stable and BC breaks might happen. # noqa: E501
OpenAPI spec version: 0.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import kimai_python
from kimai_python.api.user_api import UserApi # noqa: E501
from kimai_python.rest import ApiException
class TestUserApi(unittest.TestCase):
"""UserApi unit test stubs"""
def setUp(self):
self.api = kimai_python.api.user_api.UserApi() # noqa: E501
def tearDown(self):
pass
def test_api_users_get(self):
"""Test case for api_users_get
Returns the collection of all registered users # noqa: E501
"""
pass
def test_api_users_id_get(self):
"""Test case for api_users_id_get
Return one user entity # noqa: E501
"""
pass
def test_api_users_id_patch(self):
"""Test case for api_users_id_patch
Update an existing user # noqa: E501
"""
pass
def test_api_users_me_get(self):
"""Test case for api_users_me_get
Return the current user entity # noqa: E501
"""
pass
def test_api_users_post(self):
"""Test case for api_users_post
Creates a new user # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 24.014286
| 314
| 0.650208
|
122103829b1db813c327eb1e4aa94407c1218f89
| 3,450
|
py
|
Python
|
grr/test/grr_response_test/lib/api_helpers.py
|
BA7JCM/grr
|
c6f3b19e73e1d76a195d3c9a63e894ace6ea2508
|
[
"Apache-2.0"
] | null | null | null |
grr/test/grr_response_test/lib/api_helpers.py
|
BA7JCM/grr
|
c6f3b19e73e1d76a195d3c9a63e894ace6ea2508
|
[
"Apache-2.0"
] | null | null | null |
grr/test/grr_response_test/lib/api_helpers.py
|
BA7JCM/grr
|
c6f3b19e73e1d76a195d3c9a63e894ace6ea2508
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""Helper API-client-based functions for self-contained tests."""
import time
from typing import Tuple
import requests
from grr_api_client import api
from grr_response_core import config
from grr_response_core.lib import config_lib
class Error(Exception):
"""Module-specific base error class."""
class APIEndpointTimeoutError(Error):
"""Raised when API endpoint doesn't come online in time."""
class ClientEnrollmentTimeoutError(Error):
"""Raised when a client does not enroll in time."""
class ClientVersionTimeoutError(Error):
"""Raised then a client doesn't report a specific version in time."""
def GetFleetspeakPortsFromConfig(config_path: str) -> Tuple[int, int]:
"""Gets Fleetspeak frontend and admin ports from GRR config."""
conf = config_lib.LoadConfig(config.CONFIG.MakeNewConfig(), config_path)
frontend_port = int(
conf["Server.fleetspeak_message_listen_address"].rsplit(":")[-1])
admin_port = int(conf["Server.fleetspeak_server"].rsplit(":")[-1])
return frontend_port, admin_port
def GetAdminUIPortFromConfig(config_path: str) -> int:
"""Gets the AdminUI.port setting from a given config file."""
conf = config_lib.LoadConfig(config.CONFIG.MakeNewConfig(), config_path)
return conf["AdminUI.port"]
_WAIT_TIMEOUT_SECS = 150
_CHECK_INTERVAL = 1
def WaitForAPIEndpoint(port: int) -> api.GrrApi:
"""Waits for API endpoint to come online."""
api_endpoint = "http://localhost:%d" % port
start_time = time.time()
while time.time() - start_time < _WAIT_TIMEOUT_SECS:
try:
grrapi = api.InitHttp(api_endpoint=api_endpoint)
grrapi.ListGrrBinaries()
return grrapi
except (requests.exceptions.ConnectionError, ConnectionRefusedError):
print("Connection error (%s), waiting..." % api_endpoint)
time.sleep(_CHECK_INTERVAL)
continue
raise APIEndpointTimeoutError("API endpoint %s didn't come up." %
api_endpoint)
def WaitForClientToEnroll(grrapi: api.GrrApi) -> str:
"""Waits for an already started client to enroll.
If the client doesn't enroll within ~100 seconds, main process gets killed.
Args:
grrapi: GRR API object.
Returns:
A string with an enrolled client's id.
Raises:
ClientEnrollmentTimeoutError: if the client fails to enroll in time.
"""
start_time = time.time()
while time.time() - start_time < _WAIT_TIMEOUT_SECS:
clients = list(grrapi.SearchClients(query="."))
if clients:
return clients[0].client_id
print("No clients enrolled, waiting...")
time.sleep(_CHECK_INTERVAL)
raise ClientEnrollmentTimeoutError("Client didn't enroll.")
def KillClient(grrapi: api.GrrApi, client_id: str):
"""Kills a given client."""
f = grrapi.Client(client_id).CreateFlow("Kill")
f.WaitUntilDone(timeout=60)
def WaitForClientVersionGreaterThan(api_client_obj, min_version):
"""Waits until the client version becomes greater than a given value."""
start_time = time.time()
while time.time() - start_time < _WAIT_TIMEOUT_SECS:
version = api_client_obj.Get().data.agent_info.client_version
if version > min_version:
print("Got expected client version %d." % version)
return version
print("Got client version: %d, must be > %d" % (version, min_version))
time.sleep(_CHECK_INTERVAL)
raise ClientVersionTimeoutError(
"Timed out while waiting for the client version > %d." % min_version)
| 29.487179
| 77
| 0.722029
|
91bc6586e6b2b8170f0592ddaad2568d3354ef3b
| 8,849
|
py
|
Python
|
tracking/TrackerEnvironment.py
|
jccaicedo/localization-agent
|
d280acf355307b74e68dca9ec80ab293f0d18642
|
[
"MIT"
] | 8
|
2016-11-20T19:43:45.000Z
|
2020-12-09T04:58:05.000Z
|
tracking/TrackerEnvironment.py
|
jccaicedo/localization-agent
|
d280acf355307b74e68dca9ec80ab293f0d18642
|
[
"MIT"
] | 45
|
2015-05-04T20:41:05.000Z
|
2017-07-17T12:04:13.000Z
|
tracking/TrackerEnvironment.py
|
jccaicedo/localization-agent
|
d280acf355307b74e68dca9ec80ab293f0d18642
|
[
"MIT"
] | 9
|
2016-11-20T19:43:46.000Z
|
2020-09-01T21:01:54.000Z
|
__author__ = "Juan C. Caicedo, caicedo@illinois.edu"
from pybrain.utilities import Named
from pybrain.rl.environments.environment import Environment
import BoxSearchState as bs
import ConvNet as cn
import random
import numpy as np
import json
import utils as cu
import libDetection as det
import RLConfig as config
def sigmoid(x, a=1.0, b=0.0):
return 1.0/(1.0 + np.exp(-a*x + b))
def tanh(x, a=5, b=0.5, c=2.0):
return c*np.tanh(a*x + b)
TEST_TIME_OUT = config.geti('testTimeOut')
class BoxSearchEnvironment(Environment, Named):
def __init__(self, imageList, mode, groundTruthFile=None):
self.mode = mode
self.cnn = cn.ConvNet()
self.testRecord = None
self.idx = -1
self.imageList = [x.strip() for x in open(imageList)]
self.groundTruth = cu.loadBoxIndexFile(groundTruthFile)
#self.imageList = self.rankImages()
#self.imageList = self.imageList[0:10]
allImgs = set([x.strip() for x in open(config.get('allImagesList'))])
self.negativeSamples = list(allImgs.difference(set(self.groundTruth.keys())))
self.negativeEpisode = False
if self.mode == 'train':
self.negativeProbability = config.getf('negativeEpisodeProb')
random.shuffle(self.imageList)
self.loadNextEpisode()
def performAction(self, action):
self.state.performAction(action)
def loadNextEpisode(self):
self.episodeDone = False
self.negativeEpisode = False
if self.selectNegativeSample(): return
# Save actions performed during this episode
if self.mode == 'test' and self.testRecord != None:
with open(config.get('testMemory') + self.imageList[self.idx] + '.txt', 'w') as outfile:
json.dump(self.testRecord, outfile)
# Load a new episode
self.idx += 1
if self.idx < len(self.imageList):
# Initialize state
previousImageName = str(int(self.imageList[self.idx])-1)
print 'Preparing starting image {}'.format(previousImageName)
self.cnn.prepareImage(previousImageName)
print 'Initial box for {} at {}'.format(previousImageName, self.groundTruth[previousImageName])
self.startingActivations = self.cnn.getActivations( self.groundTruth[previousImageName][0])
self.cnn.prepareImage(self.imageList[self.idx])
self.state = bs.BoxSearchState(self.imageList[self.idx], groundTruth=self.groundTruth)
print 'Environment::LoadNextEpisode => Image',self.idx,self.imageList[self.idx],'('+str(self.state.visibleImage.size[0])+','+str(self.state.visibleImage.size[1])+')'
else:
if self.mode == 'train':
random.shuffle(self.imageList)
self.idx = -1
self.loadNextEpisode()
else:
print 'No more images available'
# Restart record for new episode
if self.mode == 'test':
self.testRecord = {'boxes':[], 'actions':[], 'values':[], 'rewards':[], 'scores':[]}
def selectNegativeSample(self):
if self.mode == 'train' and random.random() < self.negativeProbability:
idx = random.randint(0,len(self.negativeSamples)-1)
self.cnn.prepareImage(self.negativeSamples[idx])
self.state = bs.BoxSearchState(self.negativeSamples[idx], groundTruth=self.groundTruth)
print 'Environment::LoadNextEpisode => Random Negative:',idx,self.negativeSamples[idx]
self.negativeEpisode = True
def updatePostReward(self, reward, allDone, cover):
if self.mode == 'test':
self.testRecord['boxes'].append( self.state.box )
self.testRecord['actions'].append( self.state.actionChosen )
self.testRecord['values'].append( self.state.actionValue )
self.testRecord['rewards'].append( reward )
self.testRecord['scores'].append( self.scores[:] )
if self.state.actionChosen == bs.PLACE_LANDMARK:
#negImg = random.randint(0,len(self.negativeSamples)-1)
self.cnn.coverRegion(self.state.box) #, self.negativeSamples[negImg])
self.state.reset()
if self.state.stepsWithoutLandmark > TEST_TIME_OUT:
self.state.reset()
elif self.mode == 'train':
# We do not cover false landmarks during training
if self.state.actionChosen == bs.PLACE_LANDMARK and len(cover) > 0:
# During training we only cover a carefully selected part of the ground truth box to avoid conflicts with other boxes.
#negImg = random.randint(0,len(self.negativeSamples)-1)
self.cnn.coverRegion(cover) #, self.negativeSamples[negImg])
self.state.reset()
if allDone:
self.episodeDone = True
# Terminate episode with a single detected instance
#if self.state.actionChosen == bs.PLACE_LANDMARK:
# self.episodeDone = True
def getSensors(self):
# Make a vector represenation of the action that brought the agent to this state (9 features)
prevAction = np.zeros( (bs.NUM_ACTIONS) )
prevAction[self.state.actionChosen] = 1.0
# Compute features of visible region (4096 + 21)
activations = self.cnn.getActivations(self.state.box)
# Concatenate all info in the state representation vector
print activations[config.get('convnetLayer')].shape, prevAction.shape, self.startingActivations[config.get('convnetLayer')].shape
state = np.hstack( (activations[config.get('convnetLayer')], self.startingActivations[config.get('convnetLayer')], prevAction) )
self.scores = activations['prob'].tolist()
return {'image':self.imageList[self.idx], 'state':state, 'negEpisode':self.negativeEpisode}
def sampleAction(self):
return self.state.sampleNextAction()
def rankImages(self):
keys = self.groundTruth.keys()
keys.sort()
# Rank by number of objects in the scene (from many to few)
objectCounts = [len(self.groundTruth[k]) for k in keys]
countRank = np.argsort(objectCounts)[::-1]
countDist = dict([(i,0) for i in range(max(objectCounts)+1)])
for o in objectCounts:
countDist[o] += 1
print 'Distribution of object counts (# objects vs # images):',countDist
print 'Images with largest number of objects:',[keys[i] for i in countRank[0:10]]
# Rank by object size (from small to large)
minObjectArea = [ min(map(det.area, self.groundTruth[k])) for k in keys ]
smallRank = np.argsort(minObjectArea)
intervals = [ (500*400/i) for i in range(1,21) ]
sizeDist = dict([ (i,0) for i in intervals ])
for a in minObjectArea:
counted = False
for r in intervals:
if a >= r:
sizeDist[r] += 1
counted = True
break
if not counted: sizeDist[r] += 1
print 'Distribution of smallest objects area (area vs # images):',[ (i,sizeDist[i]) for i in intervals]
print 'Images with the smallest objects:',[keys[i] for i in smallRank[0:10]]
# Rank by object size (from large to small)
maxObjectArea = [ max(map(det.area, self.groundTruth[k])) for k in keys ]
bigRank = np.argsort(minObjectArea)
intervals = [ (500*400/i) for i in range(1,21) ]
sizeDist = dict([ (i,0) for i in intervals ])
for a in maxObjectArea:
counted = False
for r in intervals:
if a >= r:
sizeDist[r] += 1
counted = True
break
if not counted: sizeDist[r] += 1
print 'Distribution of biggest objects area (area vs # images):',[ (i,sizeDist[i]) for i in intervals]
print 'Images with the biggest objects:',[keys[i] for i in bigRank[0:10]]
# Rank images by instance occlusion (from very occluded to isolated)
maxInstanceOcclusion = []
for k in keys:
if len(self.groundTruth[k]) == 1:
maxInstanceOcclusion.append(0)
else:
maxIoU = 0
for i in range(len(self.groundTruth[k])):
for j in range(i+1,len(self.groundTruth[k])):
iou = det.IoU(self.groundTruth[k][i], self.groundTruth[k][j])
if iou > maxIoU:
maxIoU = iou
maxInstanceOcclusion.append(maxIoU)
occlusionRank = np.argsort(maxInstanceOcclusion)[::-1]
intervals = [ 1.0/i for i in range(1,21) ]
occlusionDist = dict([(i,0) for i in intervals])
for o in maxInstanceOcclusion:
counted = False
for r in intervals:
if o >= r:
occlusionDist[r] += 1
counted = True
break
if not counted: occlusionDist[r] += 1
print 'Distribution of object occlusion (occlusion vs # images):',[(i,occlusionDist[i]) for i in intervals]
print 'Images with the most occluded objects:',[keys[i] for i in occlusionRank[0:10]]
# Rank combination
rank = dict([(k,0) for k in keys])
for i in range(len(keys)):
rank[ keys[ countRank[i] ] ] += i
rank[ keys[ smallRank[i]] ] += i
rank[ keys[ occlusionRank[i] ] ] += i
values = [ rank[i] for i in keys ]
complexityRank = np.argsort(values)
print 'More complex images:',[keys[i] for i in complexityRank[0:10]]
return [keys[i] for i in occlusionRank]
| 42.748792
| 171
| 0.666177
|
5320632097d6db84757136f5e591e5df0e7bb5f1
| 3,624
|
py
|
Python
|
tests/test_visitors/test_ast/test_naming/test_naming.py
|
nixphix/wemake-python-styleguide
|
95f16ff9394393444685391f957fdce04a6177d6
|
[
"MIT"
] | null | null | null |
tests/test_visitors/test_ast/test_naming/test_naming.py
|
nixphix/wemake-python-styleguide
|
95f16ff9394393444685391f957fdce04a6177d6
|
[
"MIT"
] | null | null | null |
tests/test_visitors/test_ast/test_naming/test_naming.py
|
nixphix/wemake-python-styleguide
|
95f16ff9394393444685391f957fdce04a6177d6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import pytest
from wemake_python_styleguide.constants import VARIABLE_NAMES_BLACKLIST
from wemake_python_styleguide.violations.naming import (
ConsecutiveUnderscoresInNameViolation,
PrivateNameViolation,
TooShortNameViolation,
UnderscoredNumberNameViolation,
WrongVariableNameViolation,
)
from wemake_python_styleguide.visitors.ast.naming import WrongNameVisitor
@pytest.mark.parametrize('wrong_name', VARIABLE_NAMES_BLACKLIST)
def test_wrong_variable_name(
assert_errors,
assert_error_text,
parse_ast_tree,
naming_template,
default_options,
mode,
wrong_name,
):
"""Ensures that wrong names are not allowed."""
tree = parse_ast_tree(mode(naming_template.format(wrong_name)))
visitor = WrongNameVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [WrongVariableNameViolation])
assert_error_text(visitor, wrong_name)
def test_short_variable_name(
assert_errors,
assert_error_text,
parse_ast_tree,
naming_template,
default_options,
mode,
):
"""Ensures that short names are not allowed."""
short_name = 'y'
tree = parse_ast_tree(mode(naming_template.format(short_name)))
visitor = WrongNameVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [TooShortNameViolation])
assert_error_text(visitor, short_name)
def test_private_variable_name(
assert_errors,
assert_error_text,
parse_ast_tree,
naming_template,
default_options,
mode,
):
"""Ensures that private names are not allowed."""
private_name = '__private'
tree = parse_ast_tree(mode(naming_template.format(private_name)))
visitor = WrongNameVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [PrivateNameViolation])
assert_error_text(visitor, private_name)
@pytest.mark.parametrize('underscored_name', [
'with__underscore',
'mutliple__under__score',
'triple___underscore',
])
def test_underscored_variable_name(
assert_errors,
assert_error_text,
parse_ast_tree,
naming_template,
default_options,
mode,
underscored_name,
):
"""Ensures that underscored names are not allowed."""
tree = parse_ast_tree(mode(naming_template.format(underscored_name)))
visitor = WrongNameVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [ConsecutiveUnderscoresInNameViolation])
assert_error_text(visitor, underscored_name)
@pytest.mark.parametrize('number_suffix', [
'number_5',
'between_45_letters',
'with_multiple_groups_4_5',
])
def test_number_prefix_variable_name(
assert_errors,
assert_error_text,
parse_ast_tree,
naming_template,
default_options,
mode,
number_suffix,
):
"""Ensures that number suffix names are not allowed."""
tree = parse_ast_tree(mode(naming_template.format(number_suffix)))
visitor = WrongNameVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [UnderscoredNumberNameViolation])
assert_error_text(visitor, number_suffix)
@pytest.mark.parametrize('correct_name', [
'snake_case',
'_protected_or_unused',
'with_number5',
'xy',
])
def test_naming_correct(
assert_errors,
parse_ast_tree,
naming_template,
default_options,
mode,
correct_name,
):
"""Ensures that correct names are allowed."""
tree = parse_ast_tree(mode(naming_template.format(correct_name)))
visitor = WrongNameVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [])
| 25.342657
| 73
| 0.738687
|
7b3c549cf71f430d1e220ce54ae69ab8e0ae4c6d
| 369
|
py
|
Python
|
modules/visual_feature_refinement.py
|
codeboy5/cvpr20-scatter-text-recognizer
|
4bd6cfbd4d7f64ce11864514f6b6b0646267c285
|
[
"Apache-2.0"
] | 63
|
2020-10-16T09:28:05.000Z
|
2022-03-27T15:52:16.000Z
|
modules/visual_feature_refinement.py
|
codeboy5/cvpr20-scatter-text-recognizer
|
4bd6cfbd4d7f64ce11864514f6b6b0646267c285
|
[
"Apache-2.0"
] | 7
|
2020-09-18T03:31:05.000Z
|
2022-03-03T00:20:27.000Z
|
modules/visual_feature_refinement.py
|
codeboy5/cvpr20-scatter-text-recognizer
|
4bd6cfbd4d7f64ce11864514f6b6b0646267c285
|
[
"Apache-2.0"
] | 8
|
2020-09-18T03:13:55.000Z
|
2022-02-27T13:34:33.000Z
|
import torch
import torch.nn as nn
class Refinement(nn.Module):
def __init__(self, input_size, output_size):
super(Refinement, self).__init__()
self.iid = nn.Linear(input_size, input_size)
self.decoder = nn.Linear(input_size, output_size)
def forward(self, input):
x = self.iid(input)
x = self.decoder(x)
return x
| 33.545455
| 57
| 0.653117
|
68e2a50b61ebabbea8046d574da99ea24f3147b2
| 1,309
|
py
|
Python
|
pype/plugins/blender/create/create_action.py
|
tokejepsen/pype
|
8f2b2b631cc5d3ad93eeb5ad3bc6110d32466ed3
|
[
"MIT"
] | null | null | null |
pype/plugins/blender/create/create_action.py
|
tokejepsen/pype
|
8f2b2b631cc5d3ad93eeb5ad3bc6110d32466ed3
|
[
"MIT"
] | null | null | null |
pype/plugins/blender/create/create_action.py
|
tokejepsen/pype
|
8f2b2b631cc5d3ad93eeb5ad3bc6110d32466ed3
|
[
"MIT"
] | null | null | null |
"""Create an animation asset."""
import bpy
from avalon import api
from avalon.blender import Creator, lib
import pype.blender.plugin
class CreateAction(Creator):
"""Action output for character rigs"""
name = "actionMain"
label = "Action"
family = "action"
icon = "male"
def process(self):
asset = self.data["asset"]
subset = self.data["subset"]
name = pype.blender.plugin.asset_name(asset, subset)
collection = bpy.data.collections.new(name=name)
bpy.context.scene.collection.children.link(collection)
self.data['task'] = api.Session.get('AVALON_TASK')
lib.imprint(collection, self.data)
if (self.options or {}).get("useSelection"):
for obj in lib.get_selection():
if (obj.animation_data is not None
and obj.animation_data.action is not None):
empty_obj = bpy.data.objects.new(name=name,
object_data=None)
empty_obj.animation_data_create()
empty_obj.animation_data.action = obj.animation_data.action
empty_obj.animation_data.action.name = name
collection.objects.link(empty_obj)
return collection
| 31.926829
| 79
| 0.594347
|
0131760fe6f56c145398edf040ce388f908f65b2
| 1,513
|
py
|
Python
|
devt/generate_A1_SourceCode.py
|
mdstepha/SimIMA
|
f4ef42022a8503de3dd657c673e34598f29ae807
|
[
"MIT"
] | null | null | null |
devt/generate_A1_SourceCode.py
|
mdstepha/SimIMA
|
f4ef42022a8503de3dd657c673e34598f29ae807
|
[
"MIT"
] | null | null | null |
devt/generate_A1_SourceCode.py
|
mdstepha/SimIMA
|
f4ef42022a8503de3dd657c673e34598f29ae807
|
[
"MIT"
] | null | null | null |
#!/usr/local/bin/python3
# This script creates file 'A1_SourceCode.tex' in current directory (for thesis writing)
import os
classes = os.listdir('../src/classes')
classes = [x for x in classes if x.endswith('.m')]
classes.sort()
funns = os.listdir('../src/functions') + os.listdir('../src/functions/devt')
funns = [x for x in funns if x.endswith('.m')]
funns = funns + ['getSimvmaPath.m', 'initialize.m', 'sl_customization.m']
funns.sort()
content = """\chapter{Source Code}
\label{chapter:appendix-source-code}
This appendix presents the MATLAB implementation of various classes and functions used in the project.
"""
content += "\n\n\section{Class Definitions}\n\label{section:class-definitions}\n"
for i, x in enumerate(classes):
# print(x)
# if i!=0:
# content += f"\n\\newpage"
x = x[:-2] # removing trailing .m
x_latex = x.replace('_', '\_')
content += f"\n\lstinputlisting[caption={{{x_latex}.m class definition}}, captionpos=t,label={{lst:code-{x}}}]{{Codes/classes/{x}.m}}"
content += "\n\n\\newpage\n\section{Function Definitions}\n\label{section:function-definitions}\n"
for i, x in enumerate(funns):
# print(x)
# if i!=0:
# content += f"\n\\newpage"
x = x[:-2] # removing trailing .m
x_latex = x.replace('_', '\_')
content += f"\n\lstinputlisting[caption={{{x_latex}.m function definition}}, captionpos=t,label={{lst:code-{x}}}]{{Codes/functions/{x}.m}}"
with open('A1_SourceCode.tex', 'w') as file:
file.write(content)
| 32.891304
| 143
| 0.656312
|
bf95387b6930a49fb7d5548f274fbcd4f4426c9d
| 4,963
|
py
|
Python
|
applications/RANSApplication/tests/test_RANSApplication_mpi.py
|
lkusch/Kratos
|
e8072d8e24ab6f312765185b19d439f01ab7b27b
|
[
"BSD-4-Clause"
] | 778
|
2017-01-27T16:29:17.000Z
|
2022-03-30T03:01:51.000Z
|
applications/RANSApplication/tests/test_RANSApplication_mpi.py
|
lkusch/Kratos
|
e8072d8e24ab6f312765185b19d439f01ab7b27b
|
[
"BSD-4-Clause"
] | 6,634
|
2017-01-15T22:56:13.000Z
|
2022-03-31T15:03:36.000Z
|
applications/RANSApplication/tests/test_RANSApplication_mpi.py
|
lkusch/Kratos
|
e8072d8e24ab6f312765185b19d439f01ab7b27b
|
[
"BSD-4-Clause"
] | 224
|
2017-02-07T14:12:49.000Z
|
2022-03-06T23:09:34.000Z
|
# Importing the Kratos Library
import KratosMultiphysics as KM
if not KM.IsDistributedRun():
raise Exception("This test script can only be executed in MPI!")
# Import Kratos "wrapper" for unittests
import KratosMultiphysics.KratosUnittest as KratosUnittest
# Import the tests or test_classes to create the suits
# process test_classes
from custom_process_tests import CustomProcessTest
# flow solver test_classes
from incompressible_potential_flow_solver_formulation_tests import IncompressiblePotentialFlowSolverFormulationTest
from monolithic_velocity_pressure_formulation_tests import MonolithicVelocityPressureFormulationTest
from fractional_step_velocity_pressure_formulation_tests import FractionalStepVelocityPressureFormulationTest
# turbulence model test_classes
### k-epsilon test_classes
from monolithic_k_epsilon_formulation_tests import MonolithicKEpsilonTest
from fractional_step_k_epsilon_formulation_tests import FractionalStepKEpsilonTest
### k_omega test_classes
from monolithic_k_omega_formulation_tests import MonolithicKOmegaTest
from fractional_step_k_omega_formulation_tests import FractionalStepKOmegaTest
### k_omega test_classes
from monolithic_k_omega_sst_formulation_tests import MonolithicKOmegaSSTTest
from fractional_step_k_omega_sst_formulation_tests import FractionalStepKOmegaSSTTest
def AssembleTestSuites():
''' Populates the test suites to run.
Populates the test suites to run. At least, it should pupulate the suites:
"small", "nighlty" and "all"
Return
------
suites: A dictionary of suites
The set of suites with its test_cases added.
'''
suites = KratosUnittest.KratosSuites
### Small MPI tests ########################################################
smallMPISuite = suites['mpi_small']
# adding custom process tests
# smallMPISuite.addTests(KratosUnittest.TestLoader().loadTestsFromTestCases([CustomProcessTest]))
# add symbolic mpi small tests for mpi small suite
# smallMPISuite.addTest(FractionalStepKOmegaSSTTest("testRfcVelocityTransient"))
# smallMPISuite.addTest(MonolithicKOmegaSSTTest("testRfcVelocityTransient"))
# smallMPISuite.addTest(FractionalStepKOmegaSSTTest("testVMSRfcVelocityTransient"))
# smallMPISuite.addTest(MonolithicKOmegaSSTTest("testVMSRfcVelocityTransient"))
# smallMPISuite.addTest(MonolithicKOmegaSSTTest("testQSVMSRfcVelocityTransient"))
### Nightly MPI tests ######################################################
nightlyMPISuite = suites['mpi_nightly']
nightlyMPISuite.addTests(smallMPISuite)
# adding incompressible potential flow solver tests
# nightlyMPISuite.addTests(KratosUnittest.TestLoader().loadTestsFromTestCases([IncompressiblePotentialFlowSolverFormulationTest]))
# adding monolithic flow solver tests
# nightlyMPISuite.addTests(KratosUnittest.TestLoader().loadTestsFromTestCases([MonolithicVelocityPressureFormulationTest]))
# adding fractional step flow solver tests
# nightlyMPISuite.addTests(KratosUnittest.TestLoader().loadTestsFromTestCases([FractionalStepVelocityPressureFormulationTest]))
# adding monolithic k-epsilon high re tests
# nightlyMPISuite.addTests(KratosUnittest.TestLoader().loadTestsFromTestCases([MonolithicKEpsilonTest]))
# adding fractional step k-epsilon high re tests
# nightlyMPISuite.addTests(KratosUnittest.TestLoader().loadTestsFromTestCases([FractionalStepKEpsilonTest]))
# adding monolithic k-omega high re tests
# nightlyMPISuite.addTests(KratosUnittest.TestLoader().loadTestsFromTestCases([MonolithicKOmegaTest]))
# adding fractional step k-omega high re tests
# nightlyMPISuite.addTests(KratosUnittest.TestLoader().loadTestsFromTestCases([FractionalStepKOmegaTest]))
# adding monolithic k-omega-sst high re tests
# nightlyMPISuite.addTest(MonolithicKOmegaSSTTest("testAfcTkeSteady"))
# nightlyMPISuite.addTest(MonolithicKOmegaSSTTest("testAfcVelocitySteady"))
# nightlyMPISuite.addTest(MonolithicKOmegaSSTTest("testRfcTkeSteady"))
# nightlyMPISuite.addTest(MonolithicKOmegaSSTTest("testRfcVelocitySteady"))
# nightlyMPISuite.addTest(MonolithicKOmegaSSTTest("testRfcTkeTransient"))
# adding fractional step k-omega-sst high re tests
# nightlyMPISuite.addTest(FractionalStepKOmegaSSTTest("testAfcTkeSteady"))
# nightlyMPISuite.addTest(FractionalStepKOmegaSSTTest("testAfcVelocitySteady"))
# nightlyMPISuite.addTest(FractionalStepKOmegaSSTTest("testRfcTkeSteady"))
# nightlyMPISuite.addTest(FractionalStepKOmegaSSTTest("testRfcVelocitySteady"))
# nightlyMPISuite.addTest(FractionalStepKOmegaSSTTest("testRfcTkeTransient"))
### Full MPI set ###########################################################
allMPISuite = suites['mpi_all']
allMPISuite.addTests(nightlyMPISuite) # already contains the smallMPISuite
return suites
if __name__ == '__main__':
KratosUnittest.runTests(AssembleTestSuites())
| 46.820755
| 134
| 0.786218
|
24e003b9a40db41e449ba30125d64451acb784a3
| 1,075
|
py
|
Python
|
backend/auth.py
|
tagboy07/cs498-Final
|
13cb3d2fd8785e5169bc756bc12f3cecddca4776
|
[
"MIT"
] | null | null | null |
backend/auth.py
|
tagboy07/cs498-Final
|
13cb3d2fd8785e5169bc756bc12f3cecddca4776
|
[
"MIT"
] | null | null | null |
backend/auth.py
|
tagboy07/cs498-Final
|
13cb3d2fd8785e5169bc756bc12f3cecddca4776
|
[
"MIT"
] | null | null | null |
import ldap
import sys
from bottle import post, request, run
# Connect to UIUC ActiveDirectory over StartTLS
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
ldap.set_option(ldap.OPT_REFERRALS, 0)
def login():
l = ldap.initialize("ldap://ad.uillinois.edu")
l.set_option(ldap.OPT_REFERRALS, 0)
l.set_option(ldap.OPT_PROTOCOL_VERSION, 3)
l.set_option(ldap.OPT_X_TLS,ldap.OPT_X_TLS_DEMAND)
l.set_option( ldap.OPT_X_TLS_DEMAND, True )
l.set_option( ldap.OPT_DEBUG_LEVEL, 255 )
l.start_tls_s()
username = 'CN=' + 'mcheung3' + ',OU=People,DC=ad,DC=uillinois,DC=edu'
password = 'test'
try:
l.simple_bind_s(username, password)
except ldap.INVALID_CREDENTIALS:
# -1 means the username or password was incorrect
l.unbind_s();
return "-1"
except ldap.LDAPError, e:
# -2 means there was some other error
print(e)
l.unbind_s();
return "-2"
# 0 means login successful
l.unbind_s();
return "0"
if(login() != "0"):
sys.exit("Login Failed")
| 29.054054
| 74
| 0.667907
|
dee5205fe92f111267c24401200767028e1eb64b
| 837
|
py
|
Python
|
packs/asserts/actions/object_key_number_equals.py
|
AnushkaKamerkar/st2tests
|
19988c079ac39963bce160c616cacdb7915038e8
|
[
"Apache-2.0"
] | 4
|
2015-08-26T12:06:30.000Z
|
2017-11-04T16:15:07.000Z
|
packs/asserts/actions/object_key_number_equals.py
|
AnushkaKamerkar/st2tests
|
19988c079ac39963bce160c616cacdb7915038e8
|
[
"Apache-2.0"
] | 90
|
2015-06-06T01:16:20.000Z
|
2021-10-30T12:10:39.000Z
|
packs/asserts/actions/object_key_number_equals.py
|
AnushkaKamerkar/st2tests
|
19988c079ac39963bce160c616cacdb7915038e8
|
[
"Apache-2.0"
] | 14
|
2015-06-15T01:48:04.000Z
|
2022-01-06T03:23:45.000Z
|
import sys
from st2actions.runners.pythonrunner import Action
__all__ = [
'AssertObjectKeyIntEquals'
]
class AssertObjectKeyIntEquals(Action):
def run(self, object, key, value):
if not isinstance(object, dict):
raise ValueError('object shoud be of type "dict".')
if key not in object:
sys.stderr.write('KEY %s DOESN\'T EXIST.' % key)
raise ValueError('Key %s doesn\'t exist in object %s' % (key, object))
result = (int(object[key]) == int(value))
if result:
sys.stdout.write('EQUAL.')
else:
sys.stdout.write('NOT EQUAL.')
sys.stderr.write(' Expected: %s, Original: %s' % (value, object[key]))
raise ValueError('Value not equal. Expected "%s", got "%s". ' % (value, object[key]))
return result
| 33.48
| 97
| 0.591398
|
528943c7b0c216aceee95fa9ff5f7d73e56f8d6a
| 1,427
|
py
|
Python
|
core/migrations/0006_auto_20190421_1833.py
|
MakuZo/nutrigo
|
e50e10d497bcf9e01294565d42012d777f5c98d0
|
[
"MIT"
] | 30
|
2019-03-28T18:01:58.000Z
|
2022-02-26T02:19:28.000Z
|
core/migrations/0006_auto_20190421_1833.py
|
MakuZo/nutrigo
|
e50e10d497bcf9e01294565d42012d777f5c98d0
|
[
"MIT"
] | 8
|
2019-06-06T19:33:08.000Z
|
2022-02-10T13:10:34.000Z
|
core/migrations/0006_auto_20190421_1833.py
|
MakuZo/nutrigo
|
e50e10d497bcf9e01294565d42012d777f5c98d0
|
[
"MIT"
] | 10
|
2019-04-04T19:19:28.000Z
|
2021-06-05T05:29:40.000Z
|
# Generated by Django 2.1.7 on 2019-04-21 16:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("core", "0005_foodnutrition")]
operations = [
migrations.RenameField(
model_name="foodweight", old_name="weight", new_name="value"
),
migrations.RemoveField(model_name="food", name="desc_long"),
migrations.RemoveField(model_name="food", name="desc_short"),
migrations.RemoveField(model_name="food", name="manufac_name"),
migrations.RemoveField(model_name="food", name="refuse_perc"),
migrations.RemoveField(model_name="food", name="sci_name"),
migrations.RemoveField(model_name="foodnutrition", name="max_val"),
migrations.RemoveField(model_name="foodnutrition", name="min_val"),
migrations.RemoveField(model_name="foodweight", name="data_points"),
migrations.RemoveField(model_name="foodweight", name="deviation"),
migrations.RemoveField(model_name="foodweight", name="seq"),
migrations.AddField(
model_name="food",
name="description",
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AddField(
model_name="food",
name="name",
field=models.CharField(default="null", max_length=100),
preserve_default=False,
),
]
| 39.638889
| 76
| 0.650315
|
e895b2704b40fe4c08535dffa7787d4280f042fd
| 13,234
|
py
|
Python
|
server/ahj_app/tests/test_admin.py
|
reepoi/ahj-registry
|
d4498bccfe114b19acca4f931d29f30fbc65a803
|
[
"MIT"
] | null | null | null |
server/ahj_app/tests/test_admin.py
|
reepoi/ahj-registry
|
d4498bccfe114b19acca4f931d29f30fbc65a803
|
[
"MIT"
] | null | null | null |
server/ahj_app/tests/test_admin.py
|
reepoi/ahj-registry
|
d4498bccfe114b19acca4f931d29f30fbc65a803
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import hashers
from django.http import QueryDict
import ahj_app.admin.actions as admin_actions
import ahj_app.admin.form as admin_form
from django.utils import timezone
from fixtures import *
import pytest
import datetime
from ahj_app.models import AHJ, User, APIToken, AHJUserMaintains, Edit, Location, Address
from ahj_app.models_field_enums import LocationDeterminationMethod
from ahj_app.views_edits import apply_edits
@pytest.mark.django_db
def test_get_value_or_primary_key():
ldm = LocationDeterminationMethod.objects.create(Value='GPS')
location = Location.objects.create(Description='desc', LocationDeterminationMethod=ldm)
address = Address.objects.create(LocationID=location)
assert admin_actions.get_value_or_primary_key(location, 'Description') == 'desc'
assert admin_actions.get_value_or_primary_key(location, 'LocationDeterminationMethod') == 'GPS'
assert admin_actions.get_value_or_primary_key(address, 'LocationID') == location.LocationID
assert admin_actions.get_value_or_primary_key(address, 'AddressType') == ''
@pytest.mark.parametrize(
'password', [
('new_user_password')
]
)
@pytest.mark.django_db
def test_reset_password(password, create_user):
user = create_user()
admin_actions.reset_password(user, password)
salt = user.password.split('$')[2]
assert hashers.make_password(password, salt) == user.password
@pytest.mark.django_db
def test_partition_by_field_users_by_api_token(create_user):
for x in range(0, 10):
if x % 2 == 0:
create_user().api_token.delete()
else:
create_user()
user_queryset = User.objects.all()
those_with_field_value, those_without_field_value = admin_actions.partition_by_field(user_queryset, 'api_token', None)
assert None in those_with_field_value.values_list('api_token', flat=True)
assert None not in those_without_field_value.values_list('api_token', flat=True)
@pytest.mark.django_db
def test_process_generate_api_token_data(create_user):
form_prefix = 'form-{0}'
post_data_dict = {}
post_query_dict = dict_make_query_dict(post_data_dict)
users = []
dates = []
for x in range(5):
user = create_user()
date = timezone.now() + datetime.timedelta(days=x)
date_strs = str(date.date()).split('-')
post_query_dict.update({'user_to_form': f'{user.UserID}.{form_prefix.format(x)}',
f'{form_prefix.format(x)}-ExpirationDate_year': date_strs[0],
f'{form_prefix.format(x)}-ExpirationDate_month': date_strs[1],
f'{form_prefix.format(x)}-ExpirationDate_day': date_strs[2]})
users.append(user)
dates.append(date)
results = admin_actions.process_generate_api_token_data(post_query_dict)
for x in range(len(users)):
assert results[x]['user'].UserID == users[x].UserID
assert results[x]['expires'].date() == dates[x].date()
@pytest.mark.parametrize(
'form_value, expected_output', [
('On', True),
('Off', False),
('DoNothing', None)
]
)
def test_set_toggle(form_value, expected_output):
assert admin_actions.set_toggle(form_value) == expected_output
@pytest.mark.parametrize(
'form_value, expected_output', [
('on', True),
('off', False),
('other_value', False)
]
)
def test_set_delete(form_value, expected_output):
assert admin_actions.set_delete(form_value) == expected_output
@pytest.mark.parametrize(
'delete', [
True,
False,
None
]
)
@pytest.mark.django_db
def test_delete_toggle_api_token_is_deleted(delete, create_user_with_active_api_token):
user = create_user_with_active_api_token()
admin_actions.delete_toggle_api_token(user, delete=delete)
assert APIToken.objects.filter(user=user).exists() != (delete if delete is not None else False)
@pytest.mark.parametrize(
'toggle', [
True,
False,
None
]
)
@pytest.mark.django_db
def test_delete_toggle_api_token_is_toggled(toggle, create_user_with_active_api_token):
user = create_user_with_active_api_token()
admin_actions.delete_toggle_api_token(user, toggle=toggle)
assert APIToken.objects.get(user=user).is_active == (toggle if toggle is not None else True)
@pytest.mark.django_db
def test_delete_toggle_api_token_user_has_no_api_token(create_user):
user = create_user()
user.api_token.delete()
admin_actions.delete_toggle_api_token(user, toggle=True, delete=False)
assert not APIToken.objects.filter(user=user).exists()
def dict_make_query_dict(given_dict):
qd = QueryDict('', mutable=True)
qd.update(given_dict)
return qd
@pytest.mark.parametrize(
'expect_toggle, expect_delete', [
(None, None),
(None, True),
(None, False),
(True, None),
(True, True),
(True, False),
(False, None),
(False, True),
(False, False)
]
)
@pytest.mark.django_db
def test_process_delete_toggle_api_token_data(expect_toggle, expect_delete, create_user):
if expect_toggle:
toggle_text = 'On'
elif expect_toggle is False:
toggle_text = 'Off'
else:
toggle_text = 'DoNothing'
if expect_delete:
delete_text = 'on'
else:
delete_text = ''
users = []
form_prefix = 'form-{0}'
post_data_dict = {}
post_query_dict = dict_make_query_dict(post_data_dict)
for x in range(5):
user = create_user()
users.append(user)
post_query_dict.update({'user_to_form': f'{user.UserID}.{form_prefix.format(x)}',
f'{form_prefix.format(x)}-toggle': toggle_text,
f'{form_prefix.format(x)}-delete_token': delete_text})
results = admin_actions.process_delete_toggle_api_token_data(post_query_dict)
for x in range(len(users)):
assert results[x]['user'].UserID == users[x].UserID
assert results[x]['toggle'] == expect_toggle
assert results[x]['delete'] == (expect_delete if expect_delete is not None else False)
@pytest.mark.parametrize(
'num_existing, num_kept, num_new', [
# Remove all
(3, 0, 0),
# Keep all
(3, 3, 0),
# Add all new
(0, 0, 3),
# Remove one
(3, 2, 0),
# Remove one, add new one
(3, 2, 1),
# Add one
(2, 2, 1)
]
)
@pytest.mark.django_db
def test_assign_ahj_official_status(num_existing, num_kept, num_new, ahj_obj_factory, create_user):
"""
num_existing: number of AHJs a user is an AHJ Official of
num_kept: number of AHJs a user is still an AHJ Official of
num_new: number of AHJs a user is newly assigned as an AHJ Official of
"""
user = create_user()
num_existing_ahjs = []
num_kept_ahjs = []
num_new_ahjs = []
# Add the starting relations for what the user is an AHJ Official of
for x in range(num_existing):
ahj = ahj_obj_factory()
num_existing_ahjs.append(ahj)
AHJUserMaintains.objects.create(UserID=user, AHJPK=ahj, MaintainerStatus=True)
# Track what AHJs the user will should still be an AHJ Official of
for x in range(num_kept):
num_kept_ahjs.append(num_existing_ahjs[x])
# Track the AHJs the user is newly assigned to be an AHJ Official of
for x in range(num_new):
ahj = ahj_obj_factory()
num_new_ahjs.append(ahj)
# Test applying the changes
admin_form.assign_ahj_official_status(user, num_kept_ahjs + num_new_ahjs)
all_time_assigned_ahjs = AHJUserMaintains.objects.filter(UserID=user)
assigned_ahjs = all_time_assigned_ahjs.filter(MaintainerStatus=True).values_list('AHJPK', flat=True)
former_ahjs = all_time_assigned_ahjs.filter(MaintainerStatus=False).values_list('AHJPK', flat=True)
for ahj in num_kept_ahjs + num_new_ahjs:
assert ahj.AHJPK in assigned_ahjs
for ahj in (num_existing_ahjs[num_kept:] if num_kept < len(num_existing_ahjs) else []):
assert ahj.AHJPK in former_ahjs
@pytest.mark.django_db
def test_assign_ahj_official_status__reassign_ahj(create_user, ahj_obj):
user = create_user()
assignment = AHJUserMaintains.objects.create(UserID=user, AHJPK=ahj_obj, MaintainerStatus=False)
admin_form.assign_ahj_official_status(user, [ahj_obj])
assignment = AHJUserMaintains.objects.get(MaintainerID=assignment.MaintainerID)
assert assignment.MaintainerStatus is True
@pytest.mark.parametrize(
'date_str', [
str(timezone.now()),
str(timezone.make_aware(datetime.datetime(1, 1, 1))),
''
]
)
@pytest.mark.django_db
def test_set_date_from_str(date_str):
try:
date = timezone.make_aware(datetime.datetime.strptime(date_str, '%Y-%m-%d'))
except ValueError:
date = None
result = admin_actions.set_date_from_str(date_str)
assert result == date
@pytest.mark.parametrize(
'date_effective', [
timezone.now(),
timezone.now() + datetime.timedelta(days=1),
timezone.make_aware(datetime.datetime(1, 1, 1))
]
)
@pytest.mark.django_db
def test_process_approve_edits_data(date_effective, create_user, ahj_obj):
form_prefix = 'form-{0}'
post_data_dict = {}
post_query_dict = dict_make_query_dict(post_data_dict)
edits = []
approving_user = create_user()
for x in range(5):
user = create_user()
edit = Edit.objects.create(AHJPK=ahj_obj, ChangedBy=user, EditType='A', SourceTable='AHJ',
SourceColumn='BuildingCode', SourceRow=ahj_obj.pk,
DateRequested=timezone.now())
edits.append(edit)
date_strs = str(date_effective.date()).split('-')
post_query_dict.update({'edit_to_form': f'{edit.EditID}.{form_prefix.format(x)}',
f'{form_prefix.format(x)}-DateEffective_year': date_strs[0],
f'{form_prefix.format(x)}-DateEffective_month': date_strs[1],
f'{form_prefix.format(x)}-DateEffective_day': date_strs[2]})
results = admin_actions.process_approve_edits_data(post_query_dict, approving_user)
for x in range(len(edits)):
assert results[x]['edit'].EditID == edits[x].EditID
assert results[x]['approved_by'].UserID == approving_user.UserID
if date_effective <= timezone.now():
date_effective = timezone.now()
assert results[x]['date_effective'].date() == date_effective.date()
assert results[x]['apply_now'] == (date_effective.date() == datetime.date.today())
@pytest.mark.django_db
def test_process_approve_edits_data_invalid_date_effective(create_user, ahj_obj):
form_prefix = 'form-{0}'
post_data_dict = {}
post_query_dict = dict_make_query_dict(post_data_dict)
edits = []
approving_user = create_user()
for x in range(5):
user = create_user()
edit = Edit.objects.create(AHJPK=ahj_obj, ChangedBy=user, EditType='U', SourceTable='AHJ',
SourceColumn='AHJName', SourceRow=ahj_obj.pk, NewValue='NewName',
DateRequested=timezone.now())
edits.append(edit)
post_query_dict.update({'edit_to_form': f'{edit.EditID}.{form_prefix.format(x)}',
f'{form_prefix.format(x)}-DateEffective_year': '',
f'{form_prefix.format(x)}-DateEffective_month': '',
f'{form_prefix.format(x)}-DateEffective_day': ''})
results = admin_actions.process_approve_edits_data(post_query_dict, approving_user)
assert len(results) == 0
@pytest.mark.parametrize(
'apply_now', [
True,
False
]
)
@pytest.mark.django_db
def test_approve_edit(apply_now, create_user, ahj_obj):
user = create_user()
edit = Edit.objects.create(AHJPK=ahj_obj, ChangedBy=user, EditType='U', SourceTable='AHJ',
SourceColumn='AHJName', SourceRow=ahj_obj.pk, NewValue='NewName',
DateRequested=timezone.now())
admin_actions.approve_edit(edit, user, timezone.now(), apply_now)
edit = Edit.objects.get(EditID=edit.EditID)
ahj = AHJ.objects.get(AHJPK=ahj_obj.pk)
assert edit.ApprovedBy.UserID == user.UserID
assert edit.DateEffective.date() == datetime.date.today()
assert edit.ReviewStatus == 'A'
if apply_now:
assert ahj.AHJName == 'NewName'
else:
ahj = AHJ.objects.get(AHJPK=ahj_obj.pk)
assert ahj.AHJName != 'NewName'
# NOTE: apply_edits is tested separately in test_view_edits.py
apply_edits()
ahj = AHJ.objects.get(AHJPK=ahj_obj.pk)
assert ahj.AHJName == 'NewName'
@pytest.mark.django_db
def test_build_url_parameters_for_change_list_filtering(ahj_obj_factory):
ahj1 = ahj_obj_factory()
ahj2 = ahj_obj_factory()
ahj3 = ahj_obj_factory()
assert admin_actions.build_url_parameters_for_change_list_filtering(AHJ.objects.all(), [admin_actions.field_key_pair('AHJPK', 'AHJPK')]) == f'?AHJPK={ahj1.pk},{ahj2.pk},{ahj3.pk}&'
| 37.490085
| 184
| 0.671301
|
9856b2314e979ad4ba4dcd46636e42a1351b7f99
| 24,654
|
py
|
Python
|
src/som/compiler/ast/parser.py
|
smarr/RTruffleSOM
|
1efc698577830ff3fcd1607e7155d9c6423e8804
|
[
"MIT"
] | 9
|
2015-02-03T23:24:23.000Z
|
2020-06-28T23:49:59.000Z
|
src/som/compiler/ast/parser.py
|
SOM-st/RTruffleSOM
|
1efc698577830ff3fcd1607e7155d9c6423e8804
|
[
"MIT"
] | null | null | null |
src/som/compiler/ast/parser.py
|
SOM-st/RTruffleSOM
|
1efc698577830ff3fcd1607e7155d9c6423e8804
|
[
"MIT"
] | 2
|
2016-08-28T23:25:20.000Z
|
2016-08-30T16:49:50.000Z
|
from rpython.rlib.rbigint import rbigint
from rpython.rlib.rstring import ParseStringOverflowError
from rtruffle.source_section import SourceSection
from ..parse_error import ParseError, ParseErrorSymList
from ...interpreter.ast.nodes.block_node import BlockNode, BlockNodeWithContext
from ...interpreter.ast.nodes.global_read_node import UninitializedGlobalReadNode
from ...interpreter.ast.nodes.literal_node import LiteralNode
from ...interpreter.ast.nodes.message.uninitialized_node import UninitializedMessageNode
from ...interpreter.ast.nodes.return_non_local_node import ReturnNonLocalNode
from ...interpreter.ast.nodes.sequence_node import SequenceNode
from ..lexer import Lexer
from ..symbol import Symbol
from .method_generation_context import MethodGenerationContext
class Parser(object):
_single_op_syms = [Symbol.Not, Symbol.And, Symbol.Or, Symbol.Star,
Symbol.Div, Symbol.Mod, Symbol.Plus, Symbol.Equal,
Symbol.More, Symbol.Less, Symbol.Comma, Symbol.At,
Symbol.Per, Symbol.NONE]
_binary_op_syms = [Symbol.Or, Symbol.Comma, Symbol.Minus, Symbol.Equal,
Symbol.Not, Symbol.And, Symbol.Or, Symbol.Star,
Symbol.Div, Symbol.Mod, Symbol.Plus, Symbol.Equal,
Symbol.More, Symbol.Less, Symbol.Comma, Symbol.At,
Symbol.Per, Symbol.NONE]
_keyword_selector_syms = [Symbol.Keyword, Symbol.KeywordSequence]
def __init__(self, reader, file_name, universe):
self._universe = universe
self._source_reader = reader
self._file_name = file_name
self._lexer = Lexer(reader)
self._sym = Symbol.NONE
self._text = None
self._next_sym = Symbol.NONE
self._get_symbol_from_lexer()
def classdef(self, cgenc):
cgenc.set_name(self._universe.symbol_for(self._text))
self._expect(Symbol.Identifier)
self._expect(Symbol.Equal)
self._superclass(cgenc)
self._expect(Symbol.NewTerm)
self._instance_fields(cgenc)
while (self._sym_is_identifier() or self._sym == Symbol.Keyword or
self._sym == Symbol.OperatorSequence or
self._sym_in(self._binary_op_syms)):
mgenc = MethodGenerationContext(self._universe)
mgenc.set_holder(cgenc)
mgenc.add_argument("self")
method_body = self._method(mgenc)
cgenc.add_instance_method(mgenc.assemble(method_body))
if self._accept(Symbol.Separator):
cgenc.set_class_side(True)
self._class_fields(cgenc)
while (self._sym_is_identifier() or
self._sym == Symbol.Keyword or
self._sym == Symbol.OperatorSequence or
self._sym_in(self._binary_op_syms)):
mgenc = MethodGenerationContext(self._universe)
mgenc.set_holder(cgenc)
mgenc.add_argument("self")
method_body = self._method(mgenc)
cgenc.add_class_method(mgenc.assemble(method_body))
self._expect(Symbol.EndTerm)
def _superclass(self, cgenc):
if self._sym == Symbol.Identifier:
super_name = self._universe.symbol_for(self._text)
self._accept(Symbol.Identifier)
else:
super_name = self._universe.symbol_for("Object")
cgenc.set_super_name(super_name)
# Load the super class, if it is not nil (break the dependency cycle)
if super_name.get_embedded_string() != "nil":
super_class = self._universe.load_class(super_name)
if not super_class:
raise ParseError("Super class %s could not be loaded"
% super_name.get_embedded_string(), Symbol.NONE, self)
cgenc.set_instance_fields_of_super(
super_class.get_instance_fields())
cgenc.set_class_fields_of_super(
super_class.get_class(self._universe).get_instance_fields())
else:
# TODO: figure out what this is
#raise Exception("What is going on here, not in Java, and I don't think we still got a 'class' field")
# WARNING:
# We hardcode here the field names for Class
# since Object class superclass = Class
# We avoid here any kind of dynamic solution to avoid further
# complexity. However, that makes it static, it is going to make it
# harder to change the definition of Class and Object
field_names_of_class = ["class", "superClass", "name",
"instanceFields", "instanceInvokables"]
field_names = self._universe.new_array_with_strings(field_names_of_class)
cgenc.set_class_fields_of_super(field_names)
def _sym_in(self, symbol_list):
return self._sym in symbol_list
def _sym_is_identifier(self):
return self._sym == Symbol.Identifier or self._sym == Symbol.Primitive
def _accept(self, s):
if self._sym == s:
self._get_symbol_from_lexer()
return True
return False
def _accept_one_of(self, symbol_list):
if self._sym_in(symbol_list):
self._get_symbol_from_lexer()
return True
return False
def _expect(self, s):
if self._accept(s):
return True
raise ParseError("Unexpected symbol. Expected %(expected)s, but found "
"%(found)s", s, self)
def _expect_one_of(self, symbol_list):
if self._accept_one_of(symbol_list):
return True
raise ParseErrorSymList("Unexpected symbol. Expected one of "
"%(expected)s, but found %(found)s",
symbol_list, self)
def _instance_fields(self, cgenc):
if self._accept(Symbol.Or):
while self._sym_is_identifier():
var = self._variable()
cgenc.add_instance_field(self._universe.symbol_for(var))
self._expect(Symbol.Or)
def _class_fields(self, cgenc):
if self._accept(Symbol.Or):
while self._sym_is_identifier():
var = self._variable()
cgenc.add_class_field(self._universe.symbol_for(var))
self._expect(Symbol.Or)
def _get_source_section(self, coord):
return SourceSection(
self._source_reader, "method", coord,
self._lexer.get_number_of_characters_read(),
self._file_name)
def _assign_source(self, node, coord):
node.assign_source_section(self._get_source_section(coord))
return node
def _method(self, mgenc):
self._pattern(mgenc)
self._expect(Symbol.Equal)
if self._sym == Symbol.Primitive:
mgenc.set_primitive()
return self._primitive_block()
else:
return self._method_block(mgenc)
def _primitive_block(self):
self._expect(Symbol.Primitive)
return None
def _pattern(self, mgenc):
if self._sym_is_identifier():
self._unary_pattern(mgenc)
elif self._sym == Symbol.Keyword:
self._keyword_pattern(mgenc)
else:
self._binary_pattern(mgenc)
def _unary_pattern(self, mgenc):
mgenc.set_signature(self._unary_selector())
def _binary_pattern(self, mgenc):
mgenc.set_signature(self._binary_selector())
mgenc.add_argument_if_absent(self._argument())
def _keyword_pattern(self, mgenc):
kw = self._keyword()
mgenc.add_argument_if_absent(self._argument())
while self._sym == Symbol.Keyword:
kw += self._keyword()
mgenc.add_argument_if_absent(self._argument())
mgenc.set_signature(self._universe.symbol_for(kw))
def _method_block(self, mgenc):
self._expect(Symbol.NewTerm)
method_body = self._block_contents(mgenc)
self._expect(Symbol.EndTerm)
return method_body
def _unary_selector(self):
return self._universe.symbol_for(self._identifier())
def _binary_selector(self):
s = self._text
if self._accept(Symbol.Or): pass
elif self._accept(Symbol.Comma): pass
elif self._accept(Symbol.Minus): pass
elif self._accept(Symbol.Equal): pass
elif self._accept_one_of(self._single_op_syms): pass
elif self._accept(Symbol.OperatorSequence): pass
else: self._expect(Symbol.NONE)
return self._universe.symbol_for(s)
def _identifier(self):
s = self._text
is_primitive = self._accept(Symbol.Primitive)
if not is_primitive:
self._expect(Symbol.Identifier)
return s
def _keyword(self):
s = self._text
self._expect(Symbol.Keyword)
return s
def _argument(self):
return self._variable()
def _block_contents(self, mgenc):
if self._accept(Symbol.Or):
self._locals(mgenc)
self._expect(Symbol.Or)
return self._block_body(mgenc)
def _locals(self, mgenc):
while self._sym_is_identifier():
mgenc.add_local_if_absent(self._variable())
def _block_body(self, mgenc):
coordinate = self._lexer.get_source_coordinate()
expressions = []
while True:
if self._accept(Symbol.Exit):
expressions.append(self._result(mgenc))
return self._create_sequence_node(coordinate, expressions)
elif self._sym == Symbol.EndBlock:
return self._create_sequence_node(coordinate, expressions)
elif self._sym == Symbol.EndTerm:
# the end of the method has been found (EndTerm) - make it
# implicitly return "self"
self_exp = self._variable_read(mgenc, "self")
self_coord = self._lexer.get_source_coordinate()
self._assign_source(self_exp, self_coord)
expressions.append(self_exp)
return self._create_sequence_node(coordinate, expressions)
expressions.append(self._expression(mgenc))
self._accept(Symbol.Period)
def _create_sequence_node(self, coordinate, expressions):
if not expressions:
nil_exp = UninitializedGlobalReadNode(
self._universe.symbol_for("nil"), self._universe)
return self._assign_source(nil_exp, coordinate)
if len(expressions) == 1:
return expressions[0]
return SequenceNode(expressions[:], self._get_source_section(coordinate))
def _result(self, mgenc):
exp = self._expression(mgenc)
coord = self._lexer.get_source_coordinate()
self._accept(Symbol.Period)
if mgenc.is_block_method():
node = ReturnNonLocalNode(mgenc.get_outer_self_context_level(),
exp, self._universe)
mgenc.make_catch_non_local_return()
return self._assign_source(node, coord)
else:
return exp
def _expression(self, mgenc):
self._peek_for_next_symbol_from_lexer()
if self._next_sym == Symbol.Assign:
return self._assignation(mgenc)
else:
return self._evaluation(mgenc)
def _assignation(self, mgenc):
return self._assignments(mgenc)
def _assignments(self, mgenc):
coord = self._lexer.get_source_coordinate()
if not self._sym_is_identifier():
raise ParseError("Assignments should always target variables or"
" fields, but found instead a %(found)s",
Symbol.Identifier, self)
variable = self._assignment()
self._peek_for_next_symbol_from_lexer()
if self._next_sym == Symbol.Assign:
value = self._assignments(mgenc)
else:
value = self._evaluation(mgenc)
exp = self._variable_write(mgenc, variable, value)
return self._assign_source(exp, coord)
def _assignment(self):
var_name = self._variable()
self._expect(Symbol.Assign)
return var_name
def _evaluation(self, mgenc):
exp = self._primary(mgenc)
if (self._sym_is_identifier() or
self._sym == Symbol.Keyword or
self._sym == Symbol.OperatorSequence or
self._sym_in(self._binary_op_syms)):
exp = self._messages(mgenc, exp)
return exp
def _primary(self, mgenc):
if self._sym_is_identifier():
coordinate = self._lexer.get_source_coordinate()
var_name = self._variable()
var_read = self._variable_read(mgenc, var_name)
return self._assign_source(var_read, coordinate)
if self._sym == Symbol.NewTerm:
return self._nested_term(mgenc)
if self._sym == Symbol.NewBlock:
coordinate = self._lexer.get_source_coordinate()
bgenc = MethodGenerationContext(self._universe)
bgenc.set_is_block_method(True)
bgenc.set_holder(mgenc.get_holder())
bgenc.set_outer(mgenc)
block_body = self._nested_block(bgenc)
block_method = bgenc.assemble(block_body)
mgenc.add_embedded_block_method(block_method)
if bgenc.requires_context():
result = BlockNodeWithContext(block_method, self._universe)
else:
result = BlockNode(block_method, self._universe)
return self._assign_source(result, coordinate)
return self._literal()
def _variable(self):
return self._identifier()
def _messages(self, mgenc, receiver):
msg = receiver
while self._sym_is_identifier():
msg = self._unary_message(msg)
while (self._sym == Symbol.OperatorSequence or
self._sym_in(self._binary_op_syms)):
msg = self._binary_message(mgenc, msg)
if self._sym == Symbol.Keyword:
msg = self._keyword_message(mgenc, msg)
return msg
def _unary_message(self, receiver):
coord = self._lexer.get_source_coordinate()
selector = self._unary_selector()
msg = UninitializedMessageNode(selector, self._universe, receiver, [])
return self._assign_source(msg, coord)
def _binary_message(self, mgenc, receiver):
coord = self._lexer.get_source_coordinate()
selector = self._binary_selector()
operand = self._binary_operand(mgenc)
msg = UninitializedMessageNode(selector, self._universe, receiver,
[operand])
return self._assign_source(msg, coord)
def _binary_operand(self, mgenc):
operand = self._primary(mgenc)
while self._sym_is_identifier():
operand = self._unary_message(operand)
return operand
def _keyword_message(self, mgenc, receiver):
coord = self._lexer.get_source_coordinate()
arguments = []
keyword = []
while self._sym == Symbol.Keyword:
keyword.append(self._keyword())
arguments.append(self._formula(mgenc))
selector = self._universe.symbol_for("".join(keyword))
msg = UninitializedMessageNode(selector, self._universe, receiver,
arguments[:])
return self._assign_source(msg, coord)
def _formula(self, mgenc):
operand = self._binary_operand(mgenc)
while (self._sym == Symbol.OperatorSequence or
self._sym_in(self._binary_op_syms)):
operand = self._binary_message(mgenc, operand)
return operand
def _nested_term(self, mgenc):
self._expect(Symbol.NewTerm)
exp = self._expression(mgenc)
self._expect(Symbol.EndTerm)
return exp
def _literal(self):
coord = self._lexer.get_source_coordinate()
if self._sym == Symbol.Pound:
self._peek_for_next_symbol_from_lexer_if_necessary()
if self._next_sym == Symbol.NewTerm:
val = self._literal_array()
else:
val = self._literal_symbol()
elif self._sym == Symbol.STString:
val = self._literal_string()
else:
is_negative = self._is_negative_number()
if self._sym == Symbol.Integer:
val = self._literal_integer(is_negative)
elif self._sym != Symbol.Double:
raise ParseError("Unexpected symbol. Expected %(expected)s, "
"but found %(found)s", self._sym, self)
else:
val = self._literal_double(is_negative)
lit = LiteralNode(val)
self._assign_source(lit, coord)
return lit
def _is_negative_number(self):
is_negative = False
if self._sym == Symbol.Minus:
self._expect(Symbol.Minus)
is_negative = True
return is_negative
def _literal_number(self):
if self._sym == Symbol.Minus:
return self._negative_decimal()
else:
return self._literal_decimal(False)
def _literal_decimal(self, negate_value):
if self._sym == Symbol.Integer:
return self._literal_integer(negate_value)
else:
if self._sym == Symbol.Double:
return self._literal_double(negate_value)
else:
raise ParseError("Could not parse double. "
"Expected a number but got '%s'" % self._text,
Symbol.Double, self)
def _negative_decimal(self):
self._expect(Symbol.Minus)
return self._literal_decimal(True)
def _literal_integer(self, negate_value):
try:
i = int(self._text)
if negate_value:
i = 0 - i
result = self._universe.new_integer(i)
except ParseStringOverflowError:
bigint = rbigint.fromstr(self._text)
if negate_value:
bigint.sign = -1
result = self._universe.new_biginteger(bigint)
except ValueError:
raise ParseError("Could not parse integer. "
"Expected a number but got '%s'" % self._text,
Symbol.NONE, self)
self._expect(Symbol.Integer)
return result
def _literal_double(self, negate_value):
try:
f = float(self._text)
if negate_value:
f = 0.0 - f
except ValueError:
raise ParseError("Could not parse double. "
"Expected a number but got '%s'" % self._text,
Symbol.NONE, self)
self._expect(Symbol.Double)
return self._universe.new_double(f)
def _literal_symbol(self):
self._expect(Symbol.Pound)
if self._sym == Symbol.STString:
s = self._string()
return self._universe.symbol_for(s)
else:
return self._selector()
def _literal_string(self):
s = self._string()
return self._universe.new_string(s)
def _literal_array(self):
literals = []
self._expect(Symbol.Pound)
self._expect(Symbol.NewTerm)
while self._sym != Symbol.EndTerm:
literals.append(self._get_object_for_current_literal())
self._expect(Symbol.EndTerm)
return self._universe.new_array_from_list(literals[:])
def _get_object_for_current_literal(self):
if self._sym == Symbol.Pound:
self._peek_for_next_symbol_from_lexer_if_necessary()
if self._next_sym == Symbol.NewTerm:
return self._literal_array()
else:
return self._literal_symbol()
elif self._sym == Symbol.STString:
return self._literal_string()
elif self._sym == Symbol.Integer:
return self._literal_integer(self._is_negative_number())
elif self._sym == Symbol.Double:
return self._literal_double(self._is_negative_number())
else:
raise ParseError("Could not parse literal array value",
Symbol.NONE, self)
def _selector(self):
if (self._sym == Symbol.OperatorSequence or
self._sym_in(self._single_op_syms)):
return self._binary_selector()
if (self._sym == Symbol.Keyword or
self._sym == Symbol.KeywordSequence):
return self._keyword_selector()
return self._unary_selector()
def _keyword_selector(self):
s = self._text
self._expect_one_of(self._keyword_selector_syms)
symb = self._universe.symbol_for(s)
return symb
def _string(self):
s = self._text
self._expect(Symbol.STString)
return s
def _nested_block(self, mgenc):
self._expect(Symbol.NewBlock)
mgenc.add_argument_if_absent("$blockSelf")
if self._sym == Symbol.Colon:
self._block_pattern(mgenc)
# generate Block signature
block_sig = ("$blockMethod@" +
str(self._lexer.get_current_line_number()) +
"@" + str(self._lexer.get_current_column()))
arg_size = mgenc.get_number_of_arguments()
block_sig += ":" * (arg_size - 1)
mgenc.set_signature(self._universe.symbol_for(block_sig))
expressions = self._block_contents(mgenc)
self._expect(Symbol.EndBlock)
return expressions
def _block_pattern(self, mgenc):
self._block_arguments(mgenc)
self._expect(Symbol.Or)
def _block_arguments(self, mgenc):
self._expect(Symbol.Colon)
mgenc.add_argument_if_absent(self._argument())
while self._sym == Symbol.Colon:
self._accept(Symbol.Colon)
mgenc.add_argument_if_absent(self._argument())
def _variable_read(self, mgenc, variable_name):
# 'super' needs to be handled separately
if variable_name == "super":
variable = mgenc.get_variable("self")
return variable.get_super_read_node(
mgenc.get_outer_self_context_level(),
mgenc.get_holder().get_name(),
mgenc.get_holder().is_class_side(),
self._universe)
# first lookup in local variables, or method arguments
variable = mgenc.get_variable(variable_name)
if variable:
return variable.get_read_node(
mgenc.get_context_level(variable_name))
# otherwise, it might be an object field
var_symbol = self._universe.symbol_for(variable_name)
field_read = mgenc.get_object_field_read(var_symbol)
if field_read:
return field_read
# nope, so, it is a global?
return mgenc.get_global_read(var_symbol)
def _variable_write(self, mgenc, variable_name, exp):
if variable_name == "self":
raise ParseError(
"It is not possible to write to `self`, it is a pseudo variable", Symbol.NONE, self)
if variable_name == "super":
raise ParseError(
"It is not possible to write to `super`, it is a pseudo variable", Symbol.NONE, self)
variable = mgenc.get_variable(variable_name)
if variable:
return variable.get_write_node(
mgenc.get_context_level(variable_name), exp)
field_name = self._universe.symbol_for(variable_name)
field_write = mgenc.get_object_field_write(field_name, exp)
if field_write:
return field_write
else:
raise RuntimeError("Neither a variable nor a field found in current"
" scope that is named " + variable_name + ".")
def _get_symbol_from_lexer(self):
self._sym = self._lexer.get_sym()
self._text = self._lexer.get_text()
def _peek_for_next_symbol_from_lexer_if_necessary(self):
if not self._lexer.get_peek_done():
self._peek_for_next_symbol_from_lexer()
def _peek_for_next_symbol_from_lexer(self):
self._next_sym = self._lexer.peek()
| 36.578635
| 114
| 0.608786
|
c365690a70ec83486147a8b1088f84d257cfa741
| 888
|
py
|
Python
|
tests/test_pylint.py
|
sea-kg/roadmapgen2d
|
9c707402c89e6f7a443284ea8e9275ffa9ab10fb
|
[
"MIT"
] | 1
|
2021-05-25T18:46:15.000Z
|
2021-05-25T18:46:15.000Z
|
tests/test_pylint.py
|
sea-kg/roadmapgen2d
|
9c707402c89e6f7a443284ea8e9275ffa9ab10fb
|
[
"MIT"
] | 7
|
2021-05-25T06:19:57.000Z
|
2021-05-27T03:04:56.000Z
|
tests/test_pylint.py
|
sea-kg/roadmapgen2d
|
9c707402c89e6f7a443284ea8e9275ffa9ab10fb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Test server api leaks"""
# Copyright (c) 2020 Evgenii Sopov <mrseakg@gmail.com>
# pylint: disable=relative-beyond-top-level,wrong-import-position,import-error
import os
from subprocess import Popen,PIPE,STDOUT
def test_pylint_library():
""" Test pylint library """
current_dir = os.path.dirname(os.path.abspath(__file__))
list_files_for_pylint = [
'../roadmapgen2d/',
]
for _filepath in list_files_for_pylint:
_filepath = os.path.join(current_dir, _filepath)
with Popen(
["python3", "-m", "pylint", _filepath],
stderr=STDOUT,
stdout=PIPE
) as p_out:
output = p_out.communicate()[0]
exit_code = p_out.returncode
if exit_code != 0:
print(output.decode("utf-8"))
assert exit_code == 0
| 29.6
| 78
| 0.609234
|
10da9bd3eaea1e2e851ed298018810388f8ca7fb
| 3,524
|
py
|
Python
|
source/intapi/views.py
|
mverleg/svsite
|
5c9dbcacf81020cf0c1960e337bdd33113acd597
|
[
"BSD-3-Clause"
] | null | null | null |
source/intapi/views.py
|
mverleg/svsite
|
5c9dbcacf81020cf0c1960e337bdd33113acd597
|
[
"BSD-3-Clause"
] | 142
|
2015-06-05T07:53:09.000Z
|
2020-03-31T18:37:07.000Z
|
source/intapi/views.py
|
mdilli/svsite
|
5c9dbcacf81020cf0c1960e337bdd33113acd597
|
[
"BSD-3-Clause"
] | null | null | null |
from json import dumps
from collections import OrderedDict
from django.conf import settings
from django.contrib.auth import authenticate, get_user_model
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from base.views import render_cms_special
from member.models import Team
def api_view(func):
def func_with_header(request, *args, **kwargs):
if not getattr(settings, 'INTEGRATION_KEYS', None):
return HttpResponse('integration key not set on the server; service cannot be used', status=501)
if request.method != 'POST':
return HttpResponse('send a POST request', status=405)
if 'key' not in request.POST:
return HttpResponse('your request does not include an integration key', status=400)
if request.POST['key'].strip() not in settings.INTEGRATION_KEYS:
return HttpResponse('incorrect key', status=403)
data = func(request, *args, **kwargs)
resp = HttpResponse(dumps(data, indent=2, sort_keys=False), content_type='application/json')
resp['Allow'] = 'POST'
return resp
return func_with_header
def api_info(request):
return render_cms_special(request, 'api_info.html', {
'DOMAIN': settings.SITE_URL,
'INTEGRATION_KEYS_COUNT': len(getattr(settings, 'INTEGRATION_KEYS', ())),
'INTEGRATION_ALLOW_EMAIL': getattr(settings, 'INTEGRATION_ALLOW_EMAIL', None),
})
@csrf_exempt
@api_view
def user_list_api(request):
users = get_user_model().objects.filter(is_active=True)
if 'email' in request.POST:
if not getattr(settings, 'INTEGRATION_ALLOW_EMAIL', False):
return HttpResponse('email listing is turned off on the server; service cannot be used', status=501)
return OrderedDict((user.username, user.email) for user in users)
return list(user.username for user in users)
@csrf_exempt
@api_view
def team_list_api(request):
teams = Team.objects.filter(listed=True)
return list(team.name for team in teams)
@csrf_exempt
@api_view
def user_details_api(request):
if 'username' not in request.POST and 'password' not in request.POST:
return HttpResponse('your request does not include `username` and `password`', status=400)
user = authenticate(username=request.POST['username'], password=request.POST['password'])
if not user:
if not get_user_model().objects.filter(username=request.POST['username']):
return HttpResponse('user `{0:s}` does not exist'.format(request.POST['username']), status=404)
return HttpResponse('incorrect password', status=403)
if not user.is_active:
return HttpResponse('the account `{0:s}` has been disabled'.format(user.username), status=403)
bday = None
if user.birthday:
bday = user.birthday.strftime('%Y-%m-%d')
return OrderedDict((
('username', user.username),
('first_name', user.first_name),
('last_name', user.last_name),
('email', user.email),
('birthday', bday),
('teams', {role.team.name: role.title for role in user.role_throughs}),
))
@csrf_exempt
@api_view
def team_details_api(request):
if 'teamname' not in request.POST:
return HttpResponse('your request does not include team `name`', status=400)
try:
team = Team.objects.get(name=request.POST['teamname'])
except Team.DoesNotExist:
return HttpResponse('team `{0:s}` does not exist'.format(request.POST['teamname']), status=404)
return OrderedDict((
('hidden', not team.listed),
('teamname', team.name),
('description', team.description),
('leaders', [admin.username for admin in team.admins.all()]),
('members', {role.member.username: role.title for role in team.role_throughs}),
))
| 35.959184
| 103
| 0.745176
|
c9bb49cc7cc98186ee794cc20d3353327d510211
| 112
|
py
|
Python
|
readthedocs/builds/signals.py
|
gr2m/readthedocs.org
|
38e73cd73efb76461d28a5d9737731b7d7349297
|
[
"MIT"
] | 1
|
2019-01-05T09:49:52.000Z
|
2019-01-05T09:49:52.000Z
|
readthedocs/builds/signals.py
|
himynamesdave/readthedocs.org
|
38e73cd73efb76461d28a5d9737731b7d7349297
|
[
"MIT"
] | null | null | null |
readthedocs/builds/signals.py
|
himynamesdave/readthedocs.org
|
38e73cd73efb76461d28a5d9737731b7d7349297
|
[
"MIT"
] | 1
|
2019-01-05T09:49:54.000Z
|
2019-01-05T09:49:54.000Z
|
"""Build signals"""
import django.dispatch
build_complete = django.dispatch.Signal(providing_args=['build'])
| 16
| 65
| 0.758929
|
024759a66f11aa1c7a72889c02dc189c270c559e
| 7,986
|
py
|
Python
|
qa/rpc-tests/txn_clone.py
|
otherdeniz/othercoin
|
611af232c0cd2025c4464c0444b650bc6c2444f6
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/txn_clone.py
|
otherdeniz/othercoin
|
611af232c0cd2025c4464c0444b650bc6c2444f6
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/txn_clone.py
|
otherdeniz/othercoin
|
611af232c0cd2025c4464c0444b650bc6c2444f6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test proper accounting with an equivalent malleability clone
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class TxnMallTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
return super(TxnMallTest, self).setup_network(True)
def run_test(self):
# All nodes should start with 12,500 OTC:
starting_balance = 12500
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
self.nodes[0].settxfee(.001)
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 12190)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 290)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 12190 - 290 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendfrom("foo", node1_address, 400, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 200, 0)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1,1)
clone_inputs = [{"txid":rawtx1["vin"][0]["txid"],"vout":rawtx1["vin"][0]["vout"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][1]["value"]}
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs)
# 3 hex manipulations on the clone are required
# manipulation 1. sequence is at version+#inputs+input+sigstub
posseq = 2*(4+1+36+1)
seqbe = '%08x' % rawtx1["vin"][0]["sequence"]
clone_raw = clone_raw[:posseq] + seqbe[6:8] + seqbe[4:6] + seqbe[2:4] + seqbe[0:2] + clone_raw[posseq + 8:]
# manipulation 2. createrawtransaction randomizes the order of its outputs, so swap them if necessary.
# output 0 is at version+#inputs+input+sigstub+sequence+#outputs
# 400 OTC serialized is 00902f5009000000
pos0 = 2*(4+1+36+1+4+1)
hex400 = "00902f5009000000"
output_len = 16 + 2 + 2 * int("0x" + clone_raw[pos0 + 16 : pos0 + 16 + 2], 0)
if (rawtx1["vout"][0]["value"] == 400 and clone_raw[pos0 : pos0 + 16] != hex400 or
rawtx1["vout"][0]["value"] != 400 and clone_raw[pos0 : pos0 + 16] == hex400):
output0 = clone_raw[pos0 : pos0 + output_len]
output1 = clone_raw[pos0 + output_len : pos0 + 2 * output_len]
clone_raw = clone_raw[:pos0] + output1 + output0 + clone_raw[pos0 + 2 * output_len:]
# manipulation 3. locktime is after outputs
poslt = pos0 + 2 * output_len
ltbe = '%08x' % rawtx1["locktime"]
clone_raw = clone_raw[:poslt] + ltbe[6:8] + ltbe[4:6] + ltbe[2:4] + ltbe[0:2] + clone_raw[poslt + 8:]
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransaction(clone_raw, None, None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 500OTC for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 500
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 12190 + tx1["amount"] + tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 290 + tx2["amount"] + tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"] + tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 1000 OTC for 2 matured,
# less possible orphaned matured subsidy
expected += 1000
if (self.options.mine_block):
expected -= 500
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*", 0), expected)
# Check node0's individual account balances.
# "foo" should have been debited by the equivalent clone of tx1
assert_equal(self.nodes[0].getbalance("foo"), 12190 + tx1["amount"] + tx1["fee"])
# "bar" should have been debited by (possibly unconfirmed) tx2
assert_equal(self.nodes[0].getbalance("bar", 0), 290 + tx2["amount"] + tx2["fee"])
# "" should have starting balance, less funding txes, plus subsidies
assert_equal(self.nodes[0].getbalance("", 0), starting_balance
- 12190
+ fund_foo_tx["fee"]
- 290
+ fund_bar_tx["fee"]
+ 1000)
# Node1's "from0" account balance
assert_equal(self.nodes[1].getbalance("from0", 0), -(tx1["amount"] + tx2["amount"]))
if __name__ == '__main__':
TxnMallTest().main()
| 48.108434
| 115
| 0.609191
|
18262cb6008bbf0c7cb73cf9e8ea28e9667ec828
| 509
|
py
|
Python
|
lib/steps/ImportProject.py
|
lastcolour/Tacos
|
fe2b65250bfa74613151ae2dc6a91eb30f254844
|
[
"MIT"
] | null | null | null |
lib/steps/ImportProject.py
|
lastcolour/Tacos
|
fe2b65250bfa74613151ae2dc6a91eb30f254844
|
[
"MIT"
] | null | null | null |
lib/steps/ImportProject.py
|
lastcolour/Tacos
|
fe2b65250bfa74613151ae2dc6a91eb30f254844
|
[
"MIT"
] | null | null | null |
from .Step import Step
class ImportProject(Step):
def __init__(self):
Step.__init__(self)
self._projectFile = None
def serialize(self, jsonData):
self._projectFile = jsonData["project_file"]
def run(self):
from lib.ProjectBuilder import ProjectBuilder
builder = ProjectBuilder()
project = builder.build(self._projectFile, None)
if not project:
return False
project.setParent(self._project)
return project.run()
| 28.277778
| 56
| 0.646365
|
84020389b1d419e11037ac2e589f70408ba5da57
| 26,113
|
py
|
Python
|
keras_wrapper/utils.py
|
PRHLT/multimodal_keras_wrapper
|
0a088f36e5d4251ce465974f07d1a7f21b80203e
|
[
"MIT"
] | 7
|
2018-04-08T03:06:24.000Z
|
2019-04-24T07:56:38.000Z
|
keras_wrapper/utils.py
|
PRHLT/multimodal_keras_wrapper
|
0a088f36e5d4251ce465974f07d1a7f21b80203e
|
[
"MIT"
] | null | null | null |
keras_wrapper/utils.py
|
PRHLT/multimodal_keras_wrapper
|
0a088f36e5d4251ce465974f07d1a7f21b80203e
|
[
"MIT"
] | 7
|
2017-12-04T09:06:35.000Z
|
2021-04-19T07:47:46.000Z
|
# -*- coding: utf-8 -*-
import copy
import sys
from six import iteritems
import numpy as np
import logging
logging.basicConfig(level=logging.INFO, format='[%(asctime)s] %(message)s', datefmt='%d/%m/%Y %H:%M:%S')
logger = logging.getLogger(__name__)
if sys.version_info.major == 2:
from itertools import imap as map
def checkParameters(input_params,
default_params,
hard_check=False):
"""Validates a set of input parameters and uses the default ones if not specified.
:param input_params: Input parameters.
:param default_params: Default parameters
:param hard_check: If True, raise exception if a parameter is not valid.
:return:
"""
valid_params = [key for key in default_params]
params = dict()
# Check input parameters' validity
for key, val in iteritems(input_params):
if key in valid_params:
params[key] = val
elif hard_check:
raise ValueError("Parameter '" + key + "' is not a valid parameter.")
# Use default parameters if not provided
for key, default_val in iteritems(default_params):
if key not in params:
params[key] = default_val
return params
class MultiprocessQueue():
"""
Wrapper class for encapsulating the behaviour of some multiprocessing
communication structures.
See how Queues and Pipes work in the following link
https://docs.python.org/2/library/multiprocessing.html#multiprocessing-examples
"""
def __init__(self,
manager,
multiprocess_type='Queue'):
if multiprocess_type != 'Queue' and multiprocess_type != 'Pipe':
raise NotImplementedError(
'Not valid multiprocessing queue of type ' + multiprocess_type)
self.type = multiprocess_type
if multiprocess_type == 'Queue':
self.queue = eval('manager.' + multiprocess_type + '()')
else:
self.queue = eval(multiprocess_type + '()')
def put(self,
elem):
if self.type == 'Queue':
self.queue.put(elem)
elif self.type == 'Pipe':
self.queue[1].send(elem)
def get(self):
if self.type == 'Queue':
return self.queue.get()
elif self.type == 'Pipe':
return self.queue[0].recv()
def qsize(self):
if self.type == 'Queue':
return self.queue.qsize()
elif self.type == 'Pipe':
return -1
def empty(self):
if self.type == 'Queue':
return self.queue.empty()
elif self.type == 'Pipe':
return not self.queue[0].poll()
def bbox(img,
mode='max'):
"""
Returns a bounding box covering all the non-zero area in the image.
:param img: Image on which print the bounding box
:param mode: "width_height" returns width in [2] and height in [3], "max" returns xmax in [2] and ymax in [3]
:return:
"""
rows = np.any(img, axis=1)
cols = np.any(img, axis=0)
y, ymax = np.where(rows)[0][[0, -1]]
x, xmax = np.where(cols)[0][[0, -1]]
if mode == 'width_height':
return x, y, xmax - x, ymax - y
elif mode == 'max':
return x, y, xmax, ymax
def simplifyDataset(ds,
id_classes,
n_classes=50):
"""
:param ds:
:param id_classes:
:param n_classes:
:return:
"""
logger.info("Simplifying %s from %d to %d classes." % (str(ds.name), len(ds.classes), n_classes))
ds.classes[id_classes] = ds.classes[id_classes][:n_classes]
id_labels = ds.ids_outputs[ds.types_outputs.index('categorical')]
# reduce each data split
for s in ['train', 'val', 'test']:
kept_Y = dict()
kept_X = dict()
labels_set = getattr(ds, 'Y_' + s)[id_labels]
for i, y in list(enumerate(labels_set)):
if y < n_classes:
for id_out in ds.ids_outputs:
y_split = getattr(ds, 'Y_' + s)
sample = y_split[id_out][i]
try:
kept_Y[id_out].append(sample)
except Exception:
kept_Y[id_out] = []
kept_Y[id_out].append(sample)
for id_in in ds.ids_inputs:
# exec ('sample = ds.X_' + s + '[id_in][i]')
x_split = getattr(ds, 'X_' + s)
sample = x_split[id_in][i]
try:
kept_X[id_in].append(sample)
except Exception:
kept_X[id_in] = []
kept_X[id_in].append(sample)
setattr(ds, 'X_' + s, copy.copy(kept_X))
setattr(ds, 'Y_' + s, copy.copy(kept_Y))
setattr(ds, 'len_' + s, len(kept_Y[id_labels]))
def average_models(models,
output_model,
weights=None,
custom_objects=None):
from keras_wrapper.saving import loadModel, saveModel
if not isinstance(models, list):
raise AssertionError('You must give a list of models to average.')
if len(models) == 0:
raise AssertionError('You provided an empty list of models to average!')
model_weights = np.asarray([1. / len(models)] * len(models),
dtype=np.float32) if (weights is None) or (weights == []) else np.asarray(weights,
dtype=np.float32)
if len(model_weights) != len(models):
raise AssertionError(
'You must give a list of weights of the same size than the list of models.')
loaded_models = [loadModel(m,
-1,
full_path=True,
custom_objects=custom_objects) for m in models]
# Check that all models are compatible
if not all([hasattr(loaded_model, 'model') for loaded_model in loaded_models]):
raise AssertionError('Not all models have the attribute "model".')
if not (all([hasattr(loaded_model, 'model_init') for loaded_model in
loaded_models]) or all(
[not hasattr(loaded_model, 'model_init') for loaded_model in
loaded_models])):
raise AssertionError('Not all models have the attribute "model_init".')
if not (all([hasattr(loaded_model, 'model_next') for loaded_model in
loaded_models]) or all(
[not hasattr(loaded_model, 'model_next') for loaded_model in
loaded_models])):
raise AssertionError('Not all models have the attribute "model_next".')
# Check all layers are the same
if not (all([[str(loaded_models[0].model.weights[i]) == str(loaded_model.model.weights[i]) for i in
range(len(loaded_models[0].model.weights))] for loaded_model in loaded_models])):
raise AssertionError('Not all models have the same weights!')
if hasattr(loaded_models[0], 'model_init') and getattr(loaded_models[0], 'model_init') is not None:
if not all([[str(loaded_models[0].model.weights[i]) == str(loaded_model.model.weights[i]) for i in
range(len(loaded_models[0].model_init.weights))] for loaded_model in loaded_models]):
raise AssertionError('Not all model_inits have the same weights!')
if hasattr(loaded_models[0], 'model_next') and getattr(loaded_models[0], 'model_next') is not None:
if not all([[str(loaded_models[0].model_next.weights[i]) == str(loaded_model.model_next.weights[i]) for i in
range(len(loaded_models[0].model_next.weights))] for loaded_model in loaded_models]):
raise AssertionError('Not all model_nexts have the same weights!')
# Retrieve weights, weigh them and overwrite in model[0].
current_weights = loaded_models[0].model.get_weights()
loaded_models[0].model.set_weights(
[current_weights[matrix_index] * model_weights[0] for matrix_index in range(len(current_weights))])
# We have model_init
if hasattr(loaded_models[0], 'model_init') and getattr(loaded_models[0], 'model_init') is not None:
current_weights = loaded_models[0].model_init.get_weights()
loaded_models[0].model_init.set_weights(
[current_weights[matrix_index] * model_weights[0] for matrix_index in range(len(current_weights))])
# We have model_next
if hasattr(loaded_models[0], 'model_next') and getattr(loaded_models[0], 'model_next') is not None:
current_weights = loaded_models[0].model_next.get_weights()
loaded_models[0].model_next.set_weights(
[current_weights[matrix_index] * model_weights[0] for matrix_index in range(len(current_weights))])
# Weighted sum of all models
for m in range(1, len(models)):
current_weights = loaded_models[m].model.get_weights()
prev_weights = loaded_models[0].model.get_weights()
loaded_models[0].model.set_weights(
[current_weights[matrix_index] * model_weights[m] + prev_weights[matrix_index] for matrix_index in
range(len(current_weights))])
# We have model_init
if hasattr(loaded_models[0], 'model_init') and getattr(loaded_models[0], 'model_init') is not None:
current_weights = loaded_models[m].model_init.get_weights()
prev_weights = loaded_models[0].model_init.get_weights()
loaded_models[0].model_init.set_weights(
[current_weights[matrix_index] * model_weights[m] + prev_weights[matrix_index] for matrix_index in
range(len(current_weights))])
# We have model_next
if hasattr(loaded_models[0], 'model_next') and getattr(loaded_models[0], 'model_next') is not None:
current_weights = loaded_models[m].model_next.get_weights()
prev_weights = loaded_models[0].model_next.get_weights()
loaded_models[0].model_next.set_weights(
[current_weights[matrix_index] * model_weights[m] + prev_weights[matrix_index] for matrix_index in
range(len(current_weights))])
# Save averaged model
saveModel(loaded_models[0], -1, path=output_model, full_path=True,
store_iter=False)
# Text-related utils
def one_hot_2_indices(preds,
pad_sequences=True,
verbose=0):
"""
Converts a one-hot codification into a index-based one
:param preds: Predictions codified as one-hot vectors.
:param pad_sequences: Whether we should pad sequence or not
:param verbose: Verbosity level, by default 0.
:return: List of convertedpredictions
"""
if verbose > 0:
logger.info('Converting one hot prediction into indices...')
preds = list(map(lambda x: np.argmax(x, axis=1), preds))
if pad_sequences:
preds = [pred[:sum([int(elem > 0) for elem in pred]) + 1] for pred in preds]
return preds
def indices_2_one_hot(indices,
n):
"""
Converts a list of indices into one hot codification
:param indices: list of indices
:param n: integer. Size of the vocabulary
:return: numpy array with shape (len(indices),
n)
"""
one_hot = np.zeros((len(indices), n), dtype=np.int8)
for i in range(len(indices)):
if indices[i] >= n:
raise ValueError("Index out of bounds when converting to one hot")
one_hot[i, indices[i]] = 1
return one_hot
# From keras.utils.np_utils
def to_categorical(y,
num_classes=None):
"""Converts a class vector (integers) to binary class matrix.
E.g. for use with categorical_crossentropy.
# Arguments
y: class vector to be converted into a matrix
(integers from 0 to num_classes).
num_classes: total number of classes.
# Returns
A binary matrix representation of the input.
"""
y = np.array(y,
dtype='int')
input_shape = y.shape
if input_shape and input_shape[-1] == 1 and len(input_shape) > 1:
input_shape = tuple(input_shape[:-1])
y = y.ravel()
if not num_classes:
num_classes = np.max(y) + 1
n = y.shape[0]
categorical = np.zeros((n,
num_classes))
categorical[np.arange(n), y] = 1
output_shape = input_shape + (num_classes,)
categorical = np.reshape(categorical, output_shape)
return categorical
def categorical_probas_to_classes(p):
return np.argmax(p,
axis=1)
# ------------------------------------------------------- #
# DECODING FUNCTIONS
# Functions for decoding predictions
# ------------------------------------------------------- #
def decode_predictions_one_hot(preds,
index2word,
pad_sequences=True,
verbose=0):
"""
Decodes predictions following a one-hot codification.
:param preds: Predictions codified as one-hot vectors.
:param index2word: Mapping from word indices into word characters.
:param verbose: Verbosity level, by default 0.
:return: List of decoded predictions
"""
if verbose > 0:
logger.info('Decoding one hot prediction ...')
preds = list(map(lambda prediction: np.argmax(prediction, axis=1), preds))
PAD = '<pad>'
flattened_answer_pred = [list(map(lambda index: index2word[index], pred)) for
pred in preds]
answer_pred_matrix = np.asarray(flattened_answer_pred)
answer_pred = []
for a_no in answer_pred_matrix:
end_token_pos = [j for j, x in list(enumerate(a_no)) if x == PAD]
end_token_pos = None if len(end_token_pos) == 0 or not pad_sequences else end_token_pos[0]
a_no = [a.decode('utf-8') if isinstance(a,
str) and sys.version_info.major == 2 else a
for a in a_no]
tmp = u' '.join(a_no[:end_token_pos])
answer_pred.append(tmp)
return answer_pred
def decode_predictions(preds,
temperature,
index2word,
sampling_type,
verbose=0):
"""
Decodes predictions
:param preds: Predictions codified as the output of a softmax activation function.
:param temperature: Temperature for sampling.
:param index2word: Mapping from word indices into word characters.
:param sampling_type: 'max_likelihood' or 'multinomial'.
:param verbose: Verbosity level, by default 0.
:return: List of decoded predictions.
"""
if verbose > 0:
logger.info('Decoding prediction ...')
flattened_preds = preds.reshape(-1, preds.shape[-1])
flattened_answer_pred = list(map(lambda index: index2word[index], sampling(scores=flattened_preds,
sampling_type=sampling_type,
temperature=temperature)))
answer_pred_matrix = np.asarray(flattened_answer_pred).reshape(preds.shape[:-1])
answer_pred = []
EOS = '<eos>'
PAD = '<pad>'
for a_no in answer_pred_matrix:
if len(a_no.shape) > 1: # only process word by word if our prediction has more than one output
init_token_pos = 0
end_token_pos = [j for j, x in list(enumerate(a_no)) if x == EOS or x == PAD]
end_token_pos = None if len(end_token_pos) == 0 else end_token_pos[0]
a_no = [a.decode('utf-8') if isinstance(a, str) and sys.version_info.major == 2 else a
for a in a_no]
tmp = u' '.join(a_no[init_token_pos:end_token_pos])
else:
tmp = a_no[:-1]
answer_pred.append(tmp)
return answer_pred
def decode_categorical(preds,
index2word,
verbose=0):
"""
Decodes predictions
:param preds: Predictions codified as the output of a softmax activation function.
:param index2word: Mapping from word indices into word characters.
:return: List of decoded predictions.
"""
if verbose > 0:
logger.info('Decoding prediction ...')
word_indices = categorical_probas_to_classes(preds)
return [index2word.get(word) for word in word_indices]
def decode_multilabel(preds,
index2word,
min_val=0.5,
get_probs=False,
verbose=0):
"""
Decodes predictions
:param preds: Predictions codified as the output of a softmax activation function.
:param index2word: Mapping from word indices into word characters.
:param min_val: Minimum value needed for considering a positive prediction.
:param get_probs: additionally return probability for each predicted label
:param verbose: Verbosity level, by default 0.
:return: List of decoded predictions.
"""
if verbose > 0:
logger.info('Decoding prediction ...')
answer_pred = []
probs_pred = []
for pred in preds:
current_pred = []
current_probs = []
for ind, word in list(enumerate(pred)):
if word >= min_val:
current_pred.append(index2word[ind])
current_probs.append(word)
answer_pred.append(current_pred)
probs_pred.append(current_probs)
if get_probs:
return answer_pred, probs_pred
else:
return answer_pred
def replace_unknown_words(src_word_seq,
trg_word_seq,
hard_alignment,
unk_symbol,
glossary=None,
heuristic=0,
mapping=None,
verbose=0):
"""
Replaces unknown words from the target sentence according to some heuristic.
Borrowed from: https://github.com/sebastien-j/LV_groundhog/blob/master/experiments/nmt/replace_UNK.py
:param src_word_seq: Source sentence words
:param trg_word_seq: Hypothesis words
:param hard_alignment: Target-Source alignments
:param glossary: Hard-coded substitutions.
:param unk_symbol: Symbol in trg_word_seq to replace
:param heuristic: Heuristic (0, 1, 2)
:param mapping: External alignment dictionary
:param verbose: Verbosity level
:return: trg_word_seq with replaced unknown words
"""
trans_words = trg_word_seq
new_trans_words = []
mapping = mapping or {}
for j in range(len(trans_words)):
current_word = trans_words[j]
if glossary is not None and glossary.get(
src_word_seq[hard_alignment[j]]) is not None:
current_word = glossary.get(src_word_seq[hard_alignment[j]])
new_trans_words.append(current_word)
elif current_word == unk_symbol:
current_src = src_word_seq[hard_alignment[j]]
if isinstance(current_src, str) and sys.version_info.major == 2:
current_src = current_src.decode('utf-8')
if heuristic == 0: # Copy (ok when training with large vocabularies on en->fr, en->de)
new_trans_words.append(current_src)
elif heuristic == 1:
# Use the most likely translation (with t-table). If not found, copy the source word.
# Ok for small vocabulary (~30k) models
if mapping.get(current_src) is not None:
new_trans_words.append(mapping[current_src])
else:
new_trans_words.append(current_src)
elif heuristic == 2:
# Use t-table if the source word starts with a lowercase letter. Otherwise copy
# Sometimes works better than other heuristics
if mapping.get(current_src) is not None and current_src[0].islower():
new_trans_words.append(mapping[current_src])
else:
new_trans_words.append(current_src)
else:
new_trans_words.append(current_word)
return new_trans_words
def decode_predictions_beam_search(preds,
index2word,
glossary=None,
alphas=None,
heuristic=0,
x_text=None,
unk_symbol='<unk>',
pad_sequences=False,
mapping=None,
verbose=0):
"""
Decodes predictions from the BeamSearch method.
:param preds: Predictions codified as word indices.
:param index2word: Mapping from word indices into word characters.
:param alphas: Attention model weights: Float matrix with shape (I, J) (I: number of target items; J: number of source items).
:param heuristic: Replace unknown words heuristic (0, 1 or 2)
:param x_text: Source text (for unk replacement)
:param unk_symbol: Unknown words symbol
:param pad_sequences: Whether we should make a zero-pad on the input sequence.
:param mapping: Source-target dictionary (for unk_replace heuristics 1 and 2)
:param verbose: Verbosity level, by default 0.
:return: List of decoded predictions
"""
if verbose > 0:
logger.info('Decoding beam search prediction ...')
if alphas is not None:
if x_text is None:
raise AssertionError('When using POS_UNK, you must provide the input '
'text to decode_predictions_beam_search!')
if verbose > 0:
logger.info('Using heuristic %d' % heuristic)
if pad_sequences:
preds = [pred[:sum([int(elem > 0) for elem in pred]) + 1] for pred in preds]
flattened_predictions = [list(map(lambda x: index2word[x], pred)) for pred in
preds]
final_predictions = []
if alphas is not None:
x_text = list(map(lambda x: x.split(), x_text))
hard_alignments = list(
map(lambda alignment, x_sentence: np.argmax(
alignment[:, :max(1, len(x_sentence))], axis=1), alphas,
x_text))
for i, a_no in list(enumerate(flattened_predictions)):
if unk_symbol in a_no or glossary is not None:
a_no = replace_unknown_words(x_text[i],
a_no,
hard_alignments[i],
unk_symbol,
glossary=glossary,
heuristic=heuristic,
mapping=mapping,
verbose=verbose)
a_no = [a.decode('utf-8') if isinstance(a,
str) and sys.version_info.major == 2 else a
for a in a_no]
tmp = u' '.join(a_no[:-1])
final_predictions.append(tmp)
else:
for a_no in flattened_predictions:
a_no = [a.decode('utf-8') if isinstance(a,
str) and sys.version_info.major == 2 else a
for a in a_no]
tmp = u' '.join(a_no[:-1])
final_predictions.append(tmp)
return final_predictions
def sampling(scores,
sampling_type='max_likelihood',
temperature=1.):
"""
Sampling words (each sample is drawn from a categorical distribution).
Or picks up words that maximize the likelihood.
:param scores: array of size #samples x #classes;
every entry determines a score for sample i having class j
:param sampling_type:
:param temperature: Predictions temperature. The higher, the flatter probabilities. Hence more random outputs.
:return: set of indices chosen as output, a vector of size #samples
"""
if isinstance(scores, dict):
scores = scores['output']
if sampling_type == 'multinomial':
preds = np.asarray(scores).astype('float64')
preds = np.log(preds) / temperature
exp_preds = np.exp(preds)
preds = exp_preds / np.sum(exp_preds)
probas = np.random.multinomial(1, preds, 1)
return np.argmax(probas)
elif sampling_type == 'max_likelihood':
return np.argmax(scores, axis=-1)
else:
raise NotImplementedError()
# Data structures-related utils
def flatten_list_of_lists(list_of_lists):
"""
Flattens a list of lists
:param list_of_lists: List of lists
:return: Flatten list of lists
"""
return [item for sublist in list_of_lists for item in sublist]
def flatten(my_list):
"""
Flatten a list (more general than flatten_list_of_lists, but also more inefficient
:param my_list:
:return:
"""
if not my_list:
return my_list
return flatten(my_list[0]) + (flatten(my_list[1:]) if len(my_list) > 1 else []) if isinstance(my_list,
list) else [
my_list]
def key_with_max_val(d):
""" a) create a list of the dict's keys and values;
b) return the key with the max value"""
d = dict((k, v) for k, v in iteritems(d) if isinstance(v, (int, float, complex)))
v = list(d.values())
k = list(d.keys())
if d == {}:
return -1
else:
return k[v.index(max(v))]
def print_dict(d, header=''):
"""
Formats a dictionary for printing.
:param d: Dictionary to print.
:return: String containing the formatted dictionary.
"""
obj_str = str(header) + '{ \n\t'
obj_str += "\n\t".join([str(key) + ": " + str(d[key]) for key in sorted(d.keys())])
obj_str += '\n'
obj_str += '}'
return obj_str
| 39.745814
| 130
| 0.588443
|
9d57d560c7b9f4c6c832dde6698cab1565aa0fb1
| 829
|
py
|
Python
|
configloader/image/setup.py
|
SGeetansh/dffml
|
04647bdcadef2f7e7b59cdd8ac1e89f17ef1095b
|
[
"MIT"
] | 1
|
2019-03-11T17:24:17.000Z
|
2019-03-11T17:24:17.000Z
|
configloader/image/setup.py
|
SGeetansh/dffml
|
04647bdcadef2f7e7b59cdd8ac1e89f17ef1095b
|
[
"MIT"
] | 24
|
2020-05-20T23:29:57.000Z
|
2021-04-14T04:18:21.000Z
|
configloader/image/setup.py
|
SGeetansh/dffml
|
04647bdcadef2f7e7b59cdd8ac1e89f17ef1095b
|
[
"MIT"
] | 1
|
2020-05-06T19:07:02.000Z
|
2020-05-06T19:07:02.000Z
|
import os
import importlib.util
from setuptools import setup
# Boilerplate to load commonalities
spec = importlib.util.spec_from_file_location(
"setup_common", os.path.join(os.path.dirname(__file__), "setup_common.py")
)
common = importlib.util.module_from_spec(spec)
spec.loader.exec_module(common)
common.KWARGS["install_requires"] += [
"opencv-python>=4.2.0.34",
# See https://github.com/intel/dffml/issues/816
"numpy>=1.16.2,<1.19.0",
]
common.KWARGS["entry_points"] = {
"dffml.configloader": [
f"png = {common.IMPORT_NAME}.configloader:PNGConfigLoader",
f"jpg = {common.IMPORT_NAME}.configloader:JPGConfigLoader",
f"jpeg = {common.IMPORT_NAME}.configloader:JPEGConfigLoader",
f"tiff = {common.IMPORT_NAME}.configloader:TIFFConfigLoader",
]
}
setup(**common.KWARGS)
| 30.703704
| 78
| 0.71532
|
1581b5edf184ae04f6cd66fd8365e2cb0c9587d4
| 2,946
|
py
|
Python
|
octopusapi/__init__.py
|
marcelocure/octopusapi
|
0d761b73460ae6ad761ba1479141322f078fd1b7
|
[
"MIT"
] | null | null | null |
octopusapi/__init__.py
|
marcelocure/octopusapi
|
0d761b73460ae6ad761ba1479141322f078fd1b7
|
[
"MIT"
] | null | null | null |
octopusapi/__init__.py
|
marcelocure/octopusapi
|
0d761b73460ae6ad761ba1479141322f078fd1b7
|
[
"MIT"
] | 1
|
2016-02-26T19:28:08.000Z
|
2016-02-26T19:28:08.000Z
|
import metadata
import json
import config
import logging
from wsgiref import simple_server
import falcon
from middleware import AuthMiddleware, RequireJSON, JSONTranslator, StorageError, max_body
__version__ = metadata.version
__author__ = metadata.authors[0]
__license__ = metadata.license
__copyright__ = metadata.copyright
class Field(object):
def __init__(self, name, type, description, valid_values=None):
self.name = name
self.type = type
self.description = description
self.valid_values = valid_values
class Resource(object):
def __init__(self, name, fields, get=None, post=None, put=None, delete=None, links=[]):
self.name = name
self.fields = fields
self.links = links
self.get = get
self.post = post
self.put = put
self.delete = delete
self.allowed_methods = []
if self.get:
self.allowed_methods.append('get')
if self.post:
self.allowed_methods.append('post')
if self.put:
self.allowed_methods.append('put')
if self.delete:
self.allowed_methods.append('delete')
def validate_contract(self, req):
fields = map(lambda field: field.name, self.fields)
request_fields = req.context['doc'][self.name].keys()
result = filter(lambda key: key in fields, request_fields)
all_fields_informed = lambda result: len(result) == len(self.fields)
if not all_fields_informed(result):
raise falcon.HTTPBadRequest('Invalid input fields', 'The fields contained in the request body are not valid.')
def on_get(self, req, resp, id=None):
if not self.get:
raise falcon.HTTPMethodNotAllowed(self.allowed_methods)
self.get(req, resp)
def on_put(self, req, resp, id=None):
if not self.put:
raise falcon.HTTPMethodNotAllowed(self.allowed_methods)
self.put(req, resp)
def on_delete(self, req, resp, id=None):
if not self.delete:
raise falcon.HTTPMethodNotAllowed(self.allowed_methods)
self.delete(req, resp)
@falcon.before(max_body(64 * 1024))
def on_post(self, req, resp):
if not self.post:
raise falcon.HTTPMethodNotAllowed(self.allowed_methods)
self.validate_contract(req)
self.post(req, resp)
class OctopusApp(object):
def __init__(self, app_name, resources, config):
self.resources = resources
self.app_name = app_name
self.config = config
def validate_resource(self, resource):
if not(resource.on_get or resource.on_post or resource.on_put or resource.on_delete):
raise Exception('Resource {0} has no HTTP verb handling')
pass
def load_resource(self, app, resource):
self.validate_resource(resource)
app.add_route('/{0}/{1}/'.format(self.app_name, resource.name), resource)
def run_server(self):
app = falcon.API(middleware=[AuthMiddleware(), RequireJSON(), JSONTranslator()])
map(lambda resource: self.load_resource(app, resource), self.resources)
app.add_error_handler(StorageError, StorageError.handle)
httpd = simple_server.make_server(self.config.host, self.config.port, app)
httpd.serve_forever()
| 28.601942
| 113
| 0.746096
|
52e08d231e314618d47a232ea302aa0d28587cc7
| 3,513
|
py
|
Python
|
tests/sim_norm.py
|
jschiavon/optispd
|
fb3f904a1f1099d31cbcaf27dfc63e5a9e77c9f5
|
[
"MIT"
] | null | null | null |
tests/sim_norm.py
|
jschiavon/optispd
|
fb3f904a1f1099d31cbcaf27dfc63e5a9e77c9f5
|
[
"MIT"
] | null | null | null |
tests/sim_norm.py
|
jschiavon/optispd
|
fb3f904a1f1099d31cbcaf27dfc63e5a9e77c9f5
|
[
"MIT"
] | null | null | null |
import jax.numpy as jnp
from jax import jit, random, grad
from jax.scipy.special import logsumexp
from jax.scipy.stats import multivariate_normal as mvn
from jax.scipy.stats import norm
from jax.ops import index_update, index
from jax.config import config
config.update('jax_enable_x64', True)
from scipy.optimize import minimize
from time import time
from tqdm import tqdm
import pandas as pd
from optispd.minimizer import minimizer
from optispd.manifold import SPD, Euclidean, Product
seed = 0
rng = random.PRNGKey(seed)
N = 1000
tol = 1e-4
ps = [2, 5, 10, 25, 50, 75, 100]
n_rep = 50
def ll(X, y):
datapart = jnp.trace(jnp.linalg.solve(X, jnp.matmul(y.T, y)))
return 0.5 * (N * jnp.linalg.slogdet(X)[1] + datapart)
def ll_chol(X, y):
p = y.shape[-1]
cov = index_update(
jnp.zeros(shape=(p, p)),
jnp.triu_indices(p),
X).T
logdet = 2 + jnp.sum(jnp.diag(cov))
sol = jnp.linalg.solve(cov, y.T)
return 0.5 * (N * logdet + jnp.einsum('ij,ij', sol, sol))
def optimization(kind='rcg', man=None, fun=None, gra=None, init=None, mle=0):
if kind == 'rcg':
optim = minimizer(man, method='rcg', tol=tol, verbosity=0)
res = optim.solve(fun, gra, init)
return res.nit, res.time, jnp.abs(res.fun - mle)
if kind == 'rlbfgs':
optim = minimizer(man, method='rlbfgs', tol=tol, verbosity=0)
res = optim.solve(fun, gra, init)
return res.nit, res.time, jnp.abs(res.fun - mle)
if kind == 'chol':
# print('start cholesky opt')
start = time()
init = jnp.linalg.cholesky(init)
init = init.T[jnp.triu_indices_from(init)]
res = minimize(fun, init, method='cg', jac=gra, tol=tol, options={'maxiter':1000})
# print('finished cholesky opt')
return res['nit'], time() - start, jnp.abs(res['fun'] - mle)
def run(manifold, p, k):
k, key = random.split(k)
tmean = random.normal(key, shape=(p,))
k, key = random.split(k)
tcov = random.normal(key, shape=(p, p))
tcov = tcov @ tcov.T
k, key = random.split(k)
data = random.multivariate_normal(key, mean=tmean, cov=tcov, shape=(N,))
s_mu = jnp.mean(data, axis=0)
s_cov = jnp.dot((data - s_mu).T, data - s_mu) / N
MLE = jnp.append(jnp.append(s_cov + jnp.outer(s_mu, s_mu),
jnp.array([s_mu]), axis=0),
jnp.array([jnp.append(s_mu, 1)]).T, axis=1)
mle_chol = jnp.linalg.cholesky(MLE)
mle_chol = mle_chol.T[jnp.triu_indices_from(mle_chol)]
data = jnp.concatenate([data.T, jnp.ones(shape=(1, N))], axis=0).T
fun = jit(lambda x: ll(x, data))
gra = jit(grad(fun))
init = jnp.identity(p + 1)
ll_mle = fun(MLE)
res_cg = optimization('rcg', manifold, fun=fun, gra=gra, init=init, mle=ll_mle)
res_bfgs = optimization('rlbfgs', manifold, fun=fun, gra=gra, init=init, mle=ll_mle)
fun = jit(lambda x: ll_chol(x, data))
gra = jit(grad(fun))
ll_mle_chol = fun(mle_chol)
res_cho = optimization('chol', fun=fun, gra=gra, init=init, mle=ll_mle_chol)
return p, *res_cg, *res_bfgs, *res_cho
res = []
for p in tqdm(ps):
man = SPD(p+1)
rng, *keys = random.split(rng, n_rep + 1)
for key in tqdm(keys):
res.append(run(man, p, key))
df = pd.DataFrame(data=res, columns=['p',
'cg_it', 'cg_time', 'cg_fun',
'bfgs_it', 'bfgs_time', 'bfgs_fun',
'chol_it', 'chol_time', 'chol_fun'])
df.to_csv('simulations/normal.csv', index=False)
| 30.284483
| 90
| 0.615998
|
4cb79754ef8fd992dd652fb7579469e249d73aa3
| 1,998
|
py
|
Python
|
app/backend/src/couchers/servicers/jail.py
|
a-ch-chu/couchers
|
a3f6d619854b94e7f1144807f60f50f81bfa38c9
|
[
"MIT"
] | null | null | null |
app/backend/src/couchers/servicers/jail.py
|
a-ch-chu/couchers
|
a3f6d619854b94e7f1144807f60f50f81bfa38c9
|
[
"MIT"
] | null | null | null |
app/backend/src/couchers/servicers/jail.py
|
a-ch-chu/couchers
|
a3f6d619854b94e7f1144807f60f50f81bfa38c9
|
[
"MIT"
] | null | null | null |
import logging
import grpc
from couchers import errors
from couchers.db import session_scope
from couchers.models import User
from couchers.utils import create_coordinate
from pb import jail_pb2, jail_pb2_grpc
logger = logging.getLogger(__name__)
class Jail(jail_pb2_grpc.JailServicer):
"""
The Jail servicer.
API calls allowed for users who need to complete some tasks before being
fully active
"""
def _get_jail_info(self, user):
res = jail_pb2.JailInfoRes(
has_not_accepted_tos=user.accepted_tos != 1,
has_not_added_location=user.is_missing_location,
)
# if any of the bools in res are true, we're jailed
jailed = False
for field in res.DESCRIPTOR.fields:
if getattr(res, field.name):
jailed = True
res.jailed = jailed
# double check
assert user.is_jailed == jailed
return res
def JailInfo(self, request, context):
with session_scope() as session:
user = session.query(User).filter(User.id == context.user_id).one()
return self._get_jail_info(user)
def AcceptTOS(self, request, context):
with session_scope() as session:
user = session.query(User).filter(User.id == context.user_id).one()
if user.accepted_tos == 1 and not request.accept:
context.abort(grpc.StatusCode.FAILED_PRECONDITION, errors.CANT_UNACCEPT_TOS)
user.accepted_tos = 1 if request.accept else 0
session.commit()
return self._get_jail_info(user)
def SetLocation(self, request, context):
with session_scope() as session:
user = session.query(User).filter(User.id == context.user_id).one()
user.city = request.city
user.geom = create_coordinate(request.lat, request.lng)
user.geom_radius = request.radius
session.commit()
return self._get_jail_info(user)
| 29.382353
| 92
| 0.645145
|
915983776fa36363096ca91f25c8f88dedf7160e
| 23,710
|
py
|
Python
|
cinder/tests/unit/api/test_common.py
|
UbuntuEvangelist/cinder
|
cbb55074de48176cbaa3f31a5b1d595b8aad7aa8
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/api/test_common.py
|
UbuntuEvangelist/cinder
|
cbb55074de48176cbaa3f31a5b1d595b8aad7aa8
|
[
"Apache-2.0"
] | 1
|
2021-03-21T11:38:29.000Z
|
2021-03-21T11:38:29.000Z
|
cinder/tests/unit/api/test_common.py
|
UbuntuEvangelist/cinder
|
cbb55074de48176cbaa3f31a5b1d595b8aad7aa8
|
[
"Apache-2.0"
] | 15
|
2017-01-12T10:35:10.000Z
|
2019-04-19T08:22:10.000Z
|
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suites for 'common' code used throughout the OpenStack HTTP API.
"""
import mock
from testtools import matchers
import webob
import webob.exc
from oslo_config import cfg
from cinder.api import common
from cinder import test
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
CONF = cfg.CONF
class LimiterTest(test.TestCase):
"""Unit tests for the `cinder.api.common.limited` method.
This method takes in a list of items and, depending on the 'offset'
and 'limit' GET params, returns a subset or complete set of the given
items.
"""
def setUp(self):
"""Run before each test."""
super(LimiterTest, self).setUp()
self.tiny = list(range(1))
self.small = list(range(10))
self.medium = list(range(1000))
self.large = list(range(10000))
def test_limiter_offset_zero(self):
"""Test offset key works with 0."""
req = webob.Request.blank('/?offset=0')
self.assertEqual(self.tiny, common.limited(self.tiny, req))
self.assertEqual(self.small, common.limited(self.small, req))
self.assertEqual(self.medium, common.limited(self.medium, req))
self.assertEqual(self.large[:1000], common.limited(self.large, req))
def test_limiter_offset_medium(self):
"""Test offset key works with a medium sized number."""
req = webob.Request.blank('/?offset=10')
self.assertEqual([], common.limited(self.tiny, req))
self.assertEqual(self.small[10:], common.limited(self.small, req))
self.assertEqual(self.medium[10:], common.limited(self.medium, req))
self.assertEqual(self.large[10:1010], common.limited(self.large, req))
def test_limiter_offset_over_max(self):
"""Test offset key works with a number over 1000 (max_limit)."""
req = webob.Request.blank('/?offset=1001')
self.assertEqual([], common.limited(self.tiny, req))
self.assertEqual([], common.limited(self.small, req))
self.assertEqual([], common.limited(self.medium, req))
self.assertEqual(
self.large[1001:2001], common.limited(self.large, req))
def test_limiter_offset_blank(self):
"""Test offset key works with a blank offset."""
req = webob.Request.blank('/?offset=')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_offset_bad(self):
"""Test offset key works with a BAD offset."""
req = webob.Request.blank(u'/?offset=\u0020aa')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_nothing(self):
"""Test request with no offset or limit."""
req = webob.Request.blank('/')
self.assertEqual(self.tiny, common.limited(self.tiny, req))
self.assertEqual(self.small, common.limited(self.small, req))
self.assertEqual(self.medium, common.limited(self.medium, req))
self.assertEqual(self.large[:1000], common.limited(self.large, req))
def test_limiter_limit_zero(self):
"""Test limit of zero."""
req = webob.Request.blank('/?limit=0')
self.assertEqual(self.tiny, common.limited(self.tiny, req))
self.assertEqual(self.small, common.limited(self.small, req))
self.assertEqual(self.medium, common.limited(self.medium, req))
self.assertEqual(self.large[:1000], common.limited(self.large, req))
def test_limiter_limit_bad(self):
"""Test with a bad limit."""
req = webob.Request.blank(u'/?limit=hello')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_limit_medium(self):
"""Test limit of 10."""
req = webob.Request.blank('/?limit=10')
self.assertEqual(self.tiny, common.limited(self.tiny, req))
self.assertEqual(self.small, common.limited(self.small, req))
self.assertEqual(self.medium[:10], common.limited(self.medium, req))
self.assertEqual(self.large[:10], common.limited(self.large, req))
def test_limiter_limit_over_max(self):
"""Test limit of 3000."""
req = webob.Request.blank('/?limit=3000')
self.assertEqual(self.tiny, common.limited(self.tiny, req))
self.assertEqual(self.small, common.limited(self.small, req))
self.assertEqual(self.medium, common.limited(self.medium, req))
self.assertEqual(self.large[:1000], common.limited(self.large, req))
def test_limiter_limit_and_offset(self):
"""Test request with both limit and offset."""
items = list(range(2000))
req = webob.Request.blank('/?offset=1&limit=3')
self.assertEqual(items[1:4], common.limited(items, req))
req = webob.Request.blank('/?offset=3&limit=0')
self.assertEqual(items[3:1003], common.limited(items, req))
req = webob.Request.blank('/?offset=3&limit=1500')
self.assertEqual(items[3:1003], common.limited(items, req))
req = webob.Request.blank('/?offset=3000&limit=10')
self.assertEqual([], common.limited(items, req))
def test_limiter_custom_max_limit(self):
"""Test a max_limit other than 1000."""
items = list(range(2000))
req = webob.Request.blank('/?offset=1&limit=3')
self.assertEqual(
items[1:4], common.limited(items, req, max_limit=2000))
req = webob.Request.blank('/?offset=3&limit=0')
self.assertEqual(
items[3:], common.limited(items, req, max_limit=2000))
req = webob.Request.blank('/?offset=3&limit=2500')
self.assertEqual(
items[3:], common.limited(items, req, max_limit=2000))
req = webob.Request.blank('/?offset=3000&limit=10')
self.assertEqual([], common.limited(items, req, max_limit=2000))
def test_limiter_negative_limit(self):
"""Test a negative limit."""
req = webob.Request.blank('/?limit=-3000')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_negative_offset(self):
"""Test a negative offset."""
req = webob.Request.blank('/?offset=-30')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
class PaginationParamsTest(test.TestCase):
"""Unit tests for `cinder.api.common.get_pagination_params` method.
This method takes in a request object and returns 'marker' and 'limit'
GET params.
"""
def test_nonnumerical_limit(self):
"""Test nonnumerical limit param."""
req = webob.Request.blank('/?limit=hello')
self.assertRaises(
webob.exc.HTTPBadRequest, common.get_pagination_params,
req.GET.copy())
@mock.patch.object(common, 'CONF')
def test_no_params(self, mock_cfg):
"""Test no params."""
mock_cfg.osapi_max_limit = 100
req = webob.Request.blank('/')
expected = (None, 100, 0)
self.assertEqual(expected,
common.get_pagination_params(req.GET.copy()))
def test_valid_marker(self):
"""Test valid marker param."""
marker = '263abb28-1de6-412f-b00b-f0ee0c4333c2'
req = webob.Request.blank('/?marker=' + marker)
expected = (marker, CONF.osapi_max_limit, 0)
self.assertEqual(expected,
common.get_pagination_params(req.GET.copy()))
def test_valid_limit(self):
"""Test valid limit param."""
req = webob.Request.blank('/?limit=10')
expected = (None, 10, 0)
self.assertEqual(expected,
common.get_pagination_params(req.GET.copy()))
def test_invalid_limit(self):
"""Test invalid limit param."""
req = webob.Request.blank('/?limit=-2')
self.assertRaises(
webob.exc.HTTPBadRequest, common.get_pagination_params,
req.GET.copy())
def test_valid_limit_and_marker(self):
"""Test valid limit and marker parameters."""
marker = '263abb28-1de6-412f-b00b-f0ee0c4333c2'
req = webob.Request.blank('/?limit=20&marker=%s' % marker)
expected = (marker, 20, 0)
self.assertEqual(expected,
common.get_pagination_params(req.GET.copy()))
class SortParamUtilsTest(test.TestCase):
def test_get_sort_params_defaults(self):
"""Verifies the default sort key and direction."""
sort_keys, sort_dirs = common.get_sort_params({})
self.assertEqual(['created_at'], sort_keys)
self.assertEqual(['desc'], sort_dirs)
def test_get_sort_params_override_defaults(self):
"""Verifies that the defaults can be overriden."""
sort_keys, sort_dirs = common.get_sort_params({}, default_key='key1',
default_dir='dir1')
self.assertEqual(['key1'], sort_keys)
self.assertEqual(['dir1'], sort_dirs)
def test_get_sort_params_single_value_sort_param(self):
"""Verifies a single sort key and direction."""
params = {'sort': 'key1:dir1'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1'], sort_keys)
self.assertEqual(['dir1'], sort_dirs)
def test_get_sort_params_single_value_old_params(self):
"""Verifies a single sort key and direction."""
params = {'sort_key': 'key1', 'sort_dir': 'dir1'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1'], sort_keys)
self.assertEqual(['dir1'], sort_dirs)
def test_get_sort_params_single_with_default_sort_param(self):
"""Verifies a single sort value with a default direction."""
params = {'sort': 'key1'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1'], sort_keys)
# Direction should be defaulted
self.assertEqual(['desc'], sort_dirs)
def test_get_sort_params_single_with_default_old_params(self):
"""Verifies a single sort value with a default direction."""
params = {'sort_key': 'key1'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1'], sort_keys)
# Direction should be defaulted
self.assertEqual(['desc'], sort_dirs)
def test_get_sort_params_multiple_values(self):
"""Verifies multiple sort parameter values."""
params = {'sort': 'key1:dir1,key2:dir2,key3:dir3'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['dir1', 'dir2', 'dir3'], sort_dirs)
def test_get_sort_params_multiple_not_all_dirs(self):
"""Verifies multiple sort keys without all directions."""
params = {'sort': 'key1:dir1,key2,key3:dir3'}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
# Second key is missing the direction, should be defaulted
self.assertEqual(['dir1', 'desc', 'dir3'], sort_dirs)
def test_get_sort_params_multiple_override_default_dir(self):
"""Verifies multiple sort keys and overriding default direction."""
params = {'sort': 'key1:dir1,key2,key3'}
sort_keys, sort_dirs = common.get_sort_params(params,
default_dir='foo')
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['dir1', 'foo', 'foo'], sort_dirs)
def test_get_sort_params_params_modified(self):
"""Verifies that the input sort parameter are modified."""
params = {'sort': 'key1:dir1,key2:dir2,key3:dir3'}
common.get_sort_params(params)
self.assertEqual({}, params)
params = {'sort_key': 'key1', 'sort_dir': 'dir1'}
common.get_sort_params(params)
self.assertEqual({}, params)
def test_get_sort_params_random_spaces(self):
"""Verifies that leading and trailing spaces are removed."""
params = {'sort': ' key1 : dir1,key2: dir2 , key3 '}
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['dir1', 'dir2', 'desc'], sort_dirs)
def test_get_params_mix_sort_and_old_params(self):
"""An exception is raised if both types of sorting params are given."""
for params in ({'sort': 'k1', 'sort_key': 'k1'},
{'sort': 'k1', 'sort_dir': 'd1'},
{'sort': 'k1', 'sort_key': 'k1', 'sort_dir': 'd2'}):
self.assertRaises(webob.exc.HTTPBadRequest,
common.get_sort_params,
params)
class MiscFunctionsTest(test.TestCase):
def test_remove_major_version_from_href(self):
fixture = 'http://www.testsite.com/v1/images'
expected = 'http://www.testsite.com/images'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href(self):
fixture = 'http://www.testsite.com/v1.1/images'
expected = 'http://www.testsite.com/images'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href_2(self):
fixture = 'http://www.testsite.com/v1.1/'
expected = 'http://www.testsite.com/'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href_3(self):
fixture = 'http://www.testsite.com/v10.10'
expected = 'http://www.testsite.com'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href_4(self):
fixture = 'http://www.testsite.com/v1.1/images/v10.5'
expected = 'http://www.testsite.com/images/v10.5'
actual = common.remove_version_from_href(fixture)
self.assertEqual(expected, actual)
def test_remove_version_from_href_bad_request(self):
fixture = 'http://www.testsite.com/1.1/images'
self.assertRaises(ValueError,
common.remove_version_from_href,
fixture)
def test_remove_version_from_href_bad_request_2(self):
fixture = 'http://www.testsite.com/v/images'
self.assertRaises(ValueError,
common.remove_version_from_href,
fixture)
def test_remove_version_from_href_bad_request_3(self):
fixture = 'http://www.testsite.com/v1.1images'
self.assertRaises(ValueError,
common.remove_version_from_href,
fixture)
class TestCollectionLinks(test.TestCase):
"""Tests the _get_collection_links method."""
def _validate_next_link(self, item_count, osapi_max_limit, limit,
should_link_exist):
req = webob.Request.blank('/?limit=%s' % limit if limit else '/')
link_return = [{"rel": "next", "href": "fake_link"}]
self.flags(osapi_max_limit=osapi_max_limit)
if limit is None:
limited_list_size = min(item_count, osapi_max_limit)
else:
limited_list_size = min(item_count, osapi_max_limit, limit)
limited_list = [{"uuid": str(i)} for i in range(limited_list_size)]
builder = common.ViewBuilder()
def get_pagination_params(params, max_limit=CONF.osapi_max_limit,
original_call=common.get_pagination_params):
return original_call(params, max_limit)
def _get_limit_param(params, max_limit=CONF.osapi_max_limit,
original_call=common._get_limit_param):
return original_call(params, max_limit)
with mock.patch.object(common, 'get_pagination_params',
get_pagination_params), \
mock.patch.object(common, '_get_limit_param',
_get_limit_param), \
mock.patch.object(common.ViewBuilder, '_generate_next_link',
return_value=link_return) as href_link_mock:
results = builder._get_collection_links(req, limited_list,
mock.sentinel.coll_key,
item_count, "uuid")
if should_link_exist:
href_link_mock.assert_called_once_with(limited_list, "uuid",
req,
mock.sentinel.coll_key)
self.assertThat(results, matchers.HasLength(1))
else:
self.assertFalse(href_link_mock.called)
self.assertThat(results, matchers.HasLength(0))
def test_items_equals_osapi_max_no_limit(self):
item_count = 5
osapi_max_limit = 5
limit = None
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_equals_osapi_max_greater_than_limit(self):
item_count = 5
osapi_max_limit = 5
limit = 4
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_equals_osapi_max_equals_limit(self):
item_count = 5
osapi_max_limit = 5
limit = 5
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_equals_osapi_max_less_than_limit(self):
item_count = 5
osapi_max_limit = 5
limit = 6
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_less_than_osapi_max_no_limit(self):
item_count = 5
osapi_max_limit = 7
limit = None
should_link_exist = False
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_limit_less_than_items_less_than_osapi_max(self):
item_count = 5
osapi_max_limit = 7
limit = 4
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_limit_equals_items_less_than_osapi_max(self):
item_count = 5
osapi_max_limit = 7
limit = 5
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_less_than_limit_less_than_osapi_max(self):
item_count = 5
osapi_max_limit = 7
limit = 6
should_link_exist = False
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_less_than_osapi_max_equals_limit(self):
item_count = 5
osapi_max_limit = 7
limit = 7
should_link_exist = False
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_less_than_osapi_max_less_than_limit(self):
item_count = 5
osapi_max_limit = 7
limit = 8
should_link_exist = False
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_greater_than_osapi_max_no_limit(self):
item_count = 5
osapi_max_limit = 3
limit = None
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_limit_less_than_items_greater_than_osapi_max(self):
item_count = 5
osapi_max_limit = 3
limit = 2
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_greater_than_osapi_max_equals_limit(self):
item_count = 5
osapi_max_limit = 3
limit = 3
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_greater_than_limit_greater_than_osapi_max(self):
item_count = 5
osapi_max_limit = 3
limit = 4
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_items_equals_limit_greater_than_osapi_max(self):
item_count = 5
osapi_max_limit = 3
limit = 5
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
def test_limit_greater_than_items_greater_than_osapi_max(self):
item_count = 5
osapi_max_limit = 3
limit = 6
should_link_exist = True
self._validate_next_link(item_count, osapi_max_limit, limit,
should_link_exist)
class LinkPrefixTest(test.TestCase):
def test_update_link_prefix(self):
vb = common.ViewBuilder()
result = vb._update_link_prefix("http://192.168.0.243:24/",
"http://127.0.0.1/volume")
self.assertEqual("http://127.0.0.1/volume", result)
result = vb._update_link_prefix("http://foo.x.com/v1",
"http://new.prefix.com")
self.assertEqual("http://new.prefix.com/v1", result)
result = vb._update_link_prefix(
"http://foo.x.com/v1",
"http://new.prefix.com:20455/new_extra_prefix")
self.assertEqual("http://new.prefix.com:20455/new_extra_prefix/v1",
result)
class RequestUrlTest(test.TestCase):
def test_get_request_url_no_forward(self):
app_url = 'http://127.0.0.1/v2;param?key=value#frag'
request = type('', (), {
'application_url': app_url,
'headers': {}
})
result = common.get_request_url(request)
self.assertEqual(app_url, result)
def test_get_request_url_forward(self):
request = type('', (), {
'application_url': 'http://127.0.0.1/v2;param?key=value#frag',
'headers': {'X-Forwarded-Host': '192.168.0.243:24'}
})
result = common.get_request_url(request)
self.assertEqual('http://192.168.0.243:24/v2;param?key=value#frag',
result)
| 41.378709
| 79
| 0.624926
|
526f85faefffb7a27b2a9d26caa193a6161572c1
| 731
|
py
|
Python
|
docs/conf.py
|
ewerybody/svg.charts
|
eb77a381f0721b3d59ae9461765ac9e9cffef586
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
ewerybody/svg.charts
|
eb77a381f0721b3d59ae9461765ac9e9cffef586
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
ewerybody/svg.charts
|
eb77a381f0721b3d59ae9461765ac9e9cffef586
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
extensions = ['sphinx.ext.autodoc', 'jaraco.packaging.sphinx', 'rst.linker']
master_doc = "index"
link_files = {
'../CHANGES.rst': dict(
using=dict(GH='https://github.com'),
replace=[
dict(
pattern=r'(Issue #|\B#)(?P<issue>\d+)',
url='{package_url}/issues/{issue}',
),
dict(
pattern=r'^(?m)((?P<scm_version>v?\d+(\.\d+){1,2}))\n[-=]+\n',
with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n',
),
dict(
pattern=r'PEP[- ](?P<pep_number>\d+)',
url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/',
),
],
)
}
| 28.115385
| 78
| 0.44186
|
4abf42ec5755348ef4414f50a7e108aacf831343
| 101,692
|
py
|
Python
|
pysnmp/TIMETRA-LOG-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/TIMETRA-LOG-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/TIMETRA-LOG-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module TIMETRA-LOG-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TIMETRA-LOG-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:09:43 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
SnmpSecurityLevel, SnmpMessageProcessingModel, SnmpAdminString = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpSecurityLevel", "SnmpMessageProcessingModel", "SnmpAdminString")
snmpNotifyEntry, = mibBuilder.importSymbols("SNMP-NOTIFICATION-MIB", "snmpNotifyEntry")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
sysDescr, sysObjectID = mibBuilder.importSymbols("SNMPv2-MIB", "sysDescr", "sysObjectID")
MibIdentifier, Counter32, iso, NotificationType, TimeTicks, Unsigned32, Counter64, Bits, Gauge32, ModuleIdentity, IpAddress, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "Counter32", "iso", "NotificationType", "TimeTicks", "Unsigned32", "Counter64", "Bits", "Gauge32", "ModuleIdentity", "IpAddress", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity")
StorageType, TruthValue, DateAndTime, TextualConvention, DisplayString, TimeStamp, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "StorageType", "TruthValue", "DateAndTime", "TextualConvention", "DisplayString", "TimeStamp", "RowStatus")
TFilterAction, TFilterActionOrDefault = mibBuilder.importSymbols("TIMETRA-FILTER-MIB", "TFilterAction", "TFilterActionOrDefault")
tmnxSRConfs, timetraSRMIBModules, tmnxSRNotifyPrefix, tmnxSRObjs = mibBuilder.importSymbols("TIMETRA-GLOBAL-MIB", "tmnxSRConfs", "timetraSRMIBModules", "tmnxSRNotifyPrefix", "tmnxSRObjs")
TItemDescription, TQueueId, TQueueIdOrAll, TmnxOperState, TmnxActionType, TmnxAccPlcyQECounters, THsmdaCounterIdOrZeroOrAll, TmnxAdminState, TmnxAccPlcyOECounters, TmnxAccPlcyQICounters, TNamedItem, TmnxAccPlcyAACounters, TNamedItemOrEmpty, THsmdaCounterIdOrZero, TmnxAccPlcyOICounters = mibBuilder.importSymbols("TIMETRA-TC-MIB", "TItemDescription", "TQueueId", "TQueueIdOrAll", "TmnxOperState", "TmnxActionType", "TmnxAccPlcyQECounters", "THsmdaCounterIdOrZeroOrAll", "TmnxAdminState", "TmnxAccPlcyOECounters", "TmnxAccPlcyQICounters", "TNamedItem", "TmnxAccPlcyAACounters", "TNamedItemOrEmpty", "THsmdaCounterIdOrZero", "TmnxAccPlcyOICounters")
timetraLogMIBModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 6527, 1, 1, 3, 12))
timetraLogMIBModule.setRevisions(('2011-02-01 00:00', '2009-02-28 00:00', '2008-01-01 00:00', '2007-01-01 00:00', '2006-03-15 00:00', '2005-01-24 00:00', '2004-05-27 00:00', '2004-01-15 00:00', '2003-08-15 00:00', '2003-01-20 00:00', '2001-11-10 00:00',))
if mibBuilder.loadTexts: timetraLogMIBModule.setLastUpdated('201102010000Z')
if mibBuilder.loadTexts: timetraLogMIBModule.setOrganization('Alcatel-Lucent')
tmnxLogObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12))
tmnxLogNotificationObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1))
tmnxLogNotifyPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12))
tmnxLogNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0))
tmnxLogConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12))
class TmnxPerceivedSeverity(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("none", 0), ("cleared", 1), ("indeterminate", 2), ("critical", 3), ("major", 4), ("minor", 5), ("warning", 6))
class TmnxSyslogId(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 10)
class TmnxSyslogIdOrEmpty(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 10), )
class TmnxSyslogFacility(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23))
namedValues = NamedValues(("kernel", 0), ("user", 1), ("mail", 2), ("systemd", 3), ("auth", 4), ("syslogd", 5), ("printer", 6), ("netnews", 7), ("uucp", 8), ("cron", 9), ("authpriv", 10), ("ftp", 11), ("ntp", 12), ("logaudit", 13), ("logalert", 14), ("cron2", 15), ("local0", 16), ("local1", 17), ("local2", 18), ("local3", 19), ("local4", 20), ("local5", 21), ("local6", 22), ("local7", 23))
class TmnxUdpPort(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 65535)
class TmnxSyslogSeverity(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))
namedValues = NamedValues(("emergency", 0), ("alert", 1), ("critical", 2), ("error", 3), ("warning", 4), ("notice", 5), ("info", 6), ("debug", 7))
class TmnxLogFileId(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 99)
class TmnxLogFileType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("none", 0), ("eventLog", 1), ("accountingPolicy", 2))
class TmnxLogIdIndex(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 100)
class TmnxCFlash(TextualConvention, Unsigned32):
status = 'current'
class TmnxLogFilterId(TextualConvention, Unsigned32):
status = 'current'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(0, 1001)
class TmnxLogFilterEntryId(TextualConvention, Unsigned32):
status = 'current'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(1, 999)
class TmnxLogFilterOperator(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))
namedValues = NamedValues(("off", 1), ("equal", 2), ("notEqual", 3), ("lessThan", 4), ("lessThanOrEqual", 5), ("greaterThan", 6), ("greaterThanOrEqual", 7))
class TmnxEventNumber(TextualConvention, Unsigned32):
status = 'current'
tmnxLogMaxLogs = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 2), Unsigned32().clone(15)).setUnits('logs').setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogMaxLogs.setStatus('current')
tmnxLogFileIdTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3), )
if mibBuilder.loadTexts: tmnxLogFileIdTable.setStatus('current')
tmnxLogFileIdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxLogFileId"))
if mibBuilder.loadTexts: tmnxLogFileIdEntry.setStatus('current')
tmnxLogFileId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 1), TmnxLogFileId())
if mibBuilder.loadTexts: tmnxLogFileId.setStatus('current')
tmnxLogFileIdRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFileIdRowStatus.setStatus('current')
tmnxLogFileIdStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFileIdStorageType.setStatus('current')
tmnxLogFileIdRolloverTime = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 10080)).clone(1440)).setUnits('minutes').setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFileIdRolloverTime.setStatus('current')
tmnxLogFileIdRetainTime = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 500)).clone(12)).setUnits('hours').setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFileIdRetainTime.setStatus('current')
tmnxLogFileIdAdminLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 6), TmnxCFlash()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFileIdAdminLocation.setStatus('current')
tmnxLogFileIdOperLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 7), TmnxCFlash()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogFileIdOperLocation.setStatus('current')
tmnxLogFileIdDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 8), TItemDescription().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFileIdDescription.setStatus('current')
tmnxLogFileIdLogType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 9), TmnxLogFileType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogFileIdLogType.setStatus('current')
tmnxLogFileIdLogId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogFileIdLogId.setStatus('current')
tmnxLogFileIdPathName = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogFileIdPathName.setStatus('current')
tmnxLogFileIdCreateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 12), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogFileIdCreateTime.setStatus('current')
tmnxLogFileIdBackupLoc = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 3, 1, 13), TmnxCFlash()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFileIdBackupLoc.setStatus('current')
tmnxLogApTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4), )
if mibBuilder.loadTexts: tmnxLogApTable.setStatus('current')
tmnxLogApEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxLogApPolicyId"))
if mibBuilder.loadTexts: tmnxLogApEntry.setStatus('current')
tmnxLogApPolicyId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99)))
if mibBuilder.loadTexts: tmnxLogApPolicyId.setStatus('current')
tmnxLogApRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApRowStatus.setStatus('current')
tmnxLogApStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApStorageType.setStatus('current')
tmnxLogApAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 4), TmnxAdminState().clone('outOfService')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApAdminStatus.setStatus('current')
tmnxLogApOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 5), TmnxOperState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogApOperStatus.setStatus('current')
tmnxLogApInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 120)).clone(5)).setUnits('minutes').setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApInterval.setStatus('current')
tmnxLogApDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 7), TItemDescription().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApDescription.setStatus('current')
tmnxLogApDefault = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 8), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApDefault.setStatus('current')
tmnxLogApRecord = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61))).clone(namedValues=NamedValues(("none", 0), ("svcIngressOctet", 1), ("svcEgressOctet", 2), ("svcIngressPkt", 3), ("svcEgressPkt", 4), ("netIngressOctet", 5), ("netEgressOctet", 6), ("netIngressPkt", 7), ("netEgressPkt", 8), ("compactSvcInOctet", 9), ("combinedSvcIngress", 10), ("combinedNetInEgOctet", 11), ("combinedSvcInEgOctet", 12), ("completeSvcInEg", 13), ("combinedSvcSdpInEg", 14), ("completeSvcSdpInEg", 15), ("completeSubscrIngrEgr", 16), ("bsxProtocol", 17), ("bsxApplication", 18), ("bsxAppGroup", 19), ("bsxSubscriberProtocol", 20), ("bsxSubscriberApplication", 21), ("bsxSubscriberAppGroup", 22), ("customRecordSubscriber", 23), ("customRecordService", 24), ("customRecordAa", 25), ("queueGroupOctets", 26), ("queueGroupPackets", 27), ("combinedQueueGroup", 28), ("combinedMplsLspIngress", 29), ("combinedMplsLspEgress", 30), ("combinedLdpLspEgress", 31), ("saa", 32), ("video", 33), ("kpiSystem", 34), ("kpiBearerMgmt", 35), ("kpiBearerTraffic", 36), ("kpiRefPoint", 37), ("kpiPathMgmt", 38), ("kpiIom3", 39), ("kciSystem", 40), ("kciBearerMgmt", 41), ("kciPathMgmt", 42), ("completeKpi", 43), ("completeKci", 44), ("kpiBearerGroup", 45), ("kpiRefPathGroup", 46), ("kpiKciBearerMgmt", 47), ("kpiKciPathMgmt", 48), ("kpiKciSystem", 49), ("completeKpiKci", 50), ("aaPerformance", 51), ("netInfIngressOct", 52), ("netInfIngressPkt", 53), ("combinedNetInfIngress", 54), ("accessEgressPkt", 55), ("accessEgressOct", 56), ("combinedAccessEgress", 57), ("combinedNetEgress", 58), ("combinedSvcEgress", 59), ("combinedSvcInEgPkt", 60), ("combinedNetInEgPkt", 61))).clone('none')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApRecord.setStatus('current')
tmnxLogApToFileId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 10), TmnxLogFileId()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApToFileId.setStatus('current')
tmnxLogApPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("none", 0), ("access", 1), ("network", 2), ("sdp", 3), ("subscriber", 4), ("appAssure", 5), ("qgrp", 6), ("saa", 7), ("mplsLspIngr", 8), ("mplsLspEgr", 9), ("ldpLspEgr", 10), ("video", 11), ("mobileGateway", 12), ("networkIf", 13), ("accessport", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogApPortType.setStatus('current')
tmnxLogApDefaultInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 12), TruthValue().clone('true')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApDefaultInterval.setStatus('current')
tmnxLogApDataLossCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogApDataLossCount.setStatus('current')
tmnxLogApLastDataLossTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 14), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogApLastDataLossTimeStamp.setStatus('current')
tmnxLogApToFileType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 4, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("fileId", 0), ("noFile", 1))).clone('fileId')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApToFileType.setStatus('current')
tmnxLogIdTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5), )
if mibBuilder.loadTexts: tmnxLogIdTable.setStatus('current')
tmnxLogIdEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxLogIdIndex"))
if mibBuilder.loadTexts: tmnxLogIdEntry.setStatus('current')
tmnxLogIdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 1), TmnxLogIdIndex())
if mibBuilder.loadTexts: tmnxLogIdIndex.setStatus('current')
tmnxLogIdRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdRowStatus.setStatus('current')
tmnxLogIdStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdStorageType.setStatus('current')
tmnxLogIdAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 4), TmnxAdminState().clone('inService')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdAdminStatus.setStatus('current')
tmnxLogIdOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 5), TmnxOperState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogIdOperStatus.setStatus('current')
tmnxLogIdDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 6), TItemDescription().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdDescription.setStatus('current')
tmnxLogIdFilterId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 7), TmnxLogFilterId()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdFilterId.setStatus('current')
tmnxLogIdSource = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 8), Bits().clone(namedValues=NamedValues(("main", 0), ("security", 1), ("change", 2), ("debugTrace", 3), ("li", 4)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdSource.setStatus('current')
tmnxLogIdDestination = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("none", 0), ("console", 1), ("syslog", 2), ("snmpTraps", 3), ("file", 4), ("memory", 5))).clone('none')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdDestination.setStatus('current')
tmnxLogIdFileId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 10), TmnxLogFileId()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdFileId.setStatus('current')
tmnxLogIdSyslogId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 11), TmnxSyslogIdOrEmpty()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdSyslogId.setStatus('current')
tmnxLogIdMaxMemorySize = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 12), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(50, 3000), )).clone(100)).setUnits('events').setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdMaxMemorySize.setStatus('current')
tmnxLogIdConsoleSession = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 13), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdConsoleSession.setStatus('current')
tmnxLogIdForwarded = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogIdForwarded.setStatus('current')
tmnxLogIdDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogIdDropped.setStatus('current')
tmnxLogIdTimeFormat = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 5, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("utc", 1), ("local", 2))).clone('utc')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogIdTimeFormat.setStatus('current')
tmnxLogFilterTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 6), )
if mibBuilder.loadTexts: tmnxLogFilterTable.setStatus('current')
tmnxLogFilterEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 6, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxLogFilterId"))
if mibBuilder.loadTexts: tmnxLogFilterEntry.setStatus('current')
tmnxLogFilterId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 6, 1, 1), TmnxLogFilterId().subtype(subtypeSpec=ValueRangeConstraint(1, 1001)))
if mibBuilder.loadTexts: tmnxLogFilterId.setStatus('current')
tmnxLogFilterRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 6, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterRowStatus.setStatus('current')
tmnxLogFilterDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 6, 1, 3), TItemDescription().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterDescription.setStatus('current')
tmnxLogFilterDefaultAction = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 6, 1, 4), TFilterAction().clone('forward')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterDefaultAction.setStatus('current')
tmnxLogFilterInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 6, 1, 5), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogFilterInUse.setStatus('current')
tmnxLogFilterParamsTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7), )
if mibBuilder.loadTexts: tmnxLogFilterParamsTable.setStatus('current')
tmnxLogFilterParamsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxLogFilterId"), (0, "TIMETRA-LOG-MIB", "tmnxLogFilterParamsIndex"))
if mibBuilder.loadTexts: tmnxLogFilterParamsEntry.setStatus('current')
tmnxLogFilterParamsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 1), TmnxLogFilterEntryId())
if mibBuilder.loadTexts: tmnxLogFilterParamsIndex.setStatus('current')
tmnxLogFilterParamsRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsRowStatus.setStatus('current')
tmnxLogFilterParamsDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 3), TItemDescription().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsDescription.setStatus('current')
tmnxLogFilterParamsAction = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 4), TFilterActionOrDefault().clone('default')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsAction.setStatus('current')
tmnxLogFilterParamsApplication = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 5), TNamedItemOrEmpty().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsApplication.setStatus('current')
tmnxLogFilterParamsApplOperator = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 6), TmnxLogFilterOperator().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsApplOperator.setStatus('current')
tmnxLogFilterParamsNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 7), TmnxEventNumber()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsNumber.setStatus('current')
tmnxLogFilterParamsNumberOperator = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 8), TmnxLogFilterOperator().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsNumberOperator.setStatus('current')
tmnxLogFilterParamsSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 9), TmnxPerceivedSeverity().clone('none')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsSeverity.setStatus('current')
tmnxLogFilterParamsSeverityOperator = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 10), TmnxLogFilterOperator().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsSeverityOperator.setStatus('current')
tmnxLogFilterParamsSubject = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 11), TNamedItemOrEmpty().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsSubject.setStatus('current')
tmnxLogFilterParamsSubjectOperator = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 12), TmnxLogFilterOperator().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsSubjectOperator.setStatus('current')
tmnxLogFilterParamsSubjectRegexp = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 13), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsSubjectRegexp.setStatus('current')
tmnxLogFilterParamsRouter = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 14), TNamedItemOrEmpty().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsRouter.setStatus('current')
tmnxLogFilterParamsRouterOperator = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 15), TmnxLogFilterOperator().clone('off')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsRouterOperator.setStatus('current')
tmnxLogFilterParamsRouterRegexp = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 7, 1, 16), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogFilterParamsRouterRegexp.setStatus('current')
tmnxSyslogTargetTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8), )
if mibBuilder.loadTexts: tmnxSyslogTargetTable.setStatus('current')
tmnxSyslogTargetEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxSyslogTargetIndex"))
if mibBuilder.loadTexts: tmnxSyslogTargetEntry.setStatus('current')
tmnxSyslogTargetIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 1), TmnxSyslogId())
if mibBuilder.loadTexts: tmnxSyslogTargetIndex.setStatus('current')
tmnxSyslogTargetRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSyslogTargetRowStatus.setStatus('current')
tmnxSyslogTargetDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 3), TItemDescription().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSyslogTargetDescription.setStatus('current')
tmnxSyslogTargetAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 4), IpAddress().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSyslogTargetAddress.setStatus('obsolete')
tmnxSyslogTargetUdpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 5), TmnxUdpPort().clone(514)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSyslogTargetUdpPort.setStatus('current')
tmnxSyslogTargetFacility = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 6), TmnxSyslogFacility().clone('local7')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSyslogTargetFacility.setStatus('current')
tmnxSyslogTargetSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 7), TmnxSyslogSeverity().clone('info')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSyslogTargetSeverity.setStatus('current')
tmnxSyslogTargetMessagePrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 8), TNamedItemOrEmpty().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSyslogTargetMessagePrefix.setStatus('current')
tmnxSyslogTargetMessagesDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSyslogTargetMessagesDropped.setStatus('current')
tmnxSyslogTargetAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 10), InetAddressType().clone('unknown')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSyslogTargetAddrType.setStatus('current')
tmnxSyslogTargetAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 8, 1, 11), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), ValueSizeConstraint(20, 20), )).clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSyslogTargetAddr.setStatus('current')
tmnxEventAppTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 9), )
if mibBuilder.loadTexts: tmnxEventAppTable.setStatus('current')
tmnxEventAppEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 9, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxEventAppIndex"))
if mibBuilder.loadTexts: tmnxEventAppEntry.setStatus('current')
tmnxEventAppIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 9, 1, 1), Unsigned32())
if mibBuilder.loadTexts: tmnxEventAppIndex.setStatus('current')
tmnxEventAppName = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 9, 1, 2), TNamedItem()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxEventAppName.setStatus('current')
tmnxEventTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10), )
if mibBuilder.loadTexts: tmnxEventTable.setStatus('current')
tmnxEventEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxEventAppIndex"), (0, "TIMETRA-LOG-MIB", "tmnxEventID"))
if mibBuilder.loadTexts: tmnxEventEntry.setStatus('current')
tmnxEventID = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10, 1, 1), Unsigned32())
if mibBuilder.loadTexts: tmnxEventID.setStatus('current')
tmnxEventName = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10, 1, 2), TNamedItem()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxEventName.setStatus('current')
tmnxEventSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10, 1, 3), TmnxPerceivedSeverity()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxEventSeverity.setStatus('current')
tmnxEventControl = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10, 1, 4), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxEventControl.setStatus('current')
tmnxEventCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxEventCounter.setStatus('current')
tmnxEventDropCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxEventDropCount.setStatus('current')
tmnxEventReset = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10, 1, 7), TmnxActionType().clone('notApplicable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxEventReset.setStatus('current')
tmnxEventThrottle = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 10, 1, 8), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxEventThrottle.setStatus('current')
tmnxSnmpTrapGroupTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11), )
if mibBuilder.loadTexts: tmnxSnmpTrapGroupTable.setStatus('obsolete')
tmnxSnmpTrapGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxStgIndex"), (0, "TIMETRA-LOG-MIB", "tmnxStgDestAddress"), (0, "TIMETRA-LOG-MIB", "tmnxStgDestPort"))
if mibBuilder.loadTexts: tmnxSnmpTrapGroupEntry.setStatus('obsolete')
tmnxStgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11, 1, 1), TmnxLogIdIndex())
if mibBuilder.loadTexts: tmnxStgIndex.setStatus('obsolete')
tmnxStgDestAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11, 1, 2), IpAddress().clone(hexValue="00000000"))
if mibBuilder.loadTexts: tmnxStgDestAddress.setStatus('obsolete')
tmnxStgDestPort = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11, 1, 3), TmnxUdpPort().clone(162))
if mibBuilder.loadTexts: tmnxStgDestPort.setStatus('obsolete')
tmnxStgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStgRowStatus.setStatus('obsolete')
tmnxStgDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11, 1, 5), TItemDescription().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStgDescription.setStatus('obsolete')
tmnxStgVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11, 1, 6), SnmpMessageProcessingModel().clone(3)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStgVersion.setStatus('obsolete')
tmnxStgNotifyCommunity = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32)).clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStgNotifyCommunity.setStatus('obsolete')
tmnxStgSecurityLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 11, 1, 8), SnmpSecurityLevel().clone('noAuthNoPriv')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStgSecurityLevel.setStatus('obsolete')
tmnxEventTest = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 12), TmnxActionType().clone('notApplicable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxEventTest.setStatus('current')
tmnxEventThrottleLimit = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 13), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 20000)).clone(2000)).setUnits('events').setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxEventThrottleLimit.setStatus('current')
tmnxEventThrottleInterval = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 14), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 1200)).clone(1)).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxEventThrottleInterval.setStatus('current')
tmnxSnmpSetErrsMax = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 15), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSnmpSetErrsMax.setStatus('current')
tmnxSnmpSetErrsTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16), )
if mibBuilder.loadTexts: tmnxSnmpSetErrsTable.setStatus('current')
tmnxSnmpSetErrsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxSseAddressType"), (0, "TIMETRA-LOG-MIB", "tmnxSseAddress"), (0, "TIMETRA-LOG-MIB", "tmnxSseSnmpPort"), (0, "TIMETRA-LOG-MIB", "tmnxSseRequestId"))
if mibBuilder.loadTexts: tmnxSnmpSetErrsEntry.setStatus('current')
tmnxSseAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 1), InetAddressType())
if mibBuilder.loadTexts: tmnxSseAddressType.setStatus('current')
tmnxSseAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 2), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), )))
if mibBuilder.loadTexts: tmnxSseAddress.setStatus('current')
tmnxSseSnmpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 3), TmnxUdpPort())
if mibBuilder.loadTexts: tmnxSseSnmpPort.setStatus('current')
tmnxSseRequestId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 4), Unsigned32())
if mibBuilder.loadTexts: tmnxSseRequestId.setStatus('current')
tmnxSseVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 5), SnmpMessageProcessingModel()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSseVersion.setStatus('current')
tmnxSseSeverityLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 6), TmnxPerceivedSeverity()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSseSeverityLevel.setStatus('current')
tmnxSseModuleId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSseModuleId.setStatus('current')
tmnxSseModuleName = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 8), TNamedItem()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSseModuleName.setStatus('current')
tmnxSseErrorCode = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSseErrorCode.setStatus('current')
tmnxSseErrorName = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSseErrorName.setStatus('current')
tmnxSseErrorMsg = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSseErrorMsg.setStatus('current')
tmnxSseExtraText = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 12), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 320))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSseExtraText.setStatus('current')
tmnxSseTimestamp = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 16, 1, 13), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxSseTimestamp.setStatus('current')
tmnxSnmpTrapLogTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 17), )
if mibBuilder.loadTexts: tmnxSnmpTrapLogTable.setStatus('current')
tmnxSnmpTrapLogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 17, 1), )
snmpNotifyEntry.registerAugmentions(("TIMETRA-LOG-MIB", "tmnxSnmpTrapLogEntry"))
tmnxSnmpTrapLogEntry.setIndexNames(*snmpNotifyEntry.getIndexNames())
if mibBuilder.loadTexts: tmnxSnmpTrapLogEntry.setStatus('current')
tmnxSnmpTrapLogDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 17, 1, 1), TItemDescription().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxSnmpTrapLogDescription.setStatus('current')
tmnxSnmpTrapDestTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18), )
if mibBuilder.loadTexts: tmnxSnmpTrapDestTable.setStatus('current')
tmnxSnmpTrapDestEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxStdIndex"), (1, "TIMETRA-LOG-MIB", "tmnxStdName"))
if mibBuilder.loadTexts: tmnxSnmpTrapDestEntry.setStatus('current')
tmnxStdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 1), TmnxLogIdIndex())
if mibBuilder.loadTexts: tmnxStdIndex.setStatus('current')
tmnxStdName = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 28)))
if mibBuilder.loadTexts: tmnxStdName.setStatus('current')
tmnxStdRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStdRowStatus.setStatus('current')
tmnxStdRowLastChanged = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 4), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxStdRowLastChanged.setStatus('current')
tmnxStdDestAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 5), InetAddressType().clone('unknown')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStdDestAddrType.setStatus('current')
tmnxStdDestAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 6), InetAddress().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(4, 4), ValueSizeConstraint(16, 16), ValueSizeConstraint(20, 20), )).clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStdDestAddr.setStatus('current')
tmnxStdDestPort = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 7), TmnxUdpPort().clone(162)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStdDestPort.setStatus('current')
tmnxStdDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 8), TItemDescription().clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStdDescription.setStatus('current')
tmnxStdVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 9), SnmpMessageProcessingModel().clone(3)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStdVersion.setStatus('current')
tmnxStdNotifyCommunity = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 31)).clone(hexValue="")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStdNotifyCommunity.setStatus('current')
tmnxStdSecurityLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 11), SnmpSecurityLevel().clone('noAuthNoPriv')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStdSecurityLevel.setStatus('current')
tmnxStdReplay = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 12), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxStdReplay.setStatus('current')
tmnxStdReplayStart = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 13), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxStdReplayStart.setStatus('current')
tmnxStdReplayLastTime = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 18, 1, 14), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxStdReplayLastTime.setStatus('current')
tmnxStdMaxTargets = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 19), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(10, 100)).clone(25)).setUnits('trap-targets').setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxStdMaxTargets.setStatus('current')
tmnxLogApCustRecordTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20), )
if mibBuilder.loadTexts: tmnxLogApCustRecordTable.setStatus('current')
tmnxLogApCustRecordEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1), )
tmnxLogApEntry.registerAugmentions(("TIMETRA-LOG-MIB", "tmnxLogApCustRecordEntry"))
tmnxLogApCustRecordEntry.setIndexNames(*tmnxLogApEntry.getIndexNames())
if mibBuilder.loadTexts: tmnxLogApCustRecordEntry.setStatus('current')
tmnxLogApCrLastChanged = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 1), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogApCrLastChanged.setStatus('current')
tmnxLogApCrSignChangeDelta = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 2), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogApCrSignChangeDelta.setStatus('current')
tmnxLogApCrSignChangeQueue = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 3), TQueueIdOrAll()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogApCrSignChangeQueue.setStatus('current')
tmnxLogApCrSignChangeOCntr = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 4), THsmdaCounterIdOrZeroOrAll()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogApCrSignChangeOCntr.setStatus('current')
tmnxLogApCrSignChangeQICounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 5), TmnxAccPlcyQICounters().clone(hexValue="0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogApCrSignChangeQICounters.setStatus('current')
tmnxLogApCrSignChangeQECounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 6), TmnxAccPlcyQECounters().clone(hexValue="0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogApCrSignChangeQECounters.setStatus('current')
tmnxLogApCrSignChangeOICounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 7), TmnxAccPlcyOICounters().clone(hexValue="0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogApCrSignChangeOICounters.setStatus('current')
tmnxLogApCrSignChangeOECounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 8), TmnxAccPlcyOECounters().clone(hexValue="0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogApCrSignChangeOECounters.setStatus('current')
tmnxLogApCrSignChangeAACounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 9), TmnxAccPlcyAACounters().clone(hexValue="0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogApCrSignChangeAACounters.setStatus('current')
tmnxLogApCrAACounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 20, 1, 10), TmnxAccPlcyAACounters().clone(hexValue="0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogApCrAACounters.setStatus('current')
tmnxLogApCustRecordQueueTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 21), )
if mibBuilder.loadTexts: tmnxLogApCustRecordQueueTable.setStatus('current')
tmnxLogApCustRecordQueueEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 21, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxLogApPolicyId"), (0, "TIMETRA-LOG-MIB", "tmnxLogApCrQueueId"))
if mibBuilder.loadTexts: tmnxLogApCustRecordQueueEntry.setStatus('current')
tmnxLogApCrQueueId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 21, 1, 1), TQueueId().subtype(subtypeSpec=ValueRangeConstraint(1, 32)))
if mibBuilder.loadTexts: tmnxLogApCrQueueId.setStatus('current')
tmnxLogApCrQueueRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 21, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApCrQueueRowStatus.setStatus('current')
tmnxLogApCrQueueLastChanged = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 21, 1, 3), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogApCrQueueLastChanged.setStatus('current')
tmnxLogApCrQueueICounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 21, 1, 4), TmnxAccPlcyQICounters().clone(hexValue="0")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApCrQueueICounters.setStatus('current')
tmnxLogApCrQueueECounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 21, 1, 5), TmnxAccPlcyQECounters().clone(hexValue="0")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApCrQueueECounters.setStatus('current')
tmnxLogApCrOverrideCntrTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 22), )
if mibBuilder.loadTexts: tmnxLogApCrOverrideCntrTable.setStatus('current')
tmnxLogApCrOverrideCntrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 22, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxLogApPolicyId"), (0, "TIMETRA-LOG-MIB", "tmnxLogApCrOverrideCntrId"))
if mibBuilder.loadTexts: tmnxLogApCrOverrideCntrEntry.setStatus('current')
tmnxLogApCrOverrideCntrId = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 22, 1, 1), THsmdaCounterIdOrZero().subtype(subtypeSpec=ValueRangeConstraint(1, 8)))
if mibBuilder.loadTexts: tmnxLogApCrOverrideCntrId.setStatus('current')
tmnxLogApCrOverrideCntrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 22, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApCrOverrideCntrRowStatus.setStatus('current')
tmnxLogApCrOverrideCntrLastChngd = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 22, 1, 3), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogApCrOverrideCntrLastChngd.setStatus('current')
tmnxLogApCrOverrideCntrICounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 22, 1, 4), TmnxAccPlcyOICounters().clone(hexValue="0")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApCrOverrideCntrICounters.setStatus('current')
tmnxLogApCrOverrideCntrECounters = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 22, 1, 5), TmnxAccPlcyOECounters().clone(hexValue="0")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: tmnxLogApCrOverrideCntrECounters.setStatus('current')
tmnxEventPrimaryRoutePref = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inband", 1), ("outband", 2))).clone('outband')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxEventPrimaryRoutePref.setStatus('current')
tmnxEventSecondaryRoutePref = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inband", 1), ("outband", 2), ("none", 3))).clone('inband')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxEventSecondaryRoutePref.setStatus('current')
tmnxLogConfigEventsDamped = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 25), TruthValue().clone('true')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogConfigEventsDamped.setStatus('current')
tmnxLogEventHistoryObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26))
tmnxLogEventHistGeneralObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 1))
tmnxLogExRbkOpTblLastChange = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 1, 1), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkOpTblLastChange.setStatus('current')
tmnxLogExRbkOpMaxEntries = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 10)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogExRbkOpMaxEntries.setStatus('current')
tmnxLogExecRollbackOpTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3), )
if mibBuilder.loadTexts: tmnxLogExecRollbackOpTable.setStatus('current')
tmnxLogExecRollbackOpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxLogExRbkOpIndex"))
if mibBuilder.loadTexts: tmnxLogExecRollbackOpEntry.setStatus('current')
tmnxLogExRbkOpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1, 1), Unsigned32())
if mibBuilder.loadTexts: tmnxLogExRbkOpIndex.setStatus('current')
tmnxLogExRbkOpLastChanged = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1, 2), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkOpLastChanged.setStatus('current')
tmnxLogExRbkOpType = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("unknown", 0), ("exec", 1), ("rollback", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkOpType.setStatus('current')
tmnxLogExRbkOpStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("unknown", 0), ("inProgress", 1), ("success", 2), ("failed", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkOpStatus.setStatus('current')
tmnxLogExRbkOpBegin = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1, 5), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkOpBegin.setStatus('current')
tmnxLogExRbkOpEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1, 6), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkOpEnd.setStatus('current')
tmnxLogExRbkOpFile = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkOpFile.setStatus('current')
tmnxLogExRbkOpUser = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1, 8), TNamedItem()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkOpUser.setStatus('current')
tmnxLogExRbkOpNumEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 3, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkOpNumEvents.setStatus('current')
tmnxLogExecRollbackEventTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 4), )
if mibBuilder.loadTexts: tmnxLogExecRollbackEventTable.setStatus('current')
tmnxLogExecRollbackEventEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 4, 1), ).setIndexNames((0, "TIMETRA-LOG-MIB", "tmnxLogExRbkOpIndex"), (0, "TIMETRA-LOG-MIB", "tmnxLogExRbkEventIndex"))
if mibBuilder.loadTexts: tmnxLogExecRollbackEventEntry.setStatus('current')
tmnxLogExRbkEventIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 4, 1, 1), Unsigned32())
if mibBuilder.loadTexts: tmnxLogExRbkEventIndex.setStatus('current')
tmnxLogExRbkEventOID = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 4, 1, 2), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tmnxLogExRbkEventOID.setStatus('current')
tmnxLogExRbkNotifyObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 5))
tmnxLogExecRollbackOpIndex = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 26, 5, 1), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogExecRollbackOpIndex.setStatus('current')
tmnxLogColdStartWaitTime = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 27), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 300))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogColdStartWaitTime.setStatus('current')
tmnxLogRouteRecoveryWaitTime = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 28), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: tmnxLogRouteRecoveryWaitTime.setStatus('current')
tmnxLogFileDeletedLogId = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 1), TmnxLogIdIndex()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogFileDeletedLogId.setStatus('current')
tmnxLogFileDeletedFileId = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 2), TmnxLogFileId()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogFileDeletedFileId.setStatus('current')
tmnxLogFileDeletedLogType = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 3), TmnxLogFileType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogFileDeletedLogType.setStatus('current')
tmnxLogFileDeletedLocation = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 4), TmnxCFlash()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogFileDeletedLocation.setStatus('current')
tmnxLogFileDeletedName = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 5), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogFileDeletedName.setStatus('current')
tmnxLogFileDeletedCreateTime = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 6), DateAndTime()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogFileDeletedCreateTime.setStatus('current')
tmnxLogTraceErrorTitle = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogTraceErrorTitle.setStatus('current')
tmnxLogTraceErrorSubject = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogTraceErrorSubject.setStatus('current')
tmnxLogTraceErrorMessage = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 9), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogTraceErrorMessage.setStatus('current')
tmnxLogThrottledEventID = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 10), ObjectIdentifier()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogThrottledEventID.setStatus('current')
tmnxLogThrottledEvents = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 11), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogThrottledEvents.setStatus('current')
tmnxSysLogTargetId = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 12), TmnxSyslogId()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxSysLogTargetId.setStatus('current')
tmnxSysLogTargetProblemDescr = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 13), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxSysLogTargetProblemDescr.setStatus('current')
tmnxLogNotifyApInterval = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 120))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxLogNotifyApInterval.setStatus('current')
tmnxStdReplayStartEvent = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 15), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxStdReplayStartEvent.setStatus('current')
tmnxStdReplayEndEvent = MibScalar((1, 3, 6, 1, 4, 1, 6527, 3, 1, 2, 12, 1, 16), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tmnxStdReplayEndEvent.setStatus('current')
tmnxLogSpaceContention = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 1)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileIdRolloverTime"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdRetainTime"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdAdminLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdBackupLoc"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdOperLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogType"))
if mibBuilder.loadTexts: tmnxLogSpaceContention.setStatus('current')
tmnxLogAdminLocFailed = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 2)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileIdAdminLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdBackupLoc"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdOperLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogType"))
if mibBuilder.loadTexts: tmnxLogAdminLocFailed.setStatus('current')
tmnxLogBackupLocFailed = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 3)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileIdAdminLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdBackupLoc"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdOperLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogType"))
if mibBuilder.loadTexts: tmnxLogBackupLocFailed.setStatus('current')
tmnxLogFileRollover = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 4)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileIdRolloverTime"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdRetainTime"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdAdminLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdBackupLoc"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdOperLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogType"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdPathName"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdCreateTime"))
if mibBuilder.loadTexts: tmnxLogFileRollover.setStatus('current')
tmnxLogFileDeleted = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 5)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedFileId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogType"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedName"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedCreateTime"))
if mibBuilder.loadTexts: tmnxLogFileDeleted.setStatus('current')
tmnxTestEvent = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 6)).setObjects(("SNMPv2-MIB", "sysDescr"), ("SNMPv2-MIB", "sysObjectID"))
if mibBuilder.loadTexts: tmnxTestEvent.setStatus('current')
tmnxLogTraceError = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 7)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogTraceErrorTitle"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorMessage"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorSubject"))
if mibBuilder.loadTexts: tmnxLogTraceError.setStatus('current')
tmnxLogEventThrottled = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 8)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogThrottledEventID"), ("TIMETRA-LOG-MIB", "tmnxLogThrottledEvents"))
if mibBuilder.loadTexts: tmnxLogEventThrottled.setStatus('current')
tmnxSysLogTargetProblem = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 9)).setObjects(("TIMETRA-LOG-MIB", "tmnxSysLogTargetId"), ("TIMETRA-LOG-MIB", "tmnxSysLogTargetProblemDescr"))
if mibBuilder.loadTexts: tmnxSysLogTargetProblem.setStatus('current')
tmnxLogAccountingDataLoss = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 10)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileIdRolloverTime"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdRetainTime"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdAdminLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdBackupLoc"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdOperLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogId"), ("TIMETRA-LOG-MIB", "tmnxLogNotifyApInterval"))
if mibBuilder.loadTexts: tmnxLogAccountingDataLoss.setStatus('current')
tmnxStdEventsReplayed = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 11)).setObjects(("TIMETRA-LOG-MIB", "tmnxStdDestAddrType"), ("TIMETRA-LOG-MIB", "tmnxStdDestAddr"), ("TIMETRA-LOG-MIB", "tmnxStdReplayStartEvent"), ("TIMETRA-LOG-MIB", "tmnxStdReplayEndEvent"), ("TIMETRA-LOG-MIB", "tmnxStdReplayStart"))
if mibBuilder.loadTexts: tmnxStdEventsReplayed.setStatus('current')
tmnxLogEventOverrun = NotificationType((1, 3, 6, 1, 4, 1, 6527, 3, 1, 3, 12, 0, 12)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogThrottledEventID"), ("TIMETRA-LOG-MIB", "tmnxLogThrottledEvents"))
if mibBuilder.loadTexts: tmnxLogEventOverrun.setStatus('current')
tmnxLogCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 1))
tmnxLogGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2))
tmnxLogV4v0Compliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 1, 4)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogGlobalGroup"), ("TIMETRA-LOG-MIB", "tmnxLogV4v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyGroup"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdGroup"), ("TIMETRA-LOG-MIB", "tmnxLogSyslogGroup"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapGroup"), ("TIMETRA-LOG-MIB", "tmnxLogEventsR2r1Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationR3r0Group"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV4v0Compliance = tmnxLogV4v0Compliance.setStatus('obsolete')
tmnxLogV5v0Compliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 1, 5)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogGlobalGroup"), ("TIMETRA-LOG-MIB", "tmnxLogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyGroup"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdGroup"), ("TIMETRA-LOG-MIB", "tmnxLogSyslogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpSetErrsGroup"), ("TIMETRA-LOG-MIB", "tmnxLogEventsV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationV5v0Group"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV5v0Compliance = tmnxLogV5v0Compliance.setStatus('obsolete')
tmnxLogV6v0Compliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 1, 6)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogGlobalGroup"), ("TIMETRA-LOG-MIB", "tmnxLogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyGroup"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdGroup"), ("TIMETRA-LOG-MIB", "tmnxLogSyslogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapDestV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpSetErrsGroup"), ("TIMETRA-LOG-MIB", "tmnxLogEventsV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationV6v0Group"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV6v0Compliance = tmnxLogV6v0Compliance.setStatus('obsolete')
tmnxLogV6v1Compliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 1, 7)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogGlobalGroup"), ("TIMETRA-LOG-MIB", "tmnxLogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyGroup"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdGroup"), ("TIMETRA-LOG-MIB", "tmnxLogSyslogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapDestV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpSetErrsGroup"), ("TIMETRA-LOG-MIB", "tmnxLogEventsV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyV6v1Group"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV6v1Compliance = tmnxLogV6v1Compliance.setStatus('current')
tmnxLogV7v0Compliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 1, 8)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogGlobalGroup"), ("TIMETRA-LOG-MIB", "tmnxLogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyGroup"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdGroup"), ("TIMETRA-LOG-MIB", "tmnxLogSyslogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapDestV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpSetErrsGroup"), ("TIMETRA-LOG-MIB", "tmnxLogEventsV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyV6v1Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyCRV7v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogRoutePreferenceV7v0Group"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV7v0Compliance = tmnxLogV7v0Compliance.setStatus('obsolete')
tmnxLogV9v0Compliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 1, 9)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogGlobalGroup"), ("TIMETRA-LOG-MIB", "tmnxLogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyGroup"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyV6v1Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyCRV7v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdGroup"), ("TIMETRA-LOG-MIB", "tmnxLogSyslogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapDestV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpSetErrsGroup"), ("TIMETRA-LOG-MIB", "tmnxLogEventsV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationV9v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogRoutePreferenceV7v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogEventDampedV8v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogApV9v0Group"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV9v0Compliance = tmnxLogV9v0Compliance.setStatus('obsolete')
tmnxLogV8v0Compliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 1, 10)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogGlobalGroup"), ("TIMETRA-LOG-MIB", "tmnxLogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyGroup"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdGroup"), ("TIMETRA-LOG-MIB", "tmnxLogSyslogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapDestV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpSetErrsGroup"), ("TIMETRA-LOG-MIB", "tmnxLogEventsV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyV6v1Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyCRV7v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogRoutePreferenceV7v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogEventDampedV8v0Group"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV8v0Compliance = tmnxLogV8v0Compliance.setStatus('obsolete')
tmnxLogV10v0Compliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 1, 11)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogGlobalGroup"), ("TIMETRA-LOG-MIB", "tmnxLogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyGroup"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyV6v1Group"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingPolicyCRV7v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdGroup"), ("TIMETRA-LOG-MIB", "tmnxLogSyslogV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpTrapDestV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxSnmpSetErrsGroup"), ("TIMETRA-LOG-MIB", "tmnxLogEventsV5v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationV6v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogNotificationV9v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogRoutePreferenceV7v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogEventDampedV8v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogApV9v0Group"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpGroup"), ("TIMETRA-LOG-MIB", "tmnxLogApExtGroup"), ("TIMETRA-LOG-MIB", "tmnxLogAppRouteNotifV10v0Group"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV10v0Compliance = tmnxLogV10v0Compliance.setStatus('current')
tmnxLogGlobalGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 1)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogMaxLogs"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogGlobalGroup = tmnxLogGlobalGroup.setStatus('current')
tmnxLogAccountingPolicyGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 3)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogApRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogApStorageType"), ("TIMETRA-LOG-MIB", "tmnxLogApAdminStatus"), ("TIMETRA-LOG-MIB", "tmnxLogApOperStatus"), ("TIMETRA-LOG-MIB", "tmnxLogApInterval"), ("TIMETRA-LOG-MIB", "tmnxLogApDescription"), ("TIMETRA-LOG-MIB", "tmnxLogApDefault"), ("TIMETRA-LOG-MIB", "tmnxLogApRecord"), ("TIMETRA-LOG-MIB", "tmnxLogApToFileId"), ("TIMETRA-LOG-MIB", "tmnxLogApPortType"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogAccountingPolicyGroup = tmnxLogAccountingPolicyGroup.setStatus('current')
tmnxLogFileIdGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 4)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileIdRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdStorageType"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdRolloverTime"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdRetainTime"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdAdminLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdOperLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdDescription"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogType"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdPathName"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdCreateTime"), ("TIMETRA-LOG-MIB", "tmnxLogFileIdBackupLoc"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogFileIdGroup = tmnxLogFileIdGroup.setStatus('current')
tmnxLogSyslogGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 5)).setObjects(("TIMETRA-LOG-MIB", "tmnxSyslogTargetRowStatus"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetDescription"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetAddress"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetUdpPort"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetFacility"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetSeverity"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetMessagePrefix"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetMessagesDropped"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogSyslogGroup = tmnxLogSyslogGroup.setStatus('obsolete')
tmnxSnmpTrapGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 6)).setObjects(("TIMETRA-LOG-MIB", "tmnxStgRowStatus"), ("TIMETRA-LOG-MIB", "tmnxStgDescription"), ("TIMETRA-LOG-MIB", "tmnxStgVersion"), ("TIMETRA-LOG-MIB", "tmnxStgNotifyCommunity"), ("TIMETRA-LOG-MIB", "tmnxStgSecurityLevel"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxSnmpTrapGroup = tmnxSnmpTrapGroup.setStatus('obsolete')
tmnxLogEventsR2r1Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 10)).setObjects(("TIMETRA-LOG-MIB", "tmnxEventAppName"), ("TIMETRA-LOG-MIB", "tmnxEventName"), ("TIMETRA-LOG-MIB", "tmnxEventSeverity"), ("TIMETRA-LOG-MIB", "tmnxEventControl"), ("TIMETRA-LOG-MIB", "tmnxEventCounter"), ("TIMETRA-LOG-MIB", "tmnxEventDropCount"), ("TIMETRA-LOG-MIB", "tmnxEventReset"), ("TIMETRA-LOG-MIB", "tmnxEventTest"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogEventsR2r1Group = tmnxLogEventsR2r1Group.setStatus('obsolete')
tmnxLogNotifyObjsR3r0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 13)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedFileId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogType"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedName"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedCreateTime"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorTitle"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorMessage"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogNotifyObjsR3r0Group = tmnxLogNotifyObjsR3r0Group.setStatus('obsolete')
tmnxLogNotificationR3r0Group = NotificationGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 14)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogSpaceContention"), ("TIMETRA-LOG-MIB", "tmnxLogAdminLocFailed"), ("TIMETRA-LOG-MIB", "tmnxLogBackupLocFailed"), ("TIMETRA-LOG-MIB", "tmnxLogFileRollover"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeleted"), ("TIMETRA-LOG-MIB", "tmnxTestEvent"), ("TIMETRA-LOG-MIB", "tmnxLogTraceError"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogNotificationR3r0Group = tmnxLogNotificationR3r0Group.setStatus('obsolete')
tmnxLogV4v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 15)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogIdRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogIdStorageType"), ("TIMETRA-LOG-MIB", "tmnxLogIdAdminStatus"), ("TIMETRA-LOG-MIB", "tmnxLogIdOperStatus"), ("TIMETRA-LOG-MIB", "tmnxLogIdDescription"), ("TIMETRA-LOG-MIB", "tmnxLogIdFilterId"), ("TIMETRA-LOG-MIB", "tmnxLogIdSource"), ("TIMETRA-LOG-MIB", "tmnxLogIdDestination"), ("TIMETRA-LOG-MIB", "tmnxLogIdFileId"), ("TIMETRA-LOG-MIB", "tmnxLogIdSyslogId"), ("TIMETRA-LOG-MIB", "tmnxLogIdMaxMemorySize"), ("TIMETRA-LOG-MIB", "tmnxLogIdConsoleSession"), ("TIMETRA-LOG-MIB", "tmnxLogIdForwarded"), ("TIMETRA-LOG-MIB", "tmnxLogIdDropped"), ("TIMETRA-LOG-MIB", "tmnxLogIdTimeFormat"), ("TIMETRA-LOG-MIB", "tmnxLogFilterRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogFilterDescription"), ("TIMETRA-LOG-MIB", "tmnxLogFilterDefaultAction"), ("TIMETRA-LOG-MIB", "tmnxLogFilterInUse"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsDescription"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsAction"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsApplication"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsApplOperator"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsNumber"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsNumberOperator"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSeverity"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSeverityOperator"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSubject"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSubjectOperator"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSubjectRegexp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV4v0Group = tmnxLogV4v0Group.setStatus('obsolete')
tmnxSnmpSetErrsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 16)).setObjects(("TIMETRA-LOG-MIB", "tmnxSnmpSetErrsMax"), ("TIMETRA-LOG-MIB", "tmnxSseVersion"), ("TIMETRA-LOG-MIB", "tmnxSseSeverityLevel"), ("TIMETRA-LOG-MIB", "tmnxSseModuleId"), ("TIMETRA-LOG-MIB", "tmnxSseModuleName"), ("TIMETRA-LOG-MIB", "tmnxSseErrorCode"), ("TIMETRA-LOG-MIB", "tmnxSseErrorName"), ("TIMETRA-LOG-MIB", "tmnxSseErrorMsg"), ("TIMETRA-LOG-MIB", "tmnxSseExtraText"), ("TIMETRA-LOG-MIB", "tmnxSseTimestamp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxSnmpSetErrsGroup = tmnxSnmpSetErrsGroup.setStatus('current')
tmnxLogEventsV5v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 17)).setObjects(("TIMETRA-LOG-MIB", "tmnxEventAppName"), ("TIMETRA-LOG-MIB", "tmnxEventName"), ("TIMETRA-LOG-MIB", "tmnxEventSeverity"), ("TIMETRA-LOG-MIB", "tmnxEventControl"), ("TIMETRA-LOG-MIB", "tmnxEventCounter"), ("TIMETRA-LOG-MIB", "tmnxEventDropCount"), ("TIMETRA-LOG-MIB", "tmnxEventReset"), ("TIMETRA-LOG-MIB", "tmnxEventThrottle"), ("TIMETRA-LOG-MIB", "tmnxEventTest"), ("TIMETRA-LOG-MIB", "tmnxEventThrottleLimit"), ("TIMETRA-LOG-MIB", "tmnxEventThrottleInterval"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogEventsV5v0Group = tmnxLogEventsV5v0Group.setStatus('current')
tmnxLogNotifyObjsV5v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 18)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedFileId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogType"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedName"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedCreateTime"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorTitle"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorMessage"), ("TIMETRA-LOG-MIB", "tmnxLogThrottledEventID"), ("TIMETRA-LOG-MIB", "tmnxLogThrottledEvents"), ("TIMETRA-LOG-MIB", "tmnxSysLogTargetId"), ("TIMETRA-LOG-MIB", "tmnxSysLogTargetProblemDescr"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogNotifyObjsV5v0Group = tmnxLogNotifyObjsV5v0Group.setStatus('obsolete')
tmnxLogNotificationV5v0Group = NotificationGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 19)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogSpaceContention"), ("TIMETRA-LOG-MIB", "tmnxLogAdminLocFailed"), ("TIMETRA-LOG-MIB", "tmnxLogBackupLocFailed"), ("TIMETRA-LOG-MIB", "tmnxLogFileRollover"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeleted"), ("TIMETRA-LOG-MIB", "tmnxTestEvent"), ("TIMETRA-LOG-MIB", "tmnxLogTraceError"), ("TIMETRA-LOG-MIB", "tmnxLogEventThrottled"), ("TIMETRA-LOG-MIB", "tmnxSysLogTargetProblem"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogNotificationV5v0Group = tmnxLogNotificationV5v0Group.setStatus('obsolete')
tmnxLogSyslogV5v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 20)).setObjects(("TIMETRA-LOG-MIB", "tmnxSyslogTargetRowStatus"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetDescription"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetUdpPort"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetFacility"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetSeverity"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetMessagePrefix"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetMessagesDropped"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetAddrType"), ("TIMETRA-LOG-MIB", "tmnxSyslogTargetAddr"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogSyslogV5v0Group = tmnxLogSyslogV5v0Group.setStatus('current')
tmnxSnmpTrapV5v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 21)).setObjects(("TIMETRA-LOG-MIB", "tmnxSnmpTrapLogDescription"), ("TIMETRA-LOG-MIB", "tmnxStdRowStatus"), ("TIMETRA-LOG-MIB", "tmnxStdRowLastChanged"), ("TIMETRA-LOG-MIB", "tmnxStdDestAddrType"), ("TIMETRA-LOG-MIB", "tmnxStdDestAddr"), ("TIMETRA-LOG-MIB", "tmnxStdDestPort"), ("TIMETRA-LOG-MIB", "tmnxStdDescription"), ("TIMETRA-LOG-MIB", "tmnxStdVersion"), ("TIMETRA-LOG-MIB", "tmnxStdNotifyCommunity"), ("TIMETRA-LOG-MIB", "tmnxStdSecurityLevel"), ("TIMETRA-LOG-MIB", "tmnxStdMaxTargets"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxSnmpTrapV5v0Group = tmnxSnmpTrapV5v0Group.setStatus('current')
tmnxLogV5v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 22)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogIdRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogIdStorageType"), ("TIMETRA-LOG-MIB", "tmnxLogIdAdminStatus"), ("TIMETRA-LOG-MIB", "tmnxLogIdOperStatus"), ("TIMETRA-LOG-MIB", "tmnxLogIdDescription"), ("TIMETRA-LOG-MIB", "tmnxLogIdFilterId"), ("TIMETRA-LOG-MIB", "tmnxLogIdSource"), ("TIMETRA-LOG-MIB", "tmnxLogIdDestination"), ("TIMETRA-LOG-MIB", "tmnxLogIdFileId"), ("TIMETRA-LOG-MIB", "tmnxLogIdSyslogId"), ("TIMETRA-LOG-MIB", "tmnxLogIdMaxMemorySize"), ("TIMETRA-LOG-MIB", "tmnxLogIdConsoleSession"), ("TIMETRA-LOG-MIB", "tmnxLogIdForwarded"), ("TIMETRA-LOG-MIB", "tmnxLogIdDropped"), ("TIMETRA-LOG-MIB", "tmnxLogIdTimeFormat"), ("TIMETRA-LOG-MIB", "tmnxLogFilterRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogFilterDescription"), ("TIMETRA-LOG-MIB", "tmnxLogFilterDefaultAction"), ("TIMETRA-LOG-MIB", "tmnxLogFilterInUse"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsDescription"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsAction"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsApplication"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsApplOperator"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsNumber"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsNumberOperator"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSeverity"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSeverityOperator"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSubject"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSubjectOperator"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsSubjectRegexp"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsRouter"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsRouterOperator"), ("TIMETRA-LOG-MIB", "tmnxLogFilterParamsRouterRegexp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogV5v0Group = tmnxLogV5v0Group.setStatus('current')
tmnxLogObsoleteObjsV5v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 23)).setObjects(("TIMETRA-LOG-MIB", "tmnxSyslogTargetAddress"), ("TIMETRA-LOG-MIB", "tmnxStgRowStatus"), ("TIMETRA-LOG-MIB", "tmnxStgDescription"), ("TIMETRA-LOG-MIB", "tmnxStgVersion"), ("TIMETRA-LOG-MIB", "tmnxStgNotifyCommunity"), ("TIMETRA-LOG-MIB", "tmnxStgSecurityLevel"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogObsoleteObjsV5v0Group = tmnxLogObsoleteObjsV5v0Group.setStatus('current')
tmnxLogNotifyObjsV6v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 24)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedFileId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogType"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedName"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedCreateTime"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorTitle"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorMessage"), ("TIMETRA-LOG-MIB", "tmnxLogThrottledEventID"), ("TIMETRA-LOG-MIB", "tmnxLogThrottledEvents"), ("TIMETRA-LOG-MIB", "tmnxSysLogTargetId"), ("TIMETRA-LOG-MIB", "tmnxSysLogTargetProblemDescr"), ("TIMETRA-LOG-MIB", "tmnxLogNotifyApInterval"), ("TIMETRA-LOG-MIB", "tmnxStdReplayStartEvent"), ("TIMETRA-LOG-MIB", "tmnxStdReplayEndEvent"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogNotifyObjsV6v0Group = tmnxLogNotifyObjsV6v0Group.setStatus('obsolete')
tmnxLogNotificationV6v0Group = NotificationGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 25)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogSpaceContention"), ("TIMETRA-LOG-MIB", "tmnxLogAdminLocFailed"), ("TIMETRA-LOG-MIB", "tmnxLogBackupLocFailed"), ("TIMETRA-LOG-MIB", "tmnxLogFileRollover"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeleted"), ("TIMETRA-LOG-MIB", "tmnxTestEvent"), ("TIMETRA-LOG-MIB", "tmnxLogTraceError"), ("TIMETRA-LOG-MIB", "tmnxLogEventThrottled"), ("TIMETRA-LOG-MIB", "tmnxSysLogTargetProblem"), ("TIMETRA-LOG-MIB", "tmnxLogAccountingDataLoss"), ("TIMETRA-LOG-MIB", "tmnxStdEventsReplayed"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogNotificationV6v0Group = tmnxLogNotificationV6v0Group.setStatus('current')
tmnxSnmpTrapDestV6v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 26)).setObjects(("TIMETRA-LOG-MIB", "tmnxStdReplay"), ("TIMETRA-LOG-MIB", "tmnxStdReplayStart"), ("TIMETRA-LOG-MIB", "tmnxStdReplayLastTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxSnmpTrapDestV6v0Group = tmnxSnmpTrapDestV6v0Group.setStatus('current')
tmnxLogAccountingPolicyV6v1Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 27)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogApDefaultInterval"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogAccountingPolicyV6v1Group = tmnxLogAccountingPolicyV6v1Group.setStatus('current')
tmnxLogAccountingPolicyCRV7v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 28)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogApCrLastChanged"), ("TIMETRA-LOG-MIB", "tmnxLogApCrSignChangeDelta"), ("TIMETRA-LOG-MIB", "tmnxLogApCrSignChangeQueue"), ("TIMETRA-LOG-MIB", "tmnxLogApCrSignChangeOCntr"), ("TIMETRA-LOG-MIB", "tmnxLogApCrSignChangeQICounters"), ("TIMETRA-LOG-MIB", "tmnxLogApCrSignChangeQECounters"), ("TIMETRA-LOG-MIB", "tmnxLogApCrSignChangeOICounters"), ("TIMETRA-LOG-MIB", "tmnxLogApCrSignChangeOECounters"), ("TIMETRA-LOG-MIB", "tmnxLogApCrSignChangeAACounters"), ("TIMETRA-LOG-MIB", "tmnxLogApCrAACounters"), ("TIMETRA-LOG-MIB", "tmnxLogApCrQueueRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogApCrQueueLastChanged"), ("TIMETRA-LOG-MIB", "tmnxLogApCrQueueICounters"), ("TIMETRA-LOG-MIB", "tmnxLogApCrQueueECounters"), ("TIMETRA-LOG-MIB", "tmnxLogApCrOverrideCntrRowStatus"), ("TIMETRA-LOG-MIB", "tmnxLogApCrOverrideCntrLastChngd"), ("TIMETRA-LOG-MIB", "tmnxLogApCrOverrideCntrICounters"), ("TIMETRA-LOG-MIB", "tmnxLogApCrOverrideCntrECounters"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogAccountingPolicyCRV7v0Group = tmnxLogAccountingPolicyCRV7v0Group.setStatus('current')
tmnxLogRoutePreferenceV7v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 29)).setObjects(("TIMETRA-LOG-MIB", "tmnxEventPrimaryRoutePref"), ("TIMETRA-LOG-MIB", "tmnxEventSecondaryRoutePref"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogRoutePreferenceV7v0Group = tmnxLogRoutePreferenceV7v0Group.setStatus('current')
tmnxLogNotifyObjsV8v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 30)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedFileId"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLogType"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedLocation"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedName"), ("TIMETRA-LOG-MIB", "tmnxLogFileDeletedCreateTime"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorTitle"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorSubject"), ("TIMETRA-LOG-MIB", "tmnxLogTraceErrorMessage"), ("TIMETRA-LOG-MIB", "tmnxLogThrottledEventID"), ("TIMETRA-LOG-MIB", "tmnxLogThrottledEvents"), ("TIMETRA-LOG-MIB", "tmnxSysLogTargetId"), ("TIMETRA-LOG-MIB", "tmnxSysLogTargetProblemDescr"), ("TIMETRA-LOG-MIB", "tmnxLogNotifyApInterval"), ("TIMETRA-LOG-MIB", "tmnxStdReplayStartEvent"), ("TIMETRA-LOG-MIB", "tmnxStdReplayEndEvent"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogNotifyObjsV8v0Group = tmnxLogNotifyObjsV8v0Group.setStatus('current')
tmnxLogNotificationV9v0Group = NotificationGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 31)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogEventOverrun"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogNotificationV9v0Group = tmnxLogNotificationV9v0Group.setStatus('current')
tmnxLogEventDampedV8v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 32)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogConfigEventsDamped"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogEventDampedV8v0Group = tmnxLogEventDampedV8v0Group.setStatus('current')
tmnxLogApV9v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 33)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogApDataLossCount"), ("TIMETRA-LOG-MIB", "tmnxLogApLastDataLossTimeStamp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogApV9v0Group = tmnxLogApV9v0Group.setStatus('current')
tmnxLogExRbkOpGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 34)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogExRbkOpTblLastChange"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpMaxEntries"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpLastChanged"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpType"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpStatus"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpBegin"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpEnd"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpFile"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpUser"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkOpNumEvents"), ("TIMETRA-LOG-MIB", "tmnxLogExRbkEventOID"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogExRbkOpGroup = tmnxLogExRbkOpGroup.setStatus('current')
tmnxLogNotifyObjsV10v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 35)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogExecRollbackOpIndex"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogNotifyObjsV10v0Group = tmnxLogNotifyObjsV10v0Group.setStatus('current')
tmnxLogApExtGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 36)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogApToFileType"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogApExtGroup = tmnxLogApExtGroup.setStatus('current')
tmnxLogAppRouteNotifV10v0Group = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 3, 1, 1, 12, 2, 37)).setObjects(("TIMETRA-LOG-MIB", "tmnxLogColdStartWaitTime"), ("TIMETRA-LOG-MIB", "tmnxLogRouteRecoveryWaitTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
tmnxLogAppRouteNotifV10v0Group = tmnxLogAppRouteNotifV10v0Group.setStatus('current')
mibBuilder.exportSymbols("TIMETRA-LOG-MIB", tmnxLogFileDeletedName=tmnxLogFileDeletedName, tmnxEventAppIndex=tmnxEventAppIndex, TmnxLogFilterEntryId=TmnxLogFilterEntryId, tmnxLogFileIdBackupLoc=tmnxLogFileIdBackupLoc, tmnxEventCounter=tmnxEventCounter, tmnxStdReplay=tmnxStdReplay, tmnxLogApCrSignChangeDelta=tmnxLogApCrSignChangeDelta, tmnxLogIdForwarded=tmnxLogIdForwarded, tmnxLogGroups=tmnxLogGroups, tmnxLogApStorageType=tmnxLogApStorageType, tmnxLogFileIdStorageType=tmnxLogFileIdStorageType, tmnxStdDestAddr=tmnxStdDestAddr, tmnxLogApRowStatus=tmnxLogApRowStatus, tmnxEventThrottleLimit=tmnxEventThrottleLimit, tmnxLogCompliances=tmnxLogCompliances, tmnxLogApCrQueueLastChanged=tmnxLogApCrQueueLastChanged, tmnxSnmpTrapLogEntry=tmnxSnmpTrapLogEntry, tmnxLogExRbkOpIndex=tmnxLogExRbkOpIndex, tmnxStdDescription=tmnxStdDescription, tmnxLogApCrOverrideCntrId=tmnxLogApCrOverrideCntrId, tmnxSyslogTargetMessagePrefix=tmnxSyslogTargetMessagePrefix, tmnxLogFilterParamsApplication=tmnxLogFilterParamsApplication, tmnxLogV8v0Compliance=tmnxLogV8v0Compliance, tmnxLogIdMaxMemorySize=tmnxLogIdMaxMemorySize, tmnxSnmpSetErrsGroup=tmnxSnmpSetErrsGroup, tmnxLogConfigEventsDamped=tmnxLogConfigEventsDamped, tmnxSseModuleName=tmnxSseModuleName, tmnxLogFilterInUse=tmnxLogFilterInUse, tmnxLogNotifyObjsV8v0Group=tmnxLogNotifyObjsV8v0Group, tmnxSseRequestId=tmnxSseRequestId, tmnxLogFilterDefaultAction=tmnxLogFilterDefaultAction, TmnxLogFileId=TmnxLogFileId, tmnxLogApDataLossCount=tmnxLogApDataLossCount, tmnxStgDestAddress=tmnxStgDestAddress, tmnxStdDestPort=tmnxStdDestPort, tmnxStdReplayStartEvent=tmnxStdReplayStartEvent, tmnxLogApDefaultInterval=tmnxLogApDefaultInterval, tmnxLogThrottledEventID=tmnxLogThrottledEventID, tmnxLogExRbkEventOID=tmnxLogExRbkEventOID, tmnxLogIdIndex=tmnxLogIdIndex, TmnxSyslogSeverity=TmnxSyslogSeverity, tmnxEventAppEntry=tmnxEventAppEntry, tmnxLogNotificationV6v0Group=tmnxLogNotificationV6v0Group, tmnxLogFileIdRolloverTime=tmnxLogFileIdRolloverTime, tmnxLogApRecord=tmnxLogApRecord, tmnxEventDropCount=tmnxEventDropCount, tmnxSseModuleId=tmnxSseModuleId, tmnxLogFileDeletedLogType=tmnxLogFileDeletedLogType, tmnxStgDescription=tmnxStgDescription, tmnxSyslogTargetIndex=tmnxSyslogTargetIndex, tmnxLogExecRollbackEventTable=tmnxLogExecRollbackEventTable, tmnxLogNotifyObjsV10v0Group=tmnxLogNotifyObjsV10v0Group, tmnxLogAccountingPolicyV6v1Group=tmnxLogAccountingPolicyV6v1Group, tmnxLogNotifyPrefix=tmnxLogNotifyPrefix, tmnxLogExRbkOpStatus=tmnxLogExRbkOpStatus, tmnxLogAppRouteNotifV10v0Group=tmnxLogAppRouteNotifV10v0Group, tmnxLogRouteRecoveryWaitTime=tmnxLogRouteRecoveryWaitTime, tmnxSnmpTrapGroupTable=tmnxSnmpTrapGroupTable, tmnxLogIdStorageType=tmnxLogIdStorageType, tmnxLogFilterParamsRouterRegexp=tmnxLogFilterParamsRouterRegexp, tmnxLogBackupLocFailed=tmnxLogBackupLocFailed, tmnxLogV4v0Group=tmnxLogV4v0Group, PYSNMP_MODULE_ID=timetraLogMIBModule, tmnxStgDestPort=tmnxStgDestPort, tmnxLogApAdminStatus=tmnxLogApAdminStatus, tmnxLogExRbkOpGroup=tmnxLogExRbkOpGroup, tmnxStdReplayEndEvent=tmnxStdReplayEndEvent, tmnxLogTraceError=tmnxLogTraceError, tmnxLogV5v0Group=tmnxLogV5v0Group, tmnxSnmpTrapV5v0Group=tmnxSnmpTrapV5v0Group, tmnxLogApCrSignChangeQueue=tmnxLogApCrSignChangeQueue, tmnxLogApCrOverrideCntrICounters=tmnxLogApCrOverrideCntrICounters, TmnxSyslogId=TmnxSyslogId, tmnxLogAccountingPolicyCRV7v0Group=tmnxLogAccountingPolicyCRV7v0Group, tmnxSnmpSetErrsMax=tmnxSnmpSetErrsMax, tmnxLogExRbkNotifyObjects=tmnxLogExRbkNotifyObjects, tmnxLogTraceErrorSubject=tmnxLogTraceErrorSubject, tmnxLogApCrOverrideCntrECounters=tmnxLogApCrOverrideCntrECounters, tmnxLogFilterParamsTable=tmnxLogFilterParamsTable, tmnxLogExRbkOpType=tmnxLogExRbkOpType, tmnxLogFilterParamsRouterOperator=tmnxLogFilterParamsRouterOperator, tmnxSnmpTrapGroupEntry=tmnxSnmpTrapGroupEntry, tmnxSseTimestamp=tmnxSseTimestamp, tmnxEventSecondaryRoutePref=tmnxEventSecondaryRoutePref, tmnxLogExRbkOpMaxEntries=tmnxLogExRbkOpMaxEntries, tmnxLogExRbkOpEnd=tmnxLogExRbkOpEnd, tmnxEventAppName=tmnxEventAppName, tmnxLogV10v0Compliance=tmnxLogV10v0Compliance, tmnxLogFileIdLogId=tmnxLogFileIdLogId, TmnxPerceivedSeverity=TmnxPerceivedSeverity, tmnxStgIndex=tmnxStgIndex, tmnxLogExecRollbackOpTable=tmnxLogExecRollbackOpTable, tmnxLogColdStartWaitTime=tmnxLogColdStartWaitTime, tmnxLogIdDescription=tmnxLogIdDescription, tmnxEventThrottleInterval=tmnxEventThrottleInterval, tmnxEventPrimaryRoutePref=tmnxEventPrimaryRoutePref, tmnxLogApToFileId=tmnxLogApToFileId, tmnxLogIdDestination=tmnxLogIdDestination, tmnxSnmpSetErrsEntry=tmnxSnmpSetErrsEntry, TmnxLogIdIndex=TmnxLogIdIndex, tmnxLogFilterParamsNumberOperator=tmnxLogFilterParamsNumberOperator, tmnxLogApCustRecordQueueEntry=tmnxLogApCustRecordQueueEntry, tmnxLogNotificationV5v0Group=tmnxLogNotificationV5v0Group, tmnxEventControl=tmnxEventControl, tmnxLogAccountingDataLoss=tmnxLogAccountingDataLoss, tmnxLogTraceErrorTitle=tmnxLogTraceErrorTitle, tmnxLogExRbkOpNumEvents=tmnxLogExRbkOpNumEvents, tmnxSyslogTargetDescription=tmnxSyslogTargetDescription, tmnxLogFileIdEntry=tmnxLogFileIdEntry, tmnxEventReset=tmnxEventReset, tmnxLogApCrSignChangeAACounters=tmnxLogApCrSignChangeAACounters, tmnxSseErrorName=tmnxSseErrorName, TmnxLogFilterOperator=TmnxLogFilterOperator, tmnxLogObsoleteObjsV5v0Group=tmnxLogObsoleteObjsV5v0Group, tmnxLogFilterParamsAction=tmnxLogFilterParamsAction, tmnxLogFileId=tmnxLogFileId, tmnxSyslogTargetMessagesDropped=tmnxSyslogTargetMessagesDropped, tmnxLogExecRollbackEventEntry=tmnxLogExecRollbackEventEntry, tmnxLogFileIdRowStatus=tmnxLogFileIdRowStatus, tmnxLogV7v0Compliance=tmnxLogV7v0Compliance, tmnxStgRowStatus=tmnxStgRowStatus, tmnxLogSyslogV5v0Group=tmnxLogSyslogV5v0Group, TmnxUdpPort=TmnxUdpPort, tmnxEventName=tmnxEventName, tmnxEventAppTable=tmnxEventAppTable, tmnxLogFileIdRetainTime=tmnxLogFileIdRetainTime, tmnxSnmpTrapDestV6v0Group=tmnxSnmpTrapDestV6v0Group, tmnxLogFilterParamsSubject=tmnxLogFilterParamsSubject, tmnxLogObjs=tmnxLogObjs, tmnxLogIdAdminStatus=tmnxLogIdAdminStatus, tmnxLogMaxLogs=tmnxLogMaxLogs, tmnxLogIdTable=tmnxLogIdTable, tmnxLogNotifications=tmnxLogNotifications, tmnxLogFilterParamsSeverity=tmnxLogFilterParamsSeverity, tmnxSyslogTargetTable=tmnxSyslogTargetTable, tmnxSseErrorCode=tmnxSseErrorCode, tmnxLogEventHistGeneralObjs=tmnxLogEventHistGeneralObjs, tmnxSysLogTargetId=tmnxSysLogTargetId, tmnxLogSyslogGroup=tmnxLogSyslogGroup, tmnxStdRowLastChanged=tmnxStdRowLastChanged, tmnxSnmpTrapDestEntry=tmnxSnmpTrapDestEntry, tmnxLogApCrOverrideCntrEntry=tmnxLogApCrOverrideCntrEntry, tmnxLogFilterTable=tmnxLogFilterTable, tmnxLogV4v0Compliance=tmnxLogV4v0Compliance, tmnxLogTraceErrorMessage=tmnxLogTraceErrorMessage, tmnxLogEventDampedV8v0Group=tmnxLogEventDampedV8v0Group, timetraLogMIBModule=timetraLogMIBModule, tmnxLogIdSyslogId=tmnxLogIdSyslogId, tmnxLogFilterParamsRowStatus=tmnxLogFilterParamsRowStatus, tmnxSyslogTargetSeverity=tmnxSyslogTargetSeverity, tmnxLogIdFileId=tmnxLogIdFileId, tmnxLogIdEntry=tmnxLogIdEntry, tmnxStgVersion=tmnxStgVersion, tmnxLogApInterval=tmnxLogApInterval, tmnxLogExRbkOpFile=tmnxLogExRbkOpFile, tmnxLogFileDeletedLocation=tmnxLogFileDeletedLocation, tmnxLogApEntry=tmnxLogApEntry, tmnxLogNotifyObjsV6v0Group=tmnxLogNotifyObjsV6v0Group, tmnxEventEntry=tmnxEventEntry, tmnxLogFilterParamsIndex=tmnxLogFilterParamsIndex, tmnxLogFilterId=tmnxLogFilterId, tmnxLogExRbkEventIndex=tmnxLogExRbkEventIndex, TmnxLogFileType=TmnxLogFileType, tmnxLogFileIdCreateTime=tmnxLogFileIdCreateTime, tmnxLogV9v0Compliance=tmnxLogV9v0Compliance, tmnxSseAddress=tmnxSseAddress, tmnxEventSeverity=tmnxEventSeverity, tmnxLogFilterParamsSubjectOperator=tmnxLogFilterParamsSubjectOperator, tmnxStdNotifyCommunity=tmnxStdNotifyCommunity, tmnxLogApCrQueueRowStatus=tmnxLogApCrQueueRowStatus, tmnxLogConformance=tmnxLogConformance, tmnxSyslogTargetAddress=tmnxSyslogTargetAddress, tmnxEventTable=tmnxEventTable, tmnxLogApCustRecordQueueTable=tmnxLogApCustRecordQueueTable, tmnxStdEventsReplayed=tmnxStdEventsReplayed, tmnxLogGlobalGroup=tmnxLogGlobalGroup, tmnxLogNotifyObjsV5v0Group=tmnxLogNotifyObjsV5v0Group, tmnxLogExecRollbackOpIndex=tmnxLogExecRollbackOpIndex, tmnxLogFilterParamsApplOperator=tmnxLogFilterParamsApplOperator, tmnxLogFileDeletedLogId=tmnxLogFileDeletedLogId, tmnxLogIdFilterId=tmnxLogIdFilterId, tmnxLogFilterParamsNumber=tmnxLogFilterParamsNumber, tmnxEventID=tmnxEventID, tmnxLogFileDeletedFileId=tmnxLogFileDeletedFileId, tmnxLogFilterParamsDescription=tmnxLogFilterParamsDescription, tmnxSseSeverityLevel=tmnxSseSeverityLevel, TmnxSyslogFacility=TmnxSyslogFacility, TmnxEventNumber=TmnxEventNumber, tmnxLogAccountingPolicyGroup=tmnxLogAccountingPolicyGroup, tmnxLogFileIdAdminLocation=tmnxLogFileIdAdminLocation, tmnxTestEvent=tmnxTestEvent, tmnxLogExRbkOpTblLastChange=tmnxLogExRbkOpTblLastChange, tmnxStdDestAddrType=tmnxStdDestAddrType, tmnxStdReplayLastTime=tmnxStdReplayLastTime, tmnxLogApCrLastChanged=tmnxLogApCrLastChanged, tmnxLogApToFileType=tmnxLogApToFileType, tmnxLogApCrSignChangeQICounters=tmnxLogApCrSignChangeQICounters, tmnxLogFilterDescription=tmnxLogFilterDescription, tmnxLogEventHistoryObjs=tmnxLogEventHistoryObjs, tmnxLogIdConsoleSession=tmnxLogIdConsoleSession, tmnxSyslogTargetUdpPort=tmnxSyslogTargetUdpPort, tmnxSseSnmpPort=tmnxSseSnmpPort, tmnxStgSecurityLevel=tmnxStgSecurityLevel, tmnxLogV6v1Compliance=tmnxLogV6v1Compliance, tmnxSnmpSetErrsTable=tmnxSnmpSetErrsTable, tmnxSseVersion=tmnxSseVersion, tmnxLogApPolicyId=tmnxLogApPolicyId, tmnxLogApDescription=tmnxLogApDescription, tmnxLogApCrOverrideCntrRowStatus=tmnxLogApCrOverrideCntrRowStatus, tmnxSysLogTargetProblem=tmnxSysLogTargetProblem, tmnxLogIdOperStatus=tmnxLogIdOperStatus, tmnxLogApCrSignChangeOICounters=tmnxLogApCrSignChangeOICounters, tmnxLogFilterParamsSeverityOperator=tmnxLogFilterParamsSeverityOperator, tmnxLogApPortType=tmnxLogApPortType, tmnxSseErrorMsg=tmnxSseErrorMsg, tmnxLogApOperStatus=tmnxLogApOperStatus, tmnxLogApCustRecordTable=tmnxLogApCustRecordTable, tmnxLogFileIdGroup=tmnxLogFileIdGroup, tmnxLogApTable=tmnxLogApTable, tmnxLogAdminLocFailed=tmnxLogAdminLocFailed, tmnxSyslogTargetAddrType=tmnxSyslogTargetAddrType, tmnxLogEventThrottled=tmnxLogEventThrottled, tmnxStdVersion=tmnxStdVersion, tmnxLogNotifyObjsR3r0Group=tmnxLogNotifyObjsR3r0Group, tmnxEventThrottle=tmnxEventThrottle, tmnxLogApCrSignChangeOECounters=tmnxLogApCrSignChangeOECounters, tmnxSnmpTrapLogDescription=tmnxSnmpTrapLogDescription, tmnxLogApCrQueueECounters=tmnxLogApCrQueueECounters, tmnxSyslogTargetRowStatus=tmnxSyslogTargetRowStatus, tmnxLogApDefault=tmnxLogApDefault, tmnxLogApCrOverrideCntrLastChngd=tmnxLogApCrOverrideCntrLastChngd, tmnxLogApCustRecordEntry=tmnxLogApCustRecordEntry, tmnxLogExRbkOpUser=tmnxLogExRbkOpUser, tmnxLogRoutePreferenceV7v0Group=tmnxLogRoutePreferenceV7v0Group, tmnxLogFileIdPathName=tmnxLogFileIdPathName, tmnxSnmpTrapDestTable=tmnxSnmpTrapDestTable, tmnxSnmpTrapGroup=tmnxSnmpTrapGroup, tmnxLogNotificationV9v0Group=tmnxLogNotificationV9v0Group, tmnxLogFileIdDescription=tmnxLogFileIdDescription, tmnxLogFileIdLogType=tmnxLogFileIdLogType, tmnxLogFilterParamsRouter=tmnxLogFilterParamsRouter, tmnxEventTest=tmnxEventTest, tmnxLogFileIdTable=tmnxLogFileIdTable, tmnxLogApCrQueueICounters=tmnxLogApCrQueueICounters, TmnxLogFilterId=TmnxLogFilterId, tmnxSseExtraText=tmnxSseExtraText, tmnxLogFileRollover=tmnxLogFileRollover, tmnxLogApCrOverrideCntrTable=tmnxLogApCrOverrideCntrTable, tmnxLogIdTimeFormat=tmnxLogIdTimeFormat, tmnxLogSpaceContention=tmnxLogSpaceContention, tmnxSyslogTargetFacility=tmnxSyslogTargetFacility, tmnxLogExecRollbackOpEntry=tmnxLogExecRollbackOpEntry, tmnxStdName=tmnxStdName, tmnxStdReplayStart=tmnxStdReplayStart)
mibBuilder.exportSymbols("TIMETRA-LOG-MIB", tmnxLogApCrQueueId=tmnxLogApCrQueueId, tmnxLogFileIdOperLocation=tmnxLogFileIdOperLocation, tmnxLogNotificationObjects=tmnxLogNotificationObjects, tmnxLogIdDropped=tmnxLogIdDropped, tmnxLogFilterRowStatus=tmnxLogFilterRowStatus, tmnxStdRowStatus=tmnxStdRowStatus, tmnxLogFilterParamsEntry=tmnxLogFilterParamsEntry, tmnxLogExRbkOpBegin=tmnxLogExRbkOpBegin, tmnxLogApExtGroup=tmnxLogApExtGroup, tmnxLogEventsV5v0Group=tmnxLogEventsV5v0Group, tmnxLogIdRowStatus=tmnxLogIdRowStatus, tmnxLogApLastDataLossTimeStamp=tmnxLogApLastDataLossTimeStamp, tmnxLogApCrSignChangeQECounters=tmnxLogApCrSignChangeQECounters, tmnxSnmpTrapLogTable=tmnxSnmpTrapLogTable, tmnxLogThrottledEvents=tmnxLogThrottledEvents, tmnxLogApCrAACounters=tmnxLogApCrAACounters, tmnxLogFileDeletedCreateTime=tmnxLogFileDeletedCreateTime, tmnxLogNotifyApInterval=tmnxLogNotifyApInterval, tmnxSyslogTargetEntry=tmnxSyslogTargetEntry, tmnxLogEventOverrun=tmnxLogEventOverrun, tmnxLogIdSource=tmnxLogIdSource, tmnxSseAddressType=tmnxSseAddressType, tmnxStgNotifyCommunity=tmnxStgNotifyCommunity, tmnxLogFilterEntry=tmnxLogFilterEntry, tmnxStdSecurityLevel=tmnxStdSecurityLevel, tmnxStdIndex=tmnxStdIndex, tmnxLogV6v0Compliance=tmnxLogV6v0Compliance, tmnxLogApV9v0Group=tmnxLogApV9v0Group, tmnxLogFilterParamsSubjectRegexp=tmnxLogFilterParamsSubjectRegexp, tmnxSyslogTargetAddr=tmnxSyslogTargetAddr, tmnxLogExRbkOpLastChanged=tmnxLogExRbkOpLastChanged, tmnxLogV5v0Compliance=tmnxLogV5v0Compliance, tmnxLogNotificationR3r0Group=tmnxLogNotificationR3r0Group, TmnxCFlash=TmnxCFlash, tmnxLogEventsR2r1Group=tmnxLogEventsR2r1Group, TmnxSyslogIdOrEmpty=TmnxSyslogIdOrEmpty, tmnxSysLogTargetProblemDescr=tmnxSysLogTargetProblemDescr, tmnxLogApCrSignChangeOCntr=tmnxLogApCrSignChangeOCntr, tmnxStdMaxTargets=tmnxStdMaxTargets, tmnxLogFileDeleted=tmnxLogFileDeleted)
| 149.108504
| 11,632
| 0.757149
|
a97bbb14468b7c51a0541d43c1777bf9b6413366
| 1,554
|
py
|
Python
|
cogdl/data/extract.py
|
YuHuang42/cogdl
|
36eafd4a2ced8a513643b99a3e63e9919c38717c
|
[
"MIT"
] | 824
|
2020-11-30T14:38:07.000Z
|
2022-03-19T10:14:04.000Z
|
cogdl/data/extract.py
|
YuHuang42/cogdl
|
36eafd4a2ced8a513643b99a3e63e9919c38717c
|
[
"MIT"
] | 38
|
2020-12-21T12:32:57.000Z
|
2022-01-31T02:32:05.000Z
|
cogdl/data/extract.py
|
YuHuang42/cogdl
|
36eafd4a2ced8a513643b99a3e63e9919c38717c
|
[
"MIT"
] | 85
|
2020-12-21T05:16:09.000Z
|
2022-03-28T08:44:22.000Z
|
from __future__ import print_function
import os.path as osp
import tarfile
import zipfile
import bz2
import gzip
def maybe_log(path, log=True):
if log:
print("Extracting", path)
def extract_tar(path, folder, mode="r:gz", log=True):
r"""Extracts a tar archive to a specific folder.
Args:
path (string): The path to the tar archive.
folder (string): The folder.
mode (string, optional): The compression mode. (default: :obj:`"r:gz"`)
log (bool, optional): If :obj:`False`, will not print anything to the
console. (default: :obj:`True`)
"""
maybe_log(path, log)
with tarfile.open(path, mode) as f:
f.extractall(folder)
def extract_zip(path, folder, log=True):
r"""Extracts a zip archive to a specific folder.
Args:
path (string): The path to the tar archive.
folder (string): The folder.
log (bool, optional): If :obj:`False`, will not print anything to the
console. (default: :obj:`True`)
"""
maybe_log(path, log)
with zipfile.ZipFile(path, "r") as f:
f.extractall(folder)
def extract_bz2(path, folder, log=True):
maybe_log(path, log)
with bz2.open(path, "r") as r:
with open(osp.join(folder, ".".join(path.split(".")[:-1])), "wb") as w:
w.write(r.read())
def extract_gz(path, folder, log=True):
maybe_log(path, log)
with gzip.open(path, "r") as r:
with open(osp.join(folder, ".".join(path.split(".")[:-1])), "wb") as w:
w.write(r.read())
| 27.75
| 79
| 0.606821
|
bfc9849842519e3c5e472c596fc1488e9aeb9663
| 363
|
py
|
Python
|
src/quakestats/core/q3toql/api.py
|
LaudateCorpus1/quakestats
|
d4e44d593e6c7334628d34b5ec648ade5976003e
|
[
"MIT"
] | 21
|
2018-04-24T09:33:01.000Z
|
2022-03-05T10:53:45.000Z
|
src/quakestats/core/q3toql/api.py
|
brabiega/quakestats
|
1628720350a1e4e40ebebdb7988785663892f0be
|
[
"MIT"
] | 42
|
2018-04-13T18:09:19.000Z
|
2021-08-05T20:23:22.000Z
|
src/quakestats/core/q3toql/api.py
|
LaudateCorpus1/quakestats
|
d4e44d593e6c7334628d34b5ec648ade5976003e
|
[
"MIT"
] | 8
|
2018-06-12T18:07:39.000Z
|
2021-08-28T02:26:17.000Z
|
"""
Base module for Q3 game parser
"""
import logging
from quakestats.core.q3parser.parser import (
Q3Game,
)
from quakestats.core.q3toql.transform import (
Q3toQL,
QuakeGame,
)
logger = logging.getLogger(__name__)
class Q3toQLAPI():
def transform(self, q3game: Q3Game) -> QuakeGame:
tf = Q3toQL()
return tf.transform(q3game)
| 16.5
| 53
| 0.683196
|
c32a246c900b9c07ab2c10928db432e23a4f2f0a
| 6,200
|
py
|
Python
|
models/train_classifier.py
|
jcylim/Disaster_Response_Pipeline
|
0a43e5dfbb8c518e6c46916325ff354fcef593aa
|
[
"FTL",
"CNRI-Python",
"blessing"
] | null | null | null |
models/train_classifier.py
|
jcylim/Disaster_Response_Pipeline
|
0a43e5dfbb8c518e6c46916325ff354fcef593aa
|
[
"FTL",
"CNRI-Python",
"blessing"
] | null | null | null |
models/train_classifier.py
|
jcylim/Disaster_Response_Pipeline
|
0a43e5dfbb8c518e6c46916325ff354fcef593aa
|
[
"FTL",
"CNRI-Python",
"blessing"
] | null | null | null |
import sys
import pandas as pd
from sqlalchemy import create_engine
import pickle
import nltk
nltk.download(['punkt', 'wordnet', 'averaged_perceptron_tagger'])
import re
import numpy as np
from nltk.tokenize import word_tokenize
from nltk.stem import WordNetLemmatizer
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
from sklearn.multioutput import MultiOutputClassifier
# custom transformer
class StartingVerbExtractor(BaseEstimator, TransformerMixin):
'''
Modified StartingVerbExtractor class used to improve analysis performance
'''
def starting_verb(self, text):
sentence_list = nltk.sent_tokenize(text)
for sentence in sentence_list:
pos_tags = nltk.pos_tag(tokenize(sentence))
first_word, first_tag = pos_tags[0]
if first_tag in ['VB', 'VBP'] or first_word == 'RT':
return True
return False
def fit(self, x, y=None):
return self
def transform(self, X):
X_tagged = pd.Series(X).apply(self.starting_verb)
return pd.DataFrame(X_tagged)
def load_data(database_filepath):
'''
Load dataset, input set, and labels set from SQLite database.
Arguments:
database_filepath: path to database where dataset is saved to (String)
Returns:
X: feature dataset (Pandas Series)
y: label dataset (Pandas Series)
category_names: list of column names (Pandas Index)
'''
engine = create_engine('sqlite:///' + database_filepath)
df = pd.read_sql_table('df',engine)
# load feature set (X), label set (Y), and column names
X = df['message']
y = df.iloc[:,4:]
category_names = y.columns
return X, y, category_names
def tokenize(text):
'''
Tokenize text to enable NLP.
Arguments:
text: English text to be tokenized for ML (List)
Returns:
clean_tokens: tokenized text for ML (List)
'''
url_regex = 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
detected_urls = re.findall(url_regex, text)
for url in detected_urls:
text = text.replace(url, "urlplaceholder")
tokens = word_tokenize(text)
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok).lower().strip()
clean_tokens.append(clean_tok)
return clean_tokens
def build_model():
'''
Build ML pipeline that includes GridSearch, FeatureUnion, pipeline with CountVectorizer and TfidfTransformer, StartingVerbExtractor, and AdaBoostClassifier for analysis.
Returns:
model: ML pipeline that contains NLP processes and classifier (Scikit Pipeline)
'''
# parameters for grid search to improve pipeline performance
parameters = {
'features__text_pipeline__vect__ngram_range': ((1, 1), (1, 2)),
'features__text_pipeline__vect__max_df': (0.5, 0.75),
'features__text_pipeline__vect__max_features': (None, 5000),
'features__text_pipeline__tfidf__use_idf': (True, False)
}
pipeline = Pipeline([
('features', FeatureUnion([
('text_pipeline', Pipeline([
('vect', CountVectorizer(tokenizer=tokenize)),
('tfidf', TfidfTransformer())
])),
('starting_verb', StartingVerbExtractor())
])),
('clf', MultiOutputClassifier(AdaBoostClassifier()))
])
model = GridSearchCV(pipeline, param_grid=parameters)
return model
def evaluate_model(model, X_test, Y_test, category_names):
'''
Evaluate performance of ML pipeline by displaying multiple scores.
Arguments:
model: ML pipeline to be evaluated (Scikit Pipeline)
X_test: test feature dataset (Pandas Series)
Y_test: test label dataset (Pandas Series)
category_names: list of column names (List)
'''
# model predictions
y_pred = model.predict(X_test)
# Overall accuracy of model
accuracy = (y_pred == Y_test).mean()
print("Overall Accuracy:", accuracy.mean())
# scores report
y_pred_df = pd.DataFrame(y_pred, columns=category_names)
for col in category_names:
print('Attribute: {}\n'.format(col))
print(classification_report(Y_test[col], y_pred_df[col]))
def save_model(model, model_filepath):
'''
Build ML pipeline that includes FeatureUnion, pipeline with CountVectorizer and TfidfTransformer, StartingVerbExtractor, and AdaBoostClassifier for analysis.
Arguments:
model: ML pipeline to be saved (Scikit Pipeline)
model_filepath: name of pickle file the model is saved to (String)
'''
filename = model_filepath
pickle.dump(model, open(filename, 'wb'))
def main():
if len(sys.argv) == 3:
database_filepath, model_filepath = sys.argv[1:]
print('Loading data...\n DATABASE: {}'.format(database_filepath))
X, Y, category_names = load_data(database_filepath)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2)
print('Building model...')
model = build_model()
print('Training model...')
model.fit(X_train, Y_train)
print('Evaluating model...')
evaluate_model(model, X_test, Y_test, category_names)
print('Saving model...\n MODEL: {}'.format(model_filepath))
save_model(model, model_filepath)
print('Trained model saved!')
else:
print('Please provide the filepath of the disaster messages database '\
'as the first argument and the filepath of the pickle file to '\
'save the model to as the second argument. \n\nExample: python '\
'train_classifier.py ../data/DisasterResponse.db classifier.pkl')
if __name__ == '__main__':
main()
| 35.428571
| 173
| 0.667903
|
b1633fde1875dd106b72ce2096b2e4d263eb30c1
| 862
|
py
|
Python
|
chapter 8/sampleCode15.py
|
DTAIEB/Thoughtful-Data-Science
|
8b80e8f3e33b6fdc6672ecee1f27e0b983b28241
|
[
"Apache-2.0"
] | 15
|
2018-06-01T19:18:32.000Z
|
2021-11-28T03:31:35.000Z
|
chapter 8/sampleCode15.py
|
chshychen/Thoughtful-Data-Science
|
8b80e8f3e33b6fdc6672ecee1f27e0b983b28241
|
[
"Apache-2.0"
] | 1
|
2018-12-17T02:01:42.000Z
|
2018-12-17T02:01:42.000Z
|
chapter 8/sampleCode15.py
|
chshychen/Thoughtful-Data-Science
|
8b80e8f3e33b6fdc6672ecee1f27e0b983b28241
|
[
"Apache-2.0"
] | 10
|
2018-09-23T02:45:45.000Z
|
2022-03-12T15:32:05.000Z
|
[[BaseSubApp]]
def add_ticker_selection_markup(refresh_ids):
def deco(fn):
def wrap(self, *args, **kwargs):
return """
<div class="row" style="text-align:center">
<div class="btn-group btn-group-toggle" style="border-bottom:2px solid #eeeeee" data-toggle="buttons">
{%for ticker, state in this.parent_pixieapp.tickers.items()%}
<label class="btn btn-secondary {%if this.parent_pixieapp.active_ticker == ticker%}active{%endif%}"
pd_refresh=\"""" + ",".join(refresh_ids) + """\" pd_script="self.parent_pixieapp.set_active_ticker('{{ticker}}')">
<input type="radio" {%if this.parent_pixieapp.active_ticker == ticker%}checked{%endif%}>
{{ticker}}
</label>
{%endfor%}
</div>
</div>
""" + fn(self, *args, **kwargs)
return wrap
return deco
| 43.1
| 126
| 0.603248
|
1cc466a1e1aca20a8038dbf223996f90f76ca31b
| 1,011
|
py
|
Python
|
build/lib/tests/generic_test.py
|
eltoto1219/vltk
|
e84c0efe9062eb864604d96345f71483816340aa
|
[
"Apache-2.0"
] | null | null | null |
build/lib/tests/generic_test.py
|
eltoto1219/vltk
|
e84c0efe9062eb864604d96345f71483816340aa
|
[
"Apache-2.0"
] | null | null | null |
build/lib/tests/generic_test.py
|
eltoto1219/vltk
|
e84c0efe9062eb864604d96345f71483816340aa
|
[
"Apache-2.0"
] | null | null | null |
import os
import unittest
from vltk import get_data
TEST_PATH = os.path.dirname(os.path.realpath(__file__))
class TestGeneric(unittest.TestCase):
# setup rando things like schema, etc
# for tests, we will want to test each new method, plus a test extraction. The test extraction will have to be first
# in order of most general to most specific tests
def test_extraction_single_dir(self):
# okay so the extraction single dir will
pass
def test_extaction_multi_dir(self):
pass
def add_text_dataset(self):
pass
def test_create_column_text(self):
pass
def test_append_column_text(self):
pass
def test_remove_column_text(self):
pass
def test_create_labeled_column_text(self):
pass
def test_append_labeled_column_text(self):
pass
def test_remove_labeled_column_text(self):
pass
'''
useful methods:
save_to_disk
datasets.concatenate_datasets
load from disk
'''
| 20.22
| 120
| 0.694362
|
2bd2c11c953dc4b41de49912844b0de63987451e
| 292
|
py
|
Python
|
AD18-flask-admin-image-demo/app/extensions.py
|
AngelLiang/Flask-Demos
|
cf0a74885b873cb2583b3870ccdf3508d3af602e
|
[
"MIT"
] | 3
|
2020-06-17T05:44:48.000Z
|
2021-09-11T02:49:38.000Z
|
AD18-flask-admin-image-demo/app/extensions.py
|
AngelLiang/Flask-Demos
|
cf0a74885b873cb2583b3870ccdf3508d3af602e
|
[
"MIT"
] | 3
|
2021-06-08T20:57:03.000Z
|
2022-02-23T14:54:59.000Z
|
AD18-flask-admin-image-demo/app/extensions.py
|
AngelLiang/Flask-Demos
|
cf0a74885b873cb2583b3870ccdf3508d3af602e
|
[
"MIT"
] | 6
|
2020-06-17T05:44:56.000Z
|
2022-03-29T12:53:05.000Z
|
from flask_sqlalchemy import SQLAlchemy
from flask_admin import Admin
db = SQLAlchemy()
admin = Admin(template_mode='bootstrap3')
def register_extensions(app):
db.init_app(app)
admin.init_app(app)
from app.admin_ import register_modelviews
register_modelviews(admin, app)
| 20.857143
| 46
| 0.773973
|
d3e555ab9178f9ec05d3921383f6b03a54441142
| 1,923
|
py
|
Python
|
setup.py
|
NJACKWinterOfCode/UltimateSecurityCam
|
9f288b90d94043060ea3e6f6617e20e35aefc9af
|
[
"MIT"
] | 10
|
2018-11-22T20:04:39.000Z
|
2020-11-20T18:32:28.000Z
|
setup.py
|
NJACKWinterOfCode/UltimateSecurityCam
|
9f288b90d94043060ea3e6f6617e20e35aefc9af
|
[
"MIT"
] | 53
|
2018-11-22T18:52:52.000Z
|
2019-01-10T11:39:24.000Z
|
setup.py
|
NJACKWinterOfCode/UltimateSecurityCam
|
9f288b90d94043060ea3e6f6617e20e35aefc9af
|
[
"MIT"
] | 15
|
2018-11-23T18:15:43.000Z
|
2019-02-22T16:00:29.000Z
|
from setuptools import setup, find_packages
from codecs import open
from os import path
dir_path = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(dir_path, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
requirements = ['numpy', 'matplotlib', 'pandas', 'sklearn', 'opencv-python', 'argparse', 'pygame', 'opencv-contrib-python']
setup(
name='Ultimate Security Cam',
version = '1.0',
author= 'Nitesh Chaudhry',
author_email= 'niteshbinladen@gmail.com',
url= 'https://github.com/NIteshx2/UltimateSecurityCam',
description =
'''
An easy-to-build, un-hack-able security camera which is impossible to fool.
## Step by step guide
-----------------------
- Installation of all the required depedencies is completed first.
- The code is made to run via terminal/IDE.
- Sequence of code:
- The code first initializes a three seconds waiting camera window.
- The main code runs to detect movements and record the complete
video footage.
- All the configurations of the video clip are recorded (like Date
and Time, camera fps, maximum object movement recorded at a time,
duration, etc.)
- The video clip and configuration data is saved for future
reference and the code terminates.
''',
long_description = long_description,
#Listing Dependencies that it has
install_requires = requirements,
#LICENSE Info
license= 'The MIT License 2018',
#INFO about where package can run
classifiers=[
'Intended Audience :: Developers and users who wish to use image filters',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Operating System :: Windows',
'Operating System :: Linux',
]
)
| 33.736842
| 123
| 0.720749
|
10e85378e0f3f94eabf1e635c769dead4c112b78
| 3,604
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/desulfonatronospirathiodismutans.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-02-17T00:44:45.000Z
|
2021-08-09T16:41:47.000Z
|
bindings/python/ensmallen/datasets/string/desulfonatronospirathiodismutans.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/desulfonatronospirathiodismutans.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Desulfonatronospira thiodismutans.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def DesulfonatronospiraThiodismutans(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Desulfonatronospira thiodismutans graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.0
- homology.v11.5
- physical.links.v11.0
- physical.links.v11.5
- links.v11.0
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Desulfonatronospira thiodismutans graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="DesulfonatronospiraThiodismutans",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 33.37037
| 223
| 0.682575
|
50a9c3e0e6b13a425b6eb2fef51a235bd86c6e39
| 353
|
py
|
Python
|
tests/samples/hello-cython/setup.py
|
mlasch/scikit-build
|
664dd9c41cc54047d6d648b0466d525573da5a94
|
[
"MIT"
] | 299
|
2015-10-19T22:45:08.000Z
|
2022-03-30T21:15:55.000Z
|
tests/samples/hello-cython/setup.py
|
mlasch/scikit-build
|
664dd9c41cc54047d6d648b0466d525573da5a94
|
[
"MIT"
] | 588
|
2015-09-17T04:26:59.000Z
|
2022-03-29T14:51:54.000Z
|
tests/samples/hello-cython/setup.py
|
mlasch/scikit-build
|
664dd9c41cc54047d6d648b0466d525573da5a94
|
[
"MIT"
] | 102
|
2015-10-19T22:45:13.000Z
|
2022-03-20T21:09:08.000Z
|
from skbuild import setup
setup(
name="hello-cython",
version="1.2.3",
description="a minimal example package (cython version)",
author='The scikit-build team',
license="MIT",
packages=['hello_cython'],
# The extra '/' was *only* added to check that scikit-build can handle it.
package_dir={'hello_cython': 'hello/'},
)
| 27.153846
| 78
| 0.660057
|
a0c4626c26ef18abd58bf146714cf77b54404470
| 13,460
|
py
|
Python
|
orca/modules/checkpointer.py
|
IBM/urcanet
|
ce3f41eba23c24506ea2cf9e77cd3898a4eafbaf
|
[
"Apache-2.0"
] | 2
|
2020-03-21T19:09:24.000Z
|
2020-07-08T07:52:55.000Z
|
orca/modules/checkpointer.py
|
IBM/urcanet
|
ce3f41eba23c24506ea2cf9e77cd3898a4eafbaf
|
[
"Apache-2.0"
] | null | null | null |
orca/modules/checkpointer.py
|
IBM/urcanet
|
ce3f41eba23c24506ea2cf9e77cd3898a4eafbaf
|
[
"Apache-2.0"
] | 3
|
2020-03-21T19:12:16.000Z
|
2020-11-17T10:02:44.000Z
|
from typing import Dict, Optional, List, Tuple, Union, Iterable, Any, NamedTuple
import logging
import os
import re
import shutil
import time
import torch
from allennlp.training.trainer import Trainer, TrainerPieces
from allennlp.training.trainer_base import TrainerBase
from allennlp.training.checkpointer import Checkpointer
from allennlp.common import Params
from allennlp.common.checks import ConfigurationError
from allennlp.common.util import (dump_metrics, gpu_memory_mb, parse_cuda_device, peak_memory_mb,
get_frozen_and_tunable_parameter_names, lazy_groups_of)
from allennlp.common.tqdm import Tqdm
from allennlp.data.instance import Instance
from allennlp.data.iterators.data_iterator import DataIterator, TensorDict
from allennlp.data.vocabulary import Vocabulary
from allennlp.models.model import Model
from allennlp.nn import util as nn_util
from allennlp.training.checkpointer import Checkpointer
from allennlp.training.learning_rate_schedulers import LearningRateScheduler
from allennlp.training.momentum_schedulers import MomentumScheduler
from allennlp.training.metric_tracker import MetricTracker
from allennlp.training.optimizers import Optimizer
from allennlp.training.tensorboard_writer import TensorboardWriter
from allennlp.training.trainer_base import TrainerBase
from allennlp.training import util as training_util
from allennlp.training.moving_average import MovingAverage
logger = logging.getLogger(__name__)
class ModifiedCheckpointer(Checkpointer):
def __init__(self,
serialization_dir: str = None,
keep_serialized_model_every_num_seconds: int = None,
num_serialized_models_to_keep: int = 20,
minimal_save: bool = True) -> None:
super().__init__(serialization_dir=serialization_dir,
keep_serialized_model_every_num_seconds=keep_serialized_model_every_num_seconds,
num_serialized_models_to_keep=num_serialized_models_to_keep)
self._minimal_save = minimal_save
def save_checkpoint(self,
epoch: Union[int, str],
model_state: Dict[str, Any],
training_states: Dict[str, Any],
is_best_so_far: bool) -> None:
if self._serialization_dir is not None:
model_path = os.path.join(self._serialization_dir, "model_state_epoch_{}.th".format(epoch))
if not self._minimal_save:
torch.save(model_state, model_path)
training_path = os.path.join(self._serialization_dir,
"training_state_epoch_{}.th".format(epoch))
if not self._minimal_save:
torch.save({**training_states, "epoch": epoch}, training_path)
if is_best_so_far:
logger.info("Best validation performance so far. "
"Copying weights to '%s/best.th'.", self._serialization_dir)
if not self._minimal_save:
shutil.copyfile(model_path, os.path.join(self._serialization_dir, "best.th"))
else:
best_model_path = os.path.join(self._serialization_dir, "best.th")
torch.save(model_state, best_model_path)
if self._num_serialized_models_to_keep and self._num_serialized_models_to_keep >= 0:
self._serialized_paths.append((time.time(), model_path, training_path))
if len(self._serialized_paths) > self._num_serialized_models_to_keep:
paths_to_remove = self._serialized_paths.pop(0)
# Check to see if we should keep this checkpoint, if it has been longer
# then self._keep_serialized_model_every_num_seconds since the last
# kept checkpoint.
remove_path = True
if self._keep_serialized_model_every_num_seconds is not None:
save_time = paths_to_remove[0]
time_since_checkpoint_kept = save_time - self._last_permanent_saved_checkpoint_time
if time_since_checkpoint_kept > self._keep_serialized_model_every_num_seconds:
# We want to keep this checkpoint.
remove_path = False
self._last_permanent_saved_checkpoint_time = save_time
if remove_path:
for fname in paths_to_remove[1:]:
if os.path.isfile(fname):
os.remove(fname)
@TrainerBase.register("modified_trainer")
class ModifiedTrainer(Trainer):
def __init__(self,
model,
optimizer: torch.optim.Optimizer,
iterator,
train_dataset,
validation_dataset = None,
patience: Optional[int] = None,
validation_metric: str = "-loss",
validation_iterator = None,
shuffle: bool = True,
num_epochs: int = 20,
serialization_dir: Optional[str] = None,
num_serialized_models_to_keep: int = 20,
keep_serialized_model_every_num_seconds: int = None,
checkpointer: Checkpointer = None,
model_save_interval: float = None,
cuda_device: Union[int, List] = -1,
grad_norm: Optional[float] = None,
grad_clipping: Optional[float] = None,
learning_rate_scheduler = None,
momentum_scheduler = None,
summary_interval: int = 100,
histogram_interval: int = None,
should_log_parameter_statistics: bool = True,
should_log_learning_rate: bool = False,
log_batch_size_period: Optional[int] = None,
moving_average = None,
minimal_save = False) -> None:
super().__init__(model=model,
optimizer=optimizer,
iterator=iterator,
train_dataset=train_dataset,
validation_dataset=validation_dataset,
patience=patience,
validation_metric=validation_metric,
validation_iterator=validation_iterator,
shuffle=shuffle,
num_epochs=num_epochs,
serialization_dir=serialization_dir,
num_serialized_models_to_keep=num_serialized_models_to_keep,
keep_serialized_model_every_num_seconds=keep_serialized_model_every_num_seconds,
checkpointer=checkpointer,
model_save_interval=model_save_interval,
cuda_device=cuda_device,
grad_norm=grad_norm,
grad_clipping=grad_clipping,
learning_rate_scheduler=learning_rate_scheduler,
momentum_scheduler=momentum_scheduler,
summary_interval=summary_interval,
histogram_interval=histogram_interval,
should_log_parameter_statistics=should_log_parameter_statistics,
should_log_learning_rate=should_log_learning_rate,
log_batch_size_period=log_batch_size_period,
moving_average=moving_average)
self._checkpointer = ModifiedCheckpointer(serialization_dir,
keep_serialized_model_every_num_seconds,
num_serialized_models_to_keep,
minimal_save)
@classmethod
def from_params(cls, params, serialization_dir, recover):
pieces = TrainerPieces.from_params(params, serialization_dir, recover)
return cls.from_params_old(model=pieces.model,
serialization_dir=serialization_dir,
iterator=pieces.iterator,
train_data=pieces.train_dataset,
validation_data=pieces.validation_dataset,
params=pieces.params,
validation_iterator=pieces.validation_iterator)
@classmethod
def from_params_old(cls, # type: ignore
model,
serialization_dir: str,
iterator,
train_data,
validation_data,
params,
validation_iterator = None) -> 'Trainer':
# pylint: disable=arguments-differ
patience = params.pop_int("patience", None)
validation_metric = params.pop("validation_metric", "-loss")
shuffle = params.pop_bool("shuffle", True)
num_epochs = params.pop_int("num_epochs", 20)
cuda_device = parse_cuda_device(params.pop("cuda_device", -1))
grad_norm = params.pop_float("grad_norm", None)
grad_clipping = params.pop_float("grad_clipping", None)
lr_scheduler_params = params.pop("learning_rate_scheduler", None)
momentum_scheduler_params = params.pop("momentum_scheduler", None)
if isinstance(cuda_device, list):
model_device = cuda_device[0]
else:
model_device = cuda_device
if model_device >= 0:
# Moving model to GPU here so that the optimizer state gets constructed on
# the right device.
model = model.cuda(model_device)
parameters = [[n, p] for n, p in model.named_parameters() if p.requires_grad]
optimizer = Optimizer.from_params(parameters, params.pop("optimizer"))
if "moving_average" in params:
moving_average = MovingAverage.from_params(params.pop("moving_average"), parameters=parameters)
else:
moving_average = None
if lr_scheduler_params:
lr_scheduler = LearningRateScheduler.from_params(optimizer, lr_scheduler_params)
else:
lr_scheduler = None
if momentum_scheduler_params:
momentum_scheduler = MomentumScheduler.from_params(optimizer, momentum_scheduler_params)
else:
momentum_scheduler = None
if 'checkpointer' in params:
if 'keep_serialized_model_every_num_seconds' in params or \
'num_serialized_models_to_keep' in params:
raise ConfigurationError(
"Checkpointer may be initialized either from the 'checkpointer' key or from the "
"keys 'num_serialized_models_to_keep' and 'keep_serialized_model_every_num_seconds'"
" but the passed config uses both methods.")
checkpointer = Checkpointer.from_params(params.pop("checkpointer"))
else:
num_serialized_models_to_keep = params.pop_int("num_serialized_models_to_keep", 20)
keep_serialized_model_every_num_seconds = params.pop_int(
"keep_serialized_model_every_num_seconds", None)
checkpointer = Checkpointer(
serialization_dir=serialization_dir,
num_serialized_models_to_keep=num_serialized_models_to_keep,
keep_serialized_model_every_num_seconds=keep_serialized_model_every_num_seconds)
model_save_interval = params.pop_float("model_save_interval", None)
summary_interval = params.pop_int("summary_interval", 100)
histogram_interval = params.pop_int("histogram_interval", None)
should_log_parameter_statistics = params.pop_bool("should_log_parameter_statistics", True)
should_log_learning_rate = params.pop_bool("should_log_learning_rate", False)
log_batch_size_period = params.pop_int("log_batch_size_period", None)
minimal_save = params.pop_int("minimal_save", False)
params.assert_empty(cls.__name__)
return cls(model, optimizer, iterator,
train_data, validation_data,
patience=patience,
validation_metric=validation_metric,
validation_iterator=validation_iterator,
shuffle=shuffle,
num_epochs=num_epochs,
serialization_dir=serialization_dir,
cuda_device=cuda_device,
grad_norm=grad_norm,
grad_clipping=grad_clipping,
learning_rate_scheduler=lr_scheduler,
momentum_scheduler=momentum_scheduler,
checkpointer=checkpointer,
model_save_interval=model_save_interval,
summary_interval=summary_interval,
histogram_interval=histogram_interval,
should_log_parameter_statistics=should_log_parameter_statistics,
should_log_learning_rate=should_log_learning_rate,
log_batch_size_period=log_batch_size_period,
moving_average=moving_average,
minimal_save=minimal_save)
| 53.201581
| 108
| 0.614562
|
b2bd8daba547b4f1b3f7c006905ae2a828c5a5f4
| 479
|
py
|
Python
|
nomadgram/users/migrations/0007_auto_20180322_0216.py
|
zlyanz13/Yonwongram
|
a340f8ef215d3d8967e6977f89f46fbe2cc1a337
|
[
"MIT"
] | null | null | null |
nomadgram/users/migrations/0007_auto_20180322_0216.py
|
zlyanz13/Yonwongram
|
a340f8ef215d3d8967e6977f89f46fbe2cc1a337
|
[
"MIT"
] | null | null | null |
nomadgram/users/migrations/0007_auto_20180322_0216.py
|
zlyanz13/Yonwongram
|
a340f8ef215d3d8967e6977f89f46fbe2cc1a337
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.3 on 2018-03-21 17:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0006_auto_20180322_0118'),
]
operations = [
migrations.AlterField(
model_name='user',
name='gender',
field=models.CharField(choices=[('female', 'Female'), ('not-specified', 'Not sepcified'), ('male', 'Male')], max_length=80, null=True),
),
]
| 25.210526
| 147
| 0.597077
|
30e20ee779276b7cd765c7c160fab127f16d7df8
| 7,078
|
py
|
Python
|
cime/config/e3sm/tests.py
|
sarats/E3SM
|
d6b52862bd04daecdaa71aba23d4952ae16d4b90
|
[
"FTL",
"zlib-acknowledgement",
"RSA-MD"
] | null | null | null |
cime/config/e3sm/tests.py
|
sarats/E3SM
|
d6b52862bd04daecdaa71aba23d4952ae16d4b90
|
[
"FTL",
"zlib-acknowledgement",
"RSA-MD"
] | null | null | null |
cime/config/e3sm/tests.py
|
sarats/E3SM
|
d6b52862bd04daecdaa71aba23d4952ae16d4b90
|
[
"FTL",
"zlib-acknowledgement",
"RSA-MD"
] | null | null | null |
# Here are the tests belonging to e3sm suites. Format is
# <test>.<grid>.<compset>.
# suite_name -> (inherits_from, timelimit, [test [, mods[, machines]]])
# To elaborate, if no mods are needed, a string representing the testname is all that is needed.
# If testmods are needed, a 2-ple must be provided (test, mods)
# If you want to restrict the test mods to certain machines, than a 3-ple is needed (test, mods, [machines])
_TESTS = {
"e3sm_land_developer" : (None, "0:45:00",
("ERS.f19_f19.ICLM45",
"ERS.f19_f19.I1850CLM45CN",
"ERS.f09_g16.I1850CLM45CN",
"ERS.f19_f19.I20TRCLM45CN",
"SMS_Ld1.hcru_hcru.I1850CRUCLM45CN",
("ERS.f19_g16.I1850CNECACNTBC" ,"clm-eca"),
("ERS.f19_g16.I1850CNECACTCBC" ,"clm-eca"),
("SMS_Ly2_P1x1.1x1_smallvilleIA.ICLM45CNCROP", "clm-force_netcdf_pio"),
("ERS_Ld3.f45_f45.ICLM45ED","clm-fates"),
("ERS.f19_g16.I1850CLM45","clm-betr"),
("ERS.f19_g16.I1850CLM45","clm-vst"),
("ERS.f09_g16.I1850CLM45CN","clm-bgcinterface"),
"ERS.ne11_oQU240.I20TRCLM45",
("ERS.f19_g16.I1850CNRDCTCBC","clm-rd"),
("ERS.f19_g16.I1850GSWCNPECACNTBC","clm-eca_f19_g16_I1850GSWCNPECACNTBC"),
("ERS.f19_g16.I20TRGSWCNPECACNTBC","clm-eca_f19_g16_I20TRGSWCNPECACNTBC"),
("ERS.f19_g16.I1850GSWCNPRDCTCBC","clm-ctc_f19_g16_I1850GSWCNPRDCTCBC"),
("ERS.f19_g16.I20TRGSWCNPRDCTCBC","clm-ctc_f19_g16_I20TRGSWCNPRDCTCBC"),
"ERS.f09_g16.ICLM45BC")
),
"e3sm_atm_developer" : (None, None,
("ERP_Ln9.ne4_ne4.FC5AV1C-L",
("SMS_Ln9.ne4_ne4.FC5AV1C-L", "cam-outfrq9s"),
("SMS.ne4_ne4.FC5AV1C-L", "cam-cosplite"),
"SMS_R_Ld5.T42_T42.FSCM5A97",
"SMS_D_Ln5.ne4_ne4.FC5AV1C-L")
),
"e3sm_atm_integration" : (None, None,
("ERP_Ln9.ne4_ne4.FC5AV1C-L-AQUAP",
("SMS_Ld1.ne4_ne4.FC5AV1C-L-AQUAP","cam-clubb_only"),
("PET_Ln5.ne4_ne4.FC5AV1C-L","allactive-mach-pet"),
"PEM_Ln5.ne4_ne4.FC5AV1C-L",
("SMS_D_Ln5.ne4_ne4.FC5AV1C-L", "cam-cosplite_nhtfrq5"),
("ERS_Ld5.ne4_ne4.FC5AV1C-L", "cam-rrtmgp"),
"REP_Ln5.ne4_ne4.FC5AV1C-L")
),
#atmopheric tests for extra coverage
"e3sm_atm_extra_coverage" : (None, None,
("SMS_Lm1.ne4_ne4.FC5AV1C-L",
"ERS_Ld31.ne4_ne4.FC5AV1C-L",
"ERP_Lm3.ne4_ne4.FC5AV1C-L",
"SMS_D_Ln5.ne30_ne30.FC5AV1C-L",
("ERP_Ln5.ne30_ne30.FC5AV1C-L"),
"SMS_Ly1.ne4_ne4.FC5AV1C-L")
),
#atmopheric tests for hi-res
"e3sm_atm_hi_res" : (None, "01:30:00",
(
"SMS.ne120_ne120.FC5AV1C-H01A",
)),
#atmopheric tests to mimic low res production runs
"e3sm_atm_prod" : (None, None,
(("SMS_Ln5.ne30_ne30.FC5AV1C-L", "cam-cosplite"),
)
),
#atmopheric nbfb tests
"e3sm_atm_nbfb" : (None, None,
("PGN_P1x1.ne4_ne4.FC5AV1C-L",
"TSC.ne4_ne4.FC5AV1C-L")
),
"e3sm_developer" : (("e3sm_land_developer","e3sm_atm_developer"), "0:45:00",
("ERS.f19_g16_rx1.A",
"ERS.ne30_g16_rx1.A",
"SEQ.f19_g16.X",
"ERIO.ne30_g16_rx1.A",
"HOMME_P24.f19_g16_rx1.A",
"NCK.f19_g16_rx1.A",
"SMS.ne30_f19_g16_rx1.A",
"ERS_Ld5.T62_oQU120.CMPASO-NYF",
"ERS.f09_g16_g.MALISIA",
"SMS.T62_oQU120_ais20.MPAS_LISIO_TEST",
"SMS.f09_g16_a.IGCLM45_MLI",
("SMS_P12x2.ne4_oQU240.A_WCYCL1850","allactive-mach_mods")
)),
"e3sm_integration" : (("e3sm_developer", "e3sm_atm_integration"),"03:00:00",
("ERS.ne11_oQU240.A_WCYCL1850",
("SMS_D_Ld1.ne30_oECv3_ICG.A_WCYCL1850S_CMIP6","allactive-v1cmip6"),
"ERS_Ln9.ne4_ne4.FC5AV1C-L",
#"ERT_Ld31.ne16_g37.B1850C5",#add this line back in with the new correct compset
"NCK.ne11_oQU240.A_WCYCL1850",
("PET.f19_g16.X","allactive-mach-pet"),
("PET.f45_g37_rx1.A","allactive-mach-pet"),
("PET_Ln9_PS.ne30_oECv3_ICG.A_WCYCL1850S","allactive-mach-pet"),
"PEM_Ln9.ne30_oECv3_ICG.A_WCYCL1850S",
"ERP_Ld3.ne30_oECv3_ICG.A_WCYCL1850S",
"SMS.f09_g16_a.MALI",
"SMS_D_Ln5.conusx4v1_conusx4v1.FC5AV1C-L",
("SMS.ne30_oECv3.BGCEXP_BCRC_CNPECACNT_1850","clm-bgcexp"),
("SMS.ne30_oECv3.BGCEXP_BCRC_CNPRDCTC_1850","clm-bgcexp"))
),
#e3sm tests for extra coverage
"e3sm_extra_coverage" : (("e3sm_atm_extra_coverage",), None,
("SMS_D_Ln5.enax4v1_enax4v1.FC5AV1C-L",
"SMS_D_Ln5.twpx4v1_twpx4v1.FC5AV1C-L")),
#e3sm tests for hi-res
"e3sm_hi_res" : (("e3sm_atm_hi_res",),None,
(
("SMS.ne120_oRRS18v3_ICG.A_WCYCL2000_H01AS", "cam-cosplite"),
"SMS.T62_oRRS30to10v3wLI.GMPAS-IAF",
)),
#e3sm tests for RRM grids
"e3sm_rrm" : (None, None,
("SMS_D_Ln5.conusx4v1_conusx4v1.FC5AV1C-L",
"SMS_D_Ln5.enax4v1_enax4v1.FC5AV1C-L",
"SMS_D_Ln5.twpx4v1_twpx4v1.FC5AV1C-L")
),
#e3sm tests to mimic production runs
"e3sm_prod" : (("e3sm_atm_prod",),None,
(
("SMS_Ld2.ne30_oECv3_ICG.A_WCYCL1850S_CMIP6","allactive-v1cmip6"),
)),
"fates" : (None, None,
("ERS_Ld9.1x1_brazil.ICLM45ED",
"ERS_D_Ld9.1x1_brazil.ICLM45ED",
"SMS_D_Lm6.1x1_brazil.ICLM45ED")
),
}
| 51.664234
| 110
| 0.476123
|
31b26e65fe4b3b964735058b017da9109e2f5ba5
| 9,982
|
py
|
Python
|
src/app_util/core.py
|
jnsougata/app_util
|
941421523d38973486321f26b38aa4735ca0a9b2
|
[
"MIT"
] | null | null | null |
src/app_util/core.py
|
jnsougata/app_util
|
941421523d38973486321f26b38aa4735ca0a9b2
|
[
"MIT"
] | null | null | null |
src/app_util/core.py
|
jnsougata/app_util
|
941421523d38973486321f26b38aa4735ca0a9b2
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
import sys
import discord
from .errors import *
from discord.ext import commands
from discord.http import Route
from .http_s import *
from dataclasses import dataclass
from .app import Overwrite, ApplicationCommandOrigin
from typing import List, Optional, Union, Any, Dict
from .enums import OptionType, ApplicationCommandType, PermissionType, try_enum
def _try_flake(snowflake: str) -> Union[int, None]:
try:
return int(snowflake)
except TypeError:
return None
def _make_qual(name: str, guild_id: Optional[int], ctype: ApplicationCommandType, ) -> str:
if guild_id:
post_fix = f'{name}_{guild_id}'
else:
post_fix = name
if ctype is ApplicationCommandType.CHAT_INPUT:
return '__CHAT__' + post_fix
if ctype is ApplicationCommandType.MESSAGE:
return '__MESSAGE__' + post_fix
if ctype is ApplicationCommandType.USER:
return '__USER__' + post_fix
@dataclass(frozen=True)
class InteractionData:
name: str = None
type: int = None
guild_id: Optional[str] = None
id: Union[int, str] = None
resolved: Optional[dict] = None
options: Optional[List[dict]] = None
# below are only used for type != 2
custom_id: Optional[str] = None
component_type: Optional[int] = None
values: Optional[list] = None
# only used for User Command & Message Command
target_id: Optional[str] = None
# only used for modals
components: Optional[List[dict]] = None
class Resolved:
def __init__(self, data: dict, c):
self.__c = c
self.data = data
self.client = c.client
@property
def users(self) -> Dict[int, discord.User]:
if self.data.get('users'):
return {int(key): discord.User(data=payload, state=self.client._connection)
for key, payload in self.data['users'].items()}
@property
def members(self) -> Dict[int, discord.Member]:
if self.data.get('members'):
return {int(key): self.__c.guild.get_member(int(key)) for key, _ in self.data['members'].items()}
@property
def roles(self) -> Dict[int, discord.Role]:
if self.data.get('roles'):
return {int(key): discord.Role(guild=self.__c.guild, data=payload, state=self.client._connection)
for key, payload in self.data['roles'].items()}
@property
def channels(self) -> Dict[int, discord.abc.GuildChannel]:
if self.data.get('channels'):
return {int(key): self.__c.guild.get_channel(int(key)) for key, _ in self.data['channels'].items()}
@property
def messages(self) -> Dict[int, discord.Message]:
if self.data.get('messages'):
return {int(key): discord.Message(data=payload, state=self.client._connection, channel=self.__c.channel)
for key, payload in self.data['messages'].items()}
@property
def attachments(self) -> Dict[int, discord.Attachment]:
if self.data.get('attachments'):
return {int(key): discord.Attachment(data=payload, state=self.client._connection)
for key, payload in self.data['attachments'].items()}
class DummyOption:
value = True
class SlashCommandOption:
def __init__(self, p, data: Dict[str, Any]):
self.data = data
self.guild = p.guild
self.client = p.client
self._resolved = p._resolved
def __repr__(self):
return f'<SlashCommandOption name={self.name} type={self.type}>'
@property
def name(self) -> str:
return self.data['name']
@property
def type(self):
value = self.data['type']
return try_enum(OptionType, value)
@staticmethod
def _hybrid(family: str, options: List[Dict[str, Any]]):
return [{'type': generic['type'], 'value': generic['value'], 'name': f'{family}_{generic["name"]}'}
for generic in options]
@property
def value(self) -> Any:
if self.type is OptionType.STRING:
return self.data.get('value')
elif self.type is OptionType.INTEGER:
return self.data.get('value')
elif self.type is OptionType.BOOLEAN:
return self.data.get('value')
elif self.type is OptionType.USER:
user_id = int(self.data.get('value'))
return self._resolved.users[user_id]
elif self.type is OptionType.CHANNEL:
channel_id = int(self.data.get('value'))
return self._resolved.channels[channel_id]
elif self.type is OptionType.ROLE:
role_id = int(self.data.get('value'))
return self._resolved.roles[role_id]
elif self.type is OptionType.MENTIONABLE:
target_id = int(self.data.get('value'))
map = {}
if not self.guild:
if self._resolved.users:
map.update(self._resolved.users)
if self._resolved.roles:
map.update(self._resolved.roles)
else:
if self._resolved.users:
map.update(self._resolved.users)
if self._resolved.roles:
map.update(self._resolved.roles)
if self._resolved.members:
map.update(self._resolved.members)
return map[target_id]
elif self.type is OptionType.NUMBER:
return self.data['value']
elif self.type is OptionType.ATTACHMENT:
attachment_id = int(self.data['value'])
return self._resolved.attachments[attachment_id]
else:
return self.data.get('value')
@property
def focused(self) -> bool:
return self.data.get('focused')
class ApplicationCommand:
def __init__(self, client: commands.Bot, data: dict):
self.__payload = data
self._client = client
self.id = int(data['id'])
self.guild_id = _try_flake(data.get('guild_id'))
self.name = data['name']
self.description = data['description']
self.type = try_enum(ApplicationCommandType, data['type'])
self._qual = _make_qual(self.name, self.guild_id, self.type)
self.application_id = int(data['application_id'])
self.options = data.get('options')
self.version = int(data['version'])
self.default_access = data['default_permission']
self.dm_access = self.default_access or False
self._permissions = data.get('permissions') or {}
self.overwrites = {}
self.__parse_permissions()
self.name_locale = data.get('name_localizations')
self.description_locale = data.get('description_localizations')
def __eq__(self, other):
return self.id == other.id
def __repr__(self):
return f'<ApplicationCommand id = {self.id} name = {self.name}>'
@classmethod
def _from_data(cls, client: commands.Bot, data: dict):
return cls(client, data)
@property
def guild_specific(self) -> bool:
if self.guild_id:
return True
return False
@property
def guild(self):
if self.guild_id:
return self._client.get_guild(self.guild_id)
return None
def overwrite_for(self, guild: discord.Guild, entity: Union[discord.Role, discord.User]) -> bool:
permission = self.overwrites.get(guild.id)
if permission is None:
return self.default_access
for_entity = permission.get(entity.id)
if for_entity is None:
return self.default_access
return for_entity['allowed']
async def delete(self):
await delete_command(self._client, self.id, self.guild_id)
await self._client.http.request(route)
self._client._application_commands.pop(self.id)
def __parse_permissions(self):
for guild_id, perms in self._permissions.items():
for p in perms:
self.overwrites[int(guild_id)] = {int(p['id']): {'allowed': p['permission'], 'type': p['type']}}
def _cache_permissions(self, ows: dict, guild_id: int):
self._permissions[guild_id] = ows['permissions']
self.__parse_permissions()
def _build_overwrites(self, guild_id: int):
overwrites = self.overwrites.get(guild_id)
if ows:
return [{'id': str(entity_id), 'type': ovrt['type'], 'permission': ovrt['allowed']}
for entity_id, ovrt in ows.items()]
async def edit_overwrites(self, guild: discord.Guild, overwrites: List[Overwrite]):
payload = {'permissions': [o.to_dict() for o in overwrites]}
data = await put_overwrites(self._client, self.id, guild.id, payload)
self._cache_permissions(data, guild.id)
async def edit_overwrite_for(self, guild: discord.Guild, overwrite: Overwrite):
container = self._build_overwrites(guild.id)
new = overwrite.to_dict()
for ovrt in container:
if ovrt['id'] == new['id']:
container.remove(ovrt)
container.append(new)
payload = {'permissions': container}
data = await put_overwrites(self._client, self.id, guild.id, payload)
self._cache_permissions(data, guild.id)
async def update(self, new_command: ApplicationCommandOrigin) -> ApplicationCommand:
if new_command.type is self.type:
try:
data = await patch_existing_command(self._client, self, new_command)
except discord.errors.HTTPException as e:
raise e
else:
updated = self._from_data(self._client, data)
self._client._application_commands.pop(updated.id)
self._client._application_commands[updated.id] = updated
return updated
raise CommandTypeMismatched(
f'Type mismatched while editing command `{self.name}`. Expected: {self.type} & got: {new_command.type}')
| 35.272085
| 116
| 0.623021
|
fef456260a8dbbe679342f52c5c663a9f52db585
| 4,500
|
py
|
Python
|
api/tests/connection_test.py
|
sthagen/facebook-pyre-check
|
cea188088c9632b10e0d0a658a8f1954f19413cd
|
[
"MIT"
] | null | null | null |
api/tests/connection_test.py
|
sthagen/facebook-pyre-check
|
cea188088c9632b10e0d0a658a8f1954f19413cd
|
[
"MIT"
] | null | null | null |
api/tests/connection_test.py
|
sthagen/facebook-pyre-check
|
cea188088c9632b10e0d0a658a8f1954f19413cd
|
[
"MIT"
] | null | null | null |
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import subprocess
import unittest
from pathlib import Path
from unittest.mock import call, MagicMock, patch
from ..connection import PyreConnection, PyreQueryError
class ConnectionApiTest(unittest.TestCase):
# pyre-ignore[56]
@patch.object(
PyreConnection,
"_validate_query_response",
side_effect=lambda response: response,
)
@patch("subprocess.run")
def test_query_server(
self, run: MagicMock, _validate_query_response: MagicMock
) -> None:
run_result = MagicMock()
run_result.returncode = 0
run.return_value = run_result
# We always start a server once when querying.
pyre_connection = PyreConnection(Path("/tmp"))
pyre_connection.server_initialized = False
pyre_connection.query_server("hi")
self.assertEqual(
run.call_args_list,
[
call(
["pyre", "--noninteractive", "incremental"],
cwd="/tmp",
stdout=subprocess.PIPE,
),
call(
["pyre", "--noninteractive", "query", "hi"],
cwd="/tmp",
stdout=subprocess.PIPE,
),
],
)
run.reset_mock()
pyre_connection = PyreConnection(Path("/tmp"))
pyre_connection.query_server("hi")
self.assertEqual(
run.call_args_list,
[
call(
["pyre", "--noninteractive", "incremental"],
cwd="/tmp",
stdout=subprocess.PIPE,
),
call(
["pyre", "--noninteractive", "query", "hi"],
cwd="/tmp",
stdout=subprocess.PIPE,
),
],
)
run.reset_mock()
pyre_connection = PyreConnection(Path("/tmp"))
pyre_connection.query_server("hi")
pyre_connection.query_server("bye")
self.assertEqual(
run.call_args_list,
[
call(
["pyre", "--noninteractive", "incremental"],
cwd="/tmp",
stdout=subprocess.PIPE,
),
call(
["pyre", "--noninteractive", "query", "hi"],
cwd="/tmp",
stdout=subprocess.PIPE,
),
call(
["pyre", "--noninteractive", "query", "bye"],
cwd="/tmp",
stdout=subprocess.PIPE,
),
],
)
run.reset_mock()
with PyreConnection(Path("/tmp")) as pyre_connection:
pyre_connection.query_server("hi")
self.assertEqual(
run.call_args_list,
[
call(
["pyre", "--noninteractive", "incremental"],
cwd="/tmp",
stdout=subprocess.PIPE,
),
call(
["pyre", "--noninteractive", "query", "hi"],
cwd="/tmp",
stdout=subprocess.PIPE,
),
call(["pyre", "--noninteractive", "stop"], check=True, cwd="/tmp"),
],
)
def test_validate_query_response(self) -> None:
with self.assertRaisesRegex(PyreQueryError, "Foo"):
PyreConnection._validate_query_response('{"error": "Foo"}')
with self.assertRaisesRegex(PyreQueryError, "is not valid JSON."):
PyreConnection._validate_query_response("asdf")
with self.assertRaisesRegex(PyreQueryError, "The server response is invalid."):
PyreConnection._validate_query_response("{}")
self.assertEqual(
PyreConnection._validate_query_response('{"response": "Foo"}'),
{"response": "Foo"},
)
def test_context_manager(self) -> None:
with patch.object(PyreConnection, "start_server") as start_server, patch.object(
PyreConnection, "stop_server"
) as stop_server:
with PyreConnection():
pass
start_server.assert_called_once_with()
stop_server.assert_called_once_with()
| 33.834586
| 88
| 0.506444
|
8435b980d71886220bfaab308a4c2573102d5303
| 365
|
py
|
Python
|
Day_8/Day 8: Dictionaries and Maps.py
|
chmielak90/HackerRank
|
25aff18bb2a02a3af5c5e35f0a99fcac54b21ca2
|
[
"MIT"
] | null | null | null |
Day_8/Day 8: Dictionaries and Maps.py
|
chmielak90/HackerRank
|
25aff18bb2a02a3af5c5e35f0a99fcac54b21ca2
|
[
"MIT"
] | null | null | null |
Day_8/Day 8: Dictionaries and Maps.py
|
chmielak90/HackerRank
|
25aff18bb2a02a3af5c5e35f0a99fcac54b21ca2
|
[
"MIT"
] | null | null | null |
n = int(input().strip())
phoneBook = {}
for i in range(n):
add_value = [str(arr_temp) for arr_temp in input().strip().split(' ')]
phoneBook[add_value[0]] = add_value[1]
for j in range(n):
key_value = str(input().strip())
if key_value in phoneBook:
print('{}={}'.format(key_value, phoneBook[key_value]))
else:
print('Not found')
| 28.076923
| 74
| 0.610959
|
8bdea8acb4beca1ca56596e53d19b0de0ee5163b
| 11,490
|
py
|
Python
|
code/python/QuotesAPIforDigitalPortals/v2/fds/sdk/QuotesAPIforDigitalPortals/model/basic_mic_operating_list_data.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/QuotesAPIforDigitalPortals/v2/fds/sdk/QuotesAPIforDigitalPortals/model/basic_mic_operating_list_data.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/QuotesAPIforDigitalPortals/v2/fds/sdk/QuotesAPIforDigitalPortals/model/basic_mic_operating_list_data.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
Prime Developer Trial
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.QuotesAPIforDigitalPortals.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.QuotesAPIforDigitalPortals.exceptions import ApiAttributeError
def lazy_import():
from fds.sdk.QuotesAPIforDigitalPortals.model.basic_mic_operating_list_data_filter import BasicMicOperatingListDataFilter
globals()['BasicMicOperatingListDataFilter'] = BasicMicOperatingListDataFilter
class BasicMicOperatingListData(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'filter': (BasicMicOperatingListDataFilter,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'filter': 'filter', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""BasicMicOperatingListData - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
filter (BasicMicOperatingListDataFilter): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""BasicMicOperatingListData - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
filter (BasicMicOperatingListDataFilter): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 43.854962
| 125
| 0.581201
|
bfe589cead26bd0bf0f0780ea3af400ed4cc1848
| 3,088
|
py
|
Python
|
bbs/settings.py
|
forumPro/bbs
|
8e85d404ff483039128eb155012f552744cfcc7b
|
[
"MIT"
] | null | null | null |
bbs/settings.py
|
forumPro/bbs
|
8e85d404ff483039128eb155012f552744cfcc7b
|
[
"MIT"
] | null | null | null |
bbs/settings.py
|
forumPro/bbs
|
8e85d404ff483039128eb155012f552744cfcc7b
|
[
"MIT"
] | null | null | null |
"""
Django settings for bbs project.
Generated by 'django-admin startproject' using Django 1.11.15.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'kjxnb)fjw3j&(t^+-d(mob10n0)$@(rb81erp6wute&-rbkwx0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'bbs.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bbs.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| 25.520661
| 91
| 0.695596
|
88394a90cac88e400573dd749e0f10edffa14c2e
| 25,927
|
py
|
Python
|
src/fichier.py
|
Franckyi/Simulation-Telecom
|
e856b78bd1da487d52676fc97750be73858f3f30
|
[
"MIT"
] | null | null | null |
src/fichier.py
|
Franckyi/Simulation-Telecom
|
e856b78bd1da487d52676fc97750be73858f3f30
|
[
"MIT"
] | null | null | null |
src/fichier.py
|
Franckyi/Simulation-Telecom
|
e856b78bd1da487d52676fc97750be73858f3f30
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
"""
Effectue des simulations depuis un fichier JSON
"""
import json
import sys
from pprint import pprint
import affichage
import canal
import codage
import echantillonnage
import modulation
import outils
import sequence
print
print "############################"
print "# #"
print "# fichier.py #"
print "# #"
print "############################"
print
if len(sys.argv) < 2:
print "Veuillez passer un fichier en paramètre"
exit(1)
fichier = sys.argv[1]
print "> Lecture du fichier"
data = json.load(open(fichier))
print "> Affichage du fichier"
print "#####"
pprint(data)
print "#####"
# Séquence
print "> Analyse de la séquence"
if 'sequence' not in data:
print "!!! Elément 'sequence' introuvable !!!"
exit(2)
if 'seq' in data['sequence']:
seq = sequence.sequence_chaine(data['sequence']['seq'])
elif 'repetitions' in data['sequence']:
seq = sequence.sequence_pseudo_aleatoire(data['sequence']['taille'], data['sequence']['repetitions'])
else:
seq = sequence.sequence_aleatoire(data['sequence']['taille'])
db = data['sequence']['db']
# Affichage séquence
print "> Analyse de l'affichage de la séquence"
if 'aff_sequence' not in data:
print "!!! Elément 'aff_sequence' introuvable !!!"
exit(2)
aff_sequence = data['aff_sequence']
aff_sequence_texte = aff_sequence['sequence']
aff_repartition = 'repartition' in aff_sequence
if aff_repartition:
bps = aff_sequence['repartition']['bps']
# Codage
has_codage = 'codage' in data
if has_codage:
print "> Analyse du codage"
fech_codage = data['codage']['fech']
ech_codage = echantillonnage.creer_echantillons(seq, db, fech_codage)
if data['codage']['type'] == 'nrz':
y_codage = codage.coder_nrz(seq, db, ech_codage, fech_codage, data['codage']['v0'], data['codage']['v1'])
elif data['codage']['type'] == 'rz':
y_codage = codage.coder_rz(seq, db, ech_codage, fech_codage, data['codage']['v0'], data['codage']['v1'])
elif data['codage']['type'] == 'manchester':
y_codage = codage.coder_manchester(seq, db, ech_codage, fech_codage, data['codage']['vp'], data['codage']['vm'])
elif data['codage']['type'] == '2b1q':
if 'vmax' in data['codage']:
y_codage = codage.coder_2b1q_max(seq, db, ech_codage, fech_codage, data['codage']['vmax'])
else:
y_codage = codage.coder_2b1q(seq, db, ech_codage, fech_codage, data['codage']['v00'], data['codage']['v01'],
data['codage']['v10'], data['codage']['v11'])
else:
print "!!! Codage '{}' inconnu !!!".format(data['codage']['type'])
exit(3)
# Affichage codage
print "> Analyse de l'affichage du codage"
if 'aff_codage' not in data:
print "!!! Elément 'aff_codage' introuvable !!!"
exit(2)
aff_codage = data['aff_codage']
aff_chronogramme_codage = 'chronogramme' in aff_codage
if aff_chronogramme_codage:
aff_chronogramme_codagej = aff_codage['chronogramme']
aff_chronogramme_codage_tmin = aff_chronogramme_codagej['tmin'] if 'tmin' in aff_chronogramme_codagej else None
aff_chronogramme_codage_tmax = aff_chronogramme_codagej['tmax'] if 'tmax' in aff_chronogramme_codagej else None
aff_chronogramme_codage_vmin = aff_chronogramme_codagej['vmin'] if 'vmin' in aff_chronogramme_codagej else None
aff_chronogramme_codage_vmax = aff_chronogramme_codagej['vmax'] if 'vmax' in aff_chronogramme_codagej else None
aff_chronogramme_codage_xlegend = aff_chronogramme_codagej[
'xlegend'] if 'xlegend' in aff_chronogramme_codagej else None
aff_chronogramme_codage_ylegend = aff_chronogramme_codagej[
'ylegend'] if 'ylegend' in aff_chronogramme_codagej else None
aff_chronogramme_codage_titre = aff_chronogramme_codagej[
'titre'] if 'titre' in aff_chronogramme_codagej else u"Chronogramme de la séquence codée"
aff_spectre_codage = 'spectre' in aff_codage
if aff_spectre_codage:
aff_spectre_codagej = aff_codage['spectre']
aff_spectre_codage_fmin = aff_spectre_codagej['fmin'] if 'fmin' in aff_spectre_codagej else None
aff_spectre_codage_fmax = aff_spectre_codagej['fmax'] if 'fmax' in aff_spectre_codagej else None
aff_spectre_codage_vmin = aff_spectre_codagej['vmin'] if 'vmin' in aff_spectre_codagej else None
aff_spectre_codage_vmax = aff_spectre_codagej['vmax'] if 'vmax' in aff_spectre_codagej else None
aff_spectre_codage_xlegend = aff_spectre_codagej['xlegend'] if 'xlegend' in aff_spectre_codagej else None
aff_spectre_codage_ylegend = aff_spectre_codagej['ylegend'] if 'ylegend' in aff_spectre_codagej else None
aff_spectre_codage_titre = aff_spectre_codagej[
'titre'] if 'titre' in aff_spectre_codagej else u"Spectre de la séquence codée"
aff_diagramme_oeil = 'diagramme_oeil' in aff_codage
if aff_diagramme_oeil:
aff_diagramme_oeilj = aff_codage['diagramme_oeil']
aff_diagramme_oeil_n = aff_diagramme_oeilj['nb_yeux']
aff_diagramme_oeil_titre = aff_diagramme_oeilj[
'titre'] if 'titre' in aff_diagramme_oeilj else u"Diagramme de l'oeil de la séquence codée"
# Affichage démodulation
print "> Analyse de l'affichage du codage à travers le canal"
if 'aff_codage_canal' not in data:
print "!!! Elément 'aff_codage_canal' introuvable !!!"
exit(2)
aff_codage_canal = data['aff_codage_canal']
aff_chronogramme_codage_canal = 'chronogramme' in aff_codage_canal
if aff_chronogramme_codage_canal:
aff_chronogramme_codage_canalj = aff_codage_canal['chronogramme']
aff_chronogramme_codage_canal_tmin = aff_chronogramme_codage_canalj[
'tmin'] if 'tmin' in aff_chronogramme_codage_canalj else None
aff_chronogramme_codage_canal_tmax = aff_chronogramme_codage_canalj[
'tmax'] if 'tmax' in aff_chronogramme_codage_canalj else None
aff_chronogramme_codage_canal_vmin = aff_chronogramme_codage_canalj[
'vmin'] if 'vmin' in aff_chronogramme_codage_canalj else None
aff_chronogramme_codage_canal_vmax = aff_chronogramme_codage_canalj[
'vmax'] if 'vmax' in aff_chronogramme_codage_canalj else None
aff_chronogramme_codage_canal_xlegend = aff_chronogramme_codage_canalj[
'xlegend'] if 'xlegend' in aff_chronogramme_codage_canalj else None
aff_chronogramme_codage_canal_ylegend = aff_chronogramme_codage_canalj[
'ylegend'] if 'ylegend' in aff_chronogramme_codage_canalj else None
aff_chronogramme_codage_canal_titre = aff_chronogramme_codage_canalj['titre'] \
if 'titre' in aff_chronogramme_codage_canalj else u"Chronogramme de la séquence codée à travers le canal"
aff_spectre_codage_canal = 'spectre' in aff_codage_canal
if aff_spectre_codage_canal:
aff_spectre_codage_canalj = aff_codage_canal['spectre']
aff_spectre_codage_canal_fmin = aff_spectre_codage_canalj[
'fmin'] if 'fmin' in aff_spectre_codage_canalj else None
aff_spectre_codage_canal_fmax = aff_spectre_codage_canalj[
'fmax'] if 'fmax' in aff_spectre_codage_canalj else None
aff_spectre_codage_canal_vmin = aff_spectre_codage_canalj[
'vmin'] if 'vmin' in aff_spectre_codage_canalj else None
aff_spectre_codage_canal_vmax = aff_spectre_codage_canalj[
'vmax'] if 'vmax' in aff_spectre_codage_canalj else None
aff_spectre_codage_canal_xlegend = aff_spectre_codage_canalj[
'xlegend'] if 'xlegend' in aff_spectre_codage_canalj else None
aff_spectre_codage_canal_ylegend = aff_spectre_codage_canalj[
'ylegend'] if 'ylegend' in aff_spectre_codage_canalj else None
aff_spectre_codage_canal_titre = aff_spectre_codage_canalj['titre'] \
if 'titre' in aff_spectre_codage_canalj else u"Spectre de la séquence codée à travers le canal"
aff_diagramme_oeil_canal = 'diagramme_oeil' in aff_codage_canal
if aff_diagramme_oeil_canal:
aff_diagramme_oeil_canalj = aff_codage_canal['diagramme_oeil']
aff_diagramme_oeil_canal_n = aff_diagramme_oeil_canalj['nb_yeux']
aff_diagramme_oeil_canal_titre = aff_diagramme_oeil_canalj['titre'] if 'titre' in aff_diagramme_oeil_canalj \
else u"Diagramme de l'oeil de la séquence codée à travers le canal"
# Modulation
has_modulation = 'modulation' in data
if has_modulation:
print "> Analyse de la modulation"
fech_modulation = data['modulation']['fech']
ech_modulation = echantillonnage.creer_echantillons(seq, db, fech_modulation)
if data['modulation']['type'] == 'ask':
y_modulation = modulation.moduler_ask(seq, db, ech_modulation, fech_modulation, data['modulation']['fp'],
data['modulation']['v'])
elif data['modulation']['type'] == 'fsk':
y_modulation = modulation.moduler_fsk(seq, db, ech_modulation, fech_modulation, data['modulation']['v'],
data['modulation']['f'])
elif data['modulation']['type'] == 'psk':
y_modulation = modulation.moduler_psk(seq, db, ech_modulation, fech_modulation, data['modulation']['v'],
data['modulation']['fp'], data['modulation']['p'])
# elif data['modulation']['type'] == 'maq':
# y_modulation = modulation.moduler_maq(seq, db, ech_modulation, fech_modulation, data['modulation']['v'],
# data['modulation']['fp'], data['modulation']['p'])
else:
print "!!! Modulation '{}' inconnue !!!".format(data['modulation']['type'])
exit(3)
# Affichage modulation
print "> Analyse de l'affichage de la modulation"
if 'aff_modulation' not in data:
print "!!! Elément 'aff_modulation' introuvable !!!"
exit(2)
aff_modulation = data['aff_modulation']
aff_chronogramme_modulation = 'chronogramme' in aff_modulation
if aff_chronogramme_modulation:
aff_chronogramme_modulationj = aff_modulation['chronogramme']
aff_chronogramme_modulation_tmin = aff_chronogramme_modulationj[
'tmin'] if 'tmin' in aff_chronogramme_modulationj else None
aff_chronogramme_modulation_tmax = aff_chronogramme_modulationj[
'tmax'] if 'tmax' in aff_chronogramme_modulationj else None
aff_chronogramme_modulation_vmin = aff_chronogramme_modulationj[
'vmin'] if 'vmin' in aff_chronogramme_modulationj else None
aff_chronogramme_modulation_vmax = aff_chronogramme_modulationj[
'vmax'] if 'vmax' in aff_chronogramme_modulationj else None
aff_chronogramme_modulation_xlegend = aff_chronogramme_modulationj[
'xlegend'] if 'xlegend' in aff_chronogramme_modulationj else None
aff_chronogramme_modulation_ylegend = aff_chronogramme_modulationj[
'ylegend'] if 'ylegend' in aff_chronogramme_modulationj else None
aff_chronogramme_modulation_titre = aff_chronogramme_modulationj[
'titre'] if 'titre' in aff_chronogramme_modulationj else u"Chronogramme de la porteuse modulée"
aff_spectre_modulation = 'spectre' in aff_modulation
if aff_spectre_modulation:
aff_spectre_modulationj = aff_modulation['spectre']
aff_spectre_modulation_fmin = aff_spectre_modulationj['fmin'] if 'fmin' in aff_spectre_modulationj else None
aff_spectre_modulation_fmax = aff_spectre_modulationj['fmax'] if 'fmax' in aff_spectre_modulationj else None
aff_spectre_modulation_vmin = aff_spectre_modulationj['vmin'] if 'vmin' in aff_spectre_modulationj else None
aff_spectre_modulation_vmax = aff_spectre_modulationj['vmax'] if 'vmax' in aff_spectre_modulationj else None
aff_spectre_modulation_xlegend = aff_spectre_modulationj[
'xlegend'] if 'xlegend' in aff_spectre_modulationj else None
aff_spectre_modulation_ylegend = aff_spectre_modulationj[
'ylegend'] if 'ylegend' in aff_spectre_modulationj else None
aff_spectre_modulation_titre = aff_spectre_modulationj[
'titre'] if 'titre' in aff_spectre_modulationj else u"Spectre de la porteuse modulée"
aff_constellation = 'constellation' in aff_modulation
if aff_constellation:
aff_constellation_j = aff_modulation['spectre']
aff_constellation_titre = aff_constellation_j[
'titre'] if 'titre' in aff_constellation_j else u"Constellation de la porteuse modulée"
# Affichage canal
print "> Analyse de l'affichage de la modulation à travers le canal"
if 'aff_modulation_canal' not in data:
print "!!! Elément 'aff_modulation_canal' introuvable !!!"
exit(2)
aff_modulation_canal = data['aff_modulation_canal']
aff_chronogramme_modulation_canal = 'chronogramme' in aff_modulation_canal
if aff_chronogramme_modulation_canal:
aff_chronogramme_modulation_canalj = aff_modulation_canal['chronogramme']
aff_chronogramme_modulation_canal_tmin = aff_chronogramme_modulation_canalj[
'tmin'] if 'tmin' in aff_chronogramme_modulation_canalj else None
aff_chronogramme_modulation_canal_tmax = aff_chronogramme_modulation_canalj[
'tmax'] if 'tmax' in aff_chronogramme_modulation_canalj else None
aff_chronogramme_modulation_canal_vmin = aff_chronogramme_modulation_canalj[
'vmin'] if 'vmin' in aff_chronogramme_modulation_canalj else None
aff_chronogramme_modulation_canal_vmax = aff_chronogramme_modulation_canalj[
'vmax'] if 'vmax' in aff_chronogramme_modulation_canalj else None
aff_chronogramme_modulation_canal_xlegend = aff_chronogramme_modulation_canalj[
'xlegend'] if 'xlegend' in aff_chronogramme_modulation_canalj else None
aff_chronogramme_modulation_canal_ylegend = aff_chronogramme_modulation_canalj[
'ylegend'] if 'ylegend' in aff_chronogramme_modulation_canalj else None
aff_chronogramme_modulation_canal_titre = aff_chronogramme_modulation_canalj[
'titre'] if 'titre' in aff_chronogramme_modulation_canalj else \
u"Chronogramme de la porteuse modulée à travers le canal"
aff_spectre_modulation_canal = 'spectre' in aff_modulation_canal
if aff_spectre_modulation_canal:
aff_spectre_modulation_canalj = aff_modulation_canal['spectre']
aff_spectre_modulation_canal_fmin = aff_spectre_modulation_canalj[
'fmin'] if 'fmin' in aff_spectre_modulation_canalj else None
aff_spectre_modulation_canal_fmax = aff_spectre_modulation_canalj[
'fmax'] if 'fmax' in aff_spectre_modulation_canalj else None
aff_spectre_modulation_canal_vmin = aff_spectre_modulation_canalj[
'vmin'] if 'vmin' in aff_spectre_modulation_canalj else None
aff_spectre_modulation_canal_vmax = aff_spectre_modulation_canalj[
'vmax'] if 'vmax' in aff_spectre_modulation_canalj else None
aff_spectre_modulation_canal_xlegend = aff_spectre_modulation_canalj[
'xlegend'] if 'xlegend' in aff_spectre_modulation_canalj else None
aff_spectre_modulation_canal_ylegend = aff_spectre_modulation_canalj[
'ylegend'] if 'ylegend' in aff_spectre_modulation_canalj else None
aff_spectre_modulation_canal_titre = aff_spectre_modulation_canalj['titre'] \
if 'titre' in aff_spectre_modulation_canalj else u"Spectre de la porteuse modulée à travers le canal"
aff_constellation_canal = 'constellation' in aff_modulation_canal
if aff_constellation_canal:
aff_constellation_canalj = aff_modulation_canal['spectre']
aff_constellation_canal_titre = aff_constellation_canalj['titre'] \
if 'titre' in aff_constellation_canalj else u"Constellation de la porteuse modulée à travers le canal"
# Canal
print "> Analyse du canal de transmission"
if 'canal' not in data:
print "!!! Elément 'canal' introuvable !!!"
exit(2)
has_bruit = 'bruit' in data['canal']
if has_bruit:
if data['canal']['bruit']['type'] == 'gaussien':
if has_codage:
y_codage_bruit = canal.bruit_gaussien(y_codage, data['canal']['bruit']['intensite'])
if has_modulation:
y_modulation_bruit = canal.bruit_gaussien(y_modulation, data['canal']['bruit']['intensite'])
elif data['canal']['bruit']['type'] == 'aleatoire':
if has_codage:
y_codage_bruit = canal.bruit_aleatoire(y_codage, data['canal']['bruit']['intensite'])
if has_modulation:
y_modulation_bruit = canal.bruit_aleatoire(y_modulation, data['canal']['bruit']['intensite'])
print "#####"
print "> Affichage"
fig = 0
if aff_sequence_texte:
print "Séquence : " + outils.chaine_binaire(seq)
if aff_repartition:
print "> Affichage de la répartition de la séquence"
affichage.figure_sequence(seq, fig, bps)
fig += 1
if has_codage:
xf_codage, yf_codage = outils.calculer_spectre(ech_codage, y_codage)
if has_bruit:
xf_codage_bruit, yf_codage_bruit = outils.calculer_spectre(ech_codage, y_codage_bruit)
if aff_chronogramme_codage:
aff_chronogramme_codage_tmin = min(
ech_codage) if aff_chronogramme_codage_tmin is None else aff_chronogramme_codage_tmin
aff_chronogramme_codage_tmax = max(
ech_codage) if aff_chronogramme_codage_tmax is None else aff_chronogramme_codage_tmax
aff_chronogramme_codage_vmin = min(
y_codage_bruit) if aff_chronogramme_codage_vmin is None else aff_chronogramme_codage_vmin
aff_chronogramme_codage_vmax = max(
y_codage_bruit) if aff_chronogramme_codage_vmax is None else aff_chronogramme_codage_vmax
if aff_spectre_codage:
aff_spectre_codage_fmin = min(
xf_codage_bruit) if aff_spectre_codage_fmin is None else aff_spectre_codage_fmin
aff_spectre_codage_fmax = max(
xf_codage_bruit) if aff_spectre_codage_fmax is None else aff_spectre_codage_fmax
aff_spectre_codage_vmin = min(
yf_codage_bruit) if aff_spectre_codage_vmin is None else aff_spectre_codage_vmin
aff_spectre_codage_vmax = max(
yf_codage_bruit) if aff_spectre_codage_vmax is None else aff_spectre_codage_vmax
y = y_codage_bruit if has_bruit else y_codage
xf = xf_codage_bruit if has_bruit else xf_codage
yf = yf_codage_bruit if has_bruit else yf_codage
aff_diagramme_oeil_vmin = min(y_codage_bruit) if has_bruit else min(y_codage)
aff_diagramme_oeil_vmax = max(y_codage_bruit) if has_bruit else max(y_codage)
if aff_chronogramme_codage:
print "> Affichage du chronogramme de la séquence codée"
affichage.figure_chronogramme(ech_codage, y_codage, fig, aff_chronogramme_codage_titre,
aff_chronogramme_codage_xlegend, aff_chronogramme_codage_ylegend,
aff_chronogramme_codage_tmin, aff_chronogramme_codage_tmax,
aff_chronogramme_codage_vmin, aff_chronogramme_codage_vmax)
fig += 1
if aff_spectre_codage:
print "> Affichage du spectre de la séquence codée"
affichage.figure_spectre(xf_codage, yf_codage, fig, aff_spectre_codage_titre,
aff_spectre_codage_xlegend, aff_spectre_codage_ylegend,
aff_spectre_codage_fmin, aff_spectre_codage_fmax,
aff_spectre_codage_vmin, aff_spectre_codage_vmax)
fig += 1
if aff_diagramme_oeil:
print "> Affichage du diagramme de l'oeil de la séquence codée"
affichage.figure_diagramme_oeil(ech_codage, y_codage, fig, seq, aff_diagramme_oeil_vmin,
aff_diagramme_oeil_vmax, aff_diagramme_oeil_n, aff_diagramme_oeil_titre)
fig += 1
if aff_chronogramme_codage_canal:
print "> Affichage du chronogramme de la séquence codée à travers le canal"
affichage.figure_chronogramme(ech_codage, y, fig, aff_chronogramme_codage_canal_titre,
aff_chronogramme_codage_canal_xlegend, aff_chronogramme_codage_canal_ylegend,
aff_chronogramme_codage_canal_tmin, aff_chronogramme_codage_canal_tmax,
aff_chronogramme_codage_canal_vmin, aff_chronogramme_codage_canal_vmax)
fig += 1
if aff_spectre_codage_canal:
print "> Affichage du spectre de la séquence codée à travers le canal"
affichage.figure_spectre(xf, yf, fig, aff_spectre_codage_canal_titre,
aff_spectre_codage_canal_xlegend, aff_spectre_codage_canal_ylegend,
aff_spectre_codage_canal_fmin, aff_spectre_codage_canal_fmax,
aff_spectre_codage_canal_vmin, aff_spectre_codage_canal_vmax)
fig += 1
if aff_diagramme_oeil_canal:
print "> Affichage du diagramme de l'oeil de la séquence codée à travers le canal"
affichage.figure_diagramme_oeil(ech_codage, y, fig, seq, aff_diagramme_oeil_vmin, aff_diagramme_oeil_vmax,
aff_diagramme_oeil_canal_n, aff_diagramme_oeil_canal_titre)
fig += 1
if has_modulation:
xf_modulation, yf_modulation = outils.calculer_spectre(ech_modulation, y_modulation)
if has_bruit:
xf_modulation_bruit, yf_modulation_bruit = outils.calculer_spectre(ech_modulation, y_modulation_bruit)
if aff_chronogramme_modulation:
aff_chronogramme_modulation_tmin = min(
ech_modulation) if aff_chronogramme_modulation_tmin is None else aff_chronogramme_modulation_tmin
aff_chronogramme_modulation_tmax = max(
ech_modulation) if aff_chronogramme_modulation_tmax is None else aff_chronogramme_modulation_tmax
aff_chronogramme_modulation_vmin = min(
y_modulation_bruit) if aff_chronogramme_modulation_vmin is None else aff_chronogramme_modulation_vmin
aff_chronogramme_modulation_vmax = max(
y_modulation_bruit) if aff_chronogramme_modulation_vmax is None else aff_chronogramme_modulation_vmax
if aff_spectre_modulation:
aff_spectre_modulation_fmin = min(
xf_modulation_bruit) if aff_spectre_modulation_fmin is None else aff_spectre_modulation_fmin
aff_spectre_modulation_fmax = max(
xf_modulation_bruit) if aff_spectre_modulation_fmax is None else aff_spectre_modulation_fmax
aff_spectre_modulation_vmin = min(
yf_modulation_bruit) if aff_spectre_modulation_vmin is None else aff_spectre_modulation_vmin
aff_spectre_modulation_vmax = max(
yf_modulation_bruit) if aff_spectre_modulation_vmax is None else aff_spectre_modulation_vmax
y = y_modulation_bruit if has_bruit else y_modulation
xf = xf_modulation_bruit if has_bruit else xf_modulation
yf = yf_modulation_bruit if has_bruit else yf_modulation
if aff_chronogramme_modulation:
print "> Affichage du chronogramme de la porteuse modulée"
affichage.figure_chronogramme(ech_modulation, y_modulation, fig, aff_chronogramme_modulation_titre,
aff_chronogramme_modulation_xlegend, aff_chronogramme_modulation_ylegend,
aff_chronogramme_modulation_tmin, aff_chronogramme_modulation_tmax,
aff_chronogramme_modulation_vmin, aff_chronogramme_modulation_vmax)
fig += 1
if aff_spectre_modulation:
print "> Affichage du spectre de la porteuse modulée"
affichage.figure_spectre(xf_modulation, yf_modulation, fig, aff_spectre_modulation_titre,
aff_spectre_modulation_xlegend, aff_spectre_modulation_ylegend,
aff_spectre_modulation_fmin, aff_spectre_modulation_fmax,
aff_spectre_modulation_vmin, aff_spectre_modulation_vmax)
fig += 1
if aff_constellation:
print "> Affichage de la constellation de la porteuse modulée"
affichage.figure_constellation()
fig += 1
if aff_chronogramme_modulation_canal:
print "> Affichage du chronogramme de la séquence codée à travers le canal"
affichage.figure_chronogramme(ech_modulation, y, fig, aff_chronogramme_modulation_canal_titre,
aff_chronogramme_modulation_canal_xlegend,
aff_chronogramme_modulation_canal_ylegend,
aff_chronogramme_modulation_canal_tmin, aff_chronogramme_modulation_canal_tmax,
aff_chronogramme_modulation_canal_vmin, aff_chronogramme_modulation_canal_vmax)
fig += 1
if aff_spectre_modulation_canal:
print "> Affichage du spectre de la séquence codée à travers le canal"
affichage.figure_spectre(xf, yf, fig, aff_spectre_modulation_canal_titre,
aff_spectre_modulation_canal_xlegend, aff_spectre_modulation_canal_ylegend,
aff_spectre_modulation_canal_fmin, aff_spectre_modulation_canal_fmax,
aff_spectre_modulation_canal_vmin, aff_spectre_modulation_canal_vmax)
fig += 1
if aff_constellation_canal:
print "> Affichage de la constellation de la porteuse modulée à travers le canal"
affichage.figure_constellation()
fig += 1
print "> Affichage en cours..."
affichage.afficher()
| 58.791383
| 120
| 0.705905
|
e00d5f56fd01113572f2015994ffceb4f789a39e
| 1,197
|
py
|
Python
|
chroma-manager/tests/unit/chroma_core/models/test_logmessage.py
|
GarimaVishvakarma/intel-chroma
|
fdf68ed00b13643c62eb7480754d3216d9295e0b
|
[
"MIT"
] | null | null | null |
chroma-manager/tests/unit/chroma_core/models/test_logmessage.py
|
GarimaVishvakarma/intel-chroma
|
fdf68ed00b13643c62eb7480754d3216d9295e0b
|
[
"MIT"
] | null | null | null |
chroma-manager/tests/unit/chroma_core/models/test_logmessage.py
|
GarimaVishvakarma/intel-chroma
|
fdf68ed00b13643c62eb7480754d3216d9295e0b
|
[
"MIT"
] | null | null | null |
from tests.unit.lib.iml_unit_test_case import IMLUnitTestCase
from chroma_core.models import LogMessage, MessageClass
class TestLogMessage(IMLUnitTestCase):
def test_classification(self):
'''
Test the classification code correctly classfies messages.
'''
test_messages = {'Lustre: Lustre output here': MessageClass.LUSTRE,
'LustreError: Lustre output here': MessageClass.LUSTRE_ERROR,
'[NOT A TIME STAMP ] Lustre: Lustre output here': MessageClass.NORMAL,
'[1234567A89] LustreError: Not A Time Stamp': MessageClass.NORMAL,
'[123456789.123456789A] LustreError: Not A Time Stamp': MessageClass.NORMAL,
'Nothing to see here': MessageClass.NORMAL}
for with_timestamp in [False, True]:
for test_message, message_class in test_messages.iteritems():
test_message = ('[9830337.7944560] ' if with_timestamp else '') + test_message
self.assertEqual(LogMessage.get_message_class(test_message),
message_class,
test_message)
| 46.038462
| 101
| 0.614871
|
5bd3c83470fb679e53c5821d62ace5cf351b8da3
| 13,292
|
py
|
Python
|
airflow/providers/mongo/hooks/mongo.py
|
emilioego/airflow
|
3457c7847cd24413ff5b622e65c27d8370f94502
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 79
|
2021-10-15T07:32:27.000Z
|
2022-03-28T04:10:19.000Z
|
airflow/providers/mongo/hooks/mongo.py
|
emilioego/airflow
|
3457c7847cd24413ff5b622e65c27d8370f94502
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 153
|
2021-10-15T05:23:46.000Z
|
2022-02-23T06:07:10.000Z
|
airflow/providers/mongo/hooks/mongo.py
|
emilioego/airflow
|
3457c7847cd24413ff5b622e65c27d8370f94502
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 23
|
2021-10-15T02:36:37.000Z
|
2022-03-17T02:59:27.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for Mongo DB"""
from ssl import CERT_NONE
from types import TracebackType
from typing import List, Optional, Type
import pymongo
from pymongo import MongoClient, ReplaceOne
from airflow.hooks.base import BaseHook
class MongoHook(BaseHook):
"""
PyMongo Wrapper to Interact With Mongo Database
Mongo Connection Documentation
https://docs.mongodb.com/manual/reference/connection-string/index.html
You can specify connection string options in extra field of your connection
https://docs.mongodb.com/manual/reference/connection-string/index.html#connection-string-options
If you want use DNS seedlist, set `srv` to True.
ex.
{"srv": true, "replicaSet": "test", "ssl": true, "connectTimeoutMS": 30000}
"""
conn_name_attr = 'conn_id'
default_conn_name = 'mongo_default'
conn_type = 'mongo'
hook_name = 'MongoDB'
def __init__(self, conn_id: str = default_conn_name, *args, **kwargs) -> None:
super().__init__()
self.mongo_conn_id = conn_id
self.connection = self.get_connection(conn_id)
self.extras = self.connection.extra_dejson.copy()
self.client = None
srv = self.extras.pop('srv', False)
scheme = 'mongodb+srv' if srv else 'mongodb'
self.uri = '{scheme}://{creds}{host}{port}/{database}'.format(
scheme=scheme,
creds=f'{self.connection.login}:{self.connection.password}@' if self.connection.login else '',
host=self.connection.host,
port='' if self.connection.port is None else f':{self.connection.port}',
database=self.connection.schema,
)
def __enter__(self):
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
if self.client is not None:
self.close_conn()
def get_conn(self) -> MongoClient:
"""Fetches PyMongo Client"""
if self.client is not None:
return self.client
# Mongo Connection Options dict that is unpacked when passed to MongoClient
options = self.extras
# If we are using SSL disable requiring certs from specific hostname
if options.get('ssl', False):
options.update({'ssl_cert_reqs': CERT_NONE})
self.client = MongoClient(self.uri, **options)
return self.client
def close_conn(self) -> None:
"""Closes connection"""
client = self.client
if client is not None:
client.close()
self.client = None
def get_collection(
self, mongo_collection: str, mongo_db: Optional[str] = None
) -> pymongo.collection.Collection:
"""
Fetches a mongo collection object for querying.
Uses connection schema as DB unless specified.
"""
mongo_db = mongo_db if mongo_db is not None else self.connection.schema
mongo_conn: MongoClient = self.get_conn()
return mongo_conn.get_database(mongo_db).get_collection(mongo_collection)
def aggregate(
self, mongo_collection: str, aggregate_query: list, mongo_db: Optional[str] = None, **kwargs
) -> pymongo.command_cursor.CommandCursor:
"""
Runs an aggregation pipeline and returns the results
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.aggregate
https://api.mongodb.com/python/current/examples/aggregation.html
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.aggregate(aggregate_query, **kwargs)
def find(
self,
mongo_collection: str,
query: dict,
find_one: bool = False,
mongo_db: Optional[str] = None,
**kwargs,
) -> pymongo.cursor.Cursor:
"""
Runs a mongo find query and returns the results
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.find
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
if find_one:
return collection.find_one(query, **kwargs)
else:
return collection.find(query, **kwargs)
def insert_one(
self, mongo_collection: str, doc: dict, mongo_db: Optional[str] = None, **kwargs
) -> pymongo.results.InsertOneResult:
"""
Inserts a single document into a mongo collection
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.insert_one
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.insert_one(doc, **kwargs)
def insert_many(
self, mongo_collection: str, docs: dict, mongo_db: Optional[str] = None, **kwargs
) -> pymongo.results.InsertManyResult:
"""
Inserts many docs into a mongo collection.
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.insert_many
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.insert_many(docs, **kwargs)
def update_one(
self,
mongo_collection: str,
filter_doc: dict,
update_doc: dict,
mongo_db: Optional[str] = None,
**kwargs,
) -> pymongo.results.UpdateResult:
"""
Updates a single document in a mongo collection.
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.update_one
:param mongo_collection: The name of the collection to update.
:type mongo_collection: str
:param filter_doc: A query that matches the documents to update.
:type filter_doc: dict
:param update_doc: The modifications to apply.
:type update_doc: dict
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
:type mongo_db: str
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.update_one(filter_doc, update_doc, **kwargs)
def update_many(
self,
mongo_collection: str,
filter_doc: dict,
update_doc: dict,
mongo_db: Optional[str] = None,
**kwargs,
) -> pymongo.results.UpdateResult:
"""
Updates one or more documents in a mongo collection.
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.update_many
:param mongo_collection: The name of the collection to update.
:type mongo_collection: str
:param filter_doc: A query that matches the documents to update.
:type filter_doc: dict
:param update_doc: The modifications to apply.
:type update_doc: dict
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
:type mongo_db: str
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.update_many(filter_doc, update_doc, **kwargs)
def replace_one(
self,
mongo_collection: str,
doc: dict,
filter_doc: Optional[dict] = None,
mongo_db: Optional[str] = None,
**kwargs,
) -> pymongo.results.UpdateResult:
"""
Replaces a single document in a mongo collection.
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.replace_one
.. note::
If no ``filter_doc`` is given, it is assumed that the replacement
document contain the ``_id`` field which is then used as filters.
:param mongo_collection: The name of the collection to update.
:type mongo_collection: str
:param doc: The new document.
:type doc: dict
:param filter_doc: A query that matches the documents to replace.
Can be omitted; then the _id field from doc will be used.
:type filter_doc: dict
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
:type mongo_db: str
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
if not filter_doc:
filter_doc = {'_id': doc['_id']}
return collection.replace_one(filter_doc, doc, **kwargs)
def replace_many(
self,
mongo_collection: str,
docs: List[dict],
filter_docs: Optional[List[dict]] = None,
mongo_db: Optional[str] = None,
upsert: bool = False,
collation: Optional[pymongo.collation.Collation] = None,
**kwargs,
) -> pymongo.results.BulkWriteResult:
"""
Replaces many documents in a mongo collection.
Uses bulk_write with multiple ReplaceOne operations
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.bulk_write
.. note::
If no ``filter_docs``are given, it is assumed that all
replacement documents contain the ``_id`` field which are then
used as filters.
:param mongo_collection: The name of the collection to update.
:type mongo_collection: str
:param docs: The new documents.
:type docs: list[dict]
:param filter_docs: A list of queries that match the documents to replace.
Can be omitted; then the _id fields from docs will be used.
:type filter_docs: list[dict]
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
:type mongo_db: str
:param upsert: If ``True``, perform an insert if no documents
match the filters for the replace operation.
:type upsert: bool
:param collation: An instance of
:class:`~pymongo.collation.Collation`. This option is only
supported on MongoDB 3.4 and above.
:type collation: pymongo.collation.Collation
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
if not filter_docs:
filter_docs = [{'_id': doc['_id']} for doc in docs]
requests = [
ReplaceOne(filter_docs[i], docs[i], upsert=upsert, collation=collation) for i in range(len(docs))
]
return collection.bulk_write(requests, **kwargs)
def delete_one(
self, mongo_collection: str, filter_doc: dict, mongo_db: Optional[str] = None, **kwargs
) -> pymongo.results.DeleteResult:
"""
Deletes a single document in a mongo collection.
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.delete_one
:param mongo_collection: The name of the collection to delete from.
:type mongo_collection: str
:param filter_doc: A query that matches the document to delete.
:type filter_doc: dict
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
:type mongo_db: str
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.delete_one(filter_doc, **kwargs)
def delete_many(
self, mongo_collection: str, filter_doc: dict, mongo_db: Optional[str] = None, **kwargs
) -> pymongo.results.DeleteResult:
"""
Deletes one or more documents in a mongo collection.
https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.delete_many
:param mongo_collection: The name of the collection to delete from.
:type mongo_collection: str
:param filter_doc: A query that matches the documents to delete.
:type filter_doc: dict
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
:type mongo_db: str
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.delete_many(filter_doc, **kwargs)
| 38.416185
| 116
| 0.658968
|
4f494942e4ed4d3ecb4b8d367fe2e21983a1367c
| 563
|
py
|
Python
|
src/asana/logic.py
|
isabella232/SGTM
|
3793d78e99f89e5f73bac5c44f9d8a18cac75fbf
|
[
"MIT"
] | 8
|
2020-12-05T00:13:03.000Z
|
2022-01-11T11:35:51.000Z
|
src/asana/logic.py
|
Asana/SGTM
|
0e9e236980ed68e80e021470da6374945bbac501
|
[
"MIT"
] | 12
|
2020-12-14T18:21:21.000Z
|
2022-03-29T17:06:20.000Z
|
src/asana/logic.py
|
isabella232/SGTM
|
3793d78e99f89e5f73bac5c44f9d8a18cac75fbf
|
[
"MIT"
] | 2
|
2021-06-27T09:32:55.000Z
|
2022-02-27T23:17:36.000Z
|
from src.github.models import PullRequest
from src.github.helpers import pull_request_has_label
from enum import Enum, unique
from src.config import SGTM_FEATURE__AUTOCOMPLETE_ENABLED
@unique
class AutocompleteLabel(Enum):
COMPLETE_ON_MERGE = "complete tasks on merge"
def should_autocomplete_tasks_on_merge(pull_request: PullRequest) -> bool:
return (
SGTM_FEATURE__AUTOCOMPLETE_ENABLED
and pull_request.merged()
and pull_request_has_label(
pull_request, AutocompleteLabel.COMPLETE_ON_MERGE.value
)
)
| 26.809524
| 74
| 0.765542
|
c52982055e46a09e15971d5a665fb55324773f7e
| 3,185
|
py
|
Python
|
matching/matching_helpers.py
|
seanmchu/algo-research
|
199964b7ce376a88e248349946538cb2159c4797
|
[
"MIT"
] | null | null | null |
matching/matching_helpers.py
|
seanmchu/algo-research
|
199964b7ce376a88e248349946538cb2159c4797
|
[
"MIT"
] | null | null | null |
matching/matching_helpers.py
|
seanmchu/algo-research
|
199964b7ce376a88e248349946538cb2159c4797
|
[
"MIT"
] | null | null | null |
import numpy as np
import math
import copy
import networkx as nx
import re
from classes import *
#student matching, seat matching, max rank --> matrix
def build_matrix(m_student,m_seat,maxrank):
matrix = np.zeros((len(m_student),len(m_seat)))
for i in range(0,len(m_student)):
if (m_student[i] != -1):
matrix[i][m_student[i]] = 1
return matrix
#matrix,edges --> int,int
def get_metrics(matrix,edges):
r1 = 0
r2 = 0
for i in range(len(matrix)):
for j in range(len(matrix[i])):
if (matrix[i][j] == 1):
if (edges[i][j] == 1):
r1 += 1
elif (edges[i][j] == 2):
r2 += 1
return r1,r2
#matrix --> void
def remove_k_edges(matrix,n_remove,m_student,k,gedges):
n_removed = 0
for i in range(len(matrix)):
if (n_removed == n_remove):
return 1
for j in range(len(matrix[i])):
if (matrix[i][j] and gedges[i][j] == k):
matrix[i][j] = 0
n_removed += 1
break
return 0
#M <- M ⊕ P
#matrix --> matrix
def symmdiff(matrix,path_s,path_v):
#First, build a list of edges based on path_s, path_v
edges = []
for i in range(0,len(path_s)):
for j in range(i - 1,i + 1):
if (j >= 0 and j < len(path_v)):
edges.append((path_s[i],path_v[j]))
for i,j in edges:
if (matrix[i][j] == 0):
matrix[i][j] = 1
else:
matrix[i][j] = 0
#Checks whether or not the seat is unmatched or not in the matrix
#matrix,seat --> Bool
def aug_path(matrix,seat):
for i in range(0,len(matrix)):
if(matrix[i][seat]):
return False
return True
#matrix,student,seat --> void
def remove_pair(matrix,student,seat):
matrix[student][seat] = 0
#matrix --> int
def get_matching_size(m):
size = 0
for i in m:
if (i != -1):
size += 1
return size
# matrix --> list
def find_unmatched_students(matrix):
m1, _ = matrix_to_matching(matrix)
l = []
for i in range(0,len(m1)):
if (m1[i] == -1):
l.append(i)
return l
#Converts matrix to matching in numpy syntax
def matrix_to_matching(matrix):
m1 = [-1] * len(matrix)
m2 = [-1] * len(matrix[0])
for i in range(0,len(matrix)):
for j in range(0,len(matrix[i])):
if (matrix[i][j] == 1):
m1[i] = j
m2[j] = i
return m1,m2
#Gets number of rank 1 and rank 2 seats filled in the matching
#Specifc for A-S algorithm, because uses networkx matching
def get_seat_metrics(edges,matching):
r1 = 0
r2 = 0
for left in matching["bridge"]:
if (matching['bridge'][left] != 0):
lindex = re.findall("(\d+)",left)[0]
for edge in matching[left]:
if(matching[left][edge] != 0):
rindex = re.findall("(\d+)",edge)[0]
if (edges[int(lindex)][int(rindex)] == 1):
r1 += 1
elif (edges[int(lindex)][int(rindex)] == 2):
r2 += 1
return r1, r2
| 27.222222
| 65
| 0.523391
|
ee8505e793f92726c460c0232431ff7df319e427
| 1,022
|
py
|
Python
|
examples/BigBoy_refactor/players/RandomEnvPlayer.py
|
attraylor/poke-env
|
05eb57800c16229ec683762e628aacb0b6dd9cc3
|
[
"MIT"
] | 4
|
2020-09-15T15:24:57.000Z
|
2021-03-02T19:48:24.000Z
|
examples/BigBoy_refactor/players/RandomEnvPlayer.py
|
attraylor/poke-env
|
05eb57800c16229ec683762e628aacb0b6dd9cc3
|
[
"MIT"
] | 10
|
2021-11-01T10:20:30.000Z
|
2022-03-29T10:27:25.000Z
|
examples/BigBoy_refactor/players/RandomEnvPlayer.py
|
attraylor/poke-env
|
05eb57800c16229ec683762e628aacb0b6dd9cc3
|
[
"MIT"
] | 1
|
2021-03-08T16:02:46.000Z
|
2021-03-08T16:02:46.000Z
|
from poke_env.player.env_player import (
Gen8EnvSinglePlayer,
)
import numpy as np
from poke_env.player_configuration import _create_player_configuration_from_player
class RandomEnvPlayer(Gen8EnvSinglePlayer):
def __init__(self,
name,
shortname,
team,
battle_format="gen8ou",
log_level = 0,
server_configuration=None,
save_replays=False):
self.shortname = shortname
self.name = name
pc = _create_player_configuration_from_player(self)
super().__init__(player_configuration = pc,
team=team,
battle_format=battle_format,
log_level = log_level,
server_configuration=server_configuration,
save_replays=save_replays)
def embed_battle(self, battle):
return np.array([0])
def select_action(self, state=None, action_mask=None, test=None, current_step=None):
if action_mask is not None:
action_indices = [i for i in range(len(action_mask)) if action_mask[i] == 1]
return np.random.choice(action_indices)
else: #shouldnt happen
return 0
| 28.388889
| 85
| 0.744618
|
2abb50d97df8a9f5cfd6bafbfd64969c078041b7
| 702
|
py
|
Python
|
keymint_keymake/exceptions.py
|
keymint/keymint_keymake
|
adc38e07ce5f16d6ba4b36294d7d2e8a361153f0
|
[
"Apache-2.0"
] | null | null | null |
keymint_keymake/exceptions.py
|
keymint/keymint_keymake
|
adc38e07ce5f16d6ba4b36294d7d2e8a361153f0
|
[
"Apache-2.0"
] | null | null | null |
keymint_keymake/exceptions.py
|
keymint/keymint_keymake
|
adc38e07ce5f16d6ba4b36294d7d2e8a361153f0
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class InvalidPermissionsXML(Exception):
pass
class InvalidGovernanceXML(Exception):
pass
| 31.909091
| 74
| 0.764957
|
0ede495483f48b3ddd3c80d53494045b5ef84947
| 1,995
|
py
|
Python
|
postgresqleu/braintreepayment/management/commands/send_braintree_logreport.py
|
bradfordboyle/pgeu-system
|
bbe70e7a94092c10f11a0f74fda23079532bb018
|
[
"MIT"
] | 11
|
2020-08-20T11:16:02.000Z
|
2022-03-12T23:25:04.000Z
|
postgresqleu/braintreepayment/management/commands/send_braintree_logreport.py
|
bradfordboyle/pgeu-system
|
bbe70e7a94092c10f11a0f74fda23079532bb018
|
[
"MIT"
] | 71
|
2019-11-18T10:11:22.000Z
|
2022-03-27T16:12:57.000Z
|
postgresqleu/braintreepayment/management/commands/send_braintree_logreport.py
|
bradfordboyle/pgeu-system
|
bbe70e7a94092c10f11a0f74fda23079532bb018
|
[
"MIT"
] | 18
|
2019-11-18T09:56:31.000Z
|
2022-01-08T03:16:43.000Z
|
# This script sends out reports fo errors in the Braintree integration
# as a summary email.
#
# Copyright (C) 2015-2019, PostgreSQL Europe
#
from django.core.management.base import BaseCommand
from django.db import transaction
from django.conf import settings
from datetime import time
from io import StringIO
from postgresqleu.invoices.models import InvoicePaymentMethod
from postgresqleu.braintreepayment.models import BraintreeLog
from postgresqleu.mailqueue.util import send_simple_mail
class Command(BaseCommand):
help = 'Send log information about Braintree events'
class ScheduledJob:
scheduled_times = [time(23, 32), ]
internal = True
@classmethod
def should_run(self):
return InvoicePaymentMethod.objects.filter(active=True, classname='postgresqleu.util.payment.braintree.Braintree').exists()
def handle(self, *args, **options):
for method in InvoicePaymentMethod.objects.filter(active=True, classname='postgresqleu.util.payment.braintree.Braintree'):
self.send_for_method(method)
@transaction.atomic
def send_for_method(self, method):
pm = method.get_implementation()
lines = list(BraintreeLog.objects.filter(error=True, sent=False, paymentmethod=method).order_by('timestamp'))
if len(lines):
sio = StringIO()
sio.write("The following error events have been logged by the Braintree integration:\n\n")
for l in lines:
sio.write("%s: %20s: %s\n" % (l.timestamp, l.transid, l.message))
l.sent = True
l.save()
sio.write("\n\n\nAll these events have now been tagged as sent, and will no longer be\nprocessed by the system in any way.\n")
send_simple_mail(settings.INVOICE_SENDER_EMAIL,
pm.config('notification_receiver'),
'Braintree integration error report',
sio.getvalue())
| 39.117647
| 138
| 0.673684
|
c92e1452faae360cf94c8dd80df8222ba4aea7fc
| 697
|
py
|
Python
|
tests/test_opioid.py
|
yoshavit/whynot
|
e33e56bae377b65fe87feac5c6246ae38f4586e8
|
[
"MIT"
] | 376
|
2020-03-20T20:09:16.000Z
|
2022-03-29T09:53:33.000Z
|
tests/test_opioid.py
|
mrtzh/whynot
|
0668f0a0c1e80defec6e4678f85ed60f45226477
|
[
"MIT"
] | 5
|
2020-04-20T10:19:34.000Z
|
2021-11-03T09:36:28.000Z
|
tests/test_opioid.py
|
mrtzh/whynot
|
0668f0a0c1e80defec6e4678f85ed60f45226477
|
[
"MIT"
] | 41
|
2020-03-20T23:14:38.000Z
|
2022-03-09T06:02:01.000Z
|
"""Unit tests for opioid epidemic simulator."""
import whynot as wn
def test_config():
"""Ensure intervention update works as expected."""
intervention = wn.opioid.Intervention(time=2021, nonmedical_incidence=-0.12)
config = wn.opioid.Config()
assert config.nonmedical_incidence.intervention_val == 0.0
config = config.update(intervention)
assert config.nonmedical_incidence.intervention_val == -0.12
intervention = wn.opioid.Intervention(time=2021, illicit_incidence=1.2)
config = wn.opioid.Config()
assert config.illicit_incidence.intervention_val == 0.0
config = config.update(intervention)
assert config.illicit_incidence.intervention_val == 1.2
| 38.722222
| 80
| 0.747489
|
7507c655db5646a26e79a14aaa043ab5cc9e561a
| 853
|
py
|
Python
|
tofino_test_builds/09_smaller_than_table.test/codegen.py
|
gycsaba96/P4RROT
|
aa10d2063d566450674e4798e6f713e49877a604
|
[
"MIT"
] | null | null | null |
tofino_test_builds/09_smaller_than_table.test/codegen.py
|
gycsaba96/P4RROT
|
aa10d2063d566450674e4798e6f713e49877a604
|
[
"MIT"
] | null | null | null |
tofino_test_builds/09_smaller_than_table.test/codegen.py
|
gycsaba96/P4RROT
|
aa10d2063d566450674e4798e6f713e49877a604
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append('../../src/')
from p4rrot.generator_tools import *
from p4rrot.known_types import *
from p4rrot.core.commands import *
from p4rrot.tofino.commands import *
UID.reset()
fp = FlowProcessor(
istruct=[('a',uint32_t),('b',uint32_t),('x',bool_t)],
method='MODIFY'
)
fp.add(AssignConst('x',True,env=fp.get_env()))
fp.add(SmallerThanTable('x','a','b',env=fp.get_env()))
fp.add(If('x',env=fp.get_env()
,then_block=Block(env=fp.get_env()).add(Increment('a',5,env=fp.get_env()))
,else_block=Block(env=fp.get_env()).add(Decrement('a',5,env=fp.get_env()))
)
)
fs = FlowSelector(
'IPV4_UDP',
[(UdpDstPort,5555)],
fp
)
solution = Solution()
solution.add_flow_processor(fp)
solution.add_flow_selector(fs)
solution.get_generated_code().dump('test.p4app')
| 25.088235
| 82
| 0.645955
|
cdca50f24fb5f8be088c07ad5dc6ce10403a5512
| 4,151
|
py
|
Python
|
rbi2/inte5.py
|
spottedzebra/interpreter
|
b5b2a735d771fbfe2842e4c36176f2bc8c1761c3
|
[
"MIT"
] | 2
|
2016-10-22T11:55:07.000Z
|
2020-07-23T20:56:15.000Z
|
rbi2/inte5.py
|
mwhit74/interpreter
|
b5b2a735d771fbfe2842e4c36176f2bc8c1761c3
|
[
"MIT"
] | null | null | null |
rbi2/inte5.py
|
mwhit74/interpreter
|
b5b2a735d771fbfe2842e4c36176f2bc8c1761c3
|
[
"MIT"
] | null | null | null |
import string
import pdb
from collections import namedtuple
ADD, SUB, MUL, DIV, CHAR, NUM, EOF = ('ADD', 'SUB', 'MUL',
'DIV','CHAR', 'NUM', 'EOF')
WHITESPACE = string.whitespace
Token = namedtuple('Token',['token_type', 'token_value'])
class Lexer(object):
def __init__(self, text):
self.text = text
self.pos = 0
self.cur_char = self.text[self.pos]
def error(self):
raise ValueError('Invalid character')
def get_next_char(self):
self.pos += 1
if self.pos <= len(self.text) - 1:
self.cur_char = self.text[self.pos]
else:
self.cur_char = None
def get_whitespace(self):
value = ''
while self.cur_char != None and self.cur_char in WHITESPACE:
value = value + self.cur_char
self.get_next_char()
def get_num(self):
value = ''
while self.cur_char != None and self.cur_char.isdigit():
value = value + self.cur_char
self.get_next_char()
return int(value)
def get_chars(self):
value = ''
while self.cur_char != None and self.cur_char.isalpha():
value = value + self.cur_char
self.get_next_char()
return value
def get_next_token(self):
while self.cur_char != None:
if self.cur_char in WHITESPACE:
value = self.get_whitespace()
if self.cur_char.isdigit():
value = self.get_num()
return Token(NUM, value)
if self.cur_char.isalpha():
value = self.get_chars()
return Token(CHAR, value)
if self.cur_char == '+':
token = Token(ADD, self.cur_char)
self.get_next_char()
return token
if self.cur_char == '-':
token = Token(SUB, self.cur_char)
self.get_next_char()
return token
if self.cur_char == '*':
token = Token(MUL, self.cur_char)
self.get_next_char()
return token
if self.cur_char == '/':
token = Token(DIV, self.cur_char)
self.get_next_char()
return token
self.error()
return Token(EOF, None)
class Interpreter(object):
def __init__(self, lexer):
self.lexer = lexer
self.cur_token = self.lexer.get_next_token()
def error(self):
raise SyntaxError('Invalid syntax')
def check_token_type(self, token_type):
if self.cur_token.token_type == token_type:
self.cur_token = self.lexer.get_next_token()
else:
self.error()
def expr1(self):
result = self.expr2()
while (self.cur_token.token_type != EOF and
self.cur_token.token_type in (ADD, SUB)):
if self.cur_token.token_type == ADD:
self.check_token_type(ADD)
result = result + self.expr2()
if self.cur_token.token_type == SUB:
self.check_token_type(SUB)
result = result - self.expr2()
return result
def expr2(self):
result = self.factor()
while (self.cur_token.token_type != EOF and
self.cur_token.token_type in (MUL, DIV)):
if self.cur_token.token_type == MUL:
self.check_token_type(MUL)
result = result * self.factor()
if self.cur_token.token_type == DIV:
self.check_token_type(DIV)
resutl = result / self.factor()
return result
def factor(self):
value = self.cur_token.token_value
self.check_token_type(NUM)
return value
def main():
while True:
try:
text = input('calc>')
except e:
continue
lexer = Lexer(text)
interpreter = Interpreter(lexer)
result = interpreter.expr1()
print(result)
if __name__ == "__main__":
main()
| 27.130719
| 68
| 0.528788
|
44a59b441b708b4af30b371f0a4e27ce7791b555
| 3,644
|
py
|
Python
|
src/leetcode_2058_find_the_minimum_and_maximum_number_of_nodes_between_critical_points.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
src/leetcode_2058_find_the_minimum_and_maximum_number_of_nodes_between_critical_points.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
src/leetcode_2058_find_the_minimum_and_maximum_number_of_nodes_between_critical_points.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
# @l2g 2058 python3
# [2058] Find the Minimum and Maximum Number of Nodes Between Critical Points
# Difficulty: Medium
# https://leetcode.com/problems/find-the-minimum-and-maximum-number-of-nodes-between-critical-points
#
# A critical point in a linked list is defined as either a local maxima or a local minima.
# A node is a local maxima if the current node has a value strictly greater than the previous node and the next node.
# A node is a local minima if the current node has a value strictly smaller than the previous node and the next node.
# Note that a node can only be a local maxima/minima if there exists both a previous node and a next node.
# Given a linked list head,return an array of length 2 containing [minDistance,
# maxDistance] where minDistance is the minimum distance between any two distinct critical points and maxDistance is the maximum distance between any two distinct critical points.
# If there are fewer than two critical points,return [-1,-1].
#
# Example 1:
#
#
# Input: head = [3,1]
# Output: [-1,-1]
# Explanation: There are no critical points in [3,1].
#
# Example 2:
#
#
# Input: head = [5,3,1,2,5,1,2]
# Output: [1,3]
# Explanation: There are three critical points:
# - [5,3,1,2,5,1,2]: The third node is a local minima because 1 is less than 3 and 2.
# - [5,3,1,2,5,1,2]: The fifth node is a local maxima because 5 is greater than 2 and 1.
# - [5,3,1,2,5,1,2]: The sixth node is a local minima because 1 is less than 5 and 2.
# The minimum distance is between the fifth and the sixth node. minDistance = 6 - 5 = 1.
# The maximum distance is between the third and the sixth node. maxDistance = 6 - 3 = 3.
#
# Example 3:
#
#
# Input: head = [1,3,2,2,3,2,2,2,7]
# Output: [3,3]
# Explanation: There are two critical points:
# - [1,3,2,2,3,2,2,2,7]: The second node is a local maxima because 3 is greater than 1 and 2.
# - [1,3,2,2,3,2,2,2,7]: The fifth node is a local maxima because 3 is greater than 2 and 2.
# Both the minimum and maximum distances are between the second and the fifth node.
# Thus, minDistance and maxDistance is 5 - 2 = 3.
# Note that the last node is not considered a local maxima because it does not have a next node.
#
# Example 4:
#
#
# Input: head = [2,3,3,2]
# Output: [-1,-1]
# Explanation: There are no critical points in [2,3,3,2].
#
#
# Constraints:
#
# The number of nodes in the list is in the range [2, 10^5].
# 1 <= Node.val <= 10^5
#
#
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
from typing import List, Optional
class Solution:
def nodesBetweenCriticalPoints(self, head: Optional[ListNode]) -> List[int]:
critical_points = []
prev = head
cur = head.next
next_node = head.next.next
pos = 1
while next_node:
# maxima
if prev.val < cur.val and next_node.val < cur.val:
critical_points.append(pos)
# minima
elif cur.val < prev.val and cur.val < next_node.val:
critical_points.append(pos)
prev, cur, next_node = prev.next, cur.next, next_node.next
pos += 1
if len(critical_points) < 2:
return [-1, -1]
min_dist = float("inf")
for i in range(1, len(critical_points)):
min_dist = min(min_dist, critical_points[i] - critical_points[i - 1])
max_dist = critical_points[-1] - critical_points[0]
return [min_dist, max_dist]
if __name__ == "__main__":
import os
import pytest
pytest.main([os.path.join("tests", "test_2058.py")])
| 35.038462
| 179
| 0.662733
|
6a2878aad6455f4029f2ade2e73fa5bfc9dd1f88
| 37,511
|
py
|
Python
|
detectron2/detectron2/modeling/roi_heads/roi_heads.py
|
sean-zhuh/SA-AutoAug
|
cb9403fe01cbd30d8b14bca106fd771586f1b89f
|
[
"BSD-2-Clause"
] | 100
|
2021-05-23T08:21:32.000Z
|
2022-03-31T17:47:56.000Z
|
detectron2/detectron2/modeling/roi_heads/roi_heads.py
|
sean-zhuh/SA-AutoAug
|
cb9403fe01cbd30d8b14bca106fd771586f1b89f
|
[
"BSD-2-Clause"
] | 7
|
2021-05-26T08:45:14.000Z
|
2021-12-02T08:23:34.000Z
|
detectron2/detectron2/modeling/roi_heads/roi_heads.py
|
sean-zhuh/SA-AutoAug
|
cb9403fe01cbd30d8b14bca106fd771586f1b89f
|
[
"BSD-2-Clause"
] | 11
|
2021-05-23T02:07:15.000Z
|
2022-02-28T13:14:45.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
import inspect
import logging
import numpy as np
from typing import Dict, List, Optional, Tuple
import torch
from torch import nn
from detectron2.config import configurable
from detectron2.layers import ShapeSpec, nonzero_tuple
from detectron2.structures import Boxes, ImageList, Instances, pairwise_iou
from detectron2.utils.events import get_event_storage
from detectron2.utils.registry import Registry
from ..backbone.resnet import BottleneckBlock, ResNet
from ..matcher import Matcher
from ..poolers import ROIPooler
from ..proposal_generator.proposal_utils import add_ground_truth_to_proposals
from ..sampling import subsample_labels
from .box_head import build_box_head
from .fast_rcnn import FastRCNNOutputLayers
from .keypoint_head import build_keypoint_head
from .mask_head import build_mask_head
ROI_HEADS_REGISTRY = Registry("ROI_HEADS")
ROI_HEADS_REGISTRY.__doc__ = """
Registry for ROI heads in a generalized R-CNN model.
ROIHeads take feature maps and region proposals, and
perform per-region computation.
The registered object will be called with `obj(cfg, input_shape)`.
The call is expected to return an :class:`ROIHeads`.
"""
logger = logging.getLogger(__name__)
def build_roi_heads(cfg, input_shape):
"""
Build ROIHeads defined by `cfg.MODEL.ROI_HEADS.NAME`.
"""
name = cfg.MODEL.ROI_HEADS.NAME
return ROI_HEADS_REGISTRY.get(name)(cfg, input_shape)
def select_foreground_proposals(
proposals: List[Instances], bg_label: int
) -> Tuple[List[Instances], List[torch.Tensor]]:
"""
Given a list of N Instances (for N images), each containing a `gt_classes` field,
return a list of Instances that contain only instances with `gt_classes != -1 &&
gt_classes != bg_label`.
Args:
proposals (list[Instances]): A list of N Instances, where N is the number of
images in the batch.
bg_label: label index of background class.
Returns:
list[Instances]: N Instances, each contains only the selected foreground instances.
list[Tensor]: N boolean vector, correspond to the selection mask of
each Instances object. True for selected instances.
"""
assert isinstance(proposals, (list, tuple))
assert isinstance(proposals[0], Instances)
assert proposals[0].has("gt_classes")
fg_proposals = []
fg_selection_masks = []
for proposals_per_image in proposals:
gt_classes = proposals_per_image.gt_classes
fg_selection_mask = (gt_classes != -1) & (gt_classes != bg_label)
fg_idxs = fg_selection_mask.nonzero().squeeze(1)
fg_proposals.append(proposals_per_image[fg_idxs])
fg_selection_masks.append(fg_selection_mask)
return fg_proposals, fg_selection_masks
def select_proposals_with_visible_keypoints(proposals: List[Instances]) -> List[Instances]:
"""
Args:
proposals (list[Instances]): a list of N Instances, where N is the
number of images.
Returns:
proposals: only contains proposals with at least one visible keypoint.
Note that this is still slightly different from Detectron.
In Detectron, proposals for training keypoint head are re-sampled from
all the proposals with IOU>threshold & >=1 visible keypoint.
Here, the proposals are first sampled from all proposals with
IOU>threshold, then proposals with no visible keypoint are filtered out.
This strategy seems to make no difference on Detectron and is easier to implement.
"""
ret = []
all_num_fg = []
for proposals_per_image in proposals:
# If empty/unannotated image (hard negatives), skip filtering for train
if len(proposals_per_image) == 0:
ret.append(proposals_per_image)
continue
gt_keypoints = proposals_per_image.gt_keypoints.tensor
# #fg x K x 3
vis_mask = gt_keypoints[:, :, 2] >= 1
xs, ys = gt_keypoints[:, :, 0], gt_keypoints[:, :, 1]
proposal_boxes = proposals_per_image.proposal_boxes.tensor.unsqueeze(dim=1) # #fg x 1 x 4
kp_in_box = (
(xs >= proposal_boxes[:, :, 0])
& (xs <= proposal_boxes[:, :, 2])
& (ys >= proposal_boxes[:, :, 1])
& (ys <= proposal_boxes[:, :, 3])
)
selection = (kp_in_box & vis_mask).any(dim=1)
selection_idxs = nonzero_tuple(selection)[0]
all_num_fg.append(selection_idxs.numel())
ret.append(proposals_per_image[selection_idxs])
storage = get_event_storage()
storage.put_scalar("keypoint_head/num_fg_samples", np.mean(all_num_fg))
return ret
class ROIHeads(torch.nn.Module):
"""
ROIHeads perform all per-region computation in an R-CNN.
It typically contains logic to
1. (in training only) match proposals with ground truth and sample them
2. crop the regions and extract per-region features using proposals
3. make per-region predictions with different heads
It can have many variants, implemented as subclasses of this class.
This base class contains the logic to match/sample proposals.
But it is not necessary to inherit this class if the sampling logic is not needed.
"""
@configurable
def __init__(
self,
*,
num_classes,
batch_size_per_image,
positive_fraction,
proposal_matcher,
proposal_append_gt=True
):
"""
NOTE: this interface is experimental.
Args:
num_classes (int): number of foreground classes (i.e. background is not included)
batch_size_per_image (int): number of proposals to sample for training
positive_fraction (float): fraction of positive (foreground) proposals
to sample for training.
proposal_matcher (Matcher): matcher that matches proposals and ground truth
proposal_append_gt (bool): whether to include ground truth as proposals as well
"""
super().__init__()
self.batch_size_per_image = batch_size_per_image
self.positive_fraction = positive_fraction
self.num_classes = num_classes
self.proposal_matcher = proposal_matcher
self.proposal_append_gt = proposal_append_gt
@classmethod
def from_config(cls, cfg):
return {
"batch_size_per_image": cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE,
"positive_fraction": cfg.MODEL.ROI_HEADS.POSITIVE_FRACTION,
"num_classes": cfg.MODEL.ROI_HEADS.NUM_CLASSES,
"proposal_append_gt": cfg.MODEL.ROI_HEADS.PROPOSAL_APPEND_GT,
# Matcher to assign box proposals to gt boxes
"proposal_matcher": Matcher(
cfg.MODEL.ROI_HEADS.IOU_THRESHOLDS,
cfg.MODEL.ROI_HEADS.IOU_LABELS,
allow_low_quality_matches=False,
),
}
def _sample_proposals(
self, matched_idxs: torch.Tensor, matched_labels: torch.Tensor, gt_classes: torch.Tensor
) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Based on the matching between N proposals and M groundtruth,
sample the proposals and set their classification labels.
Args:
matched_idxs (Tensor): a vector of length N, each is the best-matched
gt index in [0, M) for each proposal.
matched_labels (Tensor): a vector of length N, the matcher's label
(one of cfg.MODEL.ROI_HEADS.IOU_LABELS) for each proposal.
gt_classes (Tensor): a vector of length M.
Returns:
Tensor: a vector of indices of sampled proposals. Each is in [0, N).
Tensor: a vector of the same length, the classification label for
each sampled proposal. Each sample is labeled as either a category in
[0, num_classes) or the background (num_classes).
"""
has_gt = gt_classes.numel() > 0
# Get the corresponding GT for each proposal
if has_gt:
gt_classes = gt_classes[matched_idxs]
# Label unmatched proposals (0 label from matcher) as background (label=num_classes)
gt_classes[matched_labels == 0] = self.num_classes
# Label ignore proposals (-1 label)
gt_classes[matched_labels == -1] = -1
else:
gt_classes = torch.zeros_like(matched_idxs) + self.num_classes
sampled_fg_idxs, sampled_bg_idxs = subsample_labels(
gt_classes, self.batch_size_per_image, self.positive_fraction, self.num_classes
)
sampled_idxs = torch.cat([sampled_fg_idxs, sampled_bg_idxs], dim=0)
return sampled_idxs, gt_classes[sampled_idxs]
@torch.no_grad()
def label_and_sample_proposals(
self, proposals: List[Instances], targets: List[Instances]
) -> List[Instances]:
"""
Prepare some proposals to be used to train the ROI heads.
It performs box matching between `proposals` and `targets`, and assigns
training labels to the proposals.
It returns ``self.batch_size_per_image`` random samples from proposals and groundtruth
boxes, with a fraction of positives that is no larger than
``self.positive_fraction``.
Args:
See :meth:`ROIHeads.forward`
Returns:
list[Instances]:
length `N` list of `Instances`s containing the proposals
sampled for training. Each `Instances` has the following fields:
- proposal_boxes: the proposal boxes
- gt_boxes: the ground-truth box that the proposal is assigned to
(this is only meaningful if the proposal has a label > 0; if label = 0
then the ground-truth box is random)
Other fields such as "gt_classes", "gt_masks", that's included in `targets`.
"""
gt_boxes = [x.gt_boxes for x in targets]
# Augment proposals with ground-truth boxes.
# In the case of learned proposals (e.g., RPN), when training starts
# the proposals will be low quality due to random initialization.
# It's possible that none of these initial
# proposals have high enough overlap with the gt objects to be used
# as positive examples for the second stage components (box head,
# cls head, mask head). Adding the gt boxes to the set of proposals
# ensures that the second stage components will have some positive
# examples from the start of training. For RPN, this augmentation improves
# convergence and empirically improves box AP on COCO by about 0.5
# points (under one tested configuration).
if self.proposal_append_gt:
proposals = add_ground_truth_to_proposals(gt_boxes, proposals)
proposals_with_gt = []
num_fg_samples = []
num_bg_samples = []
for proposals_per_image, targets_per_image in zip(proposals, targets):
has_gt = len(targets_per_image) > 0
match_quality_matrix = pairwise_iou(
targets_per_image.gt_boxes, proposals_per_image.proposal_boxes
)
matched_idxs, matched_labels = self.proposal_matcher(match_quality_matrix)
sampled_idxs, gt_classes = self._sample_proposals(
matched_idxs, matched_labels, targets_per_image.gt_classes
)
# Set target attributes of the sampled proposals:
proposals_per_image = proposals_per_image[sampled_idxs]
proposals_per_image.gt_classes = gt_classes
if has_gt:
sampled_targets = matched_idxs[sampled_idxs]
# We index all the attributes of targets that start with "gt_"
# and have not been added to proposals yet (="gt_classes").
# NOTE: here the indexing waste some compute, because heads
# like masks, keypoints, etc, will filter the proposals again,
# (by foreground/background, or number of keypoints in the image, etc)
# so we essentially index the data twice.
for (trg_name, trg_value) in targets_per_image.get_fields().items():
if trg_name.startswith("gt_") and not proposals_per_image.has(trg_name):
proposals_per_image.set(trg_name, trg_value[sampled_targets])
# If no GT is given in the image, we don't know what a dummy gt value can be.
# Therefore the returned proposals won't have any gt_* fields, except for a
# gt_classes full of background label.
num_bg_samples.append((gt_classes == self.num_classes).sum().item())
num_fg_samples.append(gt_classes.numel() - num_bg_samples[-1])
proposals_with_gt.append(proposals_per_image)
# Log the number of fg/bg samples that are selected for training ROI heads
storage = get_event_storage()
storage.put_scalar("roi_head/num_fg_samples", np.mean(num_fg_samples))
storage.put_scalar("roi_head/num_bg_samples", np.mean(num_bg_samples))
return proposals_with_gt
def forward(
self,
images: ImageList,
features: Dict[str, torch.Tensor],
proposals: List[Instances],
targets: Optional[List[Instances]] = None,
) -> Tuple[List[Instances], Dict[str, torch.Tensor]]:
"""
Args:
images (ImageList):
features (dict[str,Tensor]): input data as a mapping from feature
map name to tensor. Axis 0 represents the number of images `N` in
the input data; axes 1-3 are channels, height, and width, which may
vary between feature maps (e.g., if a feature pyramid is used).
proposals (list[Instances]): length `N` list of `Instances`. The i-th
`Instances` contains object proposals for the i-th input image,
with fields "proposal_boxes" and "objectness_logits".
targets (list[Instances], optional): length `N` list of `Instances`. The i-th
`Instances` contains the ground-truth per-instance annotations
for the i-th input image. Specify `targets` during training only.
It may have the following fields:
- gt_boxes: the bounding box of each instance.
- gt_classes: the label for each instance with a category ranging in [0, #class].
- gt_masks: PolygonMasks or BitMasks, the ground-truth masks of each instance.
- gt_keypoints: NxKx3, the groud-truth keypoints for each instance.
Returns:
list[Instances]: length `N` list of `Instances` containing the
detected instances. Returned during inference only; may be [] during training.
dict[str->Tensor]:
mapping from a named loss to a tensor storing the loss. Used during training only.
"""
raise NotImplementedError()
@ROI_HEADS_REGISTRY.register()
class Res5ROIHeads(ROIHeads):
"""
The ROIHeads in a typical "C4" R-CNN model, where
the box and mask head share the cropping and
the per-region feature computation by a Res5 block.
See :paper:`ResNet` Appendix A.
"""
@configurable
def __init__(
self,
*,
in_features: List[str],
pooler: ROIPooler,
res5: nn.Module,
box_predictor: nn.Module,
mask_head: Optional[nn.Module] = None,
**kwargs
):
"""
NOTE: this interface is experimental.
Args:
in_features (list[str]): list of backbone feature map names to use for
feature extraction
pooler (ROIPooler): pooler to extra region features from backbone
res5 (nn.Sequential): a CNN to compute per-region features, to be used by
``box_predictor`` and ``mask_head``. Typically this is a "res5"
block from a ResNet.
box_predictor (nn.Module): make box predictions from the feature.
Should have the same interface as :class:`FastRCNNOutputLayers`.
mask_head (nn.Module): transform features to make mask predictions
"""
super().__init__(**kwargs)
self.in_features = in_features
self.pooler = pooler
if isinstance(res5, (list, tuple)):
res5 = nn.Sequential(*res5)
self.res5 = res5
self.box_predictor = box_predictor
self.mask_on = mask_head is not None
if self.mask_on:
self.mask_head = mask_head
@classmethod
def from_config(cls, cfg, input_shape):
# fmt: off
ret = super().from_config(cfg)
in_features = ret["in_features"] = cfg.MODEL.ROI_HEADS.IN_FEATURES
pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION
pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE
pooler_scales = (1.0 / input_shape[in_features[0]].stride, )
sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO
mask_on = cfg.MODEL.MASK_ON
# fmt: on
assert not cfg.MODEL.KEYPOINT_ON
assert len(in_features) == 1
ret["pooler"] = ROIPooler(
output_size=pooler_resolution,
scales=pooler_scales,
sampling_ratio=sampling_ratio,
pooler_type=pooler_type,
)
# Compatbility with old moco code. Might be useful.
# See notes in StandardROIHeads.from_config
if not inspect.ismethod(cls._build_res5_block):
logger.warning(
"The behavior of _build_res5_block may change. "
"Please do not depend on private methods."
)
cls._build_res5_block = classmethod(cls._build_res5_block)
ret["res5"], out_channels = cls._build_res5_block(cfg)
ret["box_predictor"] = FastRCNNOutputLayers(
cfg, ShapeSpec(channels=out_channels, height=1, width=1)
)
if mask_on:
ret["mask_head"] = build_mask_head(
cfg,
ShapeSpec(channels=out_channels, width=pooler_resolution, height=pooler_resolution),
)
return ret
@classmethod
def _build_res5_block(cls, cfg):
# fmt: off
stage_channel_factor = 2 ** 3 # res5 is 8x res2
num_groups = cfg.MODEL.RESNETS.NUM_GROUPS
width_per_group = cfg.MODEL.RESNETS.WIDTH_PER_GROUP
bottleneck_channels = num_groups * width_per_group * stage_channel_factor
out_channels = cfg.MODEL.RESNETS.RES2_OUT_CHANNELS * stage_channel_factor
stride_in_1x1 = cfg.MODEL.RESNETS.STRIDE_IN_1X1
norm = cfg.MODEL.RESNETS.NORM
assert not cfg.MODEL.RESNETS.DEFORM_ON_PER_STAGE[-1], \
"Deformable conv is not yet supported in res5 head."
# fmt: on
blocks = ResNet.make_stage(
BottleneckBlock,
3,
stride_per_block=[2, 1, 1],
in_channels=out_channels // 2,
bottleneck_channels=bottleneck_channels,
out_channels=out_channels,
num_groups=num_groups,
norm=norm,
stride_in_1x1=stride_in_1x1,
)
return nn.Sequential(*blocks), out_channels
def _shared_roi_transform(self, features, boxes):
x = self.pooler(features, boxes)
return self.res5(x)
def forward(self, images, features, proposals, targets=None):
"""
See :meth:`ROIHeads.forward`.
"""
del images
if self.training:
assert targets
proposals = self.label_and_sample_proposals(proposals, targets)
del targets
proposal_boxes = [x.proposal_boxes for x in proposals]
box_features = self._shared_roi_transform(
[features[f] for f in self.in_features], proposal_boxes
)
predictions = self.box_predictor(box_features.mean(dim=[2, 3]))
if self.training:
del features
losses = self.box_predictor.losses(predictions, proposals)
if self.mask_on:
proposals, fg_selection_masks = select_foreground_proposals(
proposals, self.num_classes
)
# Since the ROI feature transform is shared between boxes and masks,
# we don't need to recompute features. The mask loss is only defined
# on foreground proposals, so we need to select out the foreground
# features.
mask_features = box_features[torch.cat(fg_selection_masks, dim=0)]
del box_features
losses.update(self.mask_head(mask_features, proposals))
return [], losses
else:
pred_instances, _ = self.box_predictor.inference(predictions, proposals)
pred_instances = self.forward_with_given_boxes(features, pred_instances)
return pred_instances, {}
def forward_with_given_boxes(self, features, instances):
"""
Use the given boxes in `instances` to produce other (non-box) per-ROI outputs.
Args:
features: same as in `forward()`
instances (list[Instances]): instances to predict other outputs. Expect the keys
"pred_boxes" and "pred_classes" to exist.
Returns:
instances (Instances):
the same `Instances` object, with extra
fields such as `pred_masks` or `pred_keypoints`.
"""
assert not self.training
assert instances[0].has("pred_boxes") and instances[0].has("pred_classes")
if self.mask_on:
features = [features[f] for f in self.in_features]
x = self._shared_roi_transform(features, [x.pred_boxes for x in instances])
return self.mask_head(x, instances)
else:
return instances
@ROI_HEADS_REGISTRY.register()
class StandardROIHeads(ROIHeads):
"""
It's "standard" in a sense that there is no ROI transform sharing
or feature sharing between tasks.
Each head independently processes the input features by each head's
own pooler and head.
This class is used by most models, such as FPN and C5.
To implement more models, you can subclass it and implement a different
:meth:`forward()` or a head.
"""
@configurable
def __init__(
self,
*,
box_in_features: List[str],
box_pooler: ROIPooler,
box_head: nn.Module,
box_predictor: nn.Module,
mask_in_features: Optional[List[str]] = None,
mask_pooler: Optional[ROIPooler] = None,
mask_head: Optional[nn.Module] = None,
keypoint_in_features: Optional[List[str]] = None,
keypoint_pooler: Optional[ROIPooler] = None,
keypoint_head: Optional[nn.Module] = None,
train_on_pred_boxes: bool = False,
**kwargs
):
"""
NOTE: this interface is experimental.
Args:
box_in_features (list[str]): list of feature names to use for the box head.
box_pooler (ROIPooler): pooler to extra region features for box head
box_head (nn.Module): transform features to make box predictions
box_predictor (nn.Module): make box predictions from the feature.
Should have the same interface as :class:`FastRCNNOutputLayers`.
mask_in_features (list[str]): list of feature names to use for the mask
pooler or mask head. None if not using mask head.
mask_pooler (ROIPooler): pooler to extract region features from image features.
The mask head will then take region features to make predictions.
If None, the mask head will directly take the dict of image features
defined by `mask_in_features`
mask_head (nn.Module): transform features to make mask predictions
keypoint_in_features, keypoint_pooler, keypoint_head: similar to ``mask_*``.
train_on_pred_boxes (bool): whether to use proposal boxes or
predicted boxes from the box head to train other heads.
"""
super().__init__(**kwargs)
# keep self.in_features for backward compatibility
self.in_features = self.box_in_features = box_in_features
self.box_pooler = box_pooler
self.box_head = box_head
self.box_predictor = box_predictor
self.mask_on = mask_in_features is not None
if self.mask_on:
self.mask_in_features = mask_in_features
self.mask_pooler = mask_pooler
self.mask_head = mask_head
self.keypoint_on = keypoint_in_features is not None
if self.keypoint_on:
self.keypoint_in_features = keypoint_in_features
self.keypoint_pooler = keypoint_pooler
self.keypoint_head = keypoint_head
self.train_on_pred_boxes = train_on_pred_boxes
@classmethod
def from_config(cls, cfg, input_shape):
ret = super().from_config(cfg)
ret["train_on_pred_boxes"] = cfg.MODEL.ROI_BOX_HEAD.TRAIN_ON_PRED_BOXES
# Subclasses that have not been updated to use from_config style construction
# may have overridden _init_*_head methods. In this case, those overridden methods
# will not be classmethods and we need to avoid trying to call them here.
# We test for this with ismethod which only returns True for bound methods of cls.
# Such subclasses will need to handle calling their overridden _init_*_head methods.
if inspect.ismethod(cls._init_box_head):
ret.update(cls._init_box_head(cfg, input_shape))
if inspect.ismethod(cls._init_mask_head):
ret.update(cls._init_mask_head(cfg, input_shape))
if inspect.ismethod(cls._init_keypoint_head):
ret.update(cls._init_keypoint_head(cfg, input_shape))
return ret
@classmethod
def _init_box_head(cls, cfg, input_shape):
# fmt: off
in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES
pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION
pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features)
sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO
pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE
# fmt: on
# If StandardROIHeads is applied on multiple feature maps (as in FPN),
# then we share the same predictors and therefore the channel counts must be the same
in_channels = [input_shape[f].channels for f in in_features]
# Check all channel counts are equal
assert len(set(in_channels)) == 1, in_channels
in_channels = in_channels[0]
box_pooler = ROIPooler(
output_size=pooler_resolution,
scales=pooler_scales,
sampling_ratio=sampling_ratio,
pooler_type=pooler_type,
)
# Here we split "box head" and "box predictor", which is mainly due to historical reasons.
# They are used together so the "box predictor" layers should be part of the "box head".
# New subclasses of ROIHeads do not need "box predictor"s.
box_head = build_box_head(
cfg, ShapeSpec(channels=in_channels, height=pooler_resolution, width=pooler_resolution)
)
box_predictor = FastRCNNOutputLayers(cfg, box_head.output_shape)
return {
"box_in_features": in_features,
"box_pooler": box_pooler,
"box_head": box_head,
"box_predictor": box_predictor,
}
@classmethod
def _init_mask_head(cls, cfg, input_shape):
if not cfg.MODEL.MASK_ON:
return {}
# fmt: off
in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES
pooler_resolution = cfg.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION
pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features)
sampling_ratio = cfg.MODEL.ROI_MASK_HEAD.POOLER_SAMPLING_RATIO
pooler_type = cfg.MODEL.ROI_MASK_HEAD.POOLER_TYPE
# fmt: on
in_channels = [input_shape[f].channels for f in in_features][0]
ret = {"mask_in_features": in_features}
ret["mask_pooler"] = (
ROIPooler(
output_size=pooler_resolution,
scales=pooler_scales,
sampling_ratio=sampling_ratio,
pooler_type=pooler_type,
)
if pooler_type
else None
)
if pooler_type:
shape = ShapeSpec(
channels=in_channels, width=pooler_resolution, height=pooler_resolution
)
else:
shape = {f: input_shape[f] for f in in_features}
ret["mask_head"] = build_mask_head(cfg, shape)
return ret
@classmethod
def _init_keypoint_head(cls, cfg, input_shape):
if not cfg.MODEL.KEYPOINT_ON:
return {}
# fmt: off
in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES
pooler_resolution = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_RESOLUTION
pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) # noqa
sampling_ratio = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_SAMPLING_RATIO
pooler_type = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_TYPE
# fmt: on
in_channels = [input_shape[f].channels for f in in_features][0]
ret = {"keypoint_in_features": in_features}
ret["keypoint_pooler"] = (
ROIPooler(
output_size=pooler_resolution,
scales=pooler_scales,
sampling_ratio=sampling_ratio,
pooler_type=pooler_type,
)
if pooler_type
else None
)
if pooler_type:
shape = ShapeSpec(
channels=in_channels, width=pooler_resolution, height=pooler_resolution
)
else:
shape = {f: input_shape[f] for f in in_features}
ret["keypoint_head"] = build_keypoint_head(cfg, shape)
return ret
def forward(
self,
images: ImageList,
features: Dict[str, torch.Tensor],
proposals: List[Instances],
targets: Optional[List[Instances]] = None,
) -> Tuple[List[Instances], Dict[str, torch.Tensor]]:
"""
See :class:`ROIHeads.forward`.
"""
del images
if self.training:
assert targets, "'targets' argument is required during training"
proposals = self.label_and_sample_proposals(proposals, targets)
del targets
if self.training:
losses = self._forward_box(features, proposals)
# Usually the original proposals used by the box head are used by the mask, keypoint
# heads. But when `self.train_on_pred_boxes is True`, proposals will contain boxes
# predicted by the box head.
losses.update(self._forward_mask(features, proposals))
losses.update(self._forward_keypoint(features, proposals))
return proposals, losses
else:
pred_instances = self._forward_box(features, proposals)
# During inference cascaded prediction is used: the mask and keypoints heads are only
# applied to the top scoring box detections.
pred_instances = self.forward_with_given_boxes(features, pred_instances)
return pred_instances, {}
def forward_with_given_boxes(
self, features: Dict[str, torch.Tensor], instances: List[Instances]
) -> List[Instances]:
"""
Use the given boxes in `instances` to produce other (non-box) per-ROI outputs.
This is useful for downstream tasks where a box is known, but need to obtain
other attributes (outputs of other heads).
Test-time augmentation also uses this.
Args:
features: same as in `forward()`
instances (list[Instances]): instances to predict other outputs. Expect the keys
"pred_boxes" and "pred_classes" to exist.
Returns:
list[Instances]:
the same `Instances` objects, with extra
fields such as `pred_masks` or `pred_keypoints`.
"""
assert not self.training
assert instances[0].has("pred_boxes") and instances[0].has("pred_classes")
instances = self._forward_mask(features, instances)
instances = self._forward_keypoint(features, instances)
return instances
def _forward_box(self, features: Dict[str, torch.Tensor], proposals: List[Instances]):
"""
Forward logic of the box prediction branch. If `self.train_on_pred_boxes is True`,
the function puts predicted boxes in the `proposal_boxes` field of `proposals` argument.
Args:
features (dict[str, Tensor]): mapping from feature map names to tensor.
Same as in :meth:`ROIHeads.forward`.
proposals (list[Instances]): the per-image object proposals with
their matching ground truth.
Each has fields "proposal_boxes", and "objectness_logits",
"gt_classes", "gt_boxes".
Returns:
In training, a dict of losses.
In inference, a list of `Instances`, the predicted instances.
"""
features = [features[f] for f in self.box_in_features]
box_features = self.box_pooler(features, [x.proposal_boxes for x in proposals])
box_features = self.box_head(box_features)
predictions = self.box_predictor(box_features)
del box_features
if self.training:
losses = self.box_predictor.losses(predictions, proposals)
# proposals is modified in-place below, so losses must be computed first.
if self.train_on_pred_boxes:
with torch.no_grad():
pred_boxes = self.box_predictor.predict_boxes_for_gt_classes(
predictions, proposals
)
for proposals_per_image, pred_boxes_per_image in zip(proposals, pred_boxes):
proposals_per_image.proposal_boxes = Boxes(pred_boxes_per_image)
return losses
else:
pred_instances, _ = self.box_predictor.inference(predictions, proposals)
return pred_instances
def _forward_mask(self, features: Dict[str, torch.Tensor], instances: List[Instances]):
"""
Forward logic of the mask prediction branch.
Args:
features (dict[str, Tensor]): mapping from feature map names to tensor.
Same as in :meth:`ROIHeads.forward`.
instances (list[Instances]): the per-image instances to train/predict masks.
In training, they can be the proposals.
In inference, they can be the boxes predicted by R-CNN box head.
Returns:
In training, a dict of losses.
In inference, update `instances` with new fields "pred_masks" and return it.
"""
if not self.mask_on:
return {} if self.training else instances
if self.training:
# head is only trained on positive proposals.
instances, _ = select_foreground_proposals(instances, self.num_classes)
if self.mask_pooler is not None:
features = [features[f] for f in self.mask_in_features]
boxes = [x.proposal_boxes if self.training else x.pred_boxes for x in instances]
features = self.mask_pooler(features, boxes)
else:
features = {f: features[f] for f in self.mask_in_features}
return self.mask_head(features, instances)
def _forward_keypoint(self, features: Dict[str, torch.Tensor], instances: List[Instances]):
"""
Forward logic of the keypoint prediction branch.
Args:
features (dict[str, Tensor]): mapping from feature map names to tensor.
Same as in :meth:`ROIHeads.forward`.
instances (list[Instances]): the per-image instances to train/predict keypoints.
In training, they can be the proposals.
In inference, they can be the boxes predicted by R-CNN box head.
Returns:
In training, a dict of losses.
In inference, update `instances` with new fields "pred_keypoints" and return it.
"""
if not self.keypoint_on:
return {} if self.training else instances
if self.training:
# head is only trained on positive proposals with >=1 visible keypoints.
instances, _ = select_foreground_proposals(instances, self.num_classes)
instances = select_proposals_with_visible_keypoints(instances)
if self.keypoint_pooler is not None:
features = [features[f] for f in self.keypoint_in_features]
boxes = [x.proposal_boxes if self.training else x.pred_boxes for x in instances]
features = self.keypoint_pooler(features, boxes)
else:
features = dict([(f, features[f]) for f in self.keypoint_in_features])
return self.keypoint_head(features, instances)
| 43.06659
| 100
| 0.640292
|
66be49f460c65065e6f7736da4876a7e889e97ba
| 1,864
|
py
|
Python
|
airflow/contrib/utils/log/task_handler_with_custom_formatter.py
|
fxdmhtt/airflow
|
cf88f7bc7bbd3e9bf110e98f025759a96c130235
|
[
"Apache-2.0"
] | 3
|
2019-10-03T21:08:15.000Z
|
2019-10-04T00:24:40.000Z
|
airflow/contrib/utils/log/task_handler_with_custom_formatter.py
|
fxdmhtt/airflow
|
cf88f7bc7bbd3e9bf110e98f025759a96c130235
|
[
"Apache-2.0"
] | 3
|
2020-03-08T15:43:38.000Z
|
2021-09-29T17:26:10.000Z
|
airflow/contrib/utils/log/task_handler_with_custom_formatter.py
|
upjohnc/airflow-upjohn-k8s
|
caadbc1618d73e054de99138b0892cea3a9327c4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 5
|
2017-06-19T19:55:47.000Z
|
2020-10-10T00:49:20.000Z
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from logging import StreamHandler
from airflow import configuration as conf
from airflow.utils.helpers import parse_template_string
class TaskHandlerWithCustomFormatter(StreamHandler):
def __init__(self, stream):
super(TaskHandlerWithCustomFormatter, self).__init__()
def set_context(self, ti):
if ti.raw:
return
prefix = conf.get('core', 'task_log_prefix_template')
rendered_prefix = ""
if prefix:
_, self.prefix_jinja_template = parse_template_string(prefix)
rendered_prefix = self._render_prefix(ti)
self.setFormatter(logging.Formatter(rendered_prefix + ":" + self.formatter._fmt))
self.setLevel(self.level)
def _render_prefix(self, ti):
if self.prefix_jinja_template:
jinja_context = ti.get_template_context()
return self.prefix_jinja_template.render(**jinja_context)
logging.warning("'task_log_prefix_template' is in invalid format, ignoring the variable value")
return ""
| 37.28
| 103
| 0.725322
|
b6ed4cbd48447eb6f928c864598323f1c095ae62
| 3,987
|
py
|
Python
|
pkuseg/config.py
|
hcg2011/pkuseg-python
|
dcfbb4a5fcbd11d421c9ec76d71fed8633e7d9af
|
[
"MIT"
] | 1
|
2019-01-22T10:15:13.000Z
|
2019-01-22T10:15:13.000Z
|
pkuseg/config.py
|
DavidAlphaFox/pkuseg-python
|
3975a94cccd9e8e635ca42689ef1d44e8f719c61
|
[
"MIT"
] | null | null | null |
pkuseg/config.py
|
DavidAlphaFox/pkuseg-python
|
3975a94cccd9e8e635ca42689ef1d44e8f719c61
|
[
"MIT"
] | null | null | null |
import os
import tempfile
class Config:
lineEnd = "\n"
biLineEnd = "\n\n"
triLineEnd = "\n\n\n"
undrln = "_"
blank = " "
tab = "\t"
star = "*"
slash = "/"
comma = ","
delimInFeature = "."
B = "B"
num = "0123456789.几二三四五六七八九十千万亿兆零1234567890%"
letter = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghigklmnopqrstuvwxyz/・-"
mark = "*"
def __init__(self):
# main setting
self.trainFile = os.path.join("data", "small_training.utf8")
self.testFile = os.path.join("data", "small_test.utf8")
self._tmp_dir = tempfile.TemporaryDirectory()
self.homepath = self._tmp_dir.name
self.tempFile = os.path.join(self.homepath, ".pkuseg", "temp")
self.readFile = os.path.join("data", "small_test.utf8")
self.outputFile = os.path.join("data", "small_test_output.utf8")
self.modelOptimizer = "crf.adf"
self.rate0 = 0.05 # init value of decay rate in SGD and ADF training
# self.reg = 1
# self.regs = [1]
# self.regList = self.regs.copy()
self.random = (
0
) # 0 for 0-initialization of model weights, 1 for random init of model weights
self.evalMetric = (
"f1"
) # tok.acc (token accuracy), str.acc (string accuracy), f1 (F1-score)
self.trainSizeScale = 1 # for scaling the size of training data
self.ttlIter = 20 # of training iterations
self.nUpdate = 10 # for ADF training
self.outFolder = os.path.join(self.tempFile, "output")
self.save = 1 # save model file
self.rawResWrite = True
self.miniBatch = 1 # mini-batch in stochastic training
self.nThread = 10 # number of processes
# ADF training
self.upper = 0.995 # was tuned for nUpdate = 10
self.lower = 0.6 # was tuned for nUpdate = 10
# global variables
self.metric = None
self.reg = 1
self.outDir = self.outFolder
self.testrawDir = "rawinputs/"
self.testinputDir = "inputs/"
self.tempDir = os.path.join(self.homepath, ".pkuseg", "temp")
self.testoutputDir = "entityoutputs/"
# self.GL_init = True
self.weightRegMode = "L2" # choosing weight regularizer: L2, L1)
self.c_train = os.path.join(self.tempFile, "train.conll.txt")
self.f_train = os.path.join(self.tempFile, "train.feat.txt")
self.c_test = os.path.join(self.tempFile, "test.conll.txt")
self.f_test = os.path.join(self.tempFile, "test.feat.txt")
self.fTune = "tune.txt"
self.fLog = "trainLog.txt"
self.fResSum = "summarizeResult.txt"
self.fResRaw = "rawResult.txt"
self.fOutput = "outputTag-{}.txt"
self.fFeatureTrain = os.path.join(self.tempFile, "ftrain.txt")
self.fGoldTrain = os.path.join(self.tempFile, "gtrain.txt")
self.fFeatureTest = os.path.join(self.tempFile, "ftest.txt")
self.fGoldTest = os.path.join(self.tempFile, "gtest.txt")
self.modelDir = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "models", "ctb8"
)
self.fModel = os.path.join(self.modelDir, "model.txt")
# feature
self.numLetterNorm = True
self.featureTrim = 0
self.wordFeature = True
self.wordMax = 6
self.wordMin = 2
self.nLabel = 5
self.order = 1
def globalCheck(self):
if self.evalMetric == "f1":
self.metric = "f-score"
elif self.evalMetric == "tok.acc":
self.metric = "token-accuracy"
elif self.evalMetric == "str.acc":
self.metric = "string-accuracy"
else:
raise Exception("invalid eval metric")
assert self.rate0 > 0
assert self.trainSizeScale > 0
assert self.ttlIter > 0
assert self.nUpdate > 0
assert self.miniBatch > 0
assert self.reg > 0
config = Config()
| 34.669565
| 88
| 0.58766
|
8e3ea17e47c960b6309f11d30b0dde96efbee551
| 3,633
|
py
|
Python
|
third_party/blink/tools/blinkpy/common/read_checksum_from_png_unittest.py
|
DamieFC/chromium
|
54ce2d3c77723697efd22cfdb02aea38f9dfa25c
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2020-10-18T02:33:40.000Z
|
2020-10-18T02:33:40.000Z
|
third_party/blink/tools/blinkpy/common/read_checksum_from_png_unittest.py
|
DamieFC/chromium
|
54ce2d3c77723697efd22cfdb02aea38f9dfa25c
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 3
|
2021-05-17T16:28:52.000Z
|
2021-05-21T22:42:22.000Z
|
third_party/blink/tools/blinkpy/common/read_checksum_from_png_unittest.py
|
DamieFC/chromium
|
54ce2d3c77723697efd22cfdb02aea38f9dfa25c
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from blinkpy.common import read_checksum_from_png
from six import StringIO
class ReadChecksumFromPngTest(unittest.TestCase):
def test_read_checksum(self):
# pylint: disable=line-too-long
# Test a file with the comment.
filehandle = StringIO(
'''\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x03 \x00\x00\x02X\x08\x02\x00\x00\x00\x15\x14\x15'\x00\x00\x00)tEXtchecksum\x003c4134fe2739880353f91c5b84cadbaaC\xb8?\xec\x00\x00\x16\xfeIDATx\x9c\xed\xdd[\x8cU\xe5\xc1\xff\xf15T\x18\x0ea,)\xa6\x80XZ<\x10\n\xd6H\xc4V\x88}\xb5\xa9\xd6r\xd5\x0bki0\xa6\xb5ih\xd2\xde\x98PHz\xd1\x02=\\q#\x01\x8b\xa5rJ\x8b\x88i\xacM\xc5h\x8cbMk(\x1ez@!\x0c\xd5\xd2\xc2\xb44\x1c\x848\x1dF(\xeb\x7f\xb1\xff\xd9\xef~g\xd6\xde3\xe0o\x10\xec\xe7sa6{\xd6z\xd6\xb3\xd7\xf3\xa8_7\xdbM[Y\x96\x05\x00\x009\xc3\xde\xeb\t\x00\x00\xbc\xdf\x08,\x00\x800\x81\x05\x00\x10&\xb0\x00\x00\xc2\x04\x16\x00@\x98\xc0\x02\x00\x08\x13X\x00\x00a\x02\x0b\x00 Lx01\x00\x84\t,\x00\x800\x81\x05\x00\x10\xd64\xb0\xda\x9a\xdb\xb6m\xdb\xb4i\xd3\xfa\x9fr\xf3\xcd7\x0f\xe5T\x07\xe5\xd4\xa9'''
)
checksum = read_checksum_from_png.read_checksum(filehandle)
self.assertEqual('3c4134fe2739880353f91c5b84cadbaa', checksum)
# Test a file without the comment.
filehandle = StringIO(
'''\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x03 \x00\x00\x02X\x08\x02\x00\x00\x00\x15\x14\x15'\x00\x00\x16\xfeIDATx\x9c\xed\xdd[\x8cU\xe5\xc1\xff\xf15T\x18\x0ea,)\xa6\x80XZ<\x10\n\xd6H\xc4V\x88}\xb5\xa9\xd6r\xd5\x0bki0\xa6\xb5ih\xd2\xde\x98PHz\xd1\x02=\\q#\x01\x8b\xa5rJ\x8b\x88i\xacM\xc5h\x8cbMk(\x1ez@!\x0c\xd5\xd2\xc2\xb44\x1c\x848\x1dF(\xeb\x7f\xb1\xff\xd9\xef~g\xd6\xde3\xe0o\x10\xec\xe7sa6{\xd6z\xd6\xb3\xd7\xf3\xa8_7\xdbM[Y\x96\x05\x00\x009\xc3\xde\xeb\t\x00\x00\xbc\xdf\x08,\x00\x800\x81\x05\x00\x10&\xb0\x00\x00\xc2\x04\x16\x00@\x98\xc0\x02\x00\x08\x13X\x00\x00a\x02\x0b\x00 Lx01\x00\x84\t,\x00\x800\x81\x05\x00\x10\xd64\xb0\xda\x9a\xdb\xb6m\xdb\xb4i\xd3\xfa\x9fr\xf3\xcd7\x0f\xe5T\x07\xe5\xd4\xa9S\x8b\x17/\x1e?~\xfc\xf8\xf1\xe3\xef\xbf\xff\xfe\xf7z:M5\xbb\x87\x17\xcbUZ\x8f|V\xd7\xbd\x10\xb6\xcd{b\x88\xf6j\xb3\x9b?\x14\x9b\xa1>\xe6\xf9\xd9\xcf\x00\x17\x93'''
)
checksum = read_checksum_from_png.read_checksum(filehandle)
self.assertIsNone(checksum)
| 77.297872
| 886
| 0.744839
|
5714976b130aafe0af740890564805f211d3ac32
| 68
|
py
|
Python
|
railgun/http/__init__.py
|
c-goosen/asyncio-railgun
|
23a234d0810ae7dd3c69504232ea7b021ca7c82c
|
[
"MIT"
] | null | null | null |
railgun/http/__init__.py
|
c-goosen/asyncio-railgun
|
23a234d0810ae7dd3c69504232ea7b021ca7c82c
|
[
"MIT"
] | null | null | null |
railgun/http/__init__.py
|
c-goosen/asyncio-railgun
|
23a234d0810ae7dd3c69504232ea7b021ca7c82c
|
[
"MIT"
] | null | null | null |
"""
High level methods and wrappers for http calls with Railgun
"""
| 17
| 59
| 0.735294
|
55823c15515bd4b320f39730d09c35d0971db051
| 16,122
|
py
|
Python
|
docs/matplotlib_ext/docscrape_sphinx.py
|
nenkoru/okama
|
1e202bc801aea8adaf4c2ad033cd51af0c957df5
|
[
"MIT"
] | 200
|
2015-02-12T16:56:28.000Z
|
2022-03-16T15:34:50.000Z
|
docs/matplotlib_ext/docscrape_sphinx.py
|
nenkoru/okama
|
1e202bc801aea8adaf4c2ad033cd51af0c957df5
|
[
"MIT"
] | 330
|
2015-01-01T09:15:43.000Z
|
2022-03-30T22:48:26.000Z
|
docs/matplotlib_ext/docscrape_sphinx.py
|
nenkoru/okama
|
1e202bc801aea8adaf4c2ad033cd51af0c957df5
|
[
"MIT"
] | 142
|
2015-01-21T01:05:14.000Z
|
2022-03-07T15:22:53.000Z
|
import re
import inspect
import textwrap
import pydoc
from collections.abc import Callable
import os
from jinja2 import FileSystemLoader
from jinja2.sandbox import SandboxedEnvironment
import sphinx
from sphinx.jinja2glue import BuiltinTemplateLoader
from .docscrape import NumpyDocString, FunctionDoc, ClassDoc, ObjDoc
from .xref import make_xref
IMPORT_MATPLOTLIB_RE = r'\b(import +matplotlib|from +matplotlib +import)\b'
class SphinxDocString(NumpyDocString):
def __init__(self, docstring, config={}):
NumpyDocString.__init__(self, docstring, config=config)
self.load_config(config)
def load_config(self, config):
self.use_plots = config.get('use_plots', False)
self.use_blockquotes = config.get('use_blockquotes', False)
self.class_members_toctree = config.get('class_members_toctree', True)
self.attributes_as_param_list = config.get('attributes_as_param_list', True)
self.xref_param_type = config.get('xref_param_type', False)
self.xref_aliases = config.get('xref_aliases', dict())
self.xref_ignore = config.get('xref_ignore', set())
self.template = config.get('template', None)
if self.template is None:
template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')]
template_loader = FileSystemLoader(template_dirs)
template_env = SandboxedEnvironment(loader=template_loader)
self.template = template_env.get_template('numpydoc_docstring.rst')
# string conversion routines
def _str_header(self, name, symbol='`'):
return ['.. rubric:: ' + name, '']
def _str_field_list(self, name):
return [':' + name + ':']
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' '*indent + line]
return out
def _str_signature(self):
return ['']
def _str_summary(self):
return self['Summary'] + ['']
def _str_extended_summary(self):
return self['Extended Summary'] + ['']
def _str_returns(self, name='Returns'):
named_fmt = '**%s** : %s'
unnamed_fmt = '%s'
out = []
if self[name]:
out += self._str_field_list(name)
out += ['']
for param in self[name]:
param_type = param.type
if param_type and self.xref_param_type:
param_type = make_xref(
param_type,
self.xref_aliases,
self.xref_ignore
)
if param.name:
out += self._str_indent([named_fmt % (param.name.strip(),
param_type)])
else:
out += self._str_indent([unnamed_fmt % param_type.strip()])
if not param.desc:
out += self._str_indent(['..'], 8)
else:
if self.use_blockquotes:
out += ['']
out += self._str_indent(param.desc, 8)
out += ['']
return out
def _escape_args_and_kwargs(self, name):
if name[:2] == '**':
return r'\*\*' + name[2:]
elif name[:1] == '*':
return r'\*' + name[1:]
else:
return name
def _process_param(self, param, desc, fake_autosummary):
"""Determine how to display a parameter
Emulates autosummary behavior if fake_autosummary
Parameters
----------
param : str
The name of the parameter
desc : list of str
The parameter description as given in the docstring. This is
ignored when autosummary logic applies.
fake_autosummary : bool
If True, autosummary-style behaviour will apply for params
that are attributes of the class and have a docstring.
Returns
-------
display_param : str
The marked up parameter name for display. This may include a link
to the corresponding attribute's own documentation.
desc : list of str
A list of description lines. This may be identical to the input
``desc``, if ``autosum is None`` or ``param`` is not a class
attribute, or it will be a summary of the class attribute's
docstring.
Notes
-----
This does not have the autosummary functionality to display a method's
signature, and hence is not used to format methods. It may be
complicated to incorporate autosummary's signature mangling, as it
relies on Sphinx's plugin mechanism.
"""
param = self._escape_args_and_kwargs(param.strip())
# param = param.strip()
# XXX: If changing the following, please check the rendering when param
# ends with '_', e.g. 'word_'
# See https://github.com/numpy/numpydoc/pull/144
display_param = '**%s**' % param
if not fake_autosummary:
return display_param, desc
param_obj = getattr(self._obj, param, None)
if not (callable(param_obj)
or isinstance(param_obj, property)
or inspect.isgetsetdescriptor(param_obj)
or inspect.ismemberdescriptor(param_obj)):
param_obj = None
obj_doc = pydoc.getdoc(param_obj)
if not (param_obj and obj_doc):
return display_param, desc
prefix = getattr(self, '_name', '')
if prefix:
link_prefix = '%s.' % prefix
else:
link_prefix = ''
# Referenced object has a docstring
display_param = ':obj:`%s <%s%s>`' % (param,
link_prefix,
param)
if obj_doc:
# Overwrite desc. Take summary logic of autosummary
desc = re.split(r'\n\s*\n', obj_doc.strip(), 1)[0]
# XXX: Should this have DOTALL?
# It does not in autosummary
m = re.search(r"^([A-Z].*?\.)(?:\s|$)",
' '.join(desc.split()))
if m:
desc = m.group(1).strip()
else:
desc = desc.partition('\n')[0]
desc = desc.split('\n')
return display_param, desc
def _str_param_list(self, name, fake_autosummary=False):
"""Generate RST for a listing of parameters or similar
Parameter names are displayed as bold text, and descriptions
are in blockquotes. Descriptions may therefore contain block
markup as well.
Parameters
----------
name : str
Section name (e.g. Parameters)
fake_autosummary : bool
When True, the parameter names may correspond to attributes of the
object beign documented, usually ``property`` instances on a class.
In this case, names will be linked to fuller descriptions.
Returns
-------
rst : list of str
"""
out = []
if self[name]:
out += self._str_field_list(name)
out += ['']
for param in self[name]:
display_param, desc = self._process_param(param.name,
param.desc,
fake_autosummary)
parts = []
if display_param:
parts.append(display_param)
param_type = param.type
if param_type:
param_type = param.type
if self.xref_param_type:
param_type = make_xref(
param_type,
self.xref_aliases,
self.xref_ignore
)
parts.append(param_type)
out += self._str_indent([' : '.join(parts)])
if desc and self.use_blockquotes:
out += ['']
elif not desc:
# empty definition
desc = ['..']
out += self._str_indent(desc, 8)
out += ['']
return out
def _str_member_list(self, name):
"""
Generate a member listing, autosummary:: table where possible,
and a table where not.
"""
out = []
if self[name]:
out += ['.. rubric:: %s' % name, '']
prefix = getattr(self, '_name', '')
if prefix:
prefix = '~%s.' % prefix
autosum = []
others = []
for param in self[name]:
param = param._replace(name=param.name.strip())
# Check if the referenced member can have a docstring or not
param_obj = getattr(self._obj, param.name, None)
if not (callable(param_obj)
or isinstance(param_obj, property)
or inspect.isdatadescriptor(param_obj)):
param_obj = None
if param_obj and pydoc.getdoc(param_obj):
# Referenced object has a docstring
autosum += [" %s%s" % (prefix, param.name)]
else:
others.append(param)
if autosum:
out += ['.. autosummary::']
if self.class_members_toctree:
out += [' :toctree:']
out += [''] + autosum
if others:
maxlen_0 = max(3, max([len(p.name) + 4 for p in others]))
hdr = "=" * maxlen_0 + " " + "=" * 10
fmt = '%%%ds %%s ' % (maxlen_0,)
out += ['', '', hdr]
for param in others:
name = "**" + param.name.strip() + "**"
desc = " ".join(x.strip()
for x in param.desc).strip()
if param.type:
desc = "(%s) %s" % (param.type, desc)
out += [fmt % (name, desc)]
out += [hdr]
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
content = textwrap.dedent("\n".join(self[name])).split("\n")
out += content
out += ['']
return out
def _str_see_also(self, func_role):
out = []
if self['See Also']:
see_also = super()._str_see_also(func_role)
out = ['.. seealso::', '']
out += self._str_indent(see_also[2:])
return out
def _str_warnings(self):
out = []
if self['Warnings']:
out = ['.. warning::', '']
out += self._str_indent(self['Warnings'])
out += ['']
return out
def _str_index(self):
idx = self['index']
out = []
if len(idx) == 0:
return out
out += ['.. index:: %s' % idx.get('default', '')]
for section, references in idx.items():
if section == 'default':
continue
elif section == 'refguide':
out += [' single: %s' % (', '.join(references))]
else:
out += [' %s: %s' % (section, ','.join(references))]
out += ['']
return out
def _str_references(self):
out = []
if self['References']:
out += self._str_header('References')
if isinstance(self['References'], str):
self['References'] = [self['References']]
out.extend(self['References'])
out += ['']
# Latex collects all references to a separate bibliography,
# so we need to insert links to it
if sphinx.__version__ >= "0.6":
out += ['.. only:: latex', '']
else:
out += ['.. latexonly::', '']
items = []
for line in self['References']:
m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I)
if m:
items.append(m.group(1))
out += [' ' + ", ".join(["[%s]_" % item for item in items]), '']
return out
def _str_examples(self):
examples_str = "\n".join(self['Examples'])
if (self.use_plots and re.search(IMPORT_MATPLOTLIB_RE, examples_str)
and 'plot::' not in examples_str):
out = []
out += self._str_header('Examples')
out += ['.. plot::', '']
out += self._str_indent(self['Examples'])
out += ['']
return out
else:
return self._str_section('Examples')
def __str__(self, indent=0, func_role="obj"):
ns = {
'signature': self._str_signature(),
'index': self._str_index(),
'summary': self._str_summary(),
'extended_summary': self._str_extended_summary(),
'parameters': self._str_param_list('Parameters'),
'returns': self._str_returns('Returns'),
'yields': self._str_returns('Yields'),
'receives': self._str_returns('Receives'),
'other_parameters': self._str_param_list('Other Parameters'),
'raises': self._str_returns('Raises'),
'warns': self._str_returns('Warns'),
'warnings': self._str_warnings(),
'see_also': self._str_see_also(func_role),
'notes': self._str_section('Notes'),
'references': self._str_references(),
'examples': self._str_examples(),
'attributes':
self._str_param_list('Attributes', fake_autosummary=True)
if self.attributes_as_param_list
else self._str_member_list('Attributes'),
'methods': self._str_member_list('Methods'),
}
ns = dict((k, '\n'.join(v)) for k, v in ns.items())
rendered = self.template.render(**ns)
return '\n'.join(self._str_indent(rendered.split('\n'), indent))
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
def __init__(self, obj, doc=None, config={}):
self.load_config(config)
FunctionDoc.__init__(self, obj, doc=doc, config=config)
class SphinxClassDoc(SphinxDocString, ClassDoc):
def __init__(self, obj, doc=None, func_doc=None, config={}):
self.load_config(config)
ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config)
class SphinxObjDoc(SphinxDocString, ObjDoc):
def __init__(self, obj, doc=None, config={}):
self.load_config(config)
ObjDoc.__init__(self, obj, doc=doc, config=config)
# TODO: refactor to use docscrape.get_doc_object
def get_doc_object(obj, what=None, doc=None, config={}, builder=None):
if what is None:
if inspect.isclass(obj):
what = 'class'
elif inspect.ismodule(obj):
what = 'module'
elif isinstance(obj, Callable):
what = 'function'
else:
what = 'object'
template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')]
if builder is not None:
template_loader = BuiltinTemplateLoader()
template_loader.init(builder, dirs=template_dirs)
else:
template_loader = FileSystemLoader(template_dirs)
template_env = SandboxedEnvironment(loader=template_loader)
config['template'] = template_env.get_template('numpydoc_docstring.rst')
if what == 'class':
return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc,
config=config)
elif what in ('function', 'method'):
return SphinxFunctionDoc(obj, doc=doc, config=config)
else:
if doc is None:
doc = pydoc.getdoc(obj)
return SphinxObjDoc(obj, doc, config=config)
| 36.475113
| 84
| 0.521337
|
2dc64f3baa4ea619626d517c95224591d00d6678
| 15,781
|
py
|
Python
|
gogamechen3/api/wsgi/game/entity/async.py
|
lolizeppelin/gogamechen3
|
4ff06f9042f1bb0cc22e1cc0b342967a829ae0f8
|
[
"MIT"
] | null | null | null |
gogamechen3/api/wsgi/game/entity/async.py
|
lolizeppelin/gogamechen3
|
4ff06f9042f1bb0cc22e1cc0b342967a829ae0f8
|
[
"MIT"
] | null | null | null |
gogamechen3/api/wsgi/game/entity/async.py
|
lolizeppelin/gogamechen3
|
4ff06f9042f1bb0cc22e1cc0b342967a829ae0f8
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
import inspect
import time
import contextlib
from collections import OrderedDict
from sqlalchemy.sql import and_
from simpleutil.common.exceptions import InvalidArgument
from simpleutil.log import log as logging
from simpleutil.utils import jsonutils
from simpleutil.utils import argutils
from simpleutil.config import cfg
from simpleservice.ormdb.api import model_query
from goperation import threadpool
from goperation.utils import safe_func_wrapper
from goperation.manager import common as manager_common
from goperation.manager.api import rpcfinishtime
from goperation.manager.utils import resultutils
from goperation.manager.utils import targetutils
from goperation.manager.wsgi.entity.controller import EntityReuest
from gogamechen3 import common
from gogamechen3.api import endpoint_session
from gogamechen3.models import AppEntity
from gogamechen3.api.wsgi.utils import gmurl
from .base import AppEntityReuestBase
LOG = logging.getLogger(__name__)
entity_controller = EntityReuest()
CONF = cfg.CONF
@contextlib.contextmanager
def empty_context(*args, **kwargs):
yield
class AppEntityAsyncReuest(AppEntityReuestBase):
"""async ext function"""
OBJFILES = {'type': 'object',
'properties': {
common.APPFILE: {
'type': 'object',
'required': ['md5', 'timeout'],
'properties': {'md5': {'type': 'string', 'format': 'md5',
'description': '更新程序文件所需文件'},
'timeout': {'type': 'integer', 'minimum': 10, 'maxmum': 300,
'description': '更新超时时间'},
'backup': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '是否更新前备份程序,默认是'},
'revertable': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '程序文件是否可以回滚,默认是'},
'rollback': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '是否连带回滚(回滚前方已经成功的步骤),默认否'},
}},
common.DATADB: {
'type': 'object',
'required': ['md5', 'timeout'],
'properties': {
'md5': {'type': 'string', 'format': 'md5', 'description': '更新游戏库所需文件'},
'timeout': {'type': 'integer', 'minimum': 30, 'maxmum': 1200,
'description': '更新超时时间'},
'backup': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '是否更新前备份游戏数据库,默认否'},
'revertable': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '游戏库是否可以回滚,默认否'},
'rollback': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '是否连带回滚(回滚前方已经成功的步骤),默认否'}}},
common.LOGDB: {
'type': 'object',
'required': ['md5', 'timeout'],
'properties': {
'md5': {'type': 'string', 'format': 'md5', 'description': '更新日志库所需文件'},
'timeout': {'type': 'integer', 'minimum': 30, 'maxmum': 3600,
'description': '更新超时时间'},
'backup': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '是否更新前备份日志数据库,默认否'},
'revertable': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '日志库是否可以回滚,默认否'},
'rollback': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '是否连带回滚(回滚前方已经成功的步骤),默认否'}}},}
}
UPGRADE = {'type': 'object',
'required': ['request_time', 'finishtime', 'objfiles'],
'properties': {
'objfiles': OBJFILES,
'request_time': {'type': 'integer', 'description': '异步请求时间'},
'timeline': {'type': 'integer', 'description': '异步请求时间'},
'finishtime': {'type': 'integer', 'description': '异步请求完成时间'}}
}
FLUSH = {'type': 'object',
'properties': {
common.GMSERVER: {'type': 'integer', 'minimum': 0,
'description': 'GM服务器位置更新, 区服专用参数'},
common.CROSSSERVER: {'type': 'integer', 'minimum': 0,
'description': '战场服务器位置更新, 区服专用参数'},
'opentime': {'type': 'integer', 'minimum': 0,
'description': '游戏服开服时间, 区服专用参数'},
'force': {'type': 'boolean',
'description': '忽略运行状态'}}
}
HOTFIX = {'type': 'object',
'required': [common.APPFILE],
'properties': {
common.APPFILE: {
'type': 'object',
'required': ['md5', 'timeout'],
'properties': {'md5': {'type': 'string', 'format': 'md5',
'description': '更新程序文件所需文件'},
'timeout': {'type': 'integer', 'minimum': 10, 'maxmum': 300,
'description': '更新超时时间'},
'backup': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '是否更新前备份程序,默认否'},
'revertable': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '程序文件是否可以回滚,默认是'},
'rollback': {'oneOf': [{'type': 'boolean'}, {'type': 'null'}],
'description': '是否连带回滚(回滚前方已经成功的步骤),默认否'},
'stream': {'oneOf': [{'type': 'string', "minLength": 6, "maxLength": 5000},
{'type': 'null'}],
'description': '直接以stream流文件发送文件'},
}}}
}
def _async_bluck_rpc(self, action, group_id, objtype, entity, body=None, context=None):
caller = inspect.stack()[0][3]
body = body or {}
group_id = int(group_id)
context = context or empty_context
if entity == 'all':
entitys = 'all'
else:
entitys = argutils.map_to_int(entity)
asyncrequest = self.create_asyncrequest(body)
target = targetutils.target_endpoint(common.NAME)
session = endpoint_session(readonly=True)
query = model_query(session, AppEntity, filter=and_(AppEntity.group_id == group_id,
AppEntity.objtype == objtype))
emaps = dict()
for _entity in query:
if _entity.status <= common.DELETED:
continue
if _entity.status != common.OK and action != 'stop':
continue
emaps.setdefault(_entity.entity, _entity.agent_id)
if entitys == 'all':
entitys = emaps.keys()
agents = set(emaps.values())
else:
if entitys - set(emaps.keys()):
raise InvalidArgument('Some entitys not found or status is not active')
agents = set()
for entity in emaps:
if entity in entitys:
agents.add(emaps[entity])
with context(asyncrequest.request_id, entitys, agents):
async_ctxt = dict(pre_run=body.pop('pre_run', None),
after_run=body.pop('after_run', None),
post_run=body.pop('post_run', None))
rpc_ctxt = {}
rpc_ctxt.setdefault('agents', agents)
rpc_method = '%s_entitys' % action
rpc_args = dict(entitys=list(entitys))
rpc_args.update(body)
def wapper():
self.send_asyncrequest(asyncrequest, target,
rpc_ctxt, rpc_method, rpc_args, async_ctxt)
threadpool.add_thread(safe_func_wrapper, wapper, LOG)
return resultutils.results(result='gogamechen3 %s entitys %s spawning' % (objtype, caller),
data=[asyncrequest.to_dict()])
def start(self, req, group_id, objtype, entity, body=None):
return self._async_bluck_rpc('start', group_id, objtype, entity, body)
def stop(self, req, group_id, objtype, entity, body=None):
"""
kill 强制关闭
notify 通过gm服务器通知区服关闭
"""
body = body or {}
kill = body.get('kill', False)
notify = body.pop('notify', False)
if objtype == common.GAMESERVER and notify and not kill:
message = body.pop('message', '') or ''
delay = body.pop('delay', 3)
if delay:
if not isinstance(delay, (int, long)) or delay < 3:
raise InvalidArgument('Delay value error')
delay = min(delay, 60)
finishtime = rpcfinishtime()[0] + delay + 5
body.update({'finishtime': finishtime, 'delay': delay + 5})
url = gmurl(req, group_id, interface='closegameserver')
@contextlib.contextmanager
def context(reqeust_id, entitys, agents):
pre_run = {'executer': 'http',
'ekwargs': {'url': url, 'method': 'POST', 'async': False,
'json': OrderedDict(RealSvrIds=list(entitys), Msg=message, DelayTime=delay)}
}
body.update({'pre_run': pre_run})
yield
else:
context = None
body.pop('delay', None)
return self._async_bluck_rpc('stop', group_id, objtype, entity, body, context)
def status(self, req, group_id, objtype, entity, body=None):
return self._async_bluck_rpc('status', group_id, objtype, entity, body)
def upgrade(self, req, group_id, objtype, entity, body=None):
body = body or {}
jsonutils.schema_validate(body, self.UPGRADE)
objfiles = body.get('objfiles')
if not objfiles:
raise InvalidArgument('Not objfile found for upgrade')
request_time = body.get('request_time')
finishtime = body.get('finishtime')
timeline = body.get('timeline') or request_time
runtime = finishtime - request_time
for subtype in objfiles:
if subtype not in (common.APPFILE, common.DATADB, common.LOGDB):
raise InvalidArgument('json schema error')
objfile = objfiles[subtype]
if objfile.get('timeout') + request_time > finishtime:
raise InvalidArgument('%s timeout over finishtime' % subtype)
body.update({'timeline': timeline,
'deadline': finishtime + 3 + (runtime * 2)})
body.setdefault('objtype', objtype)
return self._async_bluck_rpc('upgrade', group_id, objtype, entity, body)
def flushconfig(self, req, group_id, objtype, entity, body=None):
body = body or {}
group_id = int(group_id)
jsonutils.schema_validate(body, self.FLUSH)
if objtype == common.GAMESERVER:
gm = body.pop(common.GMSERVER, 0)
cross = body.pop(common.CROSSSERVER, 0)
entitys = []
if gm:
entitys.append(gm)
if cross:
entitys.append(cross)
entitys = list(set(entitys))
if entitys:
chiefs = {}
session = endpoint_session()
query = model_query(session, AppEntity,
filter=and_(AppEntity.group_id == group_id,
AppEntity.entity.in_(entitys)))
gmsvr = crosssvr = None
for appserver in query:
if appserver.group_id != group_id:
raise InvalidArgument('Entity group value error')
if appserver.objtype == common.GMSERVER:
if appserver.entity != gm:
raise InvalidArgument('Find %s but entity is %d' % (common.GMSERVER, gm))
gmsvr = appserver
elif appserver.objtype == common.CROSSSERVER:
if appserver.entity != cross:
raise InvalidArgument('Find %s but entity is %d' % (common.CROSSSERVER, cross))
crosssvr = appserver
if gm and not gmsvr:
raise InvalidArgument('%s.%d can not be found' % (common.GMSERVER, gm))
if cross and not crosssvr:
raise InvalidArgument('%s.%d can not be found' % (common.CROSSSERVER, cross))
# 获取实体相关服务器信息(端口/ip)
maps = entity_controller.shows(endpoint=common.NAME, entitys=entitys)
if gmsvr:
chiefs.setdefault(common.GMSERVER,
dict(entity=gmsvr.entity,
ports=maps.get(gmsvr.entity).get('ports'),
local_ip=maps.get(gmsvr.entity).get('metadata').get('local_ip')
))
if crosssvr:
chiefs.setdefault(common.CROSSSERVER,
dict(entity=crosssvr.entity,
ports=maps.get(crosssvr.entity).get('ports'),
local_ip=maps.get(crosssvr.entity).get('metadata').get('local_ip')
))
body.update({'chiefs': chiefs})
return self._async_bluck_rpc('flushconfig', group_id, objtype, entity, body)
def hotfix(self, req, group_id, objtype, entity, body=None):
group_id = int(group_id)
body = body or {}
if objtype != common.GAMESERVER:
raise InvalidArgument('Hotfix just for %s' % common.GAMESERVER)
jsonutils.schema_validate(body, self.HOTFIX)
body.setdefault('objtype', objtype)
url = gmurl(req, group_id, interface='hotupdateconfig?RealSvrIds=0')
@contextlib.contextmanager
def context(reqeust_id, entitys, agents):
post_run = {'executer': 'http',
'ekwargs': {'url': url, 'method': 'GET', 'async': False},
'condition': 'entitys',
'ckwargs': {'all': False,
'operator': '=',
'value': manager_common.RESULT_SUCCESS,
'counter': '>',
'count': 0
}
}
body.update({'post_run': post_run})
yield
return self._async_bluck_rpc('hotfix', group_id, objtype, entity, body, context)
| 48.112805
| 115
| 0.478804
|
4786d5539433e2087aac858d39f19bf2cb135ffe
| 5,371
|
py
|
Python
|
mrf_apps/mrf_size.py
|
rouault/mrf
|
8b757396c48709e2ac6fddd923631eebdc8acfd3
|
[
"Apache-2.0"
] | 67
|
2015-04-13T12:37:59.000Z
|
2022-01-20T20:01:17.000Z
|
mrf_apps/mrf_size.py
|
rouault/mrf
|
8b757396c48709e2ac6fddd923631eebdc8acfd3
|
[
"Apache-2.0"
] | 38
|
2016-01-20T20:12:15.000Z
|
2022-02-17T23:21:22.000Z
|
mrf_apps/mrf_size.py
|
rouault/mrf
|
8b757396c48709e2ac6fddd923631eebdc8acfd3
|
[
"Apache-2.0"
] | 28
|
2015-04-30T04:14:32.000Z
|
2021-09-26T12:16:07.000Z
|
#!/usr/bin/env python3
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#-------------------------------------------------------------------------------
# Name: MRF_size
# Purpose: Visualize an MRF size index content
#
# Author: luci6974
#
# Created: 30/07/2015
# Copyright: (c) luci6974 2015-2017
#-------------------------------------------------------------------------------
'''Builds a GDAL vrt that visualizes the size of tiles in an MRF index'''
#
# Only trivial MRFs for now, flat files, default index name. Should be extended
# to support all MRFs
#
# It creates a gdal VRT file with a pixel per tile, where the pixel value
# is the size of the respective tile
# This is very useful to understand the state of an MRF
# Since most tiles are compressed, the size of the tile tends to be proportional
# to the entropy (information) of the data within the tile.
#
import xml.etree.ElementTree as XML
import sys
import os.path as path
def usage():
print('Takes one argument, a MRF file name, ' + \
'builds a .vrt that contains the tile size info')
def XMLprettify(elem, level=0):
'XML prettifier'
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
XMLprettify(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def attr(node, key, default):
return default if node is None or node.get(key) is None else node.get(key)
class PointXYZC(object):
def __init__(self, node, defaults = (-1, -1, 1, 1)):
key = 'x','y','z','c'
self.x, self.y, self.z, self.c = (
int(attr(node, key[i], defaults[i])) for i in range(4))
def __str__(self):
f = "PointXYZC ({p.x}, {p.y}, {p.z}, {p.c})"
return f.format(p = self)
class BBOX(object):
def __init__(self, node, defaults):
key = 'minx', 'miny', 'maxx', 'maxy'
self.minx, self.miny, self.maxx, self.maxy = (
float(attr(node, key[i], defaults[i])) for i in range(4))
def __str__(self):
f = "BBOX ({p.minx}, {p.miny}, {p.maxx}, {p.maxy})"
return f.format(p = self)
class MRF(object):
'MRF metadata reader'
def __init__(self, name):
try:
root = XML.parse(name).getroot()
except:
raise "Can't parse " + name
if root.tag != 'MRF_META':
raise name + ' is not an MRF metadata file'
self.name = name
#Get the basic raster info
self.size = PointXYZC(root.find('Raster/Size'))
self.pagesize = PointXYZC(root.find('Raster/PageSize'),
(512, 512, 1, self.size.c))
self.projection = root.find('GeoTags/Projection').text
self.bbox = BBOX(root.find('GeoTags/BoundingBox'),
(0, 0, self.size.x, self.size.y))
def geotransform(self):
'gdal style affine geotransform as a list'
return [
self.bbox.minx, (self.bbox.maxx - self.bbox.minx)/self.size.x, 0,
self.bbox.maxy, 0, (self.bbox.miny - self.bbox.maxy)/self.size.y]
def VRT_Size(mrf):
'Builds and returns a gdal VRT XML tree'
xsz = int (1 + (mrf.size.x-1) / mrf.pagesize.x)
ysz = int (1 + (mrf.size.y-1) / mrf.pagesize.y)
root = XML.Element('VRTDataset', {
'rasterXSize':str(xsz),
'rasterYSize':str(ysz)
})
XML.SubElement(root,'SRS').text = mrf.projection
gt = mrf.geotransform()
# Adjust for pagesize
gt[1] *= mrf.pagesize.x
gt[5] *= mrf.pagesize.y
XML.SubElement(root,'GeoTransform').text = ",".join((str(x) for x in gt))
bands = int(mrf.size.c / mrf.pagesize.c)
for band in range(bands):
xband = XML.SubElement(root, 'VRTRasterBand', {
'band':str(band+1),
'dataType':'UInt32',
'subClass':'VRTRawRasterBand'
})
idxname = path.splitext(path.basename(mrf.name))[0] + '.idx'
XML.SubElement(xband,'SourceFilename', { 'relativetoVRT':"1" }).text =\
idxname
XML.SubElement(xband,'ImageOffset').text = str(12 + 16 * band)
XML.SubElement(xband,'PixelOffset').text = str(16 * bands)
XML.SubElement(xband,'LineOffset').text = str(16 * xsz * bands)
XML.SubElement(xband,'NoDataValue').text = '0'
XML.SubElement(xband,'ByteOrder').text = 'MSB'
return XML.ElementTree(root)
def main():
if (len(sys.argv) != 2):
usage()
return
name = sys.argv[1]
outname = path.splitext(name)[0] + '_size.vrt'
vrt = VRT_Size(MRF(name))
XMLprettify(vrt.getroot())
vrt.write(outname)
if __name__ == '__main__':
main()
| 34.876623
| 80
| 0.590207
|
1b0f0e52e8341fbfc1d15af9d3e9ef096f00ea3f
| 2,386
|
py
|
Python
|
quart/flask_patch/__init__.py
|
SmartManoj/quart
|
317562ea660edb7159efc20fa57b95223d408ea0
|
[
"MIT"
] | 1
|
2020-08-09T19:45:14.000Z
|
2020-08-09T19:45:14.000Z
|
quart/flask_patch/__init__.py
|
SmartManoj/quart
|
317562ea660edb7159efc20fa57b95223d408ea0
|
[
"MIT"
] | null | null | null |
quart/flask_patch/__init__.py
|
SmartManoj/quart
|
317562ea660edb7159efc20fa57b95223d408ea0
|
[
"MIT"
] | null | null | null |
import quart.flask_patch.app
import quart.flask_patch.globals # noqa: F401
import quart.views # noqa: F401
from quart.flask_patch._patch import patch_all
patch_all()
from flask.app import Flask # noqa: E402, I100
from flask.blueprints import Blueprint # noqa: E402
from flask.config import Config # noqa: E402
from flask.ctx import ( # noqa: E402
after_this_request, copy_current_request_context, has_app_context, has_request_context,
)
from flask.exceptions import abort # noqa: E402
from flask.globals import ( # noqa: E402
_app_ctx_stack, _request_ctx_stack, current_app, g, request, session,
)
from flask.helpers import ( # noqa: E402
flash, get_flashed_messages, get_template_attribute, make_response, stream_with_context,
url_for,
)
from flask.json import jsonify # noqa: E402
from flask.signals import ( # noqa: E402
appcontext_popped, appcontext_pushed, appcontext_tearing_down, before_render_template,
got_request_exception, message_flashed, request_finished, request_started,
request_tearing_down, signals_available, template_rendered,
)
from flask.static import safe_join, send_file, send_from_directory # noqa: E402
from flask.templating import render_template, render_template_string # noqa: E402
from flask.typing import ResponseReturnValue # noqa: E402
from flask.utils import redirect # noqa: E402
from flask.wrappers import Request, Response # noqa: E402
from jinja2 import escape, Markup # noqa: E402
__all__ = (
'_app_ctx_stack', '_request_ctx_stack', 'abort', 'after_this_request',
'appcontext_popped', 'appcontext_pushed', 'appcontext_tearing_down',
'before_render_template', 'Blueprint', 'Config', 'copy_current_request_context',
'current_app', 'escape', 'flash', 'Flask', 'g', 'get_flashed_messages',
'get_template_attribute', 'got_request_exception', 'has_app_context', 'has_request_context',
'jsonify', 'make_response', 'Markup', 'message_flashed', 'redirect',
'render_template', 'render_template_string', 'request', 'Request', 'request_finished',
'request_started', 'request_tearing_down', 'Response', 'ResponseReturnValue', 'safe_join',
'send_file', 'send_from_directory', 'session', 'signals_available', 'stream_with_context',
'template_rendered', 'url_for',
)
import sys # noqa: E402, I100
json = sys.modules['flask.json']
sys.modules['flask'] = sys.modules[__name__]
| 47.72
| 96
| 0.766974
|
f923b7bfb02bd6c9ae311e8fce8428e15e8bc175
| 13,719
|
py
|
Python
|
ipyelk/nx/transformer.py
|
nrbgt/ipyelk
|
58d06d0290f5b27e942af9e6036a56143604097b
|
[
"BSD-3-Clause"
] | null | null | null |
ipyelk/nx/transformer.py
|
nrbgt/ipyelk
|
58d06d0290f5b27e942af9e6036a56143604097b
|
[
"BSD-3-Clause"
] | null | null | null |
ipyelk/nx/transformer.py
|
nrbgt/ipyelk
|
58d06d0290f5b27e942af9e6036a56143604097b
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
from collections import defaultdict
from functools import lru_cache
from typing import Dict, Generator, Hashable, List, Optional, Tuple
import networkx as nx
import traitlets as T
from ..app import ElkTransformer
from ..diagram.elk_model import ElkExtendedEdge, ElkLabel, ElkNode, ElkPort
from .factors import get_factors, invert, keep
from .nx import Edge, EdgeMap, compact, get_roots, lowest_common_ancestor
logger = logging.getLogger(__name__)
BASE_LAYOUT_DEFAULTS = {
"hierarchyHandling": "INCLUDE_CHILDREN",
# "algorithm": "layered",
# "elk.edgeRouting": "POLYLINE",
# "elk.portConstraints": "FIXED_SIDE",
# "layering.strategy": "NETWORK_SIMPEX",
}
class XELK(ElkTransformer):
"""NetworkX DiGraphs to ELK dictionary structure"""
HIDDEN_ATTR = "hidden"
_hidden_edges: Optional[EdgeMap] = None
_visible_edges: Optional[EdgeMap] = None
source = T.Tuple(T.Instance(nx.Graph), T.Instance(nx.DiGraph, allow_none=True))
base_layout = T.Dict(kw=BASE_LAYOUT_DEFAULTS)
port_scale = T.Int(default_value=10)
text_scale = T.Float(default_value=10)
label_key = T.Unicode(default_value="label")
label_offset = T.Float(default_value=5)
def eid(self, node: Hashable) -> str:
"""Get the element id for a node in the main graph for use in elk
:param node: Node in main graph
:type node: Hashable
:return: Element ID
:rtype: str
"""
g, tree = self.source
if node is None:
return "root"
elif node in g:
return g.nodes[node].get("_id", f"{node}")
return f"{node}"
def port_id(self, node, port):
return f"{self.eid(node)}.{port}"
def edge_id(self, edge: Edge):
# TODO probably will need more sophisticated id generation in future
return "{}.{} -> {}.{}".format(
edge.source, edge.source_port, edge.target, edge.target_port
)
def clear_cached(self):
# clear old cached info is starting at the top level transform
# TODO: look into ways to remove the need to have a cache like this
# NOTE: this is caused by a series of side effects
logger.debug("Clearing cached elk info")
self._nodes: Dict[Hashable, ElkNode] = {}
self._ports: Dict[Tuple[Hashable, Hashable], ElkPort] = {}
self._visible_edges, self._hidden_edges = self.collect_edges()
self.closest_common_visible.cache_clear()
self.closest_visible.cache_clear()
def transform(self, root=None):
"""Generate ELK dictionary structure
:param root: [description], defaults to None
:type root: [type], optional
:return: [description]
:rtype: [type]
"""
try:
g, tree = self.source
if root is None:
self.clear_cached()
elif is_hidden(tree, root, self.HIDDEN_ATTR):
# bail is the node is hidden
return None
nodes = self._nodes
ports = self._ports
base_layout = self.base_layout
if base_layout is None:
base_layout = {}
layout = {}
# TODO: refactor this so you can specify node-specific layouts
# NOTE: add traitlet for it, and get based on node passed
layout.update(base_layout)
properties = None
labels = self.make_labels(root)
model_id = self.eid(root)
self._nodes[root] = ElkNode(
id=model_id,
labels=labels,
layoutOptions=layout,
children=compact(self.get_children(root)),
properties=properties,
)
if root is None:
# the top level of the transform
port_style = ["slack-port"]
edge_style = ["slack-edge"]
nodes, ports = self.process_edges(nodes, ports, self._visible_edges)
nodes, ports = self.process_edges(
nodes, ports, self._hidden_edges, edge_style, port_style
)
for (owner, _), port in ports.items():
node = nodes[owner]
if node.ports is None:
node.ports = []
node.ports += [port]
nodes = self.size_nodes(nodes)
except Exception as E:
logger.error("Error transforming elk graph")
raise E
return nodes[root] # top level node
def size_nodes(self, nodes: Dict[Hashable, ElkNode]) -> Dict[Hashable, ElkNode]:
for node in nodes.values():
node.width, node.height = self.get_node_size(node)
return nodes
def process_edges(
self, nodes, ports, edges: EdgeMap, edge_style=None, port_style=None
):
for owner, edge_list in edges.items():
for edge in edge_list:
node = nodes[owner]
if node.edges is None:
node.edges = []
node.edges += [self.make_edge(edge, edge_style)]
source_var = (edge.source, edge.source_port)
if source_var not in ports:
ports[source_var] = self.make_port(
edge.source, edge.source_port, port_style
)
target_var = (edge.target, edge.target_port)
if target_var not in ports:
ports[target_var] = self.make_port(
edge.target, edge.target_port, port_style
)
return nodes, ports
def make_edge(
self, edge: Edge, styles: Optional[List[str]] = None
) -> ElkExtendedEdge:
properties = None
if styles:
properties = dict(cssClasses=" ".join(styles))
return ElkExtendedEdge(
id=self.edge_id(edge),
sources=[self.port_id(edge.source, edge.source_port)],
targets=[self.port_id(edge.target, edge.target_port)],
properties=properties,
)
def make_port(self, owner, port, styles):
properties = None
if styles:
properties = dict(cssClasses=" ".join(styles))
return ElkPort(
id=self.port_id(owner, port),
height=0.5 * self.port_scale,
width=0.5 * self.port_scale,
properties=properties,
)
def get_children(self, node) -> Optional[List[ElkNode]]:
g, tree = self.source
attr = self.HIDDEN_ATTR
if node is None:
if tree is None:
# Nonhierarchical graph. Iterate over only the main graph
return [self.transform(root=node) for node in g.nodes()]
else:
# Hierarchical graph but no specified root...
# start transforming from each root in the forest
return [self.transform(root=node) for node in get_roots(tree, g)]
else:
if is_hidden(tree, node, attr):
# Node is not Visible
return None
if tree is not None:
# Node is visible and in the hierarchy
if node in tree:
return [
self.transform(root=child) for child in tree.neighbors(node)
]
return None
def get_node_size(self, node: ElkNode) -> Tuple[Optional[float], Optional[float]]:
height = 0
if node.ports:
height = (
1.25 * self.port_scale * len(node.ports)
) # max(len(ins), len(outs)) # max number of ports
height = max(18, height)
if node.labels:
width = (
self.text_scale * max(len(label.text or " ") for label in node.labels)
+ self.label_offset
)
else:
width = self.text_scale
return width, height
def make_labels(self, node) -> Optional[List[ElkLabel]]:
if node is None:
return None
g, tree = self.source
data = g.nodes[node]
name = data.get(self.label_key, data.get("_id", f"{node}"))
width = self.text_scale * len(name)
return [
ElkLabel(
id=f"{name}_label_{node}",
text=name,
width=width,
x=self.label_offset,
y=self.label_offset,
)
]
def collect_edges(self) -> Tuple[EdgeMap, EdgeMap]:
"""[summary]
:return: [description]
:rtype: Tuple[
Dict[Hashable, List[ElkExtendedEdge]],
Dict[Hashable, List[ElkExtendedEdge]]
]
"""
visible: EdgeMap = defaultdict(
list
) # will index edges by nx.lowest_commen_ancestor
hidden: EdgeMap = defaultdict(
list
) # will index edges by nx.lowest_commen_ancestor
g, tree = self.source
factors = self.extract_factors()
def merge(
update: Dict[Hashable, List], base: Dict[Hashable, List]
) -> Dict[Hashable, List]:
for key, value in update.items():
base[key].extend(value)
return base
try:
while True:
sources, targets = next(factors)
visible = merge(self.process_endpts(sources, targets), visible)
except StopIteration as e:
hidden_factors: List[Tuple[List, List]] = e.value
for sources, targets in hidden_factors:
hidden = merge(self.process_endpts(sources, targets), hidden)
return visible, hidden
def to_dict(self) -> Dict:
"""Transform the NetworkX graphs into Elk json"""
return self.transform().to_dict()
def extract_factors(
self,
) -> Generator[Tuple[List, List], None, List[Tuple[List, List]]]:
g, tree = self.source
attr = self.HIDDEN_ATTR
hidden: List[Tuple[List, List]] = []
for source_vars, target_vars in get_factors(g):
shidden = [is_hidden(tree, var[0], attr) for var in source_vars]
thidden = [is_hidden(tree, var[0], attr) for var in target_vars]
sources = source_vars
targets = target_vars
try:
vis_source = self.closest_common_visible((s for s, sp in source_vars))
vis_target = self.closest_common_visible((t for t, tp in target_vars))
except ValueError:
continue # bail if no possible target or source
if any(shidden) or any(thidden):
if vis_source == vis_target:
# bail if factor is completely internal
continue
# trim hidden...
sources = list(keep(source_vars, invert(shidden)))
targets = list(keep(target_vars, invert(thidden)))
if all(shidden) or all(thidden):
if len(sources) == 0:
sources = [(vis_source, v) for v in source_vars]
if len(targets) == 0:
target_vars.sort()
targets = [(vis_target, v) for v in target_vars]
# [tuple(source_vars), tuple(target_vars)] = (
# vis_source,
# vis_target
# )
hidden.append((sources, targets))
continue
yield sources, targets
return hidden
def process_endpts(self, sources, targets) -> Dict[Hashable, List[Edge]]:
g, tree = self.source
edge_dict: Dict[Hashable, List[Edge]] = defaultdict(list)
for s, sp in sources:
for t, tp in targets:
owner = self.closest_common_visible((s, t))
edge_dict[owner].append(
Edge(source=s, source_port=sp, target=t, target_port=tp)
)
return edge_dict
@lru_cache()
def closest_visible(self, node: Hashable):
"""Crawl through the given NetworkX `tree` looking for an ancestor of
`node` that is not hidden
:param node: [description] Node to identify a visible ancestor
:type node: Hashable
:raises ValueError: [description]
:return: [description]
:rtype: [type]
"""
attr = self.HIDDEN_ATTR
g, tree = self.source
if node not in tree:
return None
if not is_hidden(tree, node, attr):
return node
predecesors = list(tree.predecessors(node))
assert (
len(predecesors) <= 1
), f"Expected only a single parent for `{node}` not {len(predecesors)}"
for pred in tree.predecessors(node):
return self.closest_visible(pred)
raise ValueError(f"Unable to find visible ancestor for `{node}`")
@lru_cache()
def closest_common_visible(self, nodes: Tuple[Hashable]) -> Hashable:
g, tree = self.source
if tree is None:
return None
result = lowest_common_ancestor(tree, [self.closest_visible(n) for n in nodes])
return result
def is_hidden(tree: nx.DiGraph, node: Hashable, attr: str) -> bool:
"""Iterate on the node ancestors and determine if it is hidden along the chain"""
if tree is not None and node in tree:
if tree.nodes[node].get(attr, False):
return True
for ancestor in nx.ancestors(tree, node):
if tree.nodes[ancestor].get(attr, False):
return True
return False
| 34.383459
| 87
| 0.557183
|
deedb64c1e7c05146b7c7fae0bc91b704bbf1952
| 4,338
|
py
|
Python
|
custom_components/edgeos/config_flow.py
|
kcleong/homeassistant-config
|
15b7bc75f5d1055d8620ced87eed9d563475296d
|
[
"MIT"
] | null | null | null |
custom_components/edgeos/config_flow.py
|
kcleong/homeassistant-config
|
15b7bc75f5d1055d8620ced87eed9d563475296d
|
[
"MIT"
] | null | null | null |
custom_components/edgeos/config_flow.py
|
kcleong/homeassistant-config
|
15b7bc75f5d1055d8620ced87eed9d563475296d
|
[
"MIT"
] | null | null | null |
"""Config flow to configure domain."""
import logging
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import callback
from .helpers.const import *
from .managers.config_flow_manager import ConfigFlowManager
from .models import AlreadyExistsError, LoginError
_LOGGER = logging.getLogger(__name__)
@config_entries.HANDLERS.register(DOMAIN)
class DomainFlowHandler(config_entries.ConfigFlow):
"""Handle a domain config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
super().__init__()
self._config_flow = ConfigFlowManager()
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return DomainOptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
"""Handle a flow start."""
_LOGGER.debug(f"Starting async_step_user of {DEFAULT_NAME}")
errors = None
await self._config_flow.initialize(self.hass)
new_user_input = self._config_flow.clone_items(user_input)
if user_input is not None:
try:
await self._config_flow.update_data(user_input, CONFIG_FLOW_DATA)
title, data = self._config_flow.get_data_user_input()
return self.async_create_entry(title=title, data=data)
except LoginError as lex:
await self._config_flow.clear_credentials(new_user_input)
_LOGGER.warning(f"Cannot complete login")
errors = lex.errors
except AlreadyExistsError as aeex:
_LOGGER.warning(
f"{DEFAULT_NAME} with {ENTRY_PRIMARY_KEY}: {aeex.title} already exists"
)
errors = {"base": "already_configured"}
schema = await self._config_flow.get_default_data(new_user_input)
return self.async_show_form(
step_id="user",
data_schema=schema,
errors=errors,
description_placeholders=new_user_input,
)
async def async_step_import(self, info):
"""Import existing configuration."""
_LOGGER.debug(f"Starting async_step_import of {DEFAULT_NAME}")
title = f"{DEFAULT_NAME} (import from configuration.yaml)"
return self.async_create_entry(title=title, data=info)
class DomainOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle domain options."""
def __init__(self, config_entry: ConfigEntry):
"""Initialize domain options flow."""
super().__init__()
self._config_entry = config_entry
self._config_flow = ConfigFlowManager()
async def async_step_init(self, user_input=None):
"""Manage the domain options."""
return await self.async_step_edge_os_additional_settings(user_input)
async def async_step_edge_os_additional_settings(self, user_input=None):
_LOGGER.info(f"Starting additional settings step: {user_input}")
errors = None
await self._config_flow.initialize(self.hass, self._config_entry)
if user_input is not None:
if user_input is not None:
try:
await self._config_flow.update_options(
user_input, CONFIG_FLOW_OPTIONS
)
title, data = self._config_flow.get_options_user_input()
return self.async_create_entry(title=title, data=data)
except LoginError as lex:
await self._config_flow.clear_credentials(user_input)
_LOGGER.warning(f"Cannot complete login")
errors = lex.errors
except AlreadyExistsError as aeex:
_LOGGER.warning(
f"{DEFAULT_NAME} with {ENTRY_PRIMARY_KEY}: {aeex.title} already exists"
)
errors = {"base": "already_configured"}
schema = self._config_flow.get_default_options()
return self.async_show_form(
step_id="edge_os_additional_settings",
data_schema=schema,
errors=errors,
description_placeholders=user_input,
)
| 32.133333
| 95
| 0.643154
|
a5eab347bc0be08e5379aa3bf43b7caa6416e898
| 19,248
|
py
|
Python
|
test_autoarray/structures/grids/two_d/test_grid_2d_interpolate.py
|
jonathanfrawley/PyAutoArray_copy
|
c21e8859bdb20737352147b9904797ac99985b73
|
[
"MIT"
] | null | null | null |
test_autoarray/structures/grids/two_d/test_grid_2d_interpolate.py
|
jonathanfrawley/PyAutoArray_copy
|
c21e8859bdb20737352147b9904797ac99985b73
|
[
"MIT"
] | null | null | null |
test_autoarray/structures/grids/two_d/test_grid_2d_interpolate.py
|
jonathanfrawley/PyAutoArray_copy
|
c21e8859bdb20737352147b9904797ac99985b73
|
[
"MIT"
] | null | null | null |
import numpy as np
import pytest
import autoarray as aa
from autoarray.mock.mock import ndarray_1d_from_grid, ndarray_2d_from_grid
class TestObj:
def test__blurring_grid_from_mask__compare_to_array_util(self):
mask = np.array(
[
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, False, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
]
)
mask = aa.Mask2D.manual(mask=mask, pixel_scales=(2.0, 2.0), sub_size=2)
blurring_mask_util = aa.util.mask_2d.blurring_mask_2d_from(
mask_2d=mask, kernel_shape_native=(3, 5)
)
blurring_grid_util = aa.util.grid_2d.grid_2d_slim_via_mask_from(
mask_2d=blurring_mask_util, pixel_scales=(2.0, 2.0), sub_size=1
)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.1)
blurring_grid = grid.blurring_grid_from_kernel_shape(kernel_shape_native=(3, 5))
assert isinstance(blurring_grid, aa.Grid2DInterpolate)
assert len(blurring_grid.shape) == 2
assert blurring_grid == pytest.approx(blurring_grid_util, 1e-4)
assert blurring_grid.pixel_scales == (2.0, 2.0)
assert blurring_grid.pixel_scales_interp == (0.1, 0.1)
def test__blurring_grid_from_kernel_shape__compare_to_array_util(self):
mask = np.array(
[
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, False, True, True, True, False, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, False, True, True, True, False, True, True],
[True, True, True, True, True, True, True, True, True],
[True, True, True, True, True, True, True, True, True],
]
)
mask = aa.Mask2D.manual(mask=mask, pixel_scales=(2.0, 2.0))
blurring_mask_util = aa.util.mask_2d.blurring_mask_2d_from(
mask_2d=mask, kernel_shape_native=(3, 5)
)
blurring_grid_util = aa.util.grid_2d.grid_2d_slim_via_mask_from(
mask_2d=blurring_mask_util, pixel_scales=(2.0, 2.0), sub_size=1
)
mask = aa.Mask2D.manual(mask=mask, pixel_scales=(2.0, 2.0))
blurring_grid = aa.Grid2DInterpolate.blurring_grid_from_mask_and_kernel_shape(
mask=mask, kernel_shape_native=(3, 5), pixel_scales_interp=0.1
)
assert isinstance(blurring_grid, aa.Grid2DInterpolate)
assert len(blurring_grid.shape) == 2
assert blurring_grid == pytest.approx(blurring_grid_util, 1e-4)
assert blurring_grid.pixel_scales == (2.0, 2.0)
assert blurring_grid.pixel_scales_interp == (0.1, 0.1)
def test__padded_grid_from_kernel_shape(self):
grid = aa.Grid2DInterpolate.uniform(
shape_native=(4, 4), pixel_scales=3.0, pixel_scales_interp=0.1
)
padded_grid = grid.padded_grid_from_kernel_shape(kernel_shape_native=(3, 3))
assert isinstance(padded_grid, aa.Grid2DInterpolate)
assert padded_grid.pixel_scales_interp == (0.1, 0.1)
mask = aa.Mask2D.unmasked(
shape_native=(6, 6), pixel_scales=(3.0, 3.0), sub_size=1
)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.1)
assert isinstance(padded_grid, aa.Grid2DInterpolate)
assert padded_grid.pixel_scales_interp == (0.1, 0.1)
assert (padded_grid.vtx == grid.vtx).all()
assert (padded_grid.wts == grid.wts).all()
mask = aa.Mask2D.manual(
mask=np.full((2, 5), False), pixel_scales=(8.0, 8.0), sub_size=4
)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.2)
padded_grid = grid.padded_grid_from_kernel_shape(kernel_shape_native=(5, 5))
padded_grid_util = aa.util.grid_2d.grid_2d_slim_via_mask_from(
mask_2d=np.full((6, 9), False), pixel_scales=(8.0, 8.0), sub_size=4
)
assert isinstance(padded_grid, aa.Grid2DInterpolate)
assert padded_grid.pixel_scales_interp == (0.2, 0.2)
assert isinstance(padded_grid, aa.Grid2DInterpolate)
assert padded_grid.shape == (864, 2)
assert (padded_grid.mask == np.full(fill_value=False, shape=(6, 9))).all()
assert padded_grid == pytest.approx(padded_grid_util, 1e-4)
class TestInterpolatedResult:
def test__function_returns_binary_ndarray_1d__returns_interpolated_array(self):
# noinspection PyUnusedLocal
class MockInterpolateClass:
def func(self, profile, grid):
result = np.zeros(grid.shape[0])
result[0] = 1
return result
mask = aa.Mask2D.unmasked(
shape_native=(3, 3), pixel_scales=(1.0, 1.0), sub_size=1
)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.5)
cls = MockInterpolateClass()
interp_array = grid.result_from_func(func=cls.func, cls=MockInterpolateClass())
assert isinstance(interp_array, aa.Array2D)
assert interp_array.ndim == 1
assert interp_array.shape == (9,)
assert (interp_array != np.array([[1, 0, 0, 0, 0, 0, 0, 0, 0]])).any()
def test__function_is_false_in_config__does_not_use_interpolatin(self):
# noinspection PyUnusedLocal
class MockInterpolateClass:
def func_off(self, profile, grid):
result = np.zeros(grid.shape[0])
result[0] = 1
return result
mask = aa.Mask2D.unmasked(
shape_native=(3, 3), pixel_scales=(1.0, 1.0), sub_size=1
)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.5)
cls = MockInterpolateClass()
arr = grid.result_from_func(func=cls.func_off, cls=MockInterpolateClass())
assert isinstance(arr, aa.Array2D)
assert arr.ndim == 1
assert arr.shape == (9,)
assert (arr == np.array([[1, 0, 0, 0, 0, 0, 0, 0, 0]])).any()
def test__function_returns_binary_ndarray_2d__returns_interpolated_grid(self):
# noinspection PyUnusedLocal
class MockInterpolateClass:
def func(self, profile, grid):
result = np.zeros((grid.shape[0], 2))
result[0, :] = 1
return result
mask = aa.Mask2D.unmasked(
shape_native=(3, 3), pixel_scales=(1.0, 1.0), sub_size=1
)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.5)
cls = MockInterpolateClass()
interp_grid = grid.result_from_func(func=cls.func, cls=MockInterpolateClass())
assert isinstance(interp_grid, aa.Grid2D)
assert interp_grid.ndim == 2
assert interp_grid.shape == (9, 2)
assert (
interp_grid
!= np.array(
np.array(
[
[1, 1],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
]
)
)
).any()
def test__function_returns_ndarray_1d__interpolation_used_and_accurate(self):
# noinspection PyUnusedLocal
class MockInterpolateObj:
def ndarray_1d_from_grid(self, profile, grid):
return ndarray_1d_from_grid(profile=profile, grid=grid)
cls = MockInterpolateObj()
mask = aa.Mask2D.circular_annular(
shape_native=(20, 20),
pixel_scales=(1.0, 1.0),
sub_size=1,
inner_radius=3.0,
outer_radius=8.0,
)
grid = aa.Grid2D.from_mask(mask=mask)
true_array = ndarray_1d_from_grid(profile=None, grid=grid)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=1.0)
interpolated_array = grid.result_from_func(
func=cls.ndarray_1d_from_grid, cls=MockInterpolateObj()
)
assert interpolated_array.shape[0] == mask.pixels_in_mask
assert (true_array == interpolated_array).all()
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.1)
interpolated_array = grid.result_from_func(
func=cls.ndarray_1d_from_grid, cls=MockInterpolateObj()
)
assert interpolated_array.shape[0] == mask.pixels_in_mask
assert true_array[0] != interpolated_array[0]
assert np.max(true_array - interpolated_array) < 0.001
mask = aa.Mask2D.circular_annular(
shape_native=(28, 28),
pixel_scales=(1.0, 1.0),
sub_size=1,
inner_radius=3.0,
outer_radius=8.0,
centre=(3.0, 3.0),
)
grid = aa.Grid2D.from_mask(mask=mask)
true_array = ndarray_1d_from_grid(profile=None, grid=grid)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.1)
interpolated_array = grid.result_from_func(
func=cls.ndarray_1d_from_grid, cls=MockInterpolateObj()
)
assert interpolated_array.shape[0] == mask.pixels_in_mask
assert true_array[0] != interpolated_array[0]
assert np.max(true_array - interpolated_array) < 0.001
def test__function_returns_ndarray_2d__interpolation_used_and_accurate(self):
# noinspection PyUnusedLocal
class MockInterpolateObj:
def ndarray_2d_from_grid(self, profile, grid):
return ndarray_2d_from_grid(profile=profile, grid=grid)
cls = MockInterpolateObj()
mask = aa.Mask2D.circular_annular(
shape_native=(20, 20),
pixel_scales=(1.0, 1.0),
sub_size=1,
inner_radius=3.0,
outer_radius=8.0,
)
grid = aa.Grid2D.from_mask(mask=mask)
true_grid = ndarray_2d_from_grid(profile=None, grid=grid)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=1.0)
interpolated_grid = grid.result_from_func(
func=cls.ndarray_2d_from_grid, cls=MockInterpolateObj()
)
assert interpolated_grid.shape[0] == mask.pixels_in_mask
assert interpolated_grid.shape[1] == 2
assert (true_grid == interpolated_grid).all()
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.1)
interpolated_grid = grid.result_from_func(
func=cls.ndarray_2d_from_grid, cls=MockInterpolateObj()
)
assert interpolated_grid.shape[0] == mask.pixels_in_mask
assert interpolated_grid.shape[1] == 2
assert true_grid[0, 0] != interpolated_grid[0, 0]
assert np.max(true_grid[:, 0] - interpolated_grid[:, 0]) < 0.001
assert np.max(true_grid[:, 1] - interpolated_grid[:, 1]) < 0.001
mask = aa.Mask2D.circular_annular(
shape_native=(28, 28),
pixel_scales=(1.0, 1.0),
sub_size=1,
inner_radius=3.0,
outer_radius=8.0,
centre=(3.0, 3.0),
)
grid = aa.Grid2D.from_mask(mask=mask)
true_grid = ndarray_2d_from_grid(profile=None, grid=grid)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.1)
interpolated_grid = grid.result_from_func(
func=cls.ndarray_2d_from_grid, cls=MockInterpolateObj()
)
assert interpolated_grid.shape[0] == mask.pixels_in_mask
assert interpolated_grid.shape[1] == 2
assert true_grid[0, 0] != interpolated_grid[0, 0]
assert np.max(true_grid[:, 0] - interpolated_grid[:, 0]) < 0.01
assert np.max(true_grid[:, 1] - interpolated_grid[:, 1]) < 0.01
class TestAPI:
def test__manual_slim(self):
grid = aa.Grid2DInterpolate.manual_slim(
grid=[[1.0, 2.0], [3.0, 4.0], [5.0, 6.0], [7.0, 8.0]],
shape_native=(2, 2),
pixel_scales=1.0,
pixel_scales_interp=0.1,
origin=(0.0, 1.0),
)
assert type(grid) == aa.Grid2DInterpolate
assert type(grid.slim) == aa.Grid2DInterpolate
assert type(grid.native) == aa.Grid2DInterpolate
assert (
grid == np.array([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0], [7.0, 8.0]])
).all()
assert (
grid.native
== np.array([[[1.0, 2.0], [3.0, 4.0]], [[5.0, 6.0], [7.0, 8.0]]])
).all()
assert (
grid.slim == np.array([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0], [7.0, 8.0]])
).all()
assert grid.pixel_scales == (1.0, 1.0)
assert grid.pixel_scales_interp == (0.1, 0.1)
assert grid.origin == (0.0, 1.0)
grid = aa.Grid2DInterpolate.manual_slim(
grid=[[1.0, 2.0], [3.0, 4.0], [5.0, 6.0], [7.0, 8.0]],
shape_native=(1, 1),
pixel_scales=1.0,
pixel_scales_interp=0.1,
sub_size=2,
origin=(0.0, 1.0),
)
assert type(grid) == aa.Grid2DInterpolate
assert type(grid.slim) == aa.Grid2DInterpolate
assert type(grid.native) == aa.Grid2DInterpolate
assert (
grid == np.array([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0], [7.0, 8.0]])
).all()
assert (
grid.native
== np.array([[[1.0, 2.0], [3.0, 4.0]], [[5.0, 6.0], [7.0, 8.0]]])
).all()
assert (
grid.slim == np.array([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0], [7.0, 8.0]])
).all()
assert (grid.binned.native == np.array([[[4.0, 5.0]]])).all()
assert (grid.binned == np.array([[4.0, 5.0]])).all()
assert grid.pixel_scales == (1.0, 1.0)
assert grid.pixel_scales_interp == (0.1, 0.1)
assert grid.origin == (0.0, 1.0)
assert grid.sub_size == 2
def test__from_mask(self):
mask = np.array(
[
[True, True, False, False],
[True, False, True, True],
[True, True, False, False],
]
)
mask = aa.Mask2D.manual(mask=mask, pixel_scales=(2.0, 2.0), sub_size=1)
grid_via_util = aa.util.grid_2d.grid_2d_slim_via_mask_from(
mask_2d=mask, sub_size=1, pixel_scales=(2.0, 2.0)
)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.1)
assert type(grid) == aa.Grid2DInterpolate
assert type(grid.slim) == aa.Grid2DInterpolate
assert type(grid.native) == aa.Grid2DInterpolate
assert grid == pytest.approx(grid_via_util, 1e-4)
assert grid.pixel_scales_interp == (0.1, 0.1)
assert grid.sub_size == 1
grid_via_util = aa.util.grid_2d.grid_2d_via_mask_from(
mask_2d=mask, sub_size=1, pixel_scales=(2.0, 2.0)
)
mask = np.array(
[
[True, True, False, False],
[True, False, True, True],
[True, True, False, False],
]
)
mask = aa.Mask2D.manual(mask=mask, pixel_scales=(2.0, 2.0), sub_size=2)
grid_via_util = aa.util.grid_2d.grid_2d_slim_via_mask_from(
mask_2d=mask, sub_size=2, pixel_scales=(2.0, 2.0)
)
grid = aa.Grid2DInterpolate.from_mask(mask=mask, pixel_scales_interp=0.1)
assert type(grid) == aa.Grid2DInterpolate
assert type(grid.slim) == aa.Grid2DInterpolate
assert type(grid.native) == aa.Grid2DInterpolate
assert grid == pytest.approx(grid_via_util, 1e-4)
assert grid.pixel_scales_interp == (0.1, 0.1)
assert grid.sub_size == 2
def test__uniform(self):
grid = aa.Grid2DInterpolate.uniform(
shape_native=(2, 2), pixel_scales=2.0, pixel_scales_interp=0.1
)
assert type(grid) == aa.Grid2DInterpolate
assert type(grid.slim) == aa.Grid2DInterpolate
assert type(grid.native) == aa.Grid2DInterpolate
assert (
grid == np.array([[1.0, -1.0], [1.0, 1.0], [-1.0, -1.0], [-1.0, 1.0]])
).all()
assert (
grid.native
== np.array([[[1.0, -1.0], [1.0, 1.0]], [[-1.0, -1.0], [-1.0, 1.0]]])
).all()
assert (
grid.slim == np.array([[1.0, -1.0], [1.0, 1.0], [-1.0, -1.0], [-1.0, 1.0]])
).all()
assert grid.pixel_scales == (2.0, 2.0)
assert grid.pixel_scales_interp == (0.1, 0.1)
assert grid.origin == (0.0, 0.0)
grid = aa.Grid2DInterpolate.uniform(
shape_native=(2, 1), pixel_scales=1.0, pixel_scales_interp=0.2, sub_size=2
)
assert type(grid) == aa.Grid2DInterpolate
assert type(grid.slim) == aa.Grid2DInterpolate
assert type(grid.native) == aa.Grid2DInterpolate
assert (
grid.native
== np.array(
[
[[0.75, -0.25], [0.75, 0.25]],
[[0.25, -0.25], [0.25, 0.25]],
[[-0.25, -0.25], [-0.25, 0.25]],
[[-0.75, -0.25], [-0.75, 0.25]],
]
)
).all()
assert (
grid.slim
== np.array(
[
[0.75, -0.25],
[0.75, 0.25],
[0.25, -0.25],
[0.25, 0.25],
[-0.25, -0.25],
[-0.25, 0.25],
[-0.75, -0.25],
[-0.75, 0.25],
]
)
).all()
assert (grid.binned.native == np.array([[[0.5, 0.0]], [[-0.5, 0.0]]])).all()
assert (grid.binned == np.array([[0.5, 0.0], [-0.5, 0.0]])).all()
assert grid.pixel_scales == (1.0, 1.0)
assert grid.pixel_scales_interp == (0.2, 0.2)
assert grid.origin == (0.0, 0.0)
assert grid.sub_size == 2
| 37.230174
| 89
| 0.548576
|
14de8eea666ea2840e4e454c63203933119a4913
| 4,566
|
py
|
Python
|
examples/BenchmarkingExample.py
|
nolanstr/bingo_multi_stage
|
7a88c4f5c59268d0612664be5864765db2edad51
|
[
"Apache-2.0"
] | null | null | null |
examples/BenchmarkingExample.py
|
nolanstr/bingo_multi_stage
|
7a88c4f5c59268d0612664be5864765db2edad51
|
[
"Apache-2.0"
] | null | null | null |
examples/BenchmarkingExample.py
|
nolanstr/bingo_multi_stage
|
7a88c4f5c59268d0612664be5864765db2edad51
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from bingo.symbolic_regression.benchmarking.benchmark_suite \
import BenchmarkSuite
from bingo.symbolic_regression.benchmarking.benchmark_test \
import BenchmarkTest
from bingo.symbolic_regression import ComponentGenerator, \
AGraphGenerator, \
AGraphCrossover, \
AGraphMutation, \
ExplicitRegression
from bingo.local_optimizers.continuous_local_opt \
import ContinuousLocalOptimization
from bingo.evaluation.evaluation import Evaluation
from bingo.evolutionary_algorithms.age_fitness import AgeFitnessEA
from bingo.evolutionary_algorithms.generalized_crowding \
import GeneralizedCrowdingEA
from bingo.evolutionary_optimizers.island import Island
def training_function(training_data, ea_choice):
component_generator = \
ComponentGenerator(input_x_dimension=training_data.x.shape[1])
component_generator.add_operator("+")
component_generator.add_operator("-")
component_generator.add_operator("*")
agraph_generator = AGraphGenerator(agraph_size=32,
component_generator=component_generator)
crossover = AGraphCrossover()
mutation = AGraphMutation(component_generator)
fitness = ExplicitRegression(training_data=training_data)
local_opt_fitness = ContinuousLocalOptimization(fitness, algorithm='lm')
evaluator = Evaluation(local_opt_fitness)
POPULATION_SIZE = 64
MUTATION_PROBABILITY = 0.1
CROSSOVER_PROBABILITY = 0.7
if ea_choice == "age_fitness":
ea = AgeFitnessEA(evaluator, agraph_generator, crossover, mutation,
MUTATION_PROBABILITY, CROSSOVER_PROBABILITY,
POPULATION_SIZE)
else:
ea = GeneralizedCrowdingEA(evaluator, crossover, mutation,
MUTATION_PROBABILITY,
CROSSOVER_PROBABILITY)
island = Island(ea, agraph_generator, POPULATION_SIZE)
opt_result = island.evolve_until_convergence(max_generations=MAX_GENERATIONS,
fitness_threshold=1e-6)
return island.get_best_individual(), opt_result
def scoring_function(equation, scoring_data, opt_result):
mae_function = ExplicitRegression(training_data=scoring_data)
mae = mae_function(equation)
return mae, opt_result.success
def parse_results(train_results, test_results):
train_array = np.array(train_results)
test_array = np.array(test_results)
mae_train = np.mean(train_array, axis=1)[:, 0]
mae_test = np.mean(test_array, axis=1)[:, 0]
success_rate = np.mean(train_array, axis=1)[:, 1]
return mae_train, mae_test, success_rate
def print_results(title, af_res, dc_res, bench_names):
print("\n----------::", title, "::-------------")
titles = "".join(["{:^10}".format(name) for name in bench_names])
print(" " + titles)
af_scores = "".join(["{:^10.2e}".format(score) for score in af_res])
print("age-fitness " + af_scores)
dc_scores = "".join(["{:^10.2e}".format(score) for score in dc_res])
print("det. crowding " + dc_scores)
def run_benchmark_comparison():
suite = BenchmarkSuite(inclusive_terms=["Nguyen"])
age_fitness_strategy = \
BenchmarkTest(lambda x: training_function(x, "age_fitness"),
scoring_function)
deterministic_crowding_strategy = \
BenchmarkTest(lambda x: training_function(x, "deterministic_crowding"),
scoring_function)
train_scores_af, test_scores_af = \
suite.run_benchmark_test(age_fitness_strategy, repeats=NUM_REPEATS)
train_scores_dc, test_scores_dc = \
suite.run_benchmark_test(deterministic_crowding_strategy,
repeats=NUM_REPEATS)
mae_train_af, mae_test_af, success_rate_af = \
parse_results(train_scores_af, test_scores_af)
mae_train_dc, mae_test_dc, success_rate_dc = \
parse_results(train_scores_dc, test_scores_dc)
benchmark_names = [benchmark.name for benchmark in suite]
print_results("MAE (Train)", mae_train_af, mae_train_dc, benchmark_names)
print_results("MAE (Test)", mae_test_af, mae_test_dc, benchmark_names)
print_results("Success Rate", success_rate_af, success_rate_dc,
benchmark_names)
if __name__ == "__main__":
MAX_GENERATIONS = 200
NUM_REPEATS = 2
run_benchmark_comparison()
| 40.052632
| 81
| 0.680464
|
1d0f2bd744f5e95ec82f44dde28d058d147b667b
| 53
|
py
|
Python
|
python/television/registry.py
|
pztrick/django-television
|
5857200a0871702052827a5d603db7d60bca1406
|
[
"MIT"
] | 1
|
2018-07-16T16:21:44.000Z
|
2018-07-16T16:21:44.000Z
|
python/television/registry.py
|
pztrick/django-television
|
5857200a0871702052827a5d603db7d60bca1406
|
[
"MIT"
] | 4
|
2019-12-22T11:20:56.000Z
|
2021-06-10T19:38:46.000Z
|
python/television/registry.py
|
pztrick/django-television
|
5857200a0871702052827a5d603db7d60bca1406
|
[
"MIT"
] | null | null | null |
LISTENERS = {}
DEMULTIPLEXERS = []
EXTRA_ROUTES = []
| 13.25
| 19
| 0.660377
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.