repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
kklmn/xrt
|
refs/heads/master
|
examples/withRaycing/03_LaueMono/__init__.py
|
1
|
# -*- coding: utf-8 -*-
r"""
Laue Monochromator
------------------
Bending of a single crystal Laue Monochromator
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Files in ``\examples\withRaycing\03_LaueMono``
This example shows the reflectivity of a bent 200-µm-thick Si111 Laue crystal
at various bending radii and energies. Watch how the band width is growing and
the flux is lowering in going to smaller radii.
+----------------+----------------+----------------+----------------+
| *E* = 9 keV | *E* = 16 keV | *E* = 25 keV | *E* = 36 keV |
+================+================+================+================+
| |E09| | |E16| | |E25| | |E36| |
+----------------+----------------+----------------+----------------+
.. |E09| animation:: _images/BentLaueSCM09keV
.. |E16| animation:: _images/BentLaueSCM16keV
.. |E25| animation:: _images/BentLaueSCM25keV
:loc: upper-right-corner
.. |E36| animation:: _images/BentLaueSCM36keV
:loc: upper-right-corner
Double bent-crystal Laue monochromator (beam cleaner)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This example shows the beam images and rocking curves of a bent 200-µm-thick
Si111 double Laue crystal monochromator (similar to the *beam cleaner*
[Karanfil2004]_) at various bending radii and energies.
.. [Karanfil2004] C. Karanfil, D. Chapman, C. U. Segre and G. Bunker, *A device
for selecting and rejecting X-ray harmonics in synchrotron radiation beams*,
J. Synchrotron Rad. **11** (2004) 393-8.
Beam images at various detuning angles of the second crystal, *R* = 25 m, *E* ~
9 keV. Watch how the energy band becomes split and the flux goes down in going
away from the parallel positioning (*dθ* = 0).
.. animation:: _images/BentLaueDCM09keV
Flux vs. detuning angle of the second crystal (rocking curves)
+----------------+----------------+----------------+----------------+
| *E* = 9 keV | *E* = 16 keV | *E* = 25 keV | *E* = 36 keV |
+================+================+================+================+
| |rc09| | |rc16| | |rc25| | |rc36| |
+----------------+----------------+----------------+----------------+
.. |rc09| animation:: _images/BentLaueDCM_rc09keV
.. |rc16| animation:: _images/BentLaueDCM_rc16keV
.. |rc25| animation:: _images/BentLaueDCM_rc25keV
:loc: upper-right-corner
.. |rc36| animation:: _images/BentLaueDCM_rc36keV
:loc: upper-right-corner
"""
pass
|
nzavagli/UnrealPy
|
refs/heads/master
|
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/beautifulsoup4-4.3.2/bs4/builder/_htmlparser.py
|
412
|
"""Use the HTMLParser library to parse HTML files that aren't too bad."""
__all__ = [
'HTMLParserTreeBuilder',
]
from HTMLParser import (
HTMLParser,
HTMLParseError,
)
import sys
import warnings
# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
# argument, which we'd like to set to False. Unfortunately,
# http://bugs.python.org/issue13273 makes strict=True a better bet
# before Python 3.2.3.
#
# At the end of this file, we monkeypatch HTMLParser so that
# strict=True works well on Python 3.2.2.
major, minor, release = sys.version_info[:3]
CONSTRUCTOR_TAKES_STRICT = (
major > 3
or (major == 3 and minor > 2)
or (major == 3 and minor == 2 and release >= 3))
from bs4.element import (
CData,
Comment,
Declaration,
Doctype,
ProcessingInstruction,
)
from bs4.dammit import EntitySubstitution, UnicodeDammit
from bs4.builder import (
HTML,
HTMLTreeBuilder,
STRICT,
)
HTMLPARSER = 'html.parser'
class BeautifulSoupHTMLParser(HTMLParser):
def handle_starttag(self, name, attrs):
# XXX namespace
attr_dict = {}
for key, value in attrs:
# Change None attribute values to the empty string
# for consistency with the other tree builders.
if value is None:
value = ''
attr_dict[key] = value
attrvalue = '""'
self.soup.handle_starttag(name, None, None, attr_dict)
def handle_endtag(self, name):
self.soup.handle_endtag(name)
def handle_data(self, data):
self.soup.handle_data(data)
def handle_charref(self, name):
# XXX workaround for a bug in HTMLParser. Remove this once
# it's fixed.
if name.startswith('x'):
real_name = int(name.lstrip('x'), 16)
elif name.startswith('X'):
real_name = int(name.lstrip('X'), 16)
else:
real_name = int(name)
try:
data = unichr(real_name)
except (ValueError, OverflowError), e:
data = u"\N{REPLACEMENT CHARACTER}"
self.handle_data(data)
def handle_entityref(self, name):
character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
if character is not None:
data = character
else:
data = "&%s;" % name
self.handle_data(data)
def handle_comment(self, data):
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(Comment)
def handle_decl(self, data):
self.soup.endData()
if data.startswith("DOCTYPE "):
data = data[len("DOCTYPE "):]
elif data == 'DOCTYPE':
# i.e. "<!DOCTYPE>"
data = ''
self.soup.handle_data(data)
self.soup.endData(Doctype)
def unknown_decl(self, data):
if data.upper().startswith('CDATA['):
cls = CData
data = data[len('CDATA['):]
else:
cls = Declaration
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(cls)
def handle_pi(self, data):
self.soup.endData()
if data.endswith("?") and data.lower().startswith("xml"):
# "An XHTML processing instruction using the trailing '?'
# will cause the '?' to be included in data." - HTMLParser
# docs.
#
# Strip the question mark so we don't end up with two
# question marks.
data = data[:-1]
self.soup.handle_data(data)
self.soup.endData(ProcessingInstruction)
class HTMLParserTreeBuilder(HTMLTreeBuilder):
is_xml = False
features = [HTML, STRICT, HTMLPARSER]
def __init__(self, *args, **kwargs):
if CONSTRUCTOR_TAKES_STRICT:
kwargs['strict'] = False
self.parser_args = (args, kwargs)
def prepare_markup(self, markup, user_specified_encoding=None,
document_declared_encoding=None):
"""
:return: A 4-tuple (markup, original encoding, encoding
declared within markup, whether any characters had to be
replaced with REPLACEMENT CHARACTER).
"""
if isinstance(markup, unicode):
yield (markup, None, None, False)
return
try_encodings = [user_specified_encoding, document_declared_encoding]
dammit = UnicodeDammit(markup, try_encodings, is_html=True)
yield (dammit.markup, dammit.original_encoding,
dammit.declared_html_encoding,
dammit.contains_replacement_characters)
def feed(self, markup):
args, kwargs = self.parser_args
parser = BeautifulSoupHTMLParser(*args, **kwargs)
parser.soup = self.soup
try:
parser.feed(markup)
except HTMLParseError, e:
warnings.warn(RuntimeWarning(
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
raise e
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
# string.
#
# XXX This code can be removed once most Python 3 users are on 3.2.3.
if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
import re
attrfind_tolerant = re.compile(
r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
locatestarttagend = re.compile(r"""
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
(?:\s+ # whitespace before attribute name
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
(?:\s*=\s* # value indicator
(?:'[^']*' # LITA-enclosed value
|\"[^\"]*\" # LIT-enclosed value
|[^'\">\s]+ # bare value
)
)?
)
)*
\s* # trailing whitespace
""", re.VERBOSE)
BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
from html.parser import tagfind, attrfind
def parse_starttag(self, i):
self.__starttag_text = None
endpos = self.check_for_whole_start_tag(i)
if endpos < 0:
return endpos
rawdata = self.rawdata
self.__starttag_text = rawdata[i:endpos]
# Now parse the data between i+1 and j into a tag and attrs
attrs = []
match = tagfind.match(rawdata, i+1)
assert match, 'unexpected call to parse_starttag()'
k = match.end()
self.lasttag = tag = rawdata[i+1:k].lower()
while k < endpos:
if self.strict:
m = attrfind.match(rawdata, k)
else:
m = attrfind_tolerant.match(rawdata, k)
if not m:
break
attrname, rest, attrvalue = m.group(1, 2, 3)
if not rest:
attrvalue = None
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
attrvalue[:1] == '"' == attrvalue[-1:]:
attrvalue = attrvalue[1:-1]
if attrvalue:
attrvalue = self.unescape(attrvalue)
attrs.append((attrname.lower(), attrvalue))
k = m.end()
end = rawdata[k:endpos].strip()
if end not in (">", "/>"):
lineno, offset = self.getpos()
if "\n" in self.__starttag_text:
lineno = lineno + self.__starttag_text.count("\n")
offset = len(self.__starttag_text) \
- self.__starttag_text.rfind("\n")
else:
offset = offset + len(self.__starttag_text)
if self.strict:
self.error("junk characters in start tag: %r"
% (rawdata[k:endpos][:20],))
self.handle_data(rawdata[i:endpos])
return endpos
if end.endswith('/>'):
# XHTML-style empty tag: <span attr="value" />
self.handle_startendtag(tag, attrs)
else:
self.handle_starttag(tag, attrs)
if tag in self.CDATA_CONTENT_ELEMENTS:
self.set_cdata_mode(tag)
return endpos
def set_cdata_mode(self, elem):
self.cdata_elem = elem.lower()
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
BeautifulSoupHTMLParser.parse_starttag = parse_starttag
BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
CONSTRUCTOR_TAKES_STRICT = True
|
mythmon/kitsune
|
refs/heads/master
|
tests/pages/desktop/login_page.py
|
6
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from pages.desktop.base import Base
from selenium.webdriver.common.by import By
class LoginPage(Base):
"""
Form for login.
"""
URL_TEMPLATE = '{locale}/users/auth'
_page_title = 'Log In | Mozilla Support'
_username_box_locator = (By.ID, 'id_username')
_password_box_locator = (By.ID, 'id_password')
_log_in_button_locator = (By.CSS_SELECTOR, "button[data-name='login']")
_login_error_locator = (By.CSS_SELECTOR, 'ul.errorlist > li')
# if user is logged-in then you see these elements
_logged_in_as_div_locator = (By.CSS_SELECTOR, 'div#mod-login_box > div')
_logged_in_text = 'Logged in as'
def log_in(self, username, password):
self.selenium.find_element(*self._username_box_locator).send_keys(username)
self.selenium.find_element(*self._password_box_locator).send_keys(password)
self.selenium.find_element(*self._log_in_button_locator).click()
if not self.header.is_user_logged_in:
error = self.selenium.find_element(*self._login_error_locator).text
error = "login failed for %s\n" % username + error
raise AssertionError(error)
|
chongdashu/pidayjam2015
|
refs/heads/master
|
cgi-bin/pypi-roulette.py
|
1
|
import math
import random
from random import shuffle
pi_string = str(math.pi)
pi_numbers = [int(s) for s in pi_string if s.isdigit()]
def roulette_one(feature):
numbers = pi_numbers[:]
n = random.sample(numbers, 1)[0]
print "%s %s" %(n,feature)
def roulette_all(features):
f = random.sample(features, len(features))
for n in pi_numbers:
i = pi_numbers.index(n)
if (i >= len(features)):
break
print "%s %s" %(n, f[pi_numbers.index(n)])
def main():
print "Trying to roulette one."
roulette_one("lives")
print "Trying to roulette list."
roulette_all(["lives", "enemies", "rounds", "npcs"])
if __name__ == "__main__":
main()
|
nesterione/scikit-learn
|
refs/heads/master
|
sklearn/decomposition/base.py
|
313
|
"""Principal Component Analysis Base Classes"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Denis A. Engemann <d.engemann@fz-juelich.de>
# Kyle Kastner <kastnerkyle@gmail.com>
#
# License: BSD 3 clause
import numpy as np
from scipy import linalg
from ..base import BaseEstimator, TransformerMixin
from ..utils import check_array
from ..utils.extmath import fast_dot
from ..utils.validation import check_is_fitted
from ..externals import six
from abc import ABCMeta, abstractmethod
class _BasePCA(six.with_metaclass(ABCMeta, BaseEstimator, TransformerMixin)):
"""Base class for PCA methods.
Warning: This class should not be used directly.
Use derived classes instead.
"""
def get_covariance(self):
"""Compute data covariance with the generative model.
``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)``
where S**2 contains the explained variances, and sigma2 contains the
noise variances.
Returns
-------
cov : array, shape=(n_features, n_features)
Estimated covariance of data.
"""
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
cov = np.dot(components_.T * exp_var_diff, components_)
cov.flat[::len(cov) + 1] += self.noise_variance_ # modify diag inplace
return cov
def get_precision(self):
"""Compute data precision matrix with the generative model.
Equals the inverse of the covariance but computed with
the matrix inversion lemma for efficiency.
Returns
-------
precision : array, shape=(n_features, n_features)
Estimated precision of data.
"""
n_features = self.components_.shape[1]
# handle corner cases first
if self.n_components_ == 0:
return np.eye(n_features) / self.noise_variance_
if self.n_components_ == n_features:
return linalg.inv(self.get_covariance())
# Get precision using matrix inversion lemma
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
precision = np.dot(components_, components_.T) / self.noise_variance_
precision.flat[::len(precision) + 1] += 1. / exp_var_diff
precision = np.dot(components_.T,
np.dot(linalg.inv(precision), components_))
precision /= -(self.noise_variance_ ** 2)
precision.flat[::len(precision) + 1] += 1. / self.noise_variance_
return precision
@abstractmethod
def fit(X, y=None):
"""Placeholder for fit. Subclasses should implement this method!
Fit the model with X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
def transform(self, X, y=None):
"""Apply dimensionality reduction to X.
X is projected on the first principal components previously extracted
from a training set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import IncrementalPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> ipca = IncrementalPCA(n_components=2, batch_size=3)
>>> ipca.fit(X)
IncrementalPCA(batch_size=3, copy=True, n_components=2, whiten=False)
>>> ipca.transform(X) # doctest: +SKIP
"""
check_is_fitted(self, ['mean_', 'components_'], all_or_any=all)
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
X_transformed = fast_dot(X, self.components_.T)
if self.whiten:
X_transformed /= np.sqrt(self.explained_variance_)
return X_transformed
def inverse_transform(self, X, y=None):
"""Transform data back to its original space.
In other words, return an input X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples is the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform will compute the
exact inverse operation, which includes reversing whitening.
"""
if self.whiten:
return fast_dot(X, np.sqrt(self.explained_variance_[:, np.newaxis]) *
self.components_) + self.mean_
else:
return fast_dot(X, self.components_) + self.mean_
|
scalable-networks/ext
|
refs/heads/master
|
gnuradio-3.7.0.1/gr-filter/examples/gr_filtdes_live_upd.py
|
11
|
#!/usr/bin/env python
#
# Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio.filter import filter_design
from gnuradio import gr, filter
from gnuradio import blocks
import sys
try:
from gnuradio import qtgui
from PyQt4 import QtGui, QtCore
import sip
except ImportError:
sys.stderr.write("Error: Program requires PyQt4 and gr-qtgui.\n")
sys.exit(1)
try:
from gnuradio import analog
except ImportError:
sys.stderr.write("Error: Program requires gr-analog.\n")
sys.exit(1)
try:
from gnuradio import blocks
except ImportError:
sys.stderr.write("Error: Program requires gr-blocks.\n")
sys.exit(1)
try:
from gnuradio import channels
except ImportError:
sys.stderr.write("Error: Program requires gr-channels.\n")
sys.exit(1)
class my_top_block(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
Rs = 8000
f1 = 1000
f2 = 2000
npts = 2048
self.qapp = QtGui.QApplication(sys.argv)
self.filt_taps = [1,]
src1 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f1, 0.1, 0)
src2 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f2, 0.1, 0)
src = blocks.add_cc()
channel = channels.channel_model(0.01)
self.filt = filter.fft_filter_ccc(1, self.filt_taps)
thr = blocks.throttle(gr.sizeof_gr_complex, 100*npts)
self.snk1 = qtgui.freq_sink_c(npts, filter.firdes.WIN_BLACKMAN_hARRIS,
0, Rs,
"Complex Freq Example", 1)
self.connect(src1, (src,0))
self.connect(src2, (src,1))
self.connect(src, channel, thr, self.filt, (self.snk1, 0))
# Get the reference pointer to the SpectrumDisplayForm QWidget
pyQt = self.snk1.pyqwidget()
# Wrap the pointer as a PyQt SIP object
# This can now be manipulated as a PyQt4.QtGui.QWidget
pyWin = sip.wrapinstance(pyQt, QtGui.QWidget)
pyWin.show()
def update_filter(self, filtobj):
print "Filter type:", filtobj.get_restype()
print "Filter params", filtobj.get_params()
self.filt.set_taps(filtobj.get_taps())
if __name__ == "__main__":
tb = my_top_block();
tb.start()
mw = filter_design.launch(sys.argv, tb.update_filter)
mw.show()
tb.qapp.exec_()
tb.stop()
|
fnp/wolnelektury
|
refs/heads/master
|
src/push/migrations/0001_initial.py
|
1
|
# This file is part of Wolnelektury, licensed under GNU Affero GPLv3 or later.
# Copyright © Fundacja Nowoczesna Polska. See NOTICE for more information.
#
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('title', models.CharField(max_length=256)),
('body', models.CharField(max_length=2048)),
('image_url', models.URLField()),
('message_id', models.CharField(max_length=2048)),
],
),
]
|
wfxiang08/django185
|
refs/heads/master
|
django/contrib/postgres/operations.py
|
111
|
from django.contrib.postgres.signals import register_hstore_handler
from django.db.migrations.operations.base import Operation
class CreateExtension(Operation):
reversible = True
def __init__(self, name):
self.name = name
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute("CREATE EXTENSION IF NOT EXISTS %s" % self.name)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute("DROP EXTENSION %s" % self.name)
def describe(self):
return "Creates extension %s" % self.name
class HStoreExtension(CreateExtension):
def __init__(self):
self.name = 'hstore'
def database_forwards(self, app_label, schema_editor, from_state, to_state):
super(HStoreExtension, self).database_forwards(app_label, schema_editor, from_state, to_state)
# Register hstore straight away as it cannot be done before the
# extension is installed, a subsequent data migration would use the
# same connection
register_hstore_handler(schema_editor.connection)
class UnaccentExtension(CreateExtension):
def __init__(self):
self.name = 'unaccent'
|
jkbradley/spark
|
refs/heads/master
|
examples/src/main/python/mllib/latent_dirichlet_allocation_example.py
|
128
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
from pyspark import SparkContext
# $example on$
from pyspark.mllib.clustering import LDA, LDAModel
from pyspark.mllib.linalg import Vectors
# $example off$
if __name__ == "__main__":
sc = SparkContext(appName="LatentDirichletAllocationExample") # SparkContext
# $example on$
# Load and parse the data
data = sc.textFile("data/mllib/sample_lda_data.txt")
parsedData = data.map(lambda line: Vectors.dense([float(x) for x in line.strip().split(' ')]))
# Index documents with unique IDs
corpus = parsedData.zipWithIndex().map(lambda x: [x[1], x[0]]).cache()
# Cluster the documents into three topics using LDA
ldaModel = LDA.train(corpus, k=3)
# Output topics. Each is a distribution over words (matching word count vectors)
print("Learned topics (as distributions over vocab of " + str(ldaModel.vocabSize())
+ " words):")
topics = ldaModel.topicsMatrix()
for topic in range(3):
print("Topic " + str(topic) + ":")
for word in range(0, ldaModel.vocabSize()):
print(" " + str(topics[word][topic]))
# Save and load model
ldaModel.save(sc, "target/org/apache/spark/PythonLatentDirichletAllocationExample/LDAModel")
sameModel = LDAModel\
.load(sc, "target/org/apache/spark/PythonLatentDirichletAllocationExample/LDAModel")
# $example off$
sc.stop()
|
mkmelin/bedrock
|
refs/heads/master
|
tests/pages/firefox/channel/ios.py
|
8
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from selenium.webdriver.common.by import By
from pages.firefox.base import FirefoxBasePage
class ChannelIOSPage(FirefoxBasePage):
URL_TEMPLATE = '/{locale}/firefox/channel/ios'
_testflight_button_locator = (By.CLASS_NAME, 'testflight-cta')
@property
def is_testflight_button_displayed(self):
return self.is_element_displayed(*self._testflight_button_locator)
|
hazrpg/calibre
|
refs/heads/master
|
src/calibre/gui2/preferences/template_functions.py
|
14
|
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import json, traceback
from PyQt5.Qt import QDialogButtonBox
from calibre.gui2 import error_dialog, warning_dialog
from calibre.gui2.preferences import ConfigWidgetBase, test_widget
from calibre.gui2.preferences.template_functions_ui import Ui_Form
from calibre.gui2.widgets import PythonHighlighter
from calibre.utils.formatter_functions import (formatter_functions,
compile_user_function, load_user_template_functions)
class ConfigWidget(ConfigWidgetBase, Ui_Form):
def genesis(self, gui):
self.gui = gui
self.db = gui.library_view.model().db
help_text = _('''
<p>Here you can add and remove functions used in template processing. A
template function is written in python. It takes information from the
book, processes it in some way, then returns a string result. Functions
defined here are usable in templates in the same way that builtin
functions are usable. The function must be named <b>evaluate</b>, and
must have the signature shown below.</p>
<p><code>evaluate(self, formatter, kwargs, mi, locals, your parameters)
→ returning a unicode string</code></p>
<p>The parameters of the evaluate function are:
<ul>
<li><b>formatter</b>: the instance of the formatter being used to
evaluate the current template. You can use this to do recursive
template evaluation.</li>
<li><b>kwargs</b>: a dictionary of metadata. Field values are in this
dictionary.
<li><b>mi</b>: a Metadata instance. Used to get field information.
This parameter can be None in some cases, such as when evaluating
non-book templates.</li>
<li><b>locals</b>: the local variables assigned to by the current
template program.</li>
<li><b>your parameters</b>: You must supply one or more formal
parameters. The number must match the arg count box, unless arg count is
-1 (variable number or arguments), in which case the last argument must
be *args. At least one argument is required, and is usually the value of
the field being operated upon. Note that when writing in basic template
mode, the user does not provide this first argument. Instead it is
supplied by the formatter.</li>
</ul></p>
<p>
The following example function checks the value of the field. If the
field is not empty, the field's value is returned, otherwise the value
EMPTY is returned.
<pre>
name: my_ifempty
arg count: 1
doc: my_ifempty(val) -- return val if it is not empty, otherwise the string 'EMPTY'
program code:
def evaluate(self, formatter, kwargs, mi, locals, val):
if val:
return val
else:
return 'EMPTY'</pre>
This function can be called in any of the three template program modes:
<ul>
<li>single-function mode: {tags:my_ifempty()}</li>
<li>template program mode: {tags:'my_ifempty($)'}</li>
<li>general program mode: program: my_ifempty(field('tags'))</li>
</p>
''')
self.textBrowser.setHtml(help_text)
def initialize(self):
try:
self.builtin_source_dict = json.loads(P('template-functions.json', data=True,
allow_user_override=False).decode('utf-8'))
except:
traceback.print_exc()
self.builtin_source_dict = {}
self.funcs = formatter_functions().get_functions()
self.builtins = formatter_functions().get_builtins_and_aliases()
self.build_function_names_box()
self.function_name.currentIndexChanged[str].connect(self.function_index_changed)
self.function_name.editTextChanged.connect(self.function_name_edited)
self.argument_count.valueChanged.connect(self.enable_replace_button)
self.documentation.textChanged.connect(self.enable_replace_button)
self.program.textChanged.connect(self.enable_replace_button)
self.create_button.clicked.connect(self.create_button_clicked)
self.delete_button.clicked.connect(self.delete_button_clicked)
self.create_button.setEnabled(False)
self.delete_button.setEnabled(False)
self.replace_button.setEnabled(False)
self.clear_button.clicked.connect(self.clear_button_clicked)
self.replace_button.clicked.connect(self.replace_button_clicked)
self.program.setTabStopWidth(20)
self.highlighter = PythonHighlighter(self.program.document())
def enable_replace_button(self):
self.replace_button.setEnabled(self.delete_button.isEnabled())
def clear_button_clicked(self):
self.build_function_names_box()
self.program.clear()
self.documentation.clear()
self.argument_count.clear()
self.create_button.setEnabled(False)
self.delete_button.setEnabled(False)
def build_function_names_box(self, scroll_to='', set_to=''):
self.function_name.blockSignals(True)
func_names = sorted(self.funcs)
self.function_name.clear()
self.function_name.addItem('')
self.function_name.addItems(func_names)
self.function_name.setCurrentIndex(0)
if set_to:
self.function_name.setEditText(set_to)
self.create_button.setEnabled(True)
self.function_name.blockSignals(False)
if scroll_to:
idx = self.function_name.findText(scroll_to)
if idx >= 0:
self.function_name.setCurrentIndex(idx)
if scroll_to not in self.builtins:
self.delete_button.setEnabled(True)
def delete_button_clicked(self):
name = unicode(self.function_name.currentText())
if name in self.builtins:
error_dialog(self.gui, _('Template functions'),
_('You cannot delete a built-in function'), show=True)
if name in self.funcs:
del self.funcs[name]
self.changed_signal.emit()
self.create_button.setEnabled(True)
self.delete_button.setEnabled(False)
self.build_function_names_box(set_to=name)
self.program.setReadOnly(False)
else:
error_dialog(self.gui, _('Template functions'),
_('Function not defined'), show=True)
def create_button_clicked(self):
self.changed_signal.emit()
name = unicode(self.function_name.currentText())
if name in self.funcs:
error_dialog(self.gui, _('Template functions'),
_('Name %s already used')%(name,), show=True)
return
if self.argument_count.value() == 0:
box = warning_dialog(self.gui, _('Template functions'),
_('Argument count should be -1 or greater than zero. '
'Setting it to zero means that this function cannot '
'be used in single function mode.'), det_msg = '',
show=False)
box.bb.setStandardButtons(box.bb.standardButtons() | QDialogButtonBox.Cancel)
box.det_msg_toggle.setVisible(False)
if not box.exec_():
return
try:
prog = unicode(self.program.toPlainText())
cls = compile_user_function(name, unicode(self.documentation.toPlainText()),
self.argument_count.value(), prog)
self.funcs[name] = cls
self.build_function_names_box(scroll_to=name)
except:
error_dialog(self.gui, _('Template functions'),
_('Exception while compiling function'), show=True,
det_msg=traceback.format_exc())
def function_name_edited(self, txt):
self.documentation.setReadOnly(False)
self.argument_count.setReadOnly(False)
self.create_button.setEnabled(True)
self.replace_button.setEnabled(False)
self.program.setReadOnly(False)
def function_index_changed(self, txt):
txt = unicode(txt)
self.create_button.setEnabled(False)
if not txt:
self.argument_count.clear()
self.documentation.clear()
self.documentation.setReadOnly(False)
self.argument_count.setReadOnly(False)
return
func = self.funcs[txt]
self.argument_count.setValue(func.arg_count)
self.documentation.setText(func.doc)
if txt in self.builtins:
if hasattr(func, 'program_text') and func.program_text:
self.program.setPlainText(func.program_text)
elif txt in self.builtin_source_dict:
self.program.setPlainText(self.builtin_source_dict[txt])
else:
self.program.setPlainText(_('function source code not available'))
self.documentation.setReadOnly(True)
self.argument_count.setReadOnly(True)
self.program.setReadOnly(True)
self.delete_button.setEnabled(False)
else:
self.program.setPlainText(func.program_text)
self.delete_button.setEnabled(True)
self.program.setReadOnly(False)
self.replace_button.setEnabled(False)
def replace_button_clicked(self):
self.delete_button_clicked()
self.create_button_clicked()
def refresh_gui(self, gui):
pass
def commit(self):
# formatter_functions().reset_to_builtins()
pref_value = []
for name, cls in self.funcs.iteritems():
if name not in self.builtins:
pref_value.append((cls.name, cls.doc, cls.arg_count, cls.program_text))
self.db.new_api.set_pref('user_template_functions', pref_value)
load_user_template_functions(self.db.library_id, pref_value)
return False
if __name__ == '__main__':
from PyQt5.Qt import QApplication
app = QApplication([])
test_widget('Advanced', 'TemplateFunctions')
|
aidear/zfbui
|
refs/heads/master
|
public/assets/tools/ant/bin/runant.py
|
124
|
#!/usr/bin/python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
runant.py
This script is a translation of the runant.pl written by Steve Loughran.
It runs ant with/out arguments, it should be quite portable (thanks to
the python os library)
This script has been tested with Python2.0/Win2K
created: 2001-04-11
author: Pierre Dittgen pierre.dittgen@criltelecom.com
Assumptions:
- the "java" executable/script is on the command path
"""
import os, os.path, string, sys
# Change it to 1 to get extra debug information
debug = 0
#######################################################################
# If ANT_HOME is not set default to script's parent directory
if os.environ.has_key('ANT_HOME'):
ANT_HOME = os.environ['ANT_HOME']
else:
ANT_HOME = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
# set ANT_LIB location
ANT_LIB = os.path.join(ANT_HOME, 'lib')
# set JAVACMD (check variables JAVACMD and JAVA_HOME)
JAVACMD = None
if not os.environ.has_key('JAVACMD'):
if os.environ.has_key('JAVA_HOME'):
if not os.path.exists(os.environ['JAVA_HOME']):
print "Warning: JAVA_HOME is not defined correctly."
else:
JAVACMD = os.path.join(os.environ['JAVA_HOME'], 'bin', 'java')
else:
print "Warning: JAVA_HOME not set."
else:
JAVACMD = os.environ['JAVACMD']
if not JAVACMD:
JAVACMD = 'java'
launcher_jar = os.path.join(ANT_LIB, 'ant-launcher.jar')
if not os.path.exists(launcher_jar):
print 'Warning: Unable to locate ant-launcher.jar. Expected to find it in %s' % \
ANT_LIB
# Build up standard classpath (LOCALCLASSPATH)
LOCALCLASSPATH = launcher_jar
if os.environ.has_key('LOCALCLASSPATH'):
LOCALCLASSPATH += os.pathsep + os.environ['LOCALCLASSPATH']
ANT_OPTS = ""
if os.environ.has_key('ANT_OPTS'):
ANT_OPTS = os.environ['ANT_OPTS']
OPTS = ""
if os.environ.has_key('JIKESPATH'):
OPTS = '-Djikes.class.path=\"%s\"' % os.environ['JIKESPATH']
ANT_ARGS = ""
if os.environ.has_key('ANT_ARGS'):
ANT_ARGS = os.environ['ANT_ARGS']
CLASSPATH = ""
if os.environ.has_key('CLASSPATH'):
CLASSPATH = "-lib " + os.environ['CLASSPATH']
# Builds the commandline
cmdline = ('%s %s -classpath %s -Dant.home=%s %s ' + \
'org.apache.tools.ant.launch.Launcher %s %s %s') \
% (JAVACMD, ANT_OPTS, LOCALCLASSPATH, ANT_HOME, OPTS, ANT_ARGS, \
CLASSPATH, string.join(sys.argv[1:], ' '))
if debug:
print '\n%s\n\n' % (cmdline)
sys.stdout.flush()
# Run the biniou!
os.system(cmdline)
|
andrei4ka/fuel-web-redhat
|
refs/heads/master
|
fuel_agent/fuel_agent/utils/partition_utils.py
|
1
|
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from fuel_agent import errors
from fuel_agent.openstack.common import log as logging
from fuel_agent.utils import utils
LOG = logging.getLogger(__name__)
def parse_partition_info(output):
lines = output.split('\n')
generic_params = lines[1].rstrip(';').split(':')
generic = {
'dev': generic_params[0],
'size': utils.parse_unit(generic_params[1], 'MiB'),
'logical_block': int(generic_params[3]),
'physical_block': int(generic_params[4]),
'table': generic_params[5],
'model': generic_params[6]
}
parts = []
for line in lines[2:]:
line = line.strip().rstrip(';')
if not line:
continue
part_params = line.split(':')
parts.append({
'num': int(part_params[0]),
'begin': utils.parse_unit(part_params[1], 'MiB'),
'end': utils.parse_unit(part_params[2], 'MiB'),
'size': utils.parse_unit(part_params[3], 'MiB'),
'fstype': part_params[4] or None
})
return {'generic': generic, 'parts': parts}
def info(dev):
output = utils.execute('parted', '-s', dev, '-m',
'unit', 'MiB',
'print', 'free',
check_exit_code=[0, 1])[0]
LOG.debug('Info output: \n%s' % output)
result = parse_partition_info(output)
LOG.debug('Info result: %s' % result)
return result
def wipe(dev):
# making an empty new table is equivalent to wiping the old one
LOG.debug('Wiping partition table on %s (we assume it is equal '
'to creating a new one)' % dev)
make_label(dev)
def make_label(dev, label='gpt'):
"""Creates partition label on a device.
:param dev: A device file, e.g. /dev/sda.
:param label: Partition label type 'gpt' or 'msdos'. Optional.
:returns: None
"""
LOG.debug('Trying to create %s partition table on device %s' %
(label, dev))
if label not in ('gpt', 'msdos'):
raise errors.WrongPartitionLabelError(
'Wrong partition label type: %s' % label)
out, err = utils.execute('parted', '-s', dev, 'mklabel', label,
check_exit_code=[0, 1])
LOG.debug('Parted output: \n%s' % out)
reread_partitions(dev, out=out)
def set_partition_flag(dev, num, flag, state='on'):
"""Sets flag on a partition
:param dev: A device file, e.g. /dev/sda.
:param num: Partition number
:param flag: Flag name. Must be one of 'bios_grub', 'legacy_boot',
'boot', 'raid', 'lvm'
:param state: Desiable flag state. 'on' or 'off'. Default is 'on'.
:returns: None
"""
LOG.debug('Trying to set partition flag: dev=%s num=%s flag=%s state=%s' %
(dev, num, flag, state))
# parted supports more flags but we are interested in
# setting only this subset of them.
# not all of these flags are compatible with one another.
if flag not in ('bios_grub', 'legacy_boot', 'boot', 'raid', 'lvm'):
raise errors.WrongPartitionSchemeError(
'Unsupported partition flag: %s' % flag)
if state not in ('on', 'off'):
raise errors.WrongPartitionSchemeError(
'Wrong partition flag state: %s' % state)
out, err = utils.execute('parted', '-s', dev, 'set', str(num),
flag, state, check_exit_code=[0, 1])
LOG.debug('Parted output: \n%s' % out)
reread_partitions(dev, out=out)
def set_gpt_type(dev, num, type_guid):
"""Sets guid on a partition.
:param dev: A device file, e.g. /dev/sda.
:param num: Partition number
:param type_guid: Partition type guid. Must be one of those listed
on this page http://en.wikipedia.org/wiki/GUID_Partition_Table.
This method does not check whether type_guid is valid or not.
:returns: None
"""
# TODO(kozhukalov): check whether type_guid is valid
LOG.debug('Setting partition GUID: dev=%s num=%s guid=%s' %
(dev, num, type_guid))
utils.execute('sgdisk', '--typecode=%s:%s' % (num, type_guid),
dev, check_exit_code=[0])
def make_partition(dev, begin, end, ptype):
LOG.debug('Trying to create a partition: dev=%s begin=%s end=%s' %
(dev, begin, end))
if ptype not in ('primary', 'logical'):
raise errors.WrongPartitionSchemeError(
'Wrong partition type: %s' % ptype)
# check begin >= end
if begin >= end:
raise errors.WrongPartitionSchemeError(
'Wrong boundaries: begin >= end')
# check if begin and end are inside one of free spaces available
if not any(x['fstype'] == 'free' and begin >= x['begin'] and
end <= x['end'] for x in info(dev)['parts']):
raise errors.WrongPartitionSchemeError(
'Invalid boundaries: begin and end '
'are not inside available free space')
out, err = utils.execute(
'parted', '-a', 'optimal', '-s', dev, 'unit', 'MiB',
'mkpart', ptype, str(begin), str(end), check_exit_code=[0, 1])
LOG.debug('Parted output: \n%s' % out)
reread_partitions(dev, out=out)
def remove_partition(dev, num):
LOG.debug('Trying to remove partition: dev=%s num=%s' % (dev, num))
if not any(x['fstype'] != 'free' and x['num'] == num
for x in info(dev)['parts']):
raise errors.PartitionNotFoundError('Partition %s not found' % num)
out, err = utils.execute('parted', '-s', dev, 'rm',
str(num), check_exit_code=[0])
reread_partitions(dev, out=out)
def reread_partitions(dev, out='Device or resource busy', timeout=30):
# The reason for this method to exist is that old versions of parted
# use ioctl(fd, BLKRRPART, NULL) to tell Linux to re-read partitions.
# This system call does not work sometimes. So we try to re-read partition
# table several times. Besides partprobe uses BLKPG instead, which
# is better than BLKRRPART for this case. BLKRRPART tells Linux to re-read
# partitions while BLKPG tells Linux which partitions are available
# BLKPG is usually used as a fallback system call.
begin = time.time()
while 'Device or resource busy' in out:
if time.time() > begin + timeout:
raise errors.BaseError('Unable to re-read partition table on'
'device %s' % dev)
LOG.debug('Last time output contained "Device or resource busy". '
'Trying to re-read partition table on device %s' % dev)
out, err = utils.execute('partprobe', dev, check_exit_code=[0, 1])
LOG.debug('Partprobe output: \n%s' % out)
pout, perr = utils.execute('partx', '-a', dev, check_exit_code=[0, 1])
LOG.debug('Partx output: \n%s' % pout)
time.sleep(1)
|
googleapis/python-assured-workloads
|
refs/heads/master
|
scripts/fixup_assuredworkloads_v1beta1_keywords.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import libcst as cst
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class assuredworkloadsCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'create_workload': ('parent', 'workload', 'external_id', ),
'delete_workload': ('name', 'etag', ),
'get_workload': ('name', ),
'list_workloads': ('parent', 'page_size', 'page_token', 'filter', ),
'update_workload': ('workload', 'update_mask', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: not a.keyword.value in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=assuredworkloadsCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the assuredworkloads client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
'--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
|
jola5/aptly
|
refs/heads/master
|
system/t04_mirror/show.py
|
11
|
from lib import BaseTest
import re
class ShowMirror1Test(BaseTest):
"""
show mirror: regular mirror
"""
fixtureCmds = ["aptly mirror create --ignore-signatures mirror1 http://mirror.yandex.ru/debian/ wheezy"]
runCmd = "aptly mirror show mirror1"
class ShowMirror2Test(BaseTest):
"""
show mirror: missing mirror
"""
runCmd = "aptly mirror show mirror-xx"
expectedCode = 1
class ShowMirror3Test(BaseTest):
"""
show mirror: regular mirror with packages
"""
fixtureDB = True
runCmd = "aptly mirror show --with-packages wheezy-contrib"
outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:+A-Za-z -]+\n", "", s)
class ShowMirror4Test(BaseTest):
"""
show mirror: mirror with filter
"""
fixtureCmds = [
"aptly mirror create -ignore-signatures -filter='nginx | Priority (required)' -filter-with-deps=true mirror4 http://security.debian.org/ wheezy/updates main"
]
runCmd = "aptly mirror show mirror4"
outputMatchPrepare = lambda _, s: re.sub(r"(Date|Valid-Until): [,0-9:+A-Za-z -]+\n", "", s)
|
lozadaOmr/ansible-admin
|
refs/heads/development
|
src/ansible/migrations/0008_playbook_directory.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-16 14:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ansible', '0007_auto_20170516_1410'),
]
operations = [
migrations.AddField(
model_name='playbook',
name='directory',
field=models.CharField(default=b'dir', editable=False, max_length=200),
),
]
|
rosmo/ansible
|
refs/heads/devel
|
test/units/modules/network/onyx/onyx_module.py
|
52
|
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestOnyxModule(ModuleTestCase):
def execute_module(self, failed=False, changed=False, commands=None, is_updates=False, sort=True, transport='cli'):
self.load_fixtures(commands, transport=transport)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if is_updates:
commands_res = result.get('updates')
else:
commands_res = result.get('commands')
if sort:
self.assertEqual(sorted(commands), sorted(commands_res), commands_res)
else:
self.assertEqual(commands, commands_res, commands_res)
return result
def failed(self):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None, transport='cli'):
pass
|
zengenti/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/atomic/atomic_image.py
|
25
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION='''
---
module: atomic_image
short_description: Manage the container images on the atomic host platform
description:
- Manage the container images on the atomic host platform
- Allows to execute the commands on the container images
version_added: "2.2"
author: "Saravanan KR @krsacme"
notes:
- Host should be support C(atomic) command
requirements:
- atomic
- "python >= 2.6"
options:
name:
description:
- Name of the container image
required: True
default: null
state:
description:
- The state of the container image.
- The state C(latest) will ensure container image is upgraded to the latest version and forcefully restart container, if running.
required: False
choices: ["present", "absent", "latest"]
default: latest
started:
description:
- Start or Stop the container
required: False
choices: ["yes", "no"]
default: yes
'''
EXAMPLES = '''
# Execute the run command on rsyslog container image (atomic run rhel7/rsyslog)
- atomic_image:
name: rhel7/rsyslog
state: latest
'''
RETURN = '''
msg:
description: The command standard output
returned: always
type: string
sample: [u'Using default tag: latest ...']
'''
def do_upgrade(module, image):
args = ['atomic', 'update', '--force', image]
rc, out, err = module.run_command(args, check_rc=False)
if rc != 0: # something went wrong emit the msg
module.fail_json(rc=rc, msg=err)
elif 'Image is up to date' in out:
return False
return True
def core(module):
image = module.params['name']
state = module.params['state']
started = module.params['started']
is_upgraded = False
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
if state == 'present' or state == 'latest':
if state == 'latest':
is_upgraded = do_upgrade(module, image)
if started:
args = ['atomic', 'run', image]
else:
args = ['atomic', 'install', image]
elif state == 'absent':
args = ['atomic', 'uninstall', image]
out = {}
err = {}
rc = 0
rc, out, err = module.run_command(args, check_rc=False)
if rc < 0:
module.fail_json(rc=rc, msg=err)
elif rc == 1 and 'already present' in err:
module.exit_json(restult=err, changed=is_upgraded)
elif started and 'Container is running' in out:
module.exit_json(result=out, changed=is_upgraded)
else:
module.exit_json(msg=out, changed=True)
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(default=None, required=True),
state = dict(default='latest', choices=['present', 'absent', 'latest']),
started = dict(default='yes', type='bool'),
),
)
# Verify that the platform supports atomic command
rc, out, err = module.run_command('atomic -v', check_rc=False)
if rc != 0:
module.fail_json(msg="Error in running atomic command", err=err)
try:
core(module)
except Exception as e:
module.fail_json(msg=str(e))
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
GEMScienceTools/rmtk
|
refs/heads/master
|
tests/vulnerability/tests_TO_BE_CHANGED/NSP/fragility_process/test_spo2ida_method.py
|
4
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
"""
Created on Thu May 29 11:29:32 2014
@author: chiaracasotto
"""
# Clear existing variables
def clearall():
all = [var for var in globals() if var[0] != "_"]
for var in all:
del globals()[var]
clearall()
import pandas as pd
import numpy as np
import os
import csv
from rmtk.vulnerability.NSP.spo2ida_based.get_spo2ida_parameters import get_spo2ida_parameters
from rmtk.vulnerability.NSP.spo2ida_based.spo2ida_allTfunction import spo2ida_allT
from rmtk.vulnerability.NSP.spo2ida_method import spo2ida
cd = os.getcwd()
pi = 3.141592653589793
# <codecell>
Gamma = [1.292]
T = [1.612]
Tav = [1.612]
Sa_ratios = 1
SPO=[[0.104435862,0.233993594,0.897709787,0.940278828, 2093.9,2093.9,91.43446494]];
dcroof = [[0.046144456, 0.107312842, 0.212491246, 0.473466774, 0.724976013, 0.940278828, 0.940278828]]
EDPlim = [[0.002, 0.005, 0.01, 0.02, 0.04, 0.06, 0.08],dcroof[0]]
with open(cd+'/inputs/EDPvec-RDvec.csv', 'rb') as f:
reader = csv.reader(f)
newlist = [row for row in reader]
EDPvec = [np.array([float(ele[0]) for ele in newlist]),[0, 1000]]
RDvec = [np.array([float(ele[1]) for ele in newlist]),[0, 1000]]
noBlg = 1
w = 1
dispersion = [np.repeat(0.,len(dcroof[0])), np.repeat(0.25,len(dcroof[0]))]
g=9.81
Tc=0.5
Td=1.8
mc = 2.24
r = 0.04
MC = 25
data = pd.DataFrame(columns=['Sa50-EdpRd0-beta0','bTSa50-EdpRd0-beta0'],index=np.arange(len(dcroof[0])))
# <codecell>
for k in range(0,len(EDPvec)):
for j in range(0,len(dispersion)):
bUthd = [dispersion[j]]*noBlg
[mc,a,ac,r,mf] = get_spo2ida_parameters(SPO[0], T[0], Gamma[0]) # Convert MDoF into SDoF
[idacm, idacr] = spo2ida_allT(mc,a,ac,r,mf,T[0],1,0,3,50,2,14) # apply SPO2IDA procedure
[SaT50,bTSa] = spo2ida(idacm, idacr, mf, T[0], Gamma[0], g, EDPlim[k], dcroof[0], EDPvec[k], RDvec[k], SPO[0], bUthd[0], MC)
data['Sa50-EdpRd'+str(k)+'-beta'+str(j)] = pd.DataFrame(SaT50)
data['bTSa50-EdpRd'+str(k)+'-beta'+str(j)] = pd.DataFrame(bTSa)
# <codecell>
data.to_csv('results/SPO2IDA_fragility.csv',header=True,index=True)
# <codecell>
|
clicksaswat/ns-3-dev
|
refs/heads/master
|
src/sixlowpan/bindings/modulegen__gcc_ILP32.py
|
38
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.sixlowpan', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData_e [enumeration]
module.add_enum('TagData_e', ['MAX_SIZE'], outer_class=root_module['ns3::PacketTagList::TagData'], import_from_module='ns.network')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch [class]
module.add_class('SixLowPanDispatch')
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::Dispatch_e [enumeration]
module.add_enum('Dispatch_e', ['LOWPAN_NALP', 'LOWPAN_NALP_N', 'LOWPAN_IPv6', 'LOWPAN_HC1', 'LOWPAN_BC0', 'LOWPAN_IPHC', 'LOWPAN_IPHC_N', 'LOWPAN_MESH', 'LOWPAN_MESH_N', 'LOWPAN_FRAG1', 'LOWPAN_FRAG1_N', 'LOWPAN_FRAGN', 'LOWPAN_FRAGN_N', 'LOWPAN_UNSUPPORTED'], outer_class=root_module['ns3::SixLowPanDispatch'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::NhcDispatch_e [enumeration]
module.add_enum('NhcDispatch_e', ['LOWPAN_NHC', 'LOWPAN_NHC_N', 'LOWPAN_UDPNHC', 'LOWPAN_UDPNHC_N', 'LOWPAN_NHCUNSUPPORTED'], outer_class=root_module['ns3::SixLowPanDispatch'])
## sixlowpan-helper.h (module 'sixlowpan'): ns3::SixLowPanHelper [class]
module.add_class('SixLowPanHelper')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header [class]
module.add_class('Ipv6Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::NextHeader_e [enumeration]
module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream [class]
module.add_class('RandomVariableStream', import_from_module='ns.core', parent=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable [class]
module.add_class('SequentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFrag1 [class]
module.add_class('SixLowPanFrag1', parent=root_module['ns3::Header'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFragN [class]
module.add_class('SixLowPanFragN', parent=root_module['ns3::Header'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1 [class]
module.add_class('SixLowPanHc1', parent=root_module['ns3::Header'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::LowPanHc1Addr_e [enumeration]
module.add_enum('LowPanHc1Addr_e', ['HC1_PIII', 'HC1_PIIC', 'HC1_PCII', 'HC1_PCIC'], outer_class=root_module['ns3::SixLowPanHc1'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::LowPanHc1NextHeader_e [enumeration]
module.add_enum('LowPanHc1NextHeader_e', ['HC1_NC', 'HC1_UDP', 'HC1_ICMP', 'HC1_TCP'], outer_class=root_module['ns3::SixLowPanHc1'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc [class]
module.add_class('SixLowPanIphc', parent=root_module['ns3::Header'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::TrafficClassFlowLabel_e [enumeration]
module.add_enum('TrafficClassFlowLabel_e', ['TF_FULL', 'TF_DSCP_ELIDED', 'TF_FL_ELIDED', 'TF_ELIDED'], outer_class=root_module['ns3::SixLowPanIphc'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::Hlim_e [enumeration]
module.add_enum('Hlim_e', ['HLIM_INLINE', 'HLIM_COMPR_1', 'HLIM_COMPR_64', 'HLIM_COMPR_255'], outer_class=root_module['ns3::SixLowPanIphc'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::HeaderCompression_e [enumeration]
module.add_enum('HeaderCompression_e', ['HC_INLINE', 'HC_COMPR_64', 'HC_COMPR_16', 'HC_COMPR_0'], outer_class=root_module['ns3::SixLowPanIphc'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIpv6 [class]
module.add_class('SixLowPanIpv6', parent=root_module['ns3::Header'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension [class]
module.add_class('SixLowPanNhcExtension', parent=root_module['ns3::Header'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension::Eid_e [enumeration]
module.add_enum('Eid_e', ['EID_HOPBYHOP_OPTIONS_H', 'EID_ROUTING_H', 'EID_FRAGMENTATION_H', 'EID_DESTINATION_OPTIONS_H', 'EID_MOBILITY_H', 'EID_IPv6_H'], outer_class=root_module['ns3::SixLowPanNhcExtension'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension [class]
module.add_class('SixLowPanUdpNhcExtension', parent=root_module['ns3::Header'])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension::Ports_e [enumeration]
module.add_enum('Ports_e', ['PORTS_INLINE', 'PORTS_ALL_SRC_LAST_DST', 'PORTS_LAST_SRC_ALL_DST', 'PORTS_LAST_SRC_LAST_DST'], outer_class=root_module['ns3::SixLowPanUdpNhcExtension'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable [class]
module.add_class('TriangularRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable [class]
module.add_class('UniformRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable [class]
module.add_class('WeibullRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable [class]
module.add_class('ZetaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable [class]
module.add_class('ZipfRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable [class]
module.add_class('ConstantRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable [class]
module.add_class('DeterministicRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable [class]
module.add_class('EmpiricalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable [class]
module.add_class('ErlangRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable [class]
module.add_class('ExponentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable [class]
module.add_class('GammaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable [class]
module.add_class('LogNormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable [class]
module.add_class('NormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable [class]
module.add_class('ParetoRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::SixLowPanNetDevice [class]
module.add_class('SixLowPanNetDevice', parent=root_module['ns3::NetDevice'])
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::SixLowPanNetDevice::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_FRAGMENT_TIMEOUT', 'DROP_FRAGMENT_BUFFER_FULL', 'DROP_UNKNOWN_EXTENSION'], outer_class=root_module['ns3::SixLowPanNetDevice'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3SixLowPanDispatch_methods(root_module, root_module['ns3::SixLowPanDispatch'])
register_Ns3SixLowPanHelper_methods(root_module, root_module['ns3::SixLowPanHelper'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Ipv6Header_methods(root_module, root_module['ns3::Ipv6Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream'])
register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3SixLowPanFrag1_methods(root_module, root_module['ns3::SixLowPanFrag1'])
register_Ns3SixLowPanFragN_methods(root_module, root_module['ns3::SixLowPanFragN'])
register_Ns3SixLowPanHc1_methods(root_module, root_module['ns3::SixLowPanHc1'])
register_Ns3SixLowPanIphc_methods(root_module, root_module['ns3::SixLowPanIphc'])
register_Ns3SixLowPanIpv6_methods(root_module, root_module['ns3::SixLowPanIpv6'])
register_Ns3SixLowPanNhcExtension_methods(root_module, root_module['ns3::SixLowPanNhcExtension'])
register_Ns3SixLowPanUdpNhcExtension_methods(root_module, root_module['ns3::SixLowPanUdpNhcExtension'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable'])
register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable'])
register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable'])
register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable'])
register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable'])
register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable'])
register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable'])
register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable'])
register_Ns3SixLowPanNetDevice_methods(root_module, root_module['ns3::SixLowPanNetDevice'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'bool',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'bool',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFullCopy() const [member function]
cls.add_method('CreateFullCopy',
'ns3::Buffer',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentEndOffset() const [member function]
cls.add_method('GetCurrentEndOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentStartOffset() const [member function]
cls.add_method('GetCurrentStartOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function]
cls.add_method('PeekU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t adjustment, int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t adjustment, int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3NetDeviceContainer_methods(root_module, cls):
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor]
cls.add_constructor([])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor]
cls.add_constructor([param('std::string', 'devName')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NetDeviceContainer', 'other')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'deviceName')])
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True)
## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 20 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SixLowPanDispatch_methods(root_module, cls):
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::SixLowPanDispatch(ns3::SixLowPanDispatch const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SixLowPanDispatch const &', 'arg0')])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::SixLowPanDispatch() [constructor]
cls.add_constructor([])
## sixlowpan-header.h (module 'sixlowpan'): static ns3::SixLowPanDispatch::Dispatch_e ns3::SixLowPanDispatch::GetDispatchType(uint8_t dispatch) [member function]
cls.add_method('GetDispatchType',
'ns3::SixLowPanDispatch::Dispatch_e',
[param('uint8_t', 'dispatch')],
is_static=True)
## sixlowpan-header.h (module 'sixlowpan'): static ns3::SixLowPanDispatch::NhcDispatch_e ns3::SixLowPanDispatch::GetNhcDispatchType(uint8_t dispatch) [member function]
cls.add_method('GetNhcDispatchType',
'ns3::SixLowPanDispatch::NhcDispatch_e',
[param('uint8_t', 'dispatch')],
is_static=True)
return
def register_Ns3SixLowPanHelper_methods(root_module, cls):
## sixlowpan-helper.h (module 'sixlowpan'): ns3::SixLowPanHelper::SixLowPanHelper(ns3::SixLowPanHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SixLowPanHelper const &', 'arg0')])
## sixlowpan-helper.h (module 'sixlowpan'): ns3::SixLowPanHelper::SixLowPanHelper() [constructor]
cls.add_constructor([])
## sixlowpan-helper.h (module 'sixlowpan'): int64_t ns3::SixLowPanHelper::AssignStreams(ns3::NetDeviceContainer c, int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('ns3::NetDeviceContainer', 'c'), param('int64_t', 'stream')])
## sixlowpan-helper.h (module 'sixlowpan'): ns3::NetDeviceContainer ns3::SixLowPanHelper::Install(ns3::NetDeviceContainer c) [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::NetDeviceContainer', 'c')])
## sixlowpan-helper.h (module 'sixlowpan'): void ns3::SixLowPanHelper::SetDeviceAttribute(std::string n1, ns3::AttributeValue const & v1) [member function]
cls.add_method('SetDeviceAttribute',
'void',
[param('std::string', 'n1'), param('ns3::AttributeValue const &', 'v1')])
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TimeWithUnit_methods(root_module, cls):
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor]
cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor]
cls.add_constructor([param('long double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable]
cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True)
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Ipv6Header_methods(root_module, cls):
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header(ns3::Ipv6Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Header const &', 'arg0')])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header() [constructor]
cls.add_constructor([])
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetDestinationAddress() const [member function]
cls.add_method('GetDestinationAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetFlowLabel() const [member function]
cls.add_method('GetFlowLabel',
'uint32_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::TypeId ns3::Ipv6Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint16_t ns3::Ipv6Header::GetPayloadLength() const [member function]
cls.add_method('GetPayloadLength',
'uint16_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetSourceAddress() const [member function]
cls.add_method('GetSourceAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetTrafficClass() const [member function]
cls.add_method('GetTrafficClass',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): static ns3::TypeId ns3::Ipv6Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDestinationAddress(ns3::Ipv6Address dst) [member function]
cls.add_method('SetDestinationAddress',
'void',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetFlowLabel(uint32_t flow) [member function]
cls.add_method('SetFlowLabel',
'void',
[param('uint32_t', 'flow')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetHopLimit(uint8_t limit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'limit')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetNextHeader(uint8_t next) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'next')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetPayloadLength(uint16_t len) [member function]
cls.add_method('SetPayloadLength',
'void',
[param('uint16_t', 'len')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetSourceAddress(ns3::Ipv6Address src) [member function]
cls.add_method('SetSourceAddress',
'void',
[param('ns3::Ipv6Address', 'src')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetTrafficClass(uint8_t traffic) [member function]
cls.add_method('SetTrafficClass',
'void',
[param('uint8_t', 'traffic')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3RandomVariableStream_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::RandomVariableStream::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream::RandomVariableStream() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetStream(int64_t stream) [member function]
cls.add_method('SetStream',
'void',
[param('int64_t', 'stream')])
## random-variable-stream.h (module 'core'): int64_t ns3::RandomVariableStream::GetStream() const [member function]
cls.add_method('GetStream',
'int64_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetAntithetic(bool isAntithetic) [member function]
cls.add_method('SetAntithetic',
'void',
[param('bool', 'isAntithetic')])
## random-variable-stream.h (module 'core'): bool ns3::RandomVariableStream::IsAntithetic() const [member function]
cls.add_method('IsAntithetic',
'bool',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::RandomVariableStream::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::RandomVariableStream::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): ns3::RngStream * ns3::RandomVariableStream::Peek() const [member function]
cls.add_method('Peek',
'ns3::RngStream *',
[],
is_const=True, visibility='protected')
return
def register_Ns3SequentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::SequentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable::SequentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): ns3::Ptr<ns3::RandomVariableStream> ns3::SequentialRandomVariable::GetIncrement() const [member function]
cls.add_method('GetIncrement',
'ns3::Ptr< ns3::RandomVariableStream >',
[],
is_const=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetConsecutive() const [member function]
cls.add_method('GetConsecutive',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SixLowPanFrag1_methods(root_module, cls):
cls.add_output_stream_operator()
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFrag1::SixLowPanFrag1(ns3::SixLowPanFrag1 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SixLowPanFrag1 const &', 'arg0')])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFrag1::SixLowPanFrag1() [constructor]
cls.add_constructor([])
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanFrag1::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanFrag1::GetDatagramSize() const [member function]
cls.add_method('GetDatagramSize',
'uint16_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanFrag1::GetDatagramTag() const [member function]
cls.add_method('GetDatagramTag',
'uint16_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanFrag1::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanFrag1::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanFrag1::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFrag1::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFrag1::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFrag1::SetDatagramSize(uint16_t datagramSize) [member function]
cls.add_method('SetDatagramSize',
'void',
[param('uint16_t', 'datagramSize')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFrag1::SetDatagramTag(uint16_t datagramTag) [member function]
cls.add_method('SetDatagramTag',
'void',
[param('uint16_t', 'datagramTag')])
return
def register_Ns3SixLowPanFragN_methods(root_module, cls):
cls.add_output_stream_operator()
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFragN::SixLowPanFragN(ns3::SixLowPanFragN const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SixLowPanFragN const &', 'arg0')])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFragN::SixLowPanFragN() [constructor]
cls.add_constructor([])
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanFragN::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanFragN::GetDatagramOffset() const [member function]
cls.add_method('GetDatagramOffset',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanFragN::GetDatagramSize() const [member function]
cls.add_method('GetDatagramSize',
'uint16_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanFragN::GetDatagramTag() const [member function]
cls.add_method('GetDatagramTag',
'uint16_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanFragN::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanFragN::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanFragN::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::SetDatagramOffset(uint8_t datagramOffset) [member function]
cls.add_method('SetDatagramOffset',
'void',
[param('uint8_t', 'datagramOffset')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::SetDatagramSize(uint16_t datagramSize) [member function]
cls.add_method('SetDatagramSize',
'void',
[param('uint16_t', 'datagramSize')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::SetDatagramTag(uint16_t datagramTag) [member function]
cls.add_method('SetDatagramTag',
'void',
[param('uint16_t', 'datagramTag')])
return
def register_Ns3SixLowPanHc1_methods(root_module, cls):
cls.add_output_stream_operator()
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::SixLowPanHc1(ns3::SixLowPanHc1 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SixLowPanHc1 const &', 'arg0')])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::SixLowPanHc1() [constructor]
cls.add_constructor([])
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanHc1::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::LowPanHc1Addr_e ns3::SixLowPanHc1::GetDstCompression() const [member function]
cls.add_method('GetDstCompression',
'ns3::SixLowPanHc1::LowPanHc1Addr_e',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t const * ns3::SixLowPanHc1::GetDstInterface() const [member function]
cls.add_method('GetDstInterface',
'uint8_t const *',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t const * ns3::SixLowPanHc1::GetDstPrefix() const [member function]
cls.add_method('GetDstPrefix',
'uint8_t const *',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanHc1::GetFlowLabel() const [member function]
cls.add_method('GetFlowLabel',
'uint32_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanHc1::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanHc1::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanHc1::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanHc1::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::LowPanHc1Addr_e ns3::SixLowPanHc1::GetSrcCompression() const [member function]
cls.add_method('GetSrcCompression',
'ns3::SixLowPanHc1::LowPanHc1Addr_e',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t const * ns3::SixLowPanHc1::GetSrcInterface() const [member function]
cls.add_method('GetSrcInterface',
'uint8_t const *',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t const * ns3::SixLowPanHc1::GetSrcPrefix() const [member function]
cls.add_method('GetSrcPrefix',
'uint8_t const *',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanHc1::GetTrafficClass() const [member function]
cls.add_method('GetTrafficClass',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanHc1::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanHc1::IsHc2HeaderPresent() const [member function]
cls.add_method('IsHc2HeaderPresent',
'bool',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanHc1::IsTcflCompression() const [member function]
cls.add_method('IsTcflCompression',
'bool',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetDstCompression(ns3::SixLowPanHc1::LowPanHc1Addr_e dstCompression) [member function]
cls.add_method('SetDstCompression',
'void',
[param('ns3::SixLowPanHc1::LowPanHc1Addr_e', 'dstCompression')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetDstInterface(uint8_t const * dstInterface) [member function]
cls.add_method('SetDstInterface',
'void',
[param('uint8_t const *', 'dstInterface')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetDstPrefix(uint8_t const * dstPrefix) [member function]
cls.add_method('SetDstPrefix',
'void',
[param('uint8_t const *', 'dstPrefix')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetFlowLabel(uint32_t flowLabel) [member function]
cls.add_method('SetFlowLabel',
'void',
[param('uint32_t', 'flowLabel')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetHc2HeaderPresent(bool hc2HeaderPresent) [member function]
cls.add_method('SetHc2HeaderPresent',
'void',
[param('bool', 'hc2HeaderPresent')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetHopLimit(uint8_t limit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'limit')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetNextHeader(uint8_t nextHeader) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'nextHeader')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetSrcCompression(ns3::SixLowPanHc1::LowPanHc1Addr_e srcCompression) [member function]
cls.add_method('SetSrcCompression',
'void',
[param('ns3::SixLowPanHc1::LowPanHc1Addr_e', 'srcCompression')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetSrcInterface(uint8_t const * srcInterface) [member function]
cls.add_method('SetSrcInterface',
'void',
[param('uint8_t const *', 'srcInterface')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetSrcPrefix(uint8_t const * srcPrefix) [member function]
cls.add_method('SetSrcPrefix',
'void',
[param('uint8_t const *', 'srcPrefix')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetTcflCompression(bool tcflCompression) [member function]
cls.add_method('SetTcflCompression',
'void',
[param('bool', 'tcflCompression')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetTrafficClass(uint8_t trafficClass) [member function]
cls.add_method('SetTrafficClass',
'void',
[param('uint8_t', 'trafficClass')])
return
def register_Ns3SixLowPanIphc_methods(root_module, cls):
cls.add_output_stream_operator()
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::SixLowPanIphc(ns3::SixLowPanIphc const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SixLowPanIphc const &', 'arg0')])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::SixLowPanIphc() [constructor]
cls.add_constructor([])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::SixLowPanIphc(uint8_t dispatch) [constructor]
cls.add_constructor([param('uint8_t', 'dispatch')])
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIphc::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetCid() const [member function]
cls.add_method('GetCid',
'bool',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetDac() const [member function]
cls.add_method('GetDac',
'bool',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::HeaderCompression_e ns3::SixLowPanIphc::GetDam() const [member function]
cls.add_method('GetDam',
'ns3::SixLowPanIphc::HeaderCompression_e',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetDscp() const [member function]
cls.add_method('GetDscp',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::Ipv6Address ns3::SixLowPanIphc::GetDstAddress() const [member function]
cls.add_method('GetDstAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetDstContextId() const [member function]
cls.add_method('GetDstContextId',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetEcn() const [member function]
cls.add_method('GetEcn',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIphc::GetFlowLabel() const [member function]
cls.add_method('GetFlowLabel',
'uint32_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::Hlim_e ns3::SixLowPanIphc::GetHlim() const [member function]
cls.add_method('GetHlim',
'ns3::SixLowPanIphc::Hlim_e',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanIphc::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetM() const [member function]
cls.add_method('GetM',
'bool',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetNh() const [member function]
cls.add_method('GetNh',
'bool',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetSac() const [member function]
cls.add_method('GetSac',
'bool',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::HeaderCompression_e ns3::SixLowPanIphc::GetSam() const [member function]
cls.add_method('GetSam',
'ns3::SixLowPanIphc::HeaderCompression_e',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIphc::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::Ipv6Address ns3::SixLowPanIphc::GetSrcAddress() const [member function]
cls.add_method('GetSrcAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetSrcContextId() const [member function]
cls.add_method('GetSrcContextId',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::TrafficClassFlowLabel_e ns3::SixLowPanIphc::GetTf() const [member function]
cls.add_method('GetTf',
'ns3::SixLowPanIphc::TrafficClassFlowLabel_e',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanIphc::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetCid(bool cidField) [member function]
cls.add_method('SetCid',
'void',
[param('bool', 'cidField')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDac(bool dacField) [member function]
cls.add_method('SetDac',
'void',
[param('bool', 'dacField')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDam(ns3::SixLowPanIphc::HeaderCompression_e damField) [member function]
cls.add_method('SetDam',
'void',
[param('ns3::SixLowPanIphc::HeaderCompression_e', 'damField')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDscp(uint8_t dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('uint8_t', 'dscp')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDstAddress(ns3::Ipv6Address dstAddress) [member function]
cls.add_method('SetDstAddress',
'void',
[param('ns3::Ipv6Address', 'dstAddress')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDstContextId(uint8_t dstContextId) [member function]
cls.add_method('SetDstContextId',
'void',
[param('uint8_t', 'dstContextId')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetEcn(uint8_t ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('uint8_t', 'ecn')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetFlowLabel(uint32_t flowLabel) [member function]
cls.add_method('SetFlowLabel',
'void',
[param('uint32_t', 'flowLabel')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetHlim(ns3::SixLowPanIphc::Hlim_e hlimField) [member function]
cls.add_method('SetHlim',
'void',
[param('ns3::SixLowPanIphc::Hlim_e', 'hlimField')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetHopLimit(uint8_t hopLimit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'hopLimit')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetM(bool mField) [member function]
cls.add_method('SetM',
'void',
[param('bool', 'mField')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetNextHeader(uint8_t nextHeader) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'nextHeader')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetNh(bool nhField) [member function]
cls.add_method('SetNh',
'void',
[param('bool', 'nhField')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetSac(bool sacField) [member function]
cls.add_method('SetSac',
'void',
[param('bool', 'sacField')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetSam(ns3::SixLowPanIphc::HeaderCompression_e samField) [member function]
cls.add_method('SetSam',
'void',
[param('ns3::SixLowPanIphc::HeaderCompression_e', 'samField')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetSrcAddress(ns3::Ipv6Address srcAddress) [member function]
cls.add_method('SetSrcAddress',
'void',
[param('ns3::Ipv6Address', 'srcAddress')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetSrcContextId(uint8_t srcContextId) [member function]
cls.add_method('SetSrcContextId',
'void',
[param('uint8_t', 'srcContextId')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetTf(ns3::SixLowPanIphc::TrafficClassFlowLabel_e tfField) [member function]
cls.add_method('SetTf',
'void',
[param('ns3::SixLowPanIphc::TrafficClassFlowLabel_e', 'tfField')])
return
def register_Ns3SixLowPanIpv6_methods(root_module, cls):
cls.add_output_stream_operator()
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIpv6::SixLowPanIpv6(ns3::SixLowPanIpv6 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SixLowPanIpv6 const &', 'arg0')])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIpv6::SixLowPanIpv6() [constructor]
cls.add_constructor([])
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIpv6::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanIpv6::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIpv6::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanIpv6::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIpv6::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIpv6::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
return
def register_Ns3SixLowPanNhcExtension_methods(root_module, cls):
cls.add_output_stream_operator()
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension::SixLowPanNhcExtension(ns3::SixLowPanNhcExtension const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SixLowPanNhcExtension const &', 'arg0')])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension::SixLowPanNhcExtension() [constructor]
cls.add_constructor([])
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanNhcExtension::CopyBlob(uint8_t * blob, uint32_t size) const [member function]
cls.add_method('CopyBlob',
'uint32_t',
[param('uint8_t *', 'blob'), param('uint32_t', 'size')],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanNhcExtension::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension::Eid_e ns3::SixLowPanNhcExtension::GetEid() const [member function]
cls.add_method('GetEid',
'ns3::SixLowPanNhcExtension::Eid_e',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanNhcExtension::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanNhcExtension::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanNhcExtension::GetNh() const [member function]
cls.add_method('GetNh',
'bool',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::NhcDispatch_e ns3::SixLowPanNhcExtension::GetNhcDispatchType() const [member function]
cls.add_method('GetNhcDispatchType',
'ns3::SixLowPanDispatch::NhcDispatch_e',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanNhcExtension::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanNhcExtension::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::SetBlob(uint8_t const * blob, uint32_t size) [member function]
cls.add_method('SetBlob',
'void',
[param('uint8_t const *', 'blob'), param('uint32_t', 'size')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::SetEid(ns3::SixLowPanNhcExtension::Eid_e extensionHeaderType) [member function]
cls.add_method('SetEid',
'void',
[param('ns3::SixLowPanNhcExtension::Eid_e', 'extensionHeaderType')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::SetNextHeader(uint8_t nextHeader) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'nextHeader')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::SetNh(bool nhField) [member function]
cls.add_method('SetNh',
'void',
[param('bool', 'nhField')])
return
def register_Ns3SixLowPanUdpNhcExtension_methods(root_module, cls):
cls.add_output_stream_operator()
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension::SixLowPanUdpNhcExtension(ns3::SixLowPanUdpNhcExtension const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SixLowPanUdpNhcExtension const &', 'arg0')])
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension::SixLowPanUdpNhcExtension() [constructor]
cls.add_constructor([])
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanUdpNhcExtension::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanUdpNhcExtension::GetC() const [member function]
cls.add_method('GetC',
'bool',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanUdpNhcExtension::GetChecksum() const [member function]
cls.add_method('GetChecksum',
'uint16_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanUdpNhcExtension::GetDstPort() const [member function]
cls.add_method('GetDstPort',
'uint16_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanUdpNhcExtension::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::NhcDispatch_e ns3::SixLowPanUdpNhcExtension::GetNhcDispatchType() const [member function]
cls.add_method('GetNhcDispatchType',
'ns3::SixLowPanDispatch::NhcDispatch_e',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension::Ports_e ns3::SixLowPanUdpNhcExtension::GetPorts() const [member function]
cls.add_method('GetPorts',
'ns3::SixLowPanUdpNhcExtension::Ports_e',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanUdpNhcExtension::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanUdpNhcExtension::GetSrcPort() const [member function]
cls.add_method('GetSrcPort',
'uint16_t',
[],
is_const=True)
## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanUdpNhcExtension::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetC(bool cField) [member function]
cls.add_method('SetC',
'void',
[param('bool', 'cField')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetChecksum(uint16_t checksum) [member function]
cls.add_method('SetChecksum',
'void',
[param('uint16_t', 'checksum')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetDstPort(uint16_t port) [member function]
cls.add_method('SetDstPort',
'void',
[param('uint16_t', 'port')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetPorts(ns3::SixLowPanUdpNhcExtension::Ports_e port) [member function]
cls.add_method('SetPorts',
'void',
[param('ns3::SixLowPanUdpNhcExtension::Ports_e', 'port')])
## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetSrcPort(uint16_t port) [member function]
cls.add_method('SetSrcPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function]
cls.add_method('As',
'ns3::TimeWithUnit',
[param('ns3::Time::Unit const', 'unit')],
is_const=True)
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]
cls.add_method('GetDays',
'double',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]
cls.add_method('GetHours',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]
cls.add_method('GetMinutes',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]
cls.add_method('GetYears',
'double',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TriangularRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::TriangularRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable::TriangularRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue(double mean, double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger(uint32_t mean, uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3UniformRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::UniformRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable::UniformRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue(double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger(uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3WeibullRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::WeibullRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable::WeibullRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetScale() const [member function]
cls.add_method('GetScale',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue(double scale, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'scale'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZetaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZetaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable::ZetaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue(double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger(uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZipfRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZipfRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable::ZipfRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue(uint32_t n, double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'n'), param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger(uint32_t n, uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'n'), param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3ConstantRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ConstantRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable::ConstantRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetConstant() const [member function]
cls.add_method('GetConstant',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue(double constant) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'constant')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger(uint32_t constant) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'constant')])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3DeterministicRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::DeterministicRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable::DeterministicRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::DeterministicRandomVariable::SetValueArray(double * values, uint64_t length) [member function]
cls.add_method('SetValueArray',
'void',
[param('double *', 'values'), param('uint64_t', 'length')])
## random-variable-stream.h (module 'core'): double ns3::DeterministicRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::DeterministicRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3EmpiricalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable::EmpiricalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::CDF(double v, double c) [member function]
cls.add_method('CDF',
'void',
[param('double', 'v'), param('double', 'c')])
## random-variable-stream.h (module 'core'): uint32_t ns3::EmpiricalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::EmpiricalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::Interpolate(double arg0, double arg1, double arg2, double arg3, double arg4) [member function]
cls.add_method('Interpolate',
'double',
[param('double', 'arg0'), param('double', 'arg1'), param('double', 'arg2'), param('double', 'arg3'), param('double', 'arg4')],
visibility='private', is_virtual=True)
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::Validate() [member function]
cls.add_method('Validate',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3ErlangRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ErlangRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable::ErlangRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetK() const [member function]
cls.add_method('GetK',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetLambda() const [member function]
cls.add_method('GetLambda',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue(uint32_t k, double lambda) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'k'), param('double', 'lambda')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger(uint32_t k, uint32_t lambda) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'k'), param('uint32_t', 'lambda')])
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3ExponentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ExponentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable::ExponentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue(double mean, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger(uint32_t mean, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3GammaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::GammaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable::GammaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetBeta() const [member function]
cls.add_method('GetBeta',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue(double alpha, double beta) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha'), param('double', 'beta')])
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger(uint32_t alpha, uint32_t beta) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha'), param('uint32_t', 'beta')])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3LogNormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::LogNormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable::LogNormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetMu() const [member function]
cls.add_method('GetMu',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetSigma() const [member function]
cls.add_method('GetSigma',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue(double mu, double sigma) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mu'), param('double', 'sigma')])
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger(uint32_t mu, uint32_t sigma) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mu'), param('uint32_t', 'sigma')])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3NormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::INFINITE_VALUE [variable]
cls.add_static_attribute('INFINITE_VALUE', 'double const', is_const=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::NormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::NormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetVariance() const [member function]
cls.add_method('GetVariance',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue(double mean, double variance, double bound=ns3::NormalRandomVariable::INFINITE_VALUE) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'variance'), param('double', 'bound', default_value='ns3::NormalRandomVariable::INFINITE_VALUE')])
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger(uint32_t mean, uint32_t variance, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'variance'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'nixVector')])
## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function]
cls.add_method('ToString',
'std::string',
[],
is_const=True)
return
def register_Ns3ParetoRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ParetoRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable::ParetoRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue(double mean, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger(uint32_t mean, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3SixLowPanNetDevice_methods(root_module, cls):
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::SixLowPanNetDevice::SixLowPanNetDevice() [constructor]
cls.add_constructor([])
## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): int64_t ns3::SixLowPanNetDevice::AssignStreams(int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('int64_t', 'stream')])
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Address ns3::SixLowPanNetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Address ns3::SixLowPanNetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Ptr<ns3::Channel> ns3::SixLowPanNetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): uint32_t ns3::SixLowPanNetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): uint16_t ns3::SixLowPanNetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Address ns3::SixLowPanNetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Address ns3::SixLowPanNetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Ptr<ns3::NetDevice> ns3::SixLowPanNetDevice::GetNetDevice() const [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Ptr<ns3::Node> ns3::SixLowPanNetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanNetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetNetDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
np/alot
|
refs/heads/master
|
alot/db/envelope.py
|
1
|
# Copyright (C) 2011-2012 Patrick Totzke <patricktotzke@gmail.com>
# This file is released under the GNU GPL, version 3 or a later revision.
# For further details see the COPYING file
import os
import email
import re
import glob
import email.charset as charset
charset.add_charset('utf-8', charset.QP, charset.QP, 'utf-8')
from email.encoders import encode_7or8bit
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.application import MIMEApplication
from alot import __version__
import logging
import alot.helper as helper
import alot.crypto as crypto
import gpgme
from alot.settings import settings
from alot.errors import GPGProblem
from attachment import Attachment
from utils import encode_header
class Envelope(object):
"""a message that is not yet sent and still editable"""
def __init__(self, template=None, bodytext=u'', headers={}, attachments=[],
sign=False, sign_key=None, encrypt=False, tags=[]):
"""
:param template: if not None, the envelope will be initialised by
:meth:`parsing <parse_template>` this string before
setting any other values given to this constructor.
:type template: str
:param bodytext: text used as body part
:type bodytext: str
:param headers: unencoded header values
:type headers: dict (str -> unicode)
:param attachments: file attachments to include
:type attachments: list of :class:`~alot.db.attachment.Attachment`
:param tags: tags to add after successful sendout and saving this message
:type tags: list of str
"""
assert isinstance(bodytext, unicode)
self.headers = {}
self.body = None
self.tmpfile = None
logging.debug('TEMPLATE: %s' % template)
if template:
self.parse_template(template)
logging.debug('PARSED TEMPLATE: %s' % template)
logging.debug('BODY: %s' % self.body)
if self.body is None:
self.body = bodytext
self.headers.update(headers)
self.attachments = list(attachments)
self.sign = sign
self.sign_key = sign_key
self.encrypt = encrypt
self.tags = tags # tags to add after successful sendout
self.sent_time = None
self.modified_since_sent = False
self.sending = False # semaphore to avoid accidental double sendout
def __str__(self):
return "Envelope (%s)\n%s" % (self.headers, self.body)
def __setitem__(self, name, val):
"""setter for header values. this allows adding header like so:
>>> envelope['Subject'] = u'sm\xf8rebr\xf8d'
"""
self.headers[name] = val
if self.sent_time:
self.modified_since_sent = True
def __getitem__(self, name):
"""getter for header values.
:raises: KeyError if undefined
"""
return self.headers[name]
def __delitem__(self, name):
del(self.headers[name])
if self.sent_time:
self.modified_since_sent = True
def __contains__(self, name):
return self.headers.__contains__(name)
def get(self, key, fallback=None):
"""secure getter for header values that allows specifying a `fallback`
return string (defaults to None). This returns the first matching value
and doesn't raise KeyErrors"""
if key in self.headers:
value = self.headers[key][0]
else:
value = fallback
return value
def get_all(self, key, fallback=[]):
"""returns all header values for given key"""
if key in self.headers:
value = self.headers[key]
else:
value = fallback
return value
def add(self, key, value):
"""add header value"""
if key not in self.headers:
self.headers[key] = []
self.headers[key].append(value)
if self.sent_time:
self.modified_since_sent = True
def attach(self, attachment, filename=None, ctype=None):
"""
attach a file
:param attachment: File to attach, given as
:class:`~alot.db.attachment.Attachment` object or path to a file.
:type attachment: :class:`~alot.db.attachment.Attachment` or str
:param filename: filename to use in content-disposition.
Will be ignored if `path` matches multiple files
:param ctype: force content-type to be used for this attachment
:type ctype: str
"""
if isinstance(attachment, Attachment):
self.attachments.append(attachment)
elif isinstance(attachment, basestring):
path = os.path.expanduser(attachment)
part = helper.mimewrap(path, filename, ctype)
self.attachments.append(Attachment(part))
else:
raise TypeError('attach accepts an Attachment or str')
if self.sent_time:
self.modified_since_sent = True
def construct_mail(self):
"""
compiles the information contained in this envelope into a
:class:`email.Message`.
"""
# Build body text part. To properly sign/encrypt messages later on, we
# convert the text to its canonical format (as per RFC 2015).
canonical_format = self.body.encode('utf-8')
canonical_format = canonical_format.replace('\\t', ' ' * 4)
textpart = MIMEText(canonical_format, 'plain', 'utf-8')
# wrap it in a multipart container if necessary
if self.attachments:
inner_msg = MIMEMultipart()
inner_msg.attach(textpart)
# add attachments
for a in self.attachments:
inner_msg.attach(a.get_mime_representation())
else:
inner_msg = textpart
if self.sign:
plaintext = crypto.email_as_string(inner_msg)
logging.debug('signing plaintext: ' + plaintext)
try:
signatures, signature_str = crypto.detached_signature_for(
plaintext, self.sign_key)
if len(signatures) != 1:
raise GPGProblem(("Could not sign message "
"(GPGME did not return a signature)"))
except gpgme.GpgmeError as e:
if e.code == gpgme.ERR_BAD_PASSPHRASE:
# If GPG_AGENT_INFO is unset or empty, the user just does
# not have gpg-agent running (properly).
if os.environ.get('GPG_AGENT_INFO', '').strip() == '':
msg = "Got invalid passphrase and GPG_AGENT_INFO\
not set. Please set up gpg-agent."
raise GPGProblem(msg)
else:
raise GPGProblem(("Bad passphrase. Is "
"gpg-agent running?"))
raise GPGProblem(str(e))
micalg = crypto.RFC3156_micalg_from_algo(signatures[0].hash_algo)
outer_msg = MIMEMultipart('signed', micalg=micalg,
protocol='application/pgp-signature')
# wrap signature in MIMEcontainter
stype = 'pgp-signature; name="signature.asc"'
signature_mime = MIMEApplication(_data=signature_str,
_subtype=stype,
_encoder=encode_7or8bit)
signature_mime['Content-Description'] = 'signature'
signature_mime.set_charset('us-ascii')
# add signed message and signature to outer message
outer_msg.attach(inner_msg)
outer_msg.attach(signature_mime)
outer_msg['Content-Disposition'] = 'inline'
else:
outer_msg = inner_msg
headers = self.headers.copy()
# add Message-ID
if 'Message-ID' not in headers:
headers['Message-ID'] = [email.Utils.make_msgid()]
if 'User-Agent' in headers:
uastring_format = headers['User-Agent'][0]
else:
uastring_format = settings.get('user_agent').strip()
uastring = uastring_format.format(version=__version__)
if uastring:
headers['User-Agent'] = [uastring]
# copy headers from envelope to mail
for k, vlist in headers.items():
for v in vlist:
outer_msg[k] = encode_header(k, v)
return outer_msg
def parse_template(self, tmp, reset=False, only_body=False):
"""parses a template or user edited string to fills this envelope.
:param tmp: the string to parse.
:type tmp: str
:param reset: remove previous envelope content
:type reset: bool
"""
logging.debug('GoT: """\n%s\n"""' % tmp)
if self.sent_time:
self.modified_since_sent = True
if only_body:
self.body = tmp
else:
m = re.match('(?P<h>([a-zA-Z0-9_-]+:.+\n)*)\n?(?P<b>(\s*.*)*)',
tmp)
assert m
d = m.groupdict()
headertext = d['h']
self.body = d['b']
# remove existing content
if reset:
self.headers = {}
# go through multiline, utf-8 encoded headers
# we decode the edited text ourselves here as
# email.message_from_file can't deal with raw utf8 header values
key = value = None
for line in headertext.splitlines():
if re.match('[a-zA-Z0-9_-]+:', line): # new k/v pair
if key and value: # save old one from stack
self.add(key, value) # save
key, value = line.strip().split(':', 1) # parse new pair
# strip spaces, otherwise we end up having " foo" as value
# of "Subject: foo"
value = value.strip()
elif key and value: # append new line without key prefix
value += line
if key and value: # save last one if present
self.add(key, value)
# interpret 'Attach' pseudo header
if 'Attach' in self:
to_attach = []
for line in self['Attach']:
gpath = os.path.expanduser(line.strip())
to_attach += filter(os.path.isfile, glob.glob(gpath))
logging.debug('Attaching: %s' % to_attach)
for path in to_attach:
self.attach(path)
del(self['Attach'])
|
vipulkanade/EventbriteDjango
|
refs/heads/master
|
lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/six.py
|
2374
|
"""Utilities for writing code that runs on Python 2 and 3"""
#Copyright (c) 2010-2011 Benjamin Peterson
#Permission is hereby granted, free of charge, to any person obtaining a copy of
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
#the Software, and to permit persons to whom the Software is furnished to do so,
#subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.2.0" # Revision 41c74fef2ded
# True if we are running on Python 3.
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result)
# This is a bit ugly, but it avoids running this again.
delattr(tp, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_code = "func_code"
_func_defaults = "func_defaults"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
if PY3:
def get_unbound_function(unbound):
return unbound
Iterator = object
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
else:
def get_unbound_function(unbound):
return unbound.im_func
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
def iterkeys(d):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)())
def itervalues(d):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)())
def iteritems(d):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)())
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape")
int2byte = chr
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
import builtins
exec_ = getattr(builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
print_ = getattr(builtins, "print")
del builtins
else:
def exec_(code, globs=None, locs=None):
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
def print_(*args, **kwargs):
"""The new-style print function."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("NewBase", (base,), {})
|
bigdawg-istc/bigdawg
|
refs/heads/master
|
provisions/cluster_setup/accumulo-data/bdsetup/thrift/server/TServer.py
|
50
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import Queue
import os
import sys
import threading
import traceback
import logging
logger = logging.getLogger(__name__)
from thrift.Thrift import TProcessor
from thrift.protocol import TBinaryProtocol
from thrift.transport import TTransport
class TServer:
"""Base interface for a server, which must have a serve() method.
Three constructors for all servers:
1) (processor, serverTransport)
2) (processor, serverTransport, transportFactory, protocolFactory)
3) (processor, serverTransport,
inputTransportFactory, outputTransportFactory,
inputProtocolFactory, outputProtocolFactory)
"""
def __init__(self, *args):
if (len(args) == 2):
self.__initArgs__(args[0], args[1],
TTransport.TTransportFactoryBase(),
TTransport.TTransportFactoryBase(),
TBinaryProtocol.TBinaryProtocolFactory(),
TBinaryProtocol.TBinaryProtocolFactory())
elif (len(args) == 4):
self.__initArgs__(args[0], args[1], args[2], args[2], args[3], args[3])
elif (len(args) == 6):
self.__initArgs__(args[0], args[1], args[2], args[3], args[4], args[5])
def __initArgs__(self, processor, serverTransport,
inputTransportFactory, outputTransportFactory,
inputProtocolFactory, outputProtocolFactory):
self.processor = processor
self.serverTransport = serverTransport
self.inputTransportFactory = inputTransportFactory
self.outputTransportFactory = outputTransportFactory
self.inputProtocolFactory = inputProtocolFactory
self.outputProtocolFactory = outputProtocolFactory
def serve(self):
pass
class TSimpleServer(TServer):
"""Simple single-threaded server that just pumps around one transport."""
def __init__(self, *args):
TServer.__init__(self, *args)
def serve(self):
self.serverTransport.listen()
while True:
client = self.serverTransport.accept()
if not client:
continue
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException as tx:
pass
except Exception as x:
logger.exception(x)
itrans.close()
otrans.close()
class TThreadedServer(TServer):
"""Threaded server that spawns a new thread per each connection."""
def __init__(self, *args, **kwargs):
TServer.__init__(self, *args)
self.daemon = kwargs.get("daemon", False)
def serve(self):
self.serverTransport.listen()
while True:
try:
client = self.serverTransport.accept()
if not client:
continue
t = threading.Thread(target=self.handle, args=(client,))
t.setDaemon(self.daemon)
t.start()
except KeyboardInterrupt:
raise
except Exception as x:
logger.exception(x)
def handle(self, client):
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException as tx:
pass
except Exception as x:
logger.exception(x)
itrans.close()
otrans.close()
class TThreadPoolServer(TServer):
"""Server with a fixed size pool of threads which service requests."""
def __init__(self, *args, **kwargs):
TServer.__init__(self, *args)
self.clients = Queue.Queue()
self.threads = 10
self.daemon = kwargs.get("daemon", False)
def setNumThreads(self, num):
"""Set the number of worker threads that should be created"""
self.threads = num
def serveThread(self):
"""Loop around getting clients from the shared queue and process them."""
while True:
try:
client = self.clients.get()
self.serveClient(client)
except Exception as x:
logger.exception(x)
def serveClient(self, client):
"""Process input/output from a client for as long as possible"""
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException as tx:
pass
except Exception as x:
logger.exception(x)
itrans.close()
otrans.close()
def serve(self):
"""Start a fixed number of worker threads and put client into a queue"""
for i in range(self.threads):
try:
t = threading.Thread(target=self.serveThread)
t.setDaemon(self.daemon)
t.start()
except Exception as x:
logger.exception(x)
# Pump the socket for clients
self.serverTransport.listen()
while True:
try:
client = self.serverTransport.accept()
if not client:
continue
self.clients.put(client)
except Exception as x:
logger.exception(x)
class TForkingServer(TServer):
"""A Thrift server that forks a new process for each request
This is more scalable than the threaded server as it does not cause
GIL contention.
Note that this has different semantics from the threading server.
Specifically, updates to shared variables will no longer be shared.
It will also not work on windows.
This code is heavily inspired by SocketServer.ForkingMixIn in the
Python stdlib.
"""
def __init__(self, *args):
TServer.__init__(self, *args)
self.children = []
def serve(self):
def try_close(file):
try:
file.close()
except IOError as e:
logger.warning(e, exc_info=True)
self.serverTransport.listen()
while True:
client = self.serverTransport.accept()
if not client:
continue
try:
pid = os.fork()
if pid: # parent
# add before collect, otherwise you race w/ waitpid
self.children.append(pid)
self.collect_children()
# Parent must close socket or the connection may not get
# closed promptly
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
try_close(itrans)
try_close(otrans)
else:
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
ecode = 0
try:
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException as tx:
pass
except Exception as e:
logger.exception(e)
ecode = 1
finally:
try_close(itrans)
try_close(otrans)
os._exit(ecode)
except TTransport.TTransportException as tx:
pass
except Exception as x:
logger.exception(x)
def collect_children(self):
while self.children:
try:
pid, status = os.waitpid(0, os.WNOHANG)
except os.error:
pid = None
if pid:
self.children.remove(pid)
else:
break
|
cogniteev/pyonedrive
|
refs/heads/master
|
tests/auth_test.py
|
1
|
""" Authentication tests
"""
import unittest
import mock
from pyonedrive import LiveAuth
import requests
class AuthTestCase(unittest.TestCase):
def setUp(self):
self.auth = LiveAuth('id', 'secret', 'scope', 'redirect')
def test_generate_code_url(self):
url = self.auth.generate_oauth_initiation_url('code')
self.assertIsInstance(url, str)
self.assertEquals(
url,
'https://login.live.com/oauth20_authorize.srf?client_id='
'id&scope=scope&response_type=code&redirect_uri=redirect')
def test_generate_token_url(self):
url = self.auth.generate_oauth_initiation_url('token')
self.assertIsInstance(url, str)
self.assertEquals(
url,
'https://login.live.com/oauth20_authorize.srf?client_id='
'id&scope=scope&response_type=token&redirect_uri=redirect')
def test_generate_invalid_url(self):
with self.assertRaises(ValueError) as val:
self.auth.generate_oauth_initiation_url('invalid')
self.assertIsNotNone(val)
def test_exchange_code(self):
res = requests.Response()
res.status_code = 200
with mock.patch('pyonedrive.live_auth.requests') as mock_requests:
mock_requests.post.return_value = res
r = self.auth.exchange_oauth_code_for_token('my_code')
mock_requests.post.assert_called_once_with(
'https://login.live.com/oauth20_token.srf',
data={
'code': 'my_code',
'client_secret': 'secret',
'redirect_uri': 'redirect',
'client_id': 'id',
'grant_type': 'authorization_code'
}
)
self.assertEquals(r.status_code, 200)
|
thecut/thecut-stripe
|
refs/heads/master
|
thecut/stripe/querysets.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.db import models
class KnownFieldMixin(object):
def _get_known_field_value(self, field_name):
"""
If queryset was called on concrete model (like account.charges.all())
this method can be used to get this concrete model.
Note: It returns None if it can't find field (instead of raising
KeyError).
Example:
if queryset is called like this:
``account123.charges.create(...)``
inside ``.create`` method call to
``self._get_known_field_value('account')``
will return ``account123``.
But inside ``Charge.objects.create(..)`` it will return ``None``.
Note 2: if the child model (``charge`` in the example) does not have
``field_name`` field this method will raise ``FieldDoesNotExist``.
"""
field = self.model._meta.get_field_by_name(field_name)[0]
try:
return self._known_related_objects[field].values()[0]
except KeyError:
return None
def _get_account(self):
return self._get_known_field_value('account')
class ChargeQuerySet(KnownFieldMixin, models.query.QuerySet):
def _get_customer(self):
return self._get_known_field_value('customer')
def create(self, customer=None, account=None, **kwargs):
"""
Charges can be created in various different ways:
1) Charge.objects.create(account=aa, amount=xx, source=card_yy)
- no customer set, charge on a connected account
- account has to be provided
2) account.charges.create(amount=xx, source=card_yy)
3) customer.charges.create(amount=xx[, source=card_yy])
- Resulting charge will have customer set
- account is optional
This method does sanity checks for parameters and raises ``ValueError``
if combination of customer and account parameters and call method does
not make sense.
:return: :py:class:`thecut.stripe.models.Charge`
"""
# "parent" as in before .
# If we are called on concrete model like this ``acc.charges.create()``
# parent_account = acc
parent_customer = self._get_customer()
parent_account = self._get_account()
if customer and parent_customer and customer != parent_customer:
# If called ``customer_john.charges.create(customer=customer_dick)``
raise ValueError("Conflicting customer instances provided")
if account and parent_account and account != parent_account:
# If called ``account_a.charges.create(account=account_b)``
raise ValueError("Conflicting account instances provided")
customer = customer or parent_customer
account = account or parent_account
if customer and account and customer.account != account:
raise ValueError("Customer has conflicting account")
if customer:
# Create a charge using the stripe's customer api
stripe_charge = customer.api().charges().create(
customer=customer.stripe_id, **kwargs)
# Create a charge model instance
return super(ChargeQuerySet, self).create(
stripe_id=stripe_charge.id, account=customer.account,
customer=customer)
elif account:
# Create a charge using the stripe API
stripe_charge = self.model._stripe.Charge.create(
api_key=account.secret_key, **kwargs)
# Create a customer model instance
return super(ChargeQuerySet, self).create(
stripe_id=stripe_charge.id, account=account)
else:
raise ValueError("Account or customer should be provided")
class CustomerQuerySet(KnownFieldMixin, models.query.QuerySet):
def create(self, account=None, plan=None, **kwargs):
account = account or self._get_account()
if plan:
kwargs.update({'plan': plan.stripe_id})
# Create a customer using the stripe API
stripe_customer = self.model._stripe.Customer.create(
api_key=account.secret_key, **kwargs)
# Create a customer model instance
customer = super(CustomerQuerySet, self).create(
stripe_id=stripe_customer.id, account=account)
# If a plan was provided, then we'll also want to sync subscriptions
if plan:
customer.subscriptions.sync()
return customer
class PlanQuerySet(KnownFieldMixin, models.query.QuerySet):
def create(self, account=None, **kwargs):
account = account or self._get_account()
# Create a plan using the stripe API
stripe_plan = self.model._stripe.Plan.create(
api_key=account.secret_key, **kwargs)
# Create a plan model instance
return super(PlanQuerySet, self).create(
stripe_id=stripe_plan.id, account=account)
class SubscriptionQuerySet(KnownFieldMixin, models.query.QuerySet):
def _get_customer(self):
return self._get_known_field_value('customer')
def _get_plan(self):
return self._get_known_field_value('plan')
def create(self, customer=None, plan=None, **kwargs):
customer = customer or self._get_customer()
plan = plan or self._get_plan()
# Create a subscription using the stripe API
stripe_subscription = customer.api().subscriptions.create(
plan=plan.stripe_id, **kwargs)
# Create a subscription model instance
return super(SubscriptionQuerySet, self).create(
stripe_id=stripe_subscription.id, account=customer.account,
customer=customer, plan=plan)
|
biomodels/MODEL1172501439
|
refs/heads/master
|
setup.py
|
1
|
from setuptools import setup, find_packages
setup(name='MODEL1172501439',
version=20140916,
description='MODEL1172501439 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main/MODEL1172501439',
maintainer='Stanley Gu',
maintainer_url='stanleygu@gmail.com',
packages=find_packages(),
package_data={'': ['*.xml', 'README.md']},
)
|
polyaxon/polyaxon
|
refs/heads/master
|
sdks/python/http_client/v1/polyaxon_sdk/models/v1_spark_replica.py
|
1
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.10.0
Contact: contact@polyaxon.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1SparkReplica(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'replicas': 'int',
'environment': 'V1Environment',
'init': 'list[V1Init]',
'sidecars': 'list[V1Container]',
'container': 'V1Container'
}
attribute_map = {
'replicas': 'replicas',
'environment': 'environment',
'init': 'init',
'sidecars': 'sidecars',
'container': 'container'
}
def __init__(self, replicas=None, environment=None, init=None, sidecars=None, container=None, local_vars_configuration=None): # noqa: E501
"""V1SparkReplica - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._replicas = None
self._environment = None
self._init = None
self._sidecars = None
self._container = None
self.discriminator = None
if replicas is not None:
self.replicas = replicas
if environment is not None:
self.environment = environment
if init is not None:
self.init = init
if sidecars is not None:
self.sidecars = sidecars
if container is not None:
self.container = container
@property
def replicas(self):
"""Gets the replicas of this V1SparkReplica. # noqa: E501
:return: The replicas of this V1SparkReplica. # noqa: E501
:rtype: int
"""
return self._replicas
@replicas.setter
def replicas(self, replicas):
"""Sets the replicas of this V1SparkReplica.
:param replicas: The replicas of this V1SparkReplica. # noqa: E501
:type: int
"""
self._replicas = replicas
@property
def environment(self):
"""Gets the environment of this V1SparkReplica. # noqa: E501
:return: The environment of this V1SparkReplica. # noqa: E501
:rtype: V1Environment
"""
return self._environment
@environment.setter
def environment(self, environment):
"""Sets the environment of this V1SparkReplica.
:param environment: The environment of this V1SparkReplica. # noqa: E501
:type: V1Environment
"""
self._environment = environment
@property
def init(self):
"""Gets the init of this V1SparkReplica. # noqa: E501
:return: The init of this V1SparkReplica. # noqa: E501
:rtype: list[V1Init]
"""
return self._init
@init.setter
def init(self, init):
"""Sets the init of this V1SparkReplica.
:param init: The init of this V1SparkReplica. # noqa: E501
:type: list[V1Init]
"""
self._init = init
@property
def sidecars(self):
"""Gets the sidecars of this V1SparkReplica. # noqa: E501
:return: The sidecars of this V1SparkReplica. # noqa: E501
:rtype: list[V1Container]
"""
return self._sidecars
@sidecars.setter
def sidecars(self, sidecars):
"""Sets the sidecars of this V1SparkReplica.
:param sidecars: The sidecars of this V1SparkReplica. # noqa: E501
:type: list[V1Container]
"""
self._sidecars = sidecars
@property
def container(self):
"""Gets the container of this V1SparkReplica. # noqa: E501
:return: The container of this V1SparkReplica. # noqa: E501
:rtype: V1Container
"""
return self._container
@container.setter
def container(self, container):
"""Sets the container of this V1SparkReplica.
:param container: The container of this V1SparkReplica. # noqa: E501
:type: V1Container
"""
self._container = container
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1SparkReplica):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1SparkReplica):
return True
return self.to_dict() != other.to_dict()
|
LordDamionDevil/Lony
|
refs/heads/master
|
lib/youtube_dl/extractor/snotr.py
|
64
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
parse_filesize,
str_to_int,
)
class SnotrIE(InfoExtractor):
_VALID_URL = r'http?://(?:www\.)?snotr\.com/video/(?P<id>\d+)/([\w]+)'
_TESTS = [{
'url': 'http://www.snotr.com/video/13708/Drone_flying_through_fireworks',
'info_dict': {
'id': '13708',
'ext': 'mp4',
'title': 'Drone flying through fireworks!',
'duration': 248,
'filesize_approx': 40700000,
'description': 'A drone flying through Fourth of July Fireworks',
'thumbnail': r're:^https?://.*\.jpg$',
},
'expected_warnings': ['description'],
}, {
'url': 'http://www.snotr.com/video/530/David_Letteman_-_George_W_Bush_Top_10',
'info_dict': {
'id': '530',
'ext': 'mp4',
'title': 'David Letteman - George W. Bush Top 10',
'duration': 126,
'filesize_approx': 8500000,
'description': 'The top 10 George W. Bush moments, brought to you by David Letterman!',
'thumbnail': r're:^https?://.*\.jpg$',
}
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
title = self._og_search_title(webpage)
description = self._og_search_description(webpage)
info_dict = self._parse_html5_media_entries(
url, webpage, video_id, m3u8_entry_protocol='m3u8_native')[0]
view_count = str_to_int(self._html_search_regex(
r'<p[^>]*>\s*<strong[^>]*>Views:</strong>\s*<span[^>]*>([\d,\.]+)',
webpage, 'view count', fatal=False))
duration = parse_duration(self._html_search_regex(
r'<p[^>]*>\s*<strong[^>]*>Length:</strong>\s*<span[^>]*>([\d:]+)',
webpage, 'duration', fatal=False))
filesize_approx = parse_filesize(self._html_search_regex(
r'<p[^>]*>\s*<strong[^>]*>Filesize:</strong>\s*<span[^>]*>([^<]+)',
webpage, 'filesize', fatal=False))
info_dict.update({
'id': video_id,
'description': description,
'title': title,
'view_count': view_count,
'duration': duration,
'filesize_approx': filesize_approx,
})
return info_dict
|
sdotter/GPE-5.1.0
|
refs/heads/master
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
ponyii/The-favorite-pocket-world-of-Nebuchadnezzar-III-The-Rhino-in-exile
|
refs/heads/master
|
game/player.py
|
1
|
import world_map.cell_and_map_classes as cell_and_map_classes
import local_settings, visualization, helpers
import units_and_buildings.unit_classes as U_classes
from units_and_buildings.function_argument_requirer import function_argument_requirer
import constants as CONST
from globals import G
class player_map_cell:
def __init__(self, link):
# link is key from G.world_map
# ??? units are {'players' : list of indexes from self.units}
self.link = link
self.wm_cell = G.world_map.get_cell(link)
self.visual_info = [{
'ph_template' : self.wm_cell.ph_template,
'icon' : self.wm_cell.icon,
'contour': self.wm_cell.contour
},
False]
# second one is previous visual_info
self.units = self.wm_cell.units
def update(self, another):
# another is new player_map_cell object
self.visual_info = [another.visual_info[0], self.visual_info[0]]
self.units = another.units
def get_changes(self):
self.visual_info[0] = {'ph_template' : G.world_map.get_cell(self.link).ph_template,
'icon' : G.world_map.get_cell(self.link).icon,
'contour': G.world_map.get_cell(self.link).contour}
def get_adj_cells(wm_cell, pl_cell):
# returns new cells on user map
# list of [player cell coords (string), player_map_cell object]
# adds new cells to user map
layer, x, y = pl_cell.split(', ')
layer = int(layer)
x = int(x)
y = int(y)
new_cells = []
for i in wm_cell.adjacent:
adj_link = join(wm_cell.adjacent[i], ', ')
adj_wm_cell = G.world_map.get_cell(adj_link)
adj_pl_cell = one_step_on_user_map(pl_cell, i)
adj_units = adj_wm_cell.units
new_cells.append([adj_pl_cell, player_map_cell(adj_link, adj_units)])
return new_cells
class player:
def __init__(self, info, first_cell):
self.name = info['name']
self.color = info['color']
self.is_died = False
self.is_players_turn = False
self.queue = {self.change_vision_area: []} # function: list of arguments; when turn starts, function calls for each argument one by one
self.init_command_list()
self.current_unit = None
self.map = {'0, 0, 0': player_map_cell(first_cell)}
'''key - stringed coords, element - payer_map_cell object'''
self.units = []
'''list of unit instances'''
self.cities = {}
'''key - pl_cell, value - city object'''
self.V = visualization.player_interface(local_settings.screen_size, self)
self.V.update_cells(['0, 0, 0'], self.map)
self.new_unit(U_classes.settler,
'settler',
G.world_map.get_cell(first_cell),
'0, 0, 0',
2)
self.new_unit(U_classes.mage,
'mage',
G.world_map.get_cell(first_cell),
'0, 0, 0',
3)
# self.wait_for_command()
self.V.is_hidden = False
def init_command_list(self):
'''keycode : [function, args]'''
self.list_of_commands = {
# move unit
'move right': [self.move_unit_one_step, ['_R']],
'move left': [self.move_unit_one_step, ['_L']],
'move up-right': [self.move_unit_one_step, ['UR']],
'move down-right': [self.move_unit_one_step, ['DR']],
'move up-left': [self.move_unit_one_step, ['UL']],
'move down-left': [self.move_unit_one_step, ['DL']],
# unit special action
'unit special action': [self.execute_current_unit_special_action, []],
'get cell info': [self.get_cell_info, []],
'next unit': [self.next_unit, []],
'end turn': [self.end_turn, []]
}
def wait_for_command(self):
while self.is_players_turn:
command, c_arg = self.V.wait_for_command()
func, arg = self.list_of_commands[command]
arg = c_arg + arg
func(*arg)
def check_if_died(self):
if len(self.units) == 0:
return True
else:
return False
def start_turn(self):
self.is_players_turn = True
for unit in self.units:
unit.restore_action_points()
if unit.status == CONST.unit_status.moved:
unit.status = CONST.unit_status.should_be_moved
for function in self.queue:
arg_list = self.queue[function]
for arg in arg_list:
function(*arg)
self.queue = {self.change_vision_area: []}
if self.check_if_died():
self.is_died = True
G.delete_player(self)
return
self.V.update_map_on_screen()
if self.current_unit == None:
self.V.update_info_bar(None)
else:
self.V.update_info_bar(self.get_current_user_obj().unit_bar_info)
self.center_camera_on_current_unit()
self.wait_for_command()
def end_turn(self):
for i in range(len(self.units)):
if self.units[i].status == CONST.unit_status.should_be_moved:
self.set_current_unit(i)
return
self.is_players_turn = False
def get_cell(self, stringified_coords):
return self.map[stringified_coords]
def get_world_cell(self, stringified_coords):
return G.world_map.get_cell(self.get_cell(stringified_coords).link)
def get_unit_by_index(self, index):
return self.units[index]
def add_unit(self, pl_cell, unit_inst):
index = len(self.units)
self.units.append(unit_inst)
self.set_current_unit(index)
def remove_unit(self, unit_inst):
index = self.units.index(unit_inst)
self.get_cell(unit_inst.pl_cell).get_changes()
# todo - what is it???
self.V.update_cells([unit_inst.pl_cell], self.map)
self.units.pop(index)
if self.current_unit == index:
self.current_unit = None
self.V.update_info_bar(None)
elif self.current_unit is not None and self.current_unit > index:
self.current_unit -= 1
def update_map(self, new_cells):
'''new_cells is list of {pl_coords: string, wm_cell: world_map_cell instance]
return list of keys of updated cells'''
updated_cells = []
for i in new_cells:
updated_cells.append(i['pl_coords'])
pl_cell = player_map_cell(i['wm_cell'].name)
if i['pl_coords'] in self.map:
self.map[i['pl_coords']].update(pl_cell)
else:
self.map[i['pl_coords']] = pl_cell
return updated_cells
def set_current_unit(self, index):
self.current_unit = index
self.V.update_info_bar(self.units[index].unit_bar_info)
self.center_camera_on_current_unit()
def center_camera_on_current_unit(self):
self.V.center_camera_on_cell(self.units[self.current_unit].pl_cell)
def get_current_user_obj(self):
try:
return self.units[self.current_unit]
except:
return None
def require_value(self, type, description):
# todo - such working with types looks ugly
if str(type) == "<class 'world_map.cell_and_map_classes.cell'>":
while True:
got_coords = self.V.choose_cell(description)
# todo - here should be checked if cell reachable
if got_coords in self.map:
return self.get_world_cell(got_coords)
else:
self.V.message('not a cell')
elif str(type) == "<class 'int'>" or str(type) == "<class 'str'>":
got = self.V.require_value(type, description)
else:
raise Exception('Incorrect type of value should be required from player')
return got
def add_city(self, new_city):
# todo - add it
pass
def remove_city(self, city_inst):
# todo - remove it!
pass
def next_unit(self):
if self.current_unit is not None:
self.current_unit += 1
if self.current_unit >= len(self.units):
self.current_unit = 0
self.set_current_unit(self.current_unit)
else:
if len(self.units) > 0:
self.set_current_unit(0)
'''returns set of pl_coords, matching current wm_cell; raises exception if no matching coords found'''
def get_pl_coords_by_wm_cell(self, wm_cell):
result = set()
for pl_coords in self.map:
if wm_cell == self.map[pl_coords].wm_cell:
result.add(pl_coords)
if len(result) > 0:
return result
else:
raise Exception('Tried to get pl_coords by wm_cell - no matching pl_coords')
def change_vision_area(self, unseen_cells, seen_cells, changed_cells):
'''
unseen - cells, which player can't see any more,
list of elements like {'wm_cell' : cell_inst, 'pl_cell': string
seen - which he couldn't see before but see now,
list of elements like {'wm_cell' : cell_inst, 'pl_cell': string
changed - visible cells was changed (by other players, ...)
list of cell_inst
'''
if self.is_players_turn:
# todo - fog for unseen cells
unseen_wm_cells = set()
for element in unseen_cells:
unseen_wm_cells.add(element['wm_cell'])
seen_wm_cells = set()
for element in seen_cells:
if element['wm_cell'] in unseen_wm_cells:
seen_cells.remove(element)
else:
seen_wm_cells.add(element['wm_cell'])
updated_cells = self.update_map(seen_cells)
self.V.update_cells(updated_cells, self.map)
changed_cells_coords = []
for element in changed_cells:
if element not in unseen_wm_cells and \
element not in seen_wm_cells:
for pl_coords in self.get_pl_coords_by_wm_cell(element):
if pl_coords not in changed_cells_coords:
changed_cells_coords.append({'pl_coords': pl_coords, 'wm_cell': element})
updated_cells = self.update_map(changed_cells_coords)
self.V.update_cells(updated_cells, self.map)
else:
queue = self.queue[self.change_vision_area]
arg_list = [unseen_cells, seen_cells, changed_cells]
if len(queue) > 1:
raise Exception('more then 1 change_vision_area functions in player queue')
elif len(queue) == 1:
self.queue[self.change_vision_area][0] = helpers.summ(queue[0], arg_list)
else:
self.queue[self.change_vision_area] = [arg_list]
def new_unit(self, unit_type, name, wm_cell, pl_cell, radius = 1):
new_unit = unit_type(name, self, wm_cell, pl_cell, radius)
self.add_unit(pl_cell, new_unit)
def move_unit_one_step(self, direction):
unit_index = self.current_unit
if unit_index is not None:
unit = self.get_unit_by_index(unit_index)
unit.one_step(direction)
def execute_current_unit_special_action(self):
unit = self.get_current_user_obj()
if unit is not None:
if isinstance(unit.special_action, function_argument_requirer):
result = unit.special_action.execute(unit, self)
if isinstance(result, str):
self.V.message(result)
if self.current_unit is not None and unit.action_points <= 0:
self.units[self.current_unit].status = 'moved'
else: # todo - add types
self.V.message('No special action')
else:
self.V.message('No current unit')
def get_cell_info(self, pl_cell):
if pl_cell in self.map:
wm_coords = self.map[pl_cell].link
wm_cell = G.world_map.get_cell(wm_coords)
self.V.info_window(wm_cell.get_info())
def get_cell_for_new_unit(self, slot_type):
'''slot_type is 'warrior', 'civilian' or 'characters' - cell slot type'''
for cell_coords in self.map:
wm_coords = self.map[cell_coords].link
wm_cell = G.world_map.get_cell(wm_coords)
if not isinstance(wm_cell, wm.worlds_end):
if not wm_cell.sea and (wm_cell.units[slot_type] is None or slot_type == 'characters'):
return {'pl_cell' : cell_coords, 'wm_cell' : wm_coords}
def message(self, text):
if self.is_players_turn:
self.V.message(text)
else:
if self.message not in self.queue:
self.queue[self.message] = [[text]]
else:
self.queue[self.message].append([text])
def __repr__(self):
return 'player | name ' + self.name
|
westinedu/sovleit
|
refs/heads/master
|
django/contrib/formtools/tests/urls.py
|
245
|
"""
This is a URLconf to be loaded by tests.py. Add any URLs needed for tests only.
"""
from django.conf.urls.defaults import *
from django.contrib.formtools.tests import *
urlpatterns = patterns('',
(r'^test1/', TestFormPreview(TestForm)),
(r'^test2/', UserSecuredFormPreview(TestForm)),
(r'^wizard/$', WizardClass([WizardPageOneForm,
WizardPageTwoForm,
WizardPageThreeForm])),
(r'^wizard2/$', UserSecuredWizardClass([WizardPageOneForm,
WizardPageTwoForm,
WizardPageThreeForm]))
)
|
JGrippo/YACS
|
refs/heads/master
|
courses/south_migrations/0001_initial.py
|
2
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Semester'
db.create_table('courses_semester', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('year', self.gf('django.db.models.fields.IntegerField')()),
('month', self.gf('django.db.models.fields.IntegerField')()),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('ref', self.gf('django.db.models.fields.CharField')(unique=True, max_length=150)),
('date_updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('courses', ['Semester'])
# Adding unique constraint on 'Semester', fields ['year', 'month']
db.create_unique('courses_semester', ['year', 'month'])
# Adding model 'Department'
db.create_table('courses_department', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(default='', max_length=200, blank=True)),
('code', self.gf('django.db.models.fields.CharField')(unique=True, max_length=50)),
))
db.send_create_signal('courses', ['Department'])
# Adding model 'Period'
db.create_table('courses_period', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('start', self.gf('django.db.models.fields.TimeField')(default=None, null=True)),
('end', self.gf('django.db.models.fields.TimeField')(default=None, null=True)),
('days_of_week_flag', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal('courses', ['Period'])
# Adding unique constraint on 'Period', fields ['start', 'end', 'days_of_week_flag']
db.create_unique('courses_period', ['start', 'end', 'days_of_week_flag'])
# Adding model 'SectionCrosslisting'
db.create_table('courses_sectioncrosslisting', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('semester', self.gf('django.db.models.fields.related.ForeignKey')(related_name='section_crosslistings', to=orm['courses.Semester'])),
('ref', self.gf('django.db.models.fields.CharField')(unique=True, max_length=200)),
))
db.send_create_signal('courses', ['SectionCrosslisting'])
# Adding model 'Section'
db.create_table('courses_section', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('number', self.gf('django.db.models.fields.IntegerField')()),
('crn', self.gf('django.db.models.fields.IntegerField')(unique=True)),
('course', self.gf('django.db.models.fields.related.ForeignKey')(related_name='sections', to=orm['courses.Course'])),
('crosslisted', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='sections', null=True, to=orm['courses.SectionCrosslisting'])),
('seats_taken', self.gf('django.db.models.fields.IntegerField')()),
('seats_total', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal('courses', ['Section'])
# Adding model 'Course'
db.create_table('courses_course', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('number', self.gf('django.db.models.fields.IntegerField')()),
('department', self.gf('django.db.models.fields.related.ForeignKey')(related_name='courses', to=orm['courses.Department'])),
('min_credits', self.gf('django.db.models.fields.IntegerField')()),
('max_credits', self.gf('django.db.models.fields.IntegerField')()),
('grade_type', self.gf('django.db.models.fields.CharField')(default='', max_length=150, blank=True)),
))
db.send_create_signal('courses', ['Course'])
# Adding unique constraint on 'Course', fields ['department', 'number']
db.create_unique('courses_course', ['department_id', 'number'])
# Adding model 'OfferedFor'
db.create_table('courses_offeredfor', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('course', self.gf('django.db.models.fields.related.ForeignKey')(related_name='offered_for', to=orm['courses.Course'])),
('semester', self.gf('django.db.models.fields.related.ForeignKey')(related_name='offers', to=orm['courses.Semester'])),
))
db.send_create_signal('courses', ['OfferedFor'])
# Adding unique constraint on 'OfferedFor', fields ['course', 'semester']
db.create_unique('courses_offeredfor', ['course_id', 'semester_id'])
# Adding model 'SectionPeriod'
db.create_table('courses_sectionperiod', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('period', self.gf('django.db.models.fields.related.ForeignKey')(related_name='course_times', to=orm['courses.Period'])),
('section', self.gf('django.db.models.fields.related.ForeignKey')(related_name='course_times', to=orm['courses.Section'])),
('semester', self.gf('django.db.models.fields.related.ForeignKey')(related_name='course_times', to=orm['courses.Semester'])),
('instructor', self.gf('django.db.models.fields.CharField')(max_length=150, blank=True)),
('location', self.gf('django.db.models.fields.CharField')(max_length=150, blank=True)),
('kind', self.gf('django.db.models.fields.CharField')(max_length=75)),
))
db.send_create_signal('courses', ['SectionPeriod'])
# Adding unique constraint on 'SectionPeriod', fields ['period', 'section', 'semester']
db.create_unique('courses_sectionperiod', ['period_id', 'section_id', 'semester_id'])
# Adding model 'SemesterDepartment'
db.create_table('courses_semesterdepartment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('department', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['courses.Department'])),
('semester', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['courses.Semester'])),
))
db.send_create_signal('courses', ['SemesterDepartment'])
# Adding unique constraint on 'SemesterDepartment', fields ['department', 'semester']
db.create_unique('courses_semesterdepartment', ['department_id', 'semester_id'])
# Adding model 'SemesterSection'
db.create_table('courses_semestersection', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('semester', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['courses.Semester'])),
('section', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['courses.Section'])),
))
db.send_create_signal('courses', ['SemesterSection'])
def backwards(self, orm):
# Removing unique constraint on 'SemesterDepartment', fields ['department', 'semester']
db.delete_unique('courses_semesterdepartment', ['department_id', 'semester_id'])
# Removing unique constraint on 'SectionPeriod', fields ['period', 'section', 'semester']
db.delete_unique('courses_sectionperiod', ['period_id', 'section_id', 'semester_id'])
# Removing unique constraint on 'OfferedFor', fields ['course', 'semester']
db.delete_unique('courses_offeredfor', ['course_id', 'semester_id'])
# Removing unique constraint on 'Course', fields ['department', 'number']
db.delete_unique('courses_course', ['department_id', 'number'])
# Removing unique constraint on 'Period', fields ['start', 'end', 'days_of_week_flag']
db.delete_unique('courses_period', ['start', 'end', 'days_of_week_flag'])
# Removing unique constraint on 'Semester', fields ['year', 'month']
db.delete_unique('courses_semester', ['year', 'month'])
# Deleting model 'Semester'
db.delete_table('courses_semester')
# Deleting model 'Department'
db.delete_table('courses_department')
# Deleting model 'Period'
db.delete_table('courses_period')
# Deleting model 'SectionCrosslisting'
db.delete_table('courses_sectioncrosslisting')
# Deleting model 'Section'
db.delete_table('courses_section')
# Deleting model 'Course'
db.delete_table('courses_course')
# Deleting model 'OfferedFor'
db.delete_table('courses_offeredfor')
# Deleting model 'SectionPeriod'
db.delete_table('courses_sectionperiod')
# Deleting model 'SemesterDepartment'
db.delete_table('courses_semesterdepartment')
# Deleting model 'SemesterSection'
db.delete_table('courses_semestersection')
models = {
'courses.course': {
'Meta': {'unique_together': "(('department', 'number'),)", 'object_name': 'Course'},
'department': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'courses'", 'to': "orm['courses.Department']"}),
'grade_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '150', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_credits': ('django.db.models.fields.IntegerField', [], {}),
'min_credits': ('django.db.models.fields.IntegerField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'number': ('django.db.models.fields.IntegerField', [], {}),
'semesters': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'courses'", 'symmetrical': 'False', 'through': "orm['courses.OfferedFor']", 'to': "orm['courses.Semester']"})
},
'courses.department': {
'Meta': {'object_name': 'Department'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'semesters': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'departments'", 'symmetrical': 'False', 'through': "orm['courses.SemesterDepartment']", 'to': "orm['courses.Semester']"})
},
'courses.offeredfor': {
'Meta': {'unique_together': "(('course', 'semester'),)", 'object_name': 'OfferedFor'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'offered_for'", 'to': "orm['courses.Course']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'semester': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'offers'", 'to': "orm['courses.Semester']"})
},
'courses.period': {
'Meta': {'unique_together': "(('start', 'end', 'days_of_week_flag'),)", 'object_name': 'Period'},
'days_of_week_flag': ('django.db.models.fields.IntegerField', [], {}),
'end': ('django.db.models.fields.TimeField', [], {'default': 'None', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start': ('django.db.models.fields.TimeField', [], {'default': 'None', 'null': 'True'})
},
'courses.section': {
'Meta': {'object_name': 'Section'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sections'", 'to': "orm['courses.Course']"}),
'crn': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
'crosslisted': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sections'", 'null': 'True', 'to': "orm['courses.SectionCrosslisting']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.IntegerField', [], {}),
'periods': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'courses'", 'symmetrical': 'False', 'through': "orm['courses.SectionPeriod']", 'to': "orm['courses.Period']"}),
'seats_taken': ('django.db.models.fields.IntegerField', [], {}),
'seats_total': ('django.db.models.fields.IntegerField', [], {}),
'semesters': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'sections'", 'symmetrical': 'False', 'through': "orm['courses.SemesterSection']", 'to': "orm['courses.Semester']"})
},
'courses.sectioncrosslisting': {
'Meta': {'object_name': 'SectionCrosslisting'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ref': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'semester': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'section_crosslistings'", 'to': "orm['courses.Semester']"})
},
'courses.sectionperiod': {
'Meta': {'unique_together': "(('period', 'section', 'semester'),)", 'object_name': 'SectionPeriod'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instructor': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'period': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'course_times'", 'to': "orm['courses.Period']"}),
'section': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'course_times'", 'to': "orm['courses.Section']"}),
'semester': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'course_times'", 'to': "orm['courses.Semester']"})
},
'courses.semester': {
'Meta': {'unique_together': "(('year', 'month'),)", 'object_name': 'Semester'},
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'month': ('django.db.models.fields.IntegerField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'ref': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
'courses.semesterdepartment': {
'Meta': {'unique_together': "(('department', 'semester'),)", 'object_name': 'SemesterDepartment'},
'department': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['courses.Department']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'semester': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['courses.Semester']"})
},
'courses.semestersection': {
'Meta': {'object_name': 'SemesterSection'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'section': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['courses.Section']"}),
'semester': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['courses.Semester']"})
}
}
complete_apps = ['courses']
|
dario61081/koalixcrm
|
refs/heads/master
|
koalixcrm/crm/factories/factory_payment_reminder.py
|
2
|
# -*- coding: utf-8 -*-
from koalixcrm.crm.models import PaymentReminder
from koalixcrm.crm.factories.factory_sales_document import StandardSalesDocumentFactory
class StandardPaymentReminderFactory(StandardSalesDocumentFactory):
class Meta:
model = PaymentReminder
payable_until = "2018-05-20"
payment_bank_reference = "This is a bank account reference"
iteration_number = "1"
status = "C"
|
pyecs/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_request.py
|
299
|
import os
import unittest
import urllib2
import json
import time
import wptserve
from base import TestUsingServer, doc_root
class TestInputFile(TestUsingServer):
def test_seek(self):
@wptserve.handlers.handler
def handler(request, response):
rv = []
f = request.raw_input
f.seek(5)
rv.append(f.read(2))
rv.append(f.tell())
f.seek(0)
rv.append(f.readline())
rv.append(f.tell())
rv.append(f.read(-1))
rv.append(f.tell())
f.seek(0)
rv.append(f.read())
f.seek(0)
rv.extend(f.readlines())
return " ".join(str(item) for item in rv)
route = ("POST", "/test/test_seek", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="POST", body="12345ab\ncdef")
self.assertEquals(200, resp.getcode())
self.assertEquals(["ab", "7", "12345ab\n", "8", "cdef", "12",
"12345ab\ncdef", "12345ab\n", "cdef"],
resp.read().split(" "))
def test_iter(self):
@wptserve.handlers.handler
def handler(request, response):
f = request.raw_input
return " ".join(line for line in f)
route = ("POST", "/test/test_iter", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="POST", body="12345\nabcdef\r\nzyxwv")
self.assertEquals(200, resp.getcode())
self.assertEquals(["12345\n", "abcdef\r\n", "zyxwv"], resp.read().split(" "))
class TestRequest(TestUsingServer):
def test_body(self):
@wptserve.handlers.handler
def handler(request, response):
request.raw_input.seek(5)
return request.body
route = ("POST", "/test/test_body", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="POST", body="12345ab\ncdef")
self.assertEquals("12345ab\ncdef", resp.read())
def test_route_match(self):
@wptserve.handlers.handler
def handler(request, response):
return request.route_match["match"] + " " + request.route_match["*"]
route = ("GET", "/test/{match}_*", handler)
self.server.router.register(*route)
resp = self.request("/test/some_route")
self.assertEquals("some route", resp.read())
class TestAuth(TestUsingServer):
def test_auth(self):
@wptserve.handlers.handler
def handler(request, response):
return " ".join((request.auth.username, request.auth.password))
route = ("GET", "/test/test_auth", handler)
self.server.router.register(*route)
resp = self.request(route[1], auth=("test", "PASS"))
self.assertEquals(200, resp.getcode())
self.assertEquals(["test", "PASS"], resp.read().split(" "))
if __name__ == '__main__':
unittest.main()
|
Pablo126/SSBW
|
refs/heads/master
|
Entrega1/lib/python3.5/site-packages/django/core/servers/basehttp.py
|
44
|
"""
HTTP server that implements the Python WSGI protocol (PEP 333, rev 1.21).
Based on wsgiref.simple_server which is part of the standard library since 2.5.
This is a simple server for use in testing or debugging Django apps. It hasn't
been reviewed for security issues. DON'T USE IT FOR PRODUCTION USE!
"""
from __future__ import unicode_literals
import logging
import socket
import sys
from wsgiref import simple_server
from django.core.exceptions import ImproperlyConfigured
from django.core.wsgi import get_wsgi_application
from django.utils import six
from django.utils.module_loading import import_string
from django.utils.six.moves import socketserver
__all__ = ('WSGIServer', 'WSGIRequestHandler')
logger = logging.getLogger('django.server')
def get_internal_wsgi_application():
"""
Loads and returns the WSGI application as configured by the user in
``settings.WSGI_APPLICATION``. With the default ``startproject`` layout,
this will be the ``application`` object in ``projectname/wsgi.py``.
This function, and the ``WSGI_APPLICATION`` setting itself, are only useful
for Django's internal server (runserver); external WSGI servers should just
be configured to point to the correct application object directly.
If settings.WSGI_APPLICATION is not set (is ``None``), we just return
whatever ``django.core.wsgi.get_wsgi_application`` returns.
"""
from django.conf import settings
app_path = getattr(settings, 'WSGI_APPLICATION')
if app_path is None:
return get_wsgi_application()
try:
return import_string(app_path)
except ImportError as e:
msg = (
"WSGI application '%(app_path)s' could not be loaded; "
"Error importing module: '%(exception)s'" % ({
'app_path': app_path,
'exception': e,
})
)
six.reraise(ImproperlyConfigured, ImproperlyConfigured(msg),
sys.exc_info()[2])
def is_broken_pipe_error():
exc_type, exc_value = sys.exc_info()[:2]
return issubclass(exc_type, socket.error) and exc_value.args[0] == 32
class WSGIServer(simple_server.WSGIServer, object):
"""BaseHTTPServer that implements the Python WSGI protocol"""
request_queue_size = 10
def __init__(self, *args, **kwargs):
if kwargs.pop('ipv6', False):
self.address_family = socket.AF_INET6
self.allow_reuse_address = kwargs.pop('allow_reuse_address', True)
super(WSGIServer, self).__init__(*args, **kwargs)
def handle_error(self, request, client_address):
if is_broken_pipe_error():
logger.info("- Broken pipe from %s\n", client_address)
else:
super(WSGIServer, self).handle_error(request, client_address)
# Inheriting from object required on Python 2.
class ServerHandler(simple_server.ServerHandler, object):
def handle_error(self):
# Ignore broken pipe errors, otherwise pass on
if not is_broken_pipe_error():
super(ServerHandler, self).handle_error()
class WSGIRequestHandler(simple_server.WSGIRequestHandler, object):
def address_string(self):
# Short-circuit parent method to not call socket.getfqdn
return self.client_address[0]
def log_message(self, format, *args):
extra = {
'request': self.request,
'server_time': self.log_date_time_string(),
}
if args[1][0] == '4':
# 0x16 = Handshake, 0x03 = SSL 3.0 or TLS 1.x
if args[0].startswith(str('\x16\x03')):
extra['status_code'] = 500
logger.error(
"You're accessing the development server over HTTPS, but "
"it only supports HTTP.\n", extra=extra,
)
return
if args[1].isdigit() and len(args[1]) == 3:
status_code = int(args[1])
extra['status_code'] = status_code
if status_code >= 500:
level = logger.error
elif status_code >= 400:
level = logger.warning
else:
level = logger.info
else:
level = logger.info
level(format, *args, extra=extra)
def get_environ(self):
# Strip all headers with underscores in the name before constructing
# the WSGI environ. This prevents header-spoofing based on ambiguity
# between underscores and dashes both normalized to underscores in WSGI
# env vars. Nginx and Apache 2.4+ both do this as well.
for k, v in self.headers.items():
if '_' in k:
del self.headers[k]
return super(WSGIRequestHandler, self).get_environ()
def handle(self):
"""Copy of WSGIRequestHandler, but with different ServerHandler"""
self.raw_requestline = self.rfile.readline(65537)
if len(self.raw_requestline) > 65536:
self.requestline = ''
self.request_version = ''
self.command = ''
self.send_error(414)
return
if not self.parse_request(): # An error code has been sent, just exit
return
handler = ServerHandler(
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
)
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
def run(addr, port, wsgi_handler, ipv6=False, threading=False, server_cls=WSGIServer):
server_address = (addr, port)
if threading:
httpd_cls = type(str('WSGIServer'), (socketserver.ThreadingMixIn, server_cls), {})
else:
httpd_cls = server_cls
httpd = httpd_cls(server_address, WSGIRequestHandler, ipv6=ipv6)
if threading:
# ThreadingMixIn.daemon_threads indicates how threads will behave on an
# abrupt shutdown; like quitting the server by the user or restarting
# by the auto-reloader. True means the server will not wait for thread
# termination before it quits. This will make auto-reloader faster
# and will prevent the need to kill the server manually if a thread
# isn't terminating correctly.
httpd.daemon_threads = True
httpd.set_app(wsgi_handler)
httpd.serve_forever()
|
robinandeer/puzzle
|
refs/heads/master
|
puzzle/plugins/gemini/mixins/variant.py
|
1
|
import logging
from gemini import GeminiQuery
from puzzle.plugins import BaseVariantMixin
from puzzle.plugins.constants import Results
from puzzle.models import (Compound, Variant, Gene, Genotype, Transcript,)
from puzzle.utils import (get_most_severe_consequence, get_omim_number,
get_cytoband_coord)
from . import VariantExtras
logger = logging.getLogger(__name__)
class VariantMixin(BaseVariantMixin, VariantExtras):
"""Class to store variant specific functions for gemini plugin"""
def build_gemini_query(self, query, extra_info):
"""Append sql to a gemini query
Args:
query(str): The gemini query
extra_info(str): The text that should be added
Return:
extended_query(str)
"""
if 'WHERE' in query:
return "{0} AND {1}".format(query, extra_info)
else:
return "{0} WHERE {1}".format(query, extra_info)
def variants(self, case_id, skip=0, count=1000, filters=None):
"""Return count variants for a case.
This function needs to have different behaviours based on what is asked
for. It should allways try to give minimal information back to improve
on speed. For example, if consequences are not asked for we will not
build all transcripts. If not sv variants we will not build sv
coordinates.
So the minimal case is to just show what is asked for in the variants
interface.
Args:
case_id (str): A gemini db
skip (int): Skip first variants
count (int): The number of variants to return
filters (dict): A dictionary with filters. Currently this will
look like: {
gene_list: [] (list of hgnc ids),
frequency: None (float),
cadd: None (float),
consequence: [] (list of consequences),
impact_severities: [] (list of consequences),
genetic_models [] (list of genetic models)
}
Returns:
puzzle.constants.Results : Named tuple with variants and
nr_of_variants
"""
filters = filters or {}
logger.debug("Looking for variants in {0}".format(case_id))
limit = count + skip
gemini_query = filters.get('gemini_query') or "SELECT * from variants v"
any_filter = False
if filters.get('frequency'):
frequency = filters['frequency']
extra_info = "(v.max_aaf_all < {0} or v.max_aaf_all is"\
" Null)".format(frequency)
gemini_query = self.build_gemini_query(gemini_query, extra_info)
if filters.get('cadd'):
cadd_score = filters['cadd']
extra_info = "(v.cadd_scaled > {0})".format(cadd_score)
gemini_query = self.build_gemini_query(gemini_query, extra_info)
if filters.get('gene_ids'):
gene_list = [gene_id.strip() for gene_id in filters['gene_ids']]
gene_string = "v.gene in ("
for index, gene_id in enumerate(gene_list):
if index == 0:
gene_string += "'{0}'".format(gene_id)
else:
gene_string += ", '{0}'".format(gene_id)
gene_string += ")"
gemini_query = self.build_gemini_query(gemini_query, gene_string)
if filters.get('range'):
chrom = filters['range']['chromosome']
if not chrom.startswith('chr'):
chrom = "chr{0}".format(chrom)
range_string = "v.chrom = '{0}' AND "\
"((v.start BETWEEN {1} AND {2}) OR "\
"(v.end BETWEEN {1} AND {2}))".format(
chrom,
filters['range']['start'],
filters['range']['end']
)
gemini_query = self.build_gemini_query(gemini_query, range_string)
filtered_variants = self._variants(
case_id=case_id,
gemini_query=gemini_query,
)
if filters.get('consequence'):
consequences = set(filters['consequence'])
filtered_variants = (variant for variant in filtered_variants if
set(variant.consequences).intersection(consequences))
if filters.get('impact_severities'):
severities = set([severity.strip()
for severity in filters['impact_severities']])
new_filtered_variants = []
filtered_variants = (variant for variant in filtered_variants if
set([variant.impact_severity]).intersection(severities))
if filters.get('sv_len'):
sv_len = int(filters['sv_len'])
filtered_variants = (variant for variant in filtered_variants if
variant.sv_len >= sv_len)
variants = []
for index, variant_obj in enumerate(filtered_variants):
if index >= skip:
if index < limit:
variants.append(variant_obj)
else:
break
return Results(variants, len(variants))
def variant(self, case_id, variant_id):
"""Return a specific variant.
We solve this by building a gemini query and send it to _variants
Args:
case_id (str): Path to a gemini database
variant_id (int): A gemini variant id
Returns:
variant_obj (dict): A puzzle variant
"""
#Use the gemini id for fast lookup
variant_id = int(variant_id)
gemini_query = "SELECT * from variants WHERE variant_id = {0}".format(
variant_id
)
individuals = []
# Get the individuals for the case
case_obj = self.case(case_id)
for individual in case_obj.individuals:
individuals.append(individual)
self.db = case_obj.variant_source
self.variant_type = case_obj.variant_type
gq = GeminiQuery(self.db)
gq.run(gemini_query)
for gemini_variant in gq:
variant = self._format_variant(
case_id=case_id,
gemini_variant=gemini_variant,
individual_objs=individuals,
index=gemini_variant['variant_id'],
add_all_info = True
)
return variant
return None
def _variants(self, case_id, gemini_query):
"""Return variants found in the gemini database
Args:
case_id (str): The case for which we want to see information
gemini_query (str): What variants should be chosen
filters (dict): A dictionary with filters
Yields:
variant_obj (dict): A Variant formatted dictionary
"""
individuals = []
# Get the individuals for the case
case_obj = self.case(case_id)
for individual in case_obj.individuals:
individuals.append(individual)
self.db = case_obj.variant_source
self.variant_type = case_obj.variant_type
gq = GeminiQuery(self.db)
gq.run(gemini_query)
index = 0
for gemini_variant in gq:
variant = None
# Check if variant is non ref in the individuals
is_variant = self._is_variant(gemini_variant, individuals)
if self.variant_type == 'snv' and not is_variant:
variant = None
else:
index += 1
logger.debug("Updating index to: {0}".format(index))
variant = self._format_variant(
case_id=case_id,
gemini_variant=gemini_variant,
individual_objs=individuals,
index=index
)
if variant:
yield variant
def _format_variant(self, case_id, gemini_variant, individual_objs,
index=0, add_all_info=False):
"""Make a puzzle variant from a gemini variant
Args:
case_id (str): related case id
gemini_variant (GeminiQueryRow): The gemini variant
individual_objs (list(dict)): A list of Individuals
index(int): The index of the variant
Returns:
variant (dict): A Variant object
"""
chrom = gemini_variant['chrom']
if chrom.startswith('chr') or chrom.startswith('CHR'):
chrom = chrom[3:]
variant_dict = {
'CHROM':chrom,
'POS':str(gemini_variant['start']),
'ID':gemini_variant['rs_ids'],
'REF':gemini_variant['ref'],
'ALT':gemini_variant['alt'],
'QUAL':gemini_variant['qual'],
'FILTER':gemini_variant['filter']
}
variant = Variant(**variant_dict)
# Use the gemini id for fast search
variant.update_variant_id(gemini_variant['variant_id'])
logger.debug("Creating a variant object of variant {0}".format(
variant.variant_id))
variant['index'] = index
# Add the most severe consequence
self._add_most_severe_consequence(variant, gemini_variant)
#Add the impact severity
self._add_impact_severity(variant, gemini_variant)
### POSITON ANNOATTIONS ###
variant.start = int(gemini_variant['start'])
variant.stop = int(gemini_variant['end'])
#Add the sv specific coordinates
if self.variant_type == 'sv':
variant.sv_type = gemini_variant['sub_type']
variant.stop = int(gemini_variant['end'])
self._add_sv_coordinates(variant)
else:
### Consequence and region annotations
#Add the transcript information
self._add_transcripts(variant, gemini_variant)
self._add_thousand_g(variant, gemini_variant)
self._add_exac(variant, gemini_variant)
self._add_gmaf(variant, gemini_variant)
#### Check the impact annotations ####
if gemini_variant['cadd_scaled']:
variant.cadd_score = gemini_variant['cadd_scaled']
# We use the prediction in text
polyphen = gemini_variant['polyphen_pred']
if polyphen:
variant.add_severity('Polyphen', polyphen)
# We use the prediction in text
sift = gemini_variant['sift_pred']
if sift:
variant.add_severity('SIFT', sift)
#Add the genes based on the hgnc symbols
self._add_hgnc_symbols(variant)
if self.variant_type == 'snv':
self._add_genes(variant)
self._add_consequences(variant)
### GENOTYPE ANNOATTIONS ###
#Get the genotype info
if add_all_info:
self._add_genotypes(variant, gemini_variant, case_id, individual_objs)
if self.variant_type == 'sv':
self._add_genes(variant)
return variant
def _is_variant(self, gemini_variant, ind_objs):
"""Check if the variant is a variation in any of the individuals
Args:
gemini_variant (GeminiQueryRow): The gemini variant
ind_objs (list(puzzle.models.individual)): A list of individuals to check
Returns:
bool : If any of the individuals has the variant
"""
indexes = (ind.ind_index for ind in ind_objs)
#Check if any individual have a heterozygous or homozygous variant call
for index in indexes:
gt_call = gemini_variant['gt_types'][index]
if (gt_call == 1 or gt_call == 3):
return True
return False
|
QuickSander/CouchPotatoServer
|
refs/heads/master
|
libs/pyasn1/__init__.py
|
193
|
import sys
# http://www.python.org/dev/peps/pep-0396/
__version__ = '0.1.7'
if sys.version_info[:2] < (2, 4):
raise RuntimeError('PyASN1 requires Python 2.4 or later')
|
Sri0405/pattern
|
refs/heads/master
|
examples/01-web/08-wiktionary.py
|
21
|
import os, sys; sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
from pattern.web import Wiktionary, DOM
from pattern.db import csv, pd
# This example retrieves male and female given names from Wiktionary (http://en.wiktionary.org).
# It then trains a classifier that can predict the gender of unknown names (about 78% correct).
# The classifier is small (80KB) and fast.
w = Wiktionary(language="en")
f = csv() # csv() is a short alias for Datasheet().
# Collect male and female given names from Wiktionary.
# Store the data as (name, gender)-rows in a CSV-file.
# The pd() function returns the parent directory of the current script,
# so pd("given-names.csv") = pattern/examples/01-web/given-names.csv.
for gender in ("male", "female"):
for ch in ("abcdefghijklmnopqrstuvwxyz"):
p = w.search("Appendix:%s_given_names/%s" % (gender.capitalize(), ch.capitalize()), cached=True)
for name in p.links:
if not name.startswith("Appendix:"):
f.append((name, gender[0]))
f.save(pd("given-names.csv"))
print ch, gender
# Create a classifier that predicts gender based on name.
from pattern.vector import SVM, chngrams, count, kfoldcv
class GenderByName(SVM):
def train(self, name, gender=None):
SVM.train(self, self.vector(name), gender)
def classify(self, name):
return SVM.classify(self, self.vector(name))
def vector(self, name):
""" Returns a dictionary with character bigrams and suffix.
For example, "Felix" => {"Fe":1, "el":1, "li":1, "ix":1, "ix$":1, 5:1}
"""
v = chngrams(name, n=2)
v = count(v)
v[name[-2:]+"$"] = 1
v[len(name)] = 1
return v
data = csv(pd("given-names.csv"))
# Test average (accuracy, precision, recall, F-score, standard deviation).
print kfoldcv(GenderByName, data, folds=3) # (0.81, 0.79, 0.77, 0.78, 0.00)
# Train and save the classifier in the current folder.
# With final=True, discards the original training data (= smaller file).
g = GenderByName(train=data)
g.save(pd("gender-by-name.svm"), final=True)
# Next time, we can simply load the trained classifier.
# Keep in mind that the script that loads the classifier
# must include the code for the GenderByName class description,
# otherwise Python won't know how to load the data.
g = GenderByName.load(pd("gender-by-name.svm"))
for name in (
"Felix",
"Felicia",
"Rover",
"Kitty",
"Legolas",
"Arwen",
"Jabba",
"Leia",
"Flash",
"Barbarella"):
print name, g.classify(name)
# In the example above, Arwen and Jabba are misclassified.
# We can of course improve the classifier by hand:
#g.train("Arwen", gender="f")
#g.train("Jabba", gender="m")
#g.save(pd("gender-by-name.svm"), final=True)
#print g.classify("Arwen")
#print g.classify("Jabba")
|
hainm/numpy
|
refs/heads/master
|
numpy/distutils/setup.py
|
263
|
#!/usr/bin/env python
from __future__ import division, print_function
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils', parent_package, top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_data_dir('tests')
config.add_data_files('site.cfg')
config.add_data_files('mingw/gfortran_vs2003_hack.c')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
furbrain/Coherence
|
refs/heads/maintain/0.6.x
|
coherence/backends/banshee_storage.py
|
3
|
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2009, Philippe Normand <phil@base-art.net>
"""
TODO:
- podcasts
"""
from twisted.internet import reactor, defer, task
from coherence.extern import db_row
from coherence.upnp.core import DIDLLite
from coherence.backend import BackendItem, BackendStore
from coherence.log import Loggable
import coherence.extern.louie as louie
from sqlite3 import dbapi2
# fallback on pysqlite2.dbapi2
import re
import os
import time
from urlparse import urlsplit
import urllib2
import mimetypes
mimetypes.init()
mimetypes.add_type('audio/x-m4a', '.m4a')
mimetypes.add_type('video/mp4', '.mp4')
mimetypes.add_type('video/mpegts', '.ts')
mimetypes.add_type('video/divx', '.divx')
mimetypes.add_type('video/divx', '.avi')
mimetypes.add_type('video/x-matroska', '.mkv')
mimetypes.add_type('audio/x-musepack', '.mpc')
mimetypes.add_type('audio/x-wavpack', '.flac')
mimetypes.add_type('audio/x-wavpack', '.wv')
mimetypes.add_type('audio/mp4', '.m4a')
ROOT_CONTAINER_ID = 0
AUDIO_CONTAINER = 200
VIDEO_CONTAINER = 300
AUDIO_ALL_CONTAINER_ID = 201
AUDIO_ARTIST_CONTAINER_ID = 202
AUDIO_ALBUM_CONTAINER_ID = 203
AUDIO_PLAYLIST_CONTAINER_ID = 204
VIDEO_ALL_CONTAINER_ID = 301
VIDEO_PLAYLIST_CONTAINER_ID = 302
def get_cover_path(artist_name, album_title):
def _escape_part(part):
escaped = ""
if part:
if part.find("(") > -1:
part = part[:part.find("(")]
escaped = re.sub("[^A-Za-z0-9]*", "", part).lower()
return escaped
base_dir = os.path.expanduser("~/.cache/album-art")
return os.path.join(base_dir, "%s-%s.jpg" % (_escape_part(artist_name),
_escape_part(album_title)))
class SQLiteDB(Loggable):
"""
Python DB API 2.0 backend support.
"""
logCategory = "sqlite"
def __init__(self, database):
""" Connect to a db backend hosting the given database.
"""
Loggable.__init__(self)
self._params = {'database': database, 'check_same_thread': True}
self.connect()
def disconnect(self):
self._db.close()
def connect(self):
"""
Connect to the database, set L{_db} instance variable.
"""
self._db = dbapi2.connect(**self._params)
def reconnect(self):
"""
Disconnect and reconnect to the database.
"""
self.disconnect()
self.connect()
def sql_execute(self, request, *params, **kw):
""" Execute a SQL query in the db backend
"""
t0 = time.time()
debug_msg = request
if params:
debug_msg = u"%s params=%r" % (request, params)
debug_msg = u''.join(debug_msg.splitlines())
if debug_msg:
self.debug('QUERY: %s', debug_msg)
cursor = self._db.cursor()
result = []
cursor.execute(request, params)
if cursor.description:
all_rows = cursor.fetchall()
result = db_row.getdict(all_rows, cursor.description)
cursor.close()
delta = time.time() - t0
self.log("SQL request took %s seconds", delta)
return result
class Container(BackendItem):
get_path = None
def __init__(self, id, parent_id, name, children_callback=None, store=None,
play_container=False):
BackendItem.__init__(self)
self.id = id
self.parent_id = parent_id
self.name = name
self.mimetype = 'directory'
self.store = store
self.play_container = play_container
self.update_id = 0
if children_callback != None:
self.children = children_callback
else:
self.children = []
def add_child(self, child):
self.children.append(child)
def get_children(self, start=0, request_count=0):
def got_children(children):
if request_count == 0:
return children[start:]
else:
return children[start:request_count]
if callable(self.children):
dfr = defer.maybeDeferred(self.children)
else:
dfr = defer.succeed(self.children)
dfr.addCallback(got_children)
return dfr
def get_child_count(self):
count = 0
if callable(self.children):
count = defer.maybeDeferred(self.children)
count.addCallback(lambda children: len(children))
else:
count = len(self.children)
return count
def get_item(self):
item = DIDLLite.Container(self.id, self.parent_id, self.name)
def got_count(count):
item.childCount = count
if self.store and self.play_container == True:
if item.childCount > 0:
dfr = self.get_children(request_count=1)
dfr.addCallback(got_child, item)
return dfr
return item
def got_child(children, item):
res = DIDLLite.PlayContainerResource(self.store.server.uuid,
cid=self.get_id(),
fid=children[0].get_id())
item.res.append(res)
return item
dfr = defer.maybeDeferred(self.get_child_count)
dfr.addCallback(got_count)
return dfr
def get_name(self):
return self.name
def get_id(self):
return self.id
class Artist(BackendItem):
def __init__(self, *args, **kwargs):
BackendItem.__init__(self, *args, **kwargs)
self._row = args[0]
self._db = args[1]
self._local_music_library_id = args[2]
self.musicbrainz_id = self._row.MusicBrainzID
self.itemID = self._row.ArtistID
self.name = self._row.Name or ''
if self.name:
self.name = self.name.encode("utf-8")
def get_children(self, start=0, end=0):
albums = []
def query_db():
q = "select * from CoreAlbums where ArtistID=? and AlbumID in " \
"(select distinct(AlbumID) from CoreTracks where " \
"PrimarySourceID=?) order by Title"
rows = self._db.sql_execute(q, self.itemID,
self._local_music_library_id)
for row in rows:
album = Album(row, self._db, self)
albums.append(album)
yield album
dfr = task.coiterate(query_db())
dfr.addCallback(lambda gen: albums)
return dfr
def get_child_count(self):
q = "select count(AlbumID) as c from CoreAlbums where ArtistID=? and " \
"AlbumID in (select distinct(AlbumID) from CoreTracks where " \
"PrimarySourceID=?) "
return self._db.sql_execute(q, self.itemID,
self._local_music_library_id)[0].c
def get_item(self):
item = DIDLLite.MusicArtist(self.get_id(),
AUDIO_ARTIST_CONTAINER_ID, self.name)
item.childCount = self.get_child_count()
return item
def get_id(self):
return "artist.%d" % self.itemID
def __repr__(self):
return '<Artist %d name="%s" musicbrainz="%s">' % (self.itemID,
self.name,
self.musicbrainz_id)
class Album(BackendItem):
""" definition for an album """
mimetype = 'directory'
get_path = None
def __init__(self, *args, **kwargs):
BackendItem.__init__(self, *args, **kwargs)
self._row = args[0]
self._db = args[1]
self.artist = args[2]
self.itemID = self._row.AlbumID
self.title = self._row.Title
self.cover = get_cover_path(self.artist.name, self.title)
if self.title:
self.title = self.title.encode("utf-8")
self.musicbrainz_id = self._row.MusicBrainzID
self.cd_count = 1
def get_children(self, start=0, request_count=0):
tracks = []
def query_db():
q = "select * from CoreTracks where AlbumID=? order by TrackNumber"
if request_count:
q += " limit %d" % request_count
rows = self._db.sql_execute(q, self.itemID)
for row in rows:
track = Track(row, self._db, self)
tracks.append(track)
yield track
dfr = task.coiterate(query_db())
dfr.addCallback(lambda gen: tracks)
return dfr
def get_child_count(self):
q = "select count(TrackID) as c from CoreTracks where AlbumID=?"
count = self._db.sql_execute(q, self.itemID)[0].c
return count
def get_item(self):
item = DIDLLite.MusicAlbum(self.get_id(), AUDIO_ALBUM_CONTAINER_ID, self.title)
item.artist = self.artist.name
item.childCount = self.get_child_count()
if self.cover:
_, ext = os.path.splitext(self.cover)
item.albumArtURI = ''.join((self._db.urlbase,
self.get_id(), '?cover', ext))
def got_tracks(tracks):
res = DIDLLite.PlayContainerResource(self._db.server.uuid,
cid=self.get_id(),
fid=tracks[0].get_id())
item.res.append(res)
return item
if item.childCount > 0:
dfr = self.get_children(request_count=1)
dfr.addCallback(got_tracks)
else:
dfr = defer.succeed(item)
return dfr
def get_id(self):
return "album.%d" % self.itemID
def get_name(self):
return self.title
def get_cover(self):
return self.cover
def __repr__(self):
return '<Album %d title="%s" artist="%s" #cds %d cover="%s" musicbrainz="%s">' \
% (self.itemID, self.title,
self.artist.name,
self.cd_count,
self.cover,
self.musicbrainz_id)
class BasePlaylist(BackendItem):
""" definition for a playlist """
mimetype = 'directory'
get_path = None
def __init__(self, *args, **kwargs):
BackendItem.__init__(self, *args, **kwargs)
self._row = args[0]
self._store = args[1]
self._db = self._store.db
self.title = self._row.Name
if self.title:
self.title = self.title.encode("utf-8")
def get_tracks(self, request_count):
return []
def db_to_didl(self, row):
album = self._store.get_album_with_id(row.AlbumID)
track = Track(row, self._db, album)
return track
def get_id(self):
return "%s.%d" % (self.id_type, self.db_id)
def __repr__(self):
return '<%s %d title="%s">' % (self.__class___.__name__,
self.db_id, self.title)
def get_children(self, start=0, request_count=0):
tracks = []
def query_db():
rows = self.get_tracks(request_count)
for row in rows:
track = self.db_to_didl(row)
tracks.append(track)
yield track
dfr = task.coiterate(query_db())
dfr.addCallback(lambda gen: tracks)
return dfr
def get_child_count(self):
return self._row.CachedCount
def get_item(self):
item = DIDLLite.PlaylistContainer(self.get_id(),
AUDIO_PLAYLIST_CONTAINER_ID,
self.title)
item.childCount = self.get_child_count()
def got_tracks(tracks):
res = DIDLLite.PlayContainerResource(self._db.server.uuid,
cid=self.get_id(),
fid=tracks[0].get_id())
item.res.append(res)
return item
if item.childCount > 0:
dfr = self.get_children(request_count=1)
dfr.addCallback(got_tracks)
else:
dfr = defer.succeed(item)
return dfr
def get_name(self):
return self.title
class MusicPlaylist(BasePlaylist):
id_type = "musicplaylist"
@property
def db_id(self):
return self._row.PlaylistID
def get_tracks(self, request_count):
q = "select * from CoreTracks where TrackID in (select TrackID " \
"from CorePlaylistEntries where PlaylistID=?)"
if request_count:
q += " limit %d" % request_count
return self._db.sql_execute(q, self.db_id)
class MusicSmartPlaylist(BasePlaylist):
id_type = "musicsmartplaylist"
@property
def db_id(self):
return self._row.SmartPlaylistID
def get_tracks(self, request_count):
q = "select * from CoreTracks where TrackID in (select TrackID " \
"from CoreSmartPlaylistEntries where SmartPlaylistID=?)"
if request_count:
q += " limit %d" % request_count
return self._db.sql_execute(q, self.db_id)
class VideoPlaylist(MusicPlaylist):
id_type = "videoplaylist"
def db_to_didl(self, row):
return Video(row, self._db)
class VideoSmartPlaylist(MusicSmartPlaylist):
id_type = "videosmartplaylist"
def db_to_didl(self, row):
return Video(row, self._db)
class BaseTrack(BackendItem):
""" definition for a track """
def __init__(self, *args, **kwargs):
BackendItem.__init__(self, *args, **kwargs)
self._row = args[0]
self._db = args[1]
self.itemID = self._row.TrackID
self.title = self._row.Title
self.track_nr = self._row.TrackNumber
self.location = self._row.Uri
self.playlist = kwargs.get("playlist")
def get_children(self, start=0, request_count=0):
return []
def get_child_count(self):
return 0
def get_resources(self):
resources = []
_, host_port, _, _, _ = urlsplit(self._db.urlbase)
if host_port.find(':') != -1:
host, port = tuple(host_port.split(':'))
else:
host = host_port
_, ext = os.path.splitext(self.location)
ext = ext.lower()
# FIXME: drop this hack when we switch to tagbin
mimetype, dummy = mimetypes.guess_type("dummy%s" % ext)
if not mimetype:
mimetype = 'audio/mpeg'
ext = "mp3"
statinfo = os.stat(self.get_path())
res = DIDLLite.Resource(self.location, 'internal:%s:%s:*' % (host,
mimetype))
try:
res.size = statinfo.st_size
except:
res.size = 0
resources.append(res)
url = "%s%s%s" % (self._db.urlbase, self.get_id(), ext)
res = DIDLLite.Resource(url, 'http-get:*:%s:*' % mimetype)
try:
res.size = statinfo.st_size
except:
res.size = 0
resources.append(res)
return statinfo, resources
def get_path(self):
return urllib2.unquote(self.location[7:].encode('utf-8'))
def get_id(self):
return "track.%d" % self.itemID
def get_name(self):
return self.title
def get_url(self):
return self._db.urlbase + str(self.itemID).encode('utf-8')
def get_cover(self):
return self.album.cover
def __repr__(self):
return '<Track %d title="%s" nr="%d" album="%s" artist="%s" path="%s">' \
% (self.itemID, self.title, self.track_nr, self.album.title,
self.album.artist.name, self.location)
class Track(BaseTrack):
def __init__(self, *args, **kwargs):
BaseTrack.__init__(self, *args, **kwargs)
self.album = args[2]
def get_item(self):
item = DIDLLite.MusicTrack(self.get_id(), self.album.itemID, self.title)
item.artist = self.album.artist.name
item.album = self.album.title
item.playlist = self.playlist
if self.album.cover != '':
_, ext = os.path.splitext(self.album.cover)
""" add the cover image extension to help clients not reacting on
the mimetype """
item.albumArtURI = ''.join((self._db.urlbase, self.get_id(),
'?cover', ext))
item.originalTrackNumber = self.track_nr
item.server_uuid = str(self._db.server.uuid)[5:]
statinfo, resources = self.get_resources()
item.res.extend(resources)
try:
# FIXME: getmtime is deprecated in Twisted 2.6
item.date = datetime.fromtimestamp(statinfo.st_mtime)
except:
item.date = None
return item
class Video(BaseTrack):
def get_item(self):
item = DIDLLite.VideoItem(self.get_id(), VIDEO_ALL_CONTAINER_ID,
self.title)
item.server_uuid = str(self._db.server.uuid)[5:]
statinfo, resources = self.get_resources()
item.res.extend(resources)
try:
# FIXME: getmtime is deprecated in Twisted 2.6
item.date = datetime.fromtimestamp(statinfo.st_mtime)
except:
item.date = None
return item
class BansheeDB(Loggable):
logCategory = "banshee_db"
def __init__(self, path=None):
Loggable.__init__(self)
self._local_music_library_id = None
self._local_video_library_id = None
default_db_path = os.path.expanduser("~/.config/banshee-1/banshee.db")
self._db_path = path or default_db_path
def open_db(self):
self.db = SQLiteDB(self._db_path)
def close(self):
self.db.disconnect()
def get_local_music_library_id(self):
if self._local_music_library_id is None:
q = "select PrimarySourceID from CorePrimarySources where StringID=?"
row = self.db.sql_execute(q, 'MusicLibrarySource-Library')[0]
self._local_music_library_id = row.PrimarySourceID
return self._local_music_library_id
def get_local_video_library_id(self):
if self._local_video_library_id is None:
q = "select PrimarySourceID from CorePrimarySources where StringID=?"
row = self.db.sql_execute(q, 'VideoLibrarySource-VideoLibrary')[0]
self._local_video_library_id = row.PrimarySourceID
return self._local_video_library_id
def get_artists(self):
artists = []
def query_db():
source_id = self.get_local_music_library_id()
q = "select * from CoreArtists where ArtistID in " \
"(select distinct(ArtistID) from CoreTracks where " \
"PrimarySourceID=?) order by Name"
for row in self.db.sql_execute(q, source_id):
artist = Artist(row, self.db, source_id)
artists.append(artist)
yield artist
dfr = task.coiterate(query_db())
dfr.addCallback(lambda gen: artists)
return dfr
def get_albums(self):
albums = []
artists = {}
def query_db():
q = "select * from CoreAlbums where AlbumID in " \
"(select distinct(AlbumID) from CoreTracks where " \
"PrimarySourceID=?) order by Title"
for row in self.db.sql_execute(q, self.get_local_music_library_id()):
try:
artist = artists[row.ArtistID]
except KeyError:
artist = self.get_artist_with_id(row.ArtistID)
artists[row.ArtistID] = artist
album = Album(row, self.db, artist)
albums.append(album)
yield album
dfr = task.coiterate(query_db())
dfr.addCallback(lambda gen: albums)
return dfr
def get_music_playlists(self):
return self.get_playlists(self.get_local_music_library_id(),
MusicPlaylist, MusicSmartPlaylist)
def get_playlists(self, source_id, PlaylistClass, SmartPlaylistClass):
playlists = []
def query_db():
q = "select * from CorePlaylists where PrimarySourceID=? order by Name"
for row in self.db.sql_execute(q, source_id):
playlist = PlaylistClass(row, self)
playlists.append(playlist)
yield playlist
q = "select * from CoreSmartPlaylists where PrimarySourceID=? order by Name"
for row in self.db.sql_execute(q, source_id):
playlist = SmartPlaylistClass(row, self)
playlists.append(playlist)
yield playlist
dfr = task.coiterate(query_db())
dfr.addCallback(lambda gen: playlists)
return dfr
def get_artist_with_id(self, artist_id):
q = "select * from CoreArtists where ArtistID=? limit 1"
row = self.db.sql_execute(q, artist_id)[0]
return Artist(row, self.db, self.get_local_music_library_id())
def get_album_with_id(self, album_id):
q = "select * from CoreAlbums where AlbumID=? limit 1"
row = self.db.sql_execute(q, album_id)[0]
artist = self.get_artist_with_id(row.ArtistID)
return Album(row, self.db, artist)
def get_playlist_with_id(self, playlist_id, PlaylistClass):
q = "select * from CorePlaylists where PlaylistID=? limit 1"
row = self.db.sql_execute(q, playlist_id)[0]
return PlaylistClass(row, self)
def get_smart_playlist_with_id(self, playlist_id, PlaylistClass):
q = "select * from CoreSmartPlaylists where SmartPlaylistID=? limit 1"
row = self.db.sql_execute(q, playlist_id)[0]
return PlaylistClass(row, self)
def get_music_playlist_with_id(self, playlist_id):
return self.get_playlist_with_id(playlist_id, MusicPlaylist)
def get_music_smart_playlist_with_id(self, playlist_id):
return self.get_smart_playlist_with_id(playlist_id, MusicSmartPlaylist)
def get_video_playlist_with_id(self, playlist_id):
return self.get_playlist_with_id(playlist_id, VideoPlaylist)
def get_video_smart_playlist_with_id(self, playlist_id):
return self.get_smart_playlist_with_id(playlist_id, VideoSmartPlaylist)
def get_track_with_id(self, track_id):
q = "select * from CoreTracks where TrackID=? limit 1"
row = self.db.sql_execute(q, track_id)[0]
album = self.get_album_with_id(row.AlbumID)
return Track(row, self.db, album)
def get_track_for_uri(self, track_uri):
q = "select * from CoreTracks where Uri=? limit 1"
try:
row = self.db.sql_execute(q, track_uri)[0]
except IndexError:
# not found
track = None
else:
album = self.get_album_with_id(row.AlbumID)
track = Track(row, self, album)
return track
def get_tracks(self):
tracks = []
albums = {}
def query_db():
q = "select * from CoreTracks where TrackID in " \
"(select distinct(TrackID) from CoreTracks where " \
"PrimarySourceID=?) order by AlbumID,TrackNumber"
for row in self.db.sql_execute(q, self.get_local_music_library_id()):
if row.AlbumID not in albums:
album = self.get_album_with_id(row.AlbumID)
albums[row.AlbumID] = album
else:
album = albums[row.AlbumID]
track = Track(row, self.db, album)
tracks.append(track)
yield track
dfr = task.coiterate(query_db())
dfr.addCallback(lambda gen: tracks)
return dfr
def get_video_with_id(self, video_id):
q = "select * from CoreTracks where TrackID=? limit 1"
row = self.db.sql_execute(q, video_id)[0]
return Video(row, self.db)
def get_videos(self):
videos = []
def query_db():
source_id = self.get_local_video_library_id()
q = "select * from CoreTracks where TrackID in " \
"(select distinct(TrackID) from CoreTracks where " \
"PrimarySourceID=?)"
for row in self.db.sql_execute(q, source_id):
video = Video(row, self.db, source_id)
videos.append(video)
yield video
dfr = task.coiterate(query_db())
dfr.addCallback(lambda gen: videos)
return dfr
def get_video_playlists(self):
return self.get_playlists(self.get_local_video_library_id(),
VideoPlaylist, VideoSmartPlaylist)
class BansheeStore(BackendStore, BansheeDB):
logCategory = 'banshee_store'
implements = ['MediaServer']
def __init__(self, server, **kwargs):
BackendStore.__init__(self, server, **kwargs)
BansheeDB.__init__(self, kwargs.get("db_path"))
self.update_id = 0
self.name = kwargs.get('name', 'Banshee')
self.containers = {}
self.containers[ROOT_CONTAINER_ID] = Container(ROOT_CONTAINER_ID,
-1, self.name, store=self)
louie.send('Coherence.UPnP.Backend.init_completed', None, backend=self)
def upnp_init(self):
self.open_db()
music = Container(AUDIO_CONTAINER, ROOT_CONTAINER_ID,
'Music', store=self)
self.containers[ROOT_CONTAINER_ID].add_child(music)
self.containers[AUDIO_CONTAINER] = music
artists = Container(AUDIO_ARTIST_CONTAINER_ID, AUDIO_CONTAINER,
'Artists', children_callback=self.get_artists,
store=self)
self.containers[AUDIO_ARTIST_CONTAINER_ID] = artists
self.containers[AUDIO_CONTAINER].add_child(artists)
albums = Container(AUDIO_ALBUM_CONTAINER_ID, AUDIO_CONTAINER,
'Albums', children_callback=self.get_albums,
store=self)
self.containers[AUDIO_ALBUM_CONTAINER_ID] = albums
self.containers[AUDIO_CONTAINER].add_child(albums)
tracks = Container(AUDIO_ALL_CONTAINER_ID, AUDIO_CONTAINER,
'All tracks', children_callback=self.get_tracks,
play_container=True, store=self)
self.containers[AUDIO_ALL_CONTAINER_ID] = tracks
self.containers[AUDIO_CONTAINER].add_child(tracks)
playlists = Container(AUDIO_PLAYLIST_CONTAINER_ID, AUDIO_CONTAINER,
'Playlists', store=self,
children_callback=self.get_music_playlists)
self.containers[AUDIO_PLAYLIST_CONTAINER_ID] = playlists
self.containers[AUDIO_CONTAINER].add_child(playlists)
videos = Container(VIDEO_CONTAINER, ROOT_CONTAINER_ID,
'Videos', store=self)
self.containers[ROOT_CONTAINER_ID].add_child(videos)
self.containers[VIDEO_CONTAINER] = videos
all_videos = Container(VIDEO_ALL_CONTAINER_ID, VIDEO_CONTAINER,
'All Videos', children_callback=self.get_videos,
store=self)
self.containers[VIDEO_ALL_CONTAINER_ID] = all_videos
self.containers[VIDEO_CONTAINER].add_child(all_videos)
playlists = Container(VIDEO_PLAYLIST_CONTAINER_ID, VIDEO_CONTAINER,
'Playlists', store=self,
children_callback=self.get_video_playlists)
self.containers[VIDEO_PLAYLIST_CONTAINER_ID] = playlists
self.containers[VIDEO_CONTAINER].add_child(playlists)
self.db.server = self.server
self.db.urlbase = self.urlbase
self.db.containers = self.containers
self.current_connection_id = None
if self.server:
hostname = self.server.coherence.hostname
source_protocol_info = ['internal:%s:audio/mpeg:*' % hostname,
'http-get:*:audio/mpeg:*',
'internal:%s:application/ogg:*' % hostname,
'http-get:*:application/ogg:*']
self.server.connection_manager_server.set_variable(0,
'SourceProtocolInfo',
source_protocol_info,
default=True)
def release(self):
self.db.disconnect()
def get_by_id(self, item_id):
self.info("get_by_id %s", item_id)
if isinstance(item_id, basestring) and item_id.find('.') > 0:
item_id = item_id.split('@', 1)
item_type, item_id = item_id[0].split('.')[:2]
item_id = int(item_id)
dfr = self._lookup(item_type, item_id)
else:
item_id = int(item_id)
item = self.containers[item_id]
dfr = defer.succeed(item)
return dfr
def _lookup(self, item_type, item_id):
lookup_mapping = dict(artist=self.get_artist_with_id,
album=self.get_album_with_id,
musicplaylist=self.get_music_playlist_with_id,
musicsmartplaylist=self.get_music_smart_playlist_with_id,
videoplaylist=self.get_video_playlist_with_id,
videosmartplaylist=self.get_video_smart_playlist_with_id,
track=self.get_track_with_id,
video=self.get_video_with_id)
item = None
func = lookup_mapping.get(item_type)
if func:
item = func(item_id)
return defer.succeed(item)
|
haveal/googleads-python-lib
|
refs/heads/master
|
examples/dfp/v201502/creative_service/update_creatives.py
|
4
|
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example updates the destination URL of a single image creative.
To determine which image creatives exist, run get_all_creatives.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
# Import appropriate modules from the client library.
from googleads import dfp
IMAGE_CREATIVE_ID = 'INSERT_IMAGE_CREATIVE_ID_HERE'
def main(client, image_creative_id):
# Initialize appropriate service.
creative_service = client.GetService('CreativeService', version='v201502')
# Create statement object to get all image creatives.
values = [{
'key': 'type',
'value': {
'xsi_type': 'TextValue',
'value': 'ImageCreative'
}
}, {
'key': 'id',
'value': {
'xsi_type': 'NumberValue',
'value': image_creative_id
}
}]
query = 'WHERE creativeType = :type AND id = :id'
statement = dfp.FilterStatement(query, values, 1)
# Get creatives by statement.
response = creative_service.getCreativesByStatement(
statement.ToStatement())
if 'results' in response:
# Update each local creative object by changing its destination URL.
updated_creatives = []
for creative in response['results']:
creative['destinationUrl'] = 'http://news.google.com'
updated_creatives.append(creative)
# Update creatives remotely.
creatives = creative_service.updateCreatives(updated_creatives)
# Display results.
for creative in creatives:
print ('Image creative with id \'%s\' and destination URL \'%s\' was '
'updated.' % (creative['id'], creative['destinationUrl']))
else:
print 'No creatives found to update.'
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, IMAGE_CREATIVE_ID)
|
soarpenguin/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/ovirt/ovirt_datacenters_facts.py
|
45
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_datacenters_facts
short_description: Retrieve facts about one or more oVirt/RHV datacenters
author: "Ondra Machacek (@machacekondra)"
version_added: "2.3"
description:
- "Retrieve facts about one or more oVirt/RHV datacenters."
notes:
- "This module creates a new top-level C(ovirt_datacenters) fact, which
contains a list of datacenters."
options:
pattern:
description:
- "Search term which is accepted by oVirt/RHV search backend."
- "For example to search datacenter I(X) use following pattern: I(name=X)"
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about all data centers which names start with C(production):
- ovirt_datacenters_facts:
pattern: name=production*
- debug:
var: ovirt_datacenters
'''
RETURN = '''
ovirt_datacenters:
description: "List of dictionaries describing the datacenters. Datacenter attribues are mapped to dictionary keys,
all datacenters attributes can be found at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/data_center."
returned: On success.
type: list
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
)
def main():
argument_spec = ovirt_facts_full_argument_spec(
pattern=dict(default='', required=False),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
datacenters_service = connection.system_service().data_centers_service()
datacenters = datacenters_service.list(search=module.params['pattern'])
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_datacenters=[
get_dict_of_struct(
struct=d,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for d in datacenters
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
|
cloudify-cosmo/cloudify-plugin-installer-plugin
|
refs/heads/master
|
plugin_installer/extract_package_name.py
|
6
|
"""
hack-ish script to extract the name field from a python package
should be called with the directory containing setup.py as the first argument
"""
if __name__ == '__main__':
import setuptools
import sys
from os import path
root_dir = sys.argv[1]
# patch for setuptools.py that prints the package name
# to stdout (also supports pbr packages)
def patch_setup(name=None, pbr=False, *args, **kwargs):
if pbr:
import ConfigParser
config = ConfigParser.ConfigParser()
config.read(path.join(root_dir, 'setup.cfg'))
name = config.get('metadata', 'name')
if name is None:
sys.stderr.write('Failed finding extracting package name for'
' package located at: {0}'.format(root_dir))
sys.exit(1)
sys.stdout.write(name)
# monkey patch setuptools.setup
setuptools.setup = patch_setup
# Make sure our setup.py is first in path
sys.path.insert(0, root_dir)
# The line below is important
import setup # NOQA
|
theodoregoetz/wernher
|
refs/heads/master
|
sandbox/KRPC Test2.py
|
1
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
%run -i '../Common.ipynb'
import krpc
# <codecell>
conn = krpc.connect(name='laptop2', address='192.168.1.2')
ksc = conn.space_center
vessel = ksc.active_vessel
obt = vessel.orbit
ap = vessel.auto_pilot
con = vessel.control
# <codecell>
def torque(vessel):
surfs = vessel.parts.with_module('ModuleControlSurface')
pitch,yaw,roll = 0,0,0
for surf in surfs:
modl = [x.fields for x in surf.modules if x.name == 'ModuleControlSurface'][0]
print(surf.name)
p = surf.position(vessel.reference_frame)
d = surf.direction(vessel.reference_frame)
r = surf.rotation(vessel.reference_frame)
print(' ',p)
print(' ',d)
print(' ',r)
if modl['Pitch']:
pass
if modl['Yaw']:
pass
if modl['Roll']:
pass
torque(vessel)
# <codecell>
surf = vessel.parts.with_module('ModuleControlSurface')[0]
# <codecell>
dir(surf.modules[0])
# <codecell>
vrf = vessel.reference_frame
srfrf = vessel.surface_reference_frame
vobtrf = vessel.orbital_reference_frame
obtrf = obt.body.reference_frame
obtorf = obt.body.orbital_reference_frame
obtnrrf = obt.body.non_rotating_reference_frame
flight = lambda rf: vessel.flight(rf)
# <codecell>
compare(ksc)
# <codecell>
t = ksc.ut
o = KeplerOrbit(obt)
print(o)
f = flight(obtorf)
print(f.longitude, f.speed)
f = flight(obtnrrf)
print(f.longitude, f.speed)
f = flight(obtrf)
print(f.longitude, f.speed)
|
enthought/etsproxy
|
refs/heads/master
|
enthought/traits/ui/wx/themed_text_editor.py
|
1
|
# proxy module
from traitsui.wx.themed_text_editor import *
|
amirgeva/codebrowser
|
refs/heads/master
|
browsestack.py
|
1
|
from PySide import QtCore
from PySide import QtGui
import sys
class BrowseStack(QtGui.QDockWidget):
def __init__(self,parent=None):
super(BrowseStack,self).__init__(parent)
self.stacklist=QtGui.QListView()
self.items=[]
self.setWidget(self.stacklist)
def updateItems(self):
m=self.stacklist.model()
model=QtGui.QStringListModel()
model.setStringList(self.items)
self.stacklist.setModel(model)
del m
def addItem(self,item):
self.items.append(item)
self.updateItems()
def top(self):
if len(self.items)==0:
return ''
return self.items[-1]
def pop(self):
if len(self.items)>0:
del self.items[-1]
self.updateItems()
|
5y/flask
|
refs/heads/master
|
flask/cli.py
|
1
|
# -*- coding: utf-8 -*-
"""
flask.run
~~~~~~~~~
A simple command line application to run flask apps.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
from threading import Lock
from contextlib import contextmanager
import click
from ._compat import iteritems
class NoAppException(click.UsageError):
"""Raised if an application cannot be found or loaded."""
def find_best_app(module):
"""Given a module instance this tries to find the best possible
application in the module or raises an exception.
"""
from . import Flask
# Search for the most common names first.
for attr_name in 'app', 'application':
app = getattr(module, attr_name, None)
if app is not None and isinstance(app, Flask):
return app
# Otherwise find the only object that is a Flask instance.
matches = [v for k, v in iteritems(module.__dict__)
if isinstance(v, Flask)]
if matches:
if len(matches) > 1:
raise NoAppException('More than one possible Flask application '
'found in module "%s", none of which are called '
'"app". Be explicit!' % module.__name__)
return matches[0]
raise NoAppException('Failed to find application in module "%s". Are '
'you sure it contains a Flask application? Maybe '
'you wrapped it in a WSGI middleware or you are '
'using a factory function.' % module.__name__)
def prepare_exec_for_file(filename):
"""Given a filename this will try to calculate the python path, add it
to the search path and return the actual module name that is expected.
"""
module = []
# Chop off file extensions or package markers
if filename.endswith('.py'):
filename = filename[:-3]
elif os.path.split(filename)[1] == '__init__.py':
filename = os.path.dirname(filename)
else:
raise NoAppException('The file provided (%s) does exist but is not a '
'valid Python file. This means that it cannot '
'be used as application. Please change the '
'extension to .py' % filename)
filename = os.path.realpath(filename)
dirpath = filename
while 1:
dirpath, extra = os.path.split(dirpath)
module.append(extra)
if not os.path.isfile(os.path.join(dirpath, '__init__.py')):
break
sys.path.insert(0, dirpath)
return '.'.join(module[::-1])
def locate_app(app_id):
"""Attempts to locate the application."""
if ':' in app_id:
module, app_obj = app_id.split(':', 1)
else:
module = app_id
app_obj = None
__import__(module)
mod = sys.modules[module]
if app_obj is None:
app = find_best_app(mod)
else:
app = getattr(mod, app_obj, None)
if app is None:
raise RuntimeError('Failed to find application in module "%s"'
% module)
return app
class DispatchingApp(object):
"""Special application that dispatches to a flask application which
is imported by name on first request. This is safer than importing
the application upfront because it means that we can forward all
errors for import problems into the browser as error.
"""
def __init__(self, loader, use_eager_loading=False):
self.loader = loader
self._app = None
self._lock = Lock()
if use_eager_loading:
self._load_unlocked()
def _load_unlocked(self):
self._app = rv = self.loader()
return rv
def __call__(self, environ, start_response):
if self._app is not None:
return self._app(environ, start_response)
with self._lock:
if self._app is not None:
rv = self._app
else:
rv = self._load_unlocked()
return rv(environ, start_response)
def _no_such_app():
raise NoAppException('Could not locate Flask application. '
'You did not provide FLASK_APP or the '
'--app parameter.')
class ScriptInfo(object):
"""Help object to deal with Flask applications. This is usually not
necessary to interface with as it's used internally in the dispatching
to click.
"""
def __init__(self, app_import_path=None, debug=None, create_app=None):
#: The application import path
self.app_import_path = app_import_path
#: The debug flag. If this is not None, the application will
#: automatically have it's debug flag overridden with this value.
self.debug = debug
#: Optionally a function that is passed the script info to create
#: the instance of the application.
self.create_app = create_app
#: A dictionary with arbitrary data that can be associated with
#: this script info.
self.data = {}
self._loaded_app = None
def load_app(self):
"""Loads the Flask app (if not yet loaded) and returns it. Calling
this multiple times will just result in the already loaded app to
be returned.
"""
if self._loaded_app is not None:
return self._loaded_app
if self.create_app is not None:
rv = self.create_app(self)
else:
if self.app_import_path is None:
_no_such_app()
rv = locate_app(self.app_import_path)
if self.debug is not None:
rv.debug = self.debug
self._loaded_app = rv
return rv
@contextmanager
def conditional_context(self, with_context=True):
"""Creates an application context or not, depending on the given
parameter but always works as context manager. This is just a
shortcut for a common operation.
"""
if with_context:
with self.load_app().app_context() as ctx:
yield ctx
else:
yield None
pass_script_info = click.make_pass_decorator(ScriptInfo)
def without_appcontext(f):
"""Marks a click callback so that it does not get a app context
created. This only works for commands directly registered to
the toplevel system. This really is only useful for very
special commands like the runserver one.
"""
f.__flask_without_appcontext__ = True
return f
def set_debug_value(ctx, value):
ctx.ensure_object(ScriptInfo).debug = value
def set_app_value(ctx, value):
if value is not None:
if os.path.isfile(value):
value = prepare_exec_for_file(value)
elif '.' not in sys.path:
sys.path.insert(0, '.')
ctx.ensure_object(ScriptInfo).app_import_path = value
debug_option = click.Option(['--debug/--no-debug'],
help='Enable or disable debug mode.',
default=None, callback=set_debug_value)
app_option = click.Option(['-a', '--app'],
help='The application to run',
callback=set_app_value, is_eager=True)
class FlaskGroup(click.Group):
"""Special subclass of the a regular click group that supports loading
more commands from the configured Flask app. Normally a developer
does not have to interface with this class but there are some very
advanced usecases for which it makes sense to create an instance of
this.
For information as of why this is useful see :ref:`custom-scripts`.
:param add_default_commands: if this is True then the default run and
shell commands wil be added.
:param add_app_option: adds the default ``--app`` option. This gets
automatically disabled if a `create_app`
callback is defined.
:param add_debug_option: adds the default ``--debug`` option.
:param create_app: an optional callback that is passed the script info
and returns the loaded app.
"""
def __init__(self, add_default_commands=True, add_app_option=None,
add_debug_option=True, create_app=None, **extra):
params = list(extra.pop('params', None) or ())
if add_app_option is None:
add_app_option = create_app is None
if add_app_option:
params.append(app_option)
if add_debug_option:
params.append(debug_option)
click.Group.__init__(self, params=params, **extra)
self.create_app = create_app
if add_default_commands:
self.add_command(run_command)
self.add_command(shell_command)
def get_command(self, ctx, name):
# We load built-in commands first as these should always be the
# same no matter what the app does. If the app does want to
# override this it needs to make a custom instance of this group
# and not attach the default commands.
#
# This also means that the script stays functional in case the
# application completely fails.
rv = click.Group.get_command(self, ctx, name)
if rv is not None:
return rv
info = ctx.ensure_object(ScriptInfo)
try:
rv = info.load_app().cli.get_command(ctx, name)
if rv is not None:
return rv
except NoAppException:
pass
def list_commands(self, ctx):
# The commands available is the list of both the application (if
# available) plus the builtin commands.
rv = set(click.Group.list_commands(self, ctx))
info = ctx.ensure_object(ScriptInfo)
try:
rv.update(info.load_app().cli.list_commands(ctx))
except Exception:
# Here we intentionally swallow all exceptions as we don't
# want the help page to break if the app does not exist.
# If someone attempts to use the command we try to create
# the app again and this will give us the error.
pass
return sorted(rv)
def invoke_subcommand(self, ctx, cmd, cmd_name, args):
with_context = cmd.callback is None or \
not getattr(cmd.callback, '__flask_without_appcontext__', False)
with ctx.find_object(ScriptInfo).conditional_context(with_context):
return click.Group.invoke_subcommand(
self, ctx, cmd, cmd_name, args)
def main(self, *args, **kwargs):
obj = kwargs.get('obj')
if obj is None:
obj = ScriptInfo(create_app=self.create_app)
kwargs['obj'] = obj
kwargs.setdefault('auto_envvar_prefix', 'FLASK')
return click.Group.main(self, *args, **kwargs)
def script_info_option(*args, **kwargs):
"""This decorator works exactly like :func:`click.option` but is eager
by default and stores the value in the :attr:`ScriptInfo.data`. This
is useful to further customize an application factory in very complex
situations.
:param script_info_key: this is a mandatory keyword argument which
defines under which data key the value should
be stored.
"""
try:
key = kwargs.pop('script_info_key')
except LookupError:
raise TypeError('script_info_key not provided.')
real_callback = kwargs.get('callback')
def callback(ctx, value):
if real_callback is not None:
value = real_callback(ctx, value)
ctx.ensure_object(ScriptInfo).data[key] = value
return value
kwargs['callback'] = callback
kwargs.setdefault('is_eager', True)
return click.option(*args, **kwargs)
@click.command('run', short_help='Runs a development server.')
@click.option('--host', '-h', default='127.0.0.1',
help='The interface to bind to.')
@click.option('--port', '-p', default=5000,
help='The port to bind to.')
@click.option('--reload/--no-reload', default=None,
help='Enable or disable the reloader. By default the reloader '
'is active if debug is enabled.')
@click.option('--debugger/--no-debugger', default=None,
help='Enable or disable the debugger. By default the debugger '
'is active if debug is enabled.')
@click.option('--eager-loading/--lazy-loader', default=None,
help='Enable or disable eager loading. By default eager '
'loading is enabled if the reloader is disabled.')
@click.option('--with-threads/--without-threads', default=False,
help='Enable or disable multithreading.')
@without_appcontext
@pass_script_info
def run_command(info, host, port, reload, debugger, eager_loading,
with_threads):
"""Runs a local development server for the Flask application.
This local server is recommended for development purposes only but it
can also be used for simple intranet deployments. By default it will
not support any sort of concurrency at all to simplify debugging. This
can be changed with the --with-threads option which will enable basic
multithreading.
The reloader and debugger are by default enabled if the debug flag of
Flask is enabled and disabled otherwise.
"""
from werkzeug.serving import run_simple
if reload is None:
reload = info.debug
if debugger is None:
debugger = info.debug
if eager_loading is None:
eager_loading = not reload
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
# Extra startup messages. This depends a but on Werkzeug internals to
# not double execute when the reloader kicks in.
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
# If we have an import path we can print it out now which can help
# people understand what's being served. If we do not have an
# import path because the app was loaded through a callback then
# we won't print anything.
if info.app_import_path is not None:
print(' * Serving Flask app "%s"' % info.app_import_path)
if info.debug is not None:
print(' * Forcing debug %s' % (info.debug and 'on' or 'off'))
run_simple(host, port, app, use_reloader=reload,
use_debugger=debugger, threaded=with_threads)
@click.command('shell', short_help='Runs a shell in the app context.')
def shell_command():
"""Runs an interactive Python shell in the context of a given
Flask application. The application will populate the default
namespace of this shell according to it's configuration.
This is useful for executing small snippets of management code
without having to manually configuring the application.
"""
import code
from flask.globals import _app_ctx_stack
app = _app_ctx_stack.top.app
banner = 'Python %s on %s\nApp: %s%s\nInstance: %s' % (
sys.version,
sys.platform,
app.import_name,
app.debug and ' [debug]' or '',
app.instance_path,
)
code.interact(banner=banner, local=app.make_shell_context())
def make_default_cli(app):
"""Creates the default click object for the app itself. Currently
there are no default commands registered because all builtin commands
are registered on the actual cmd object here.
"""
return click.Group()
@click.group(cls=FlaskGroup)
def cli(**params):
"""
This shell command acts as general utility script for Flask applications.
It loads the application configured (either through the FLASK_APP environment
variable or the --app parameter) and then provides commands either provided
by the application or Flask itself.
The most useful commands are the "run" and "shell" command.
Example usage:
flask --app=hello --debug run
"""
def main(as_module=False):
this_module = __package__ + '.cli'
args = sys.argv[1:]
if as_module:
if sys.version_info >= (2, 7):
name = 'python -m ' + this_module.rsplit('.', 1)[0]
else:
name = 'python -m ' + this_module
# This module is always executed as "python -m flask.run" and as such
# we need to ensure that we restore the actual command line so that
# the reloader can properly operate.
sys.argv = ['-m', this_module] + sys.argv[1:]
else:
name = None
cli.main(args=args, prog_name=name)
if __name__ == '__main__':
main(as_module=True)
|
Elettronik/SickRage
|
refs/heads/master
|
lib/hachoir_core/field/static_field_set.py
|
93
|
from hachoir_core.field import FieldSet, ParserError
class StaticFieldSet(FieldSet):
"""
Static field set: format class attribute is a tuple of all fields
in syntax like:
format = (
(TYPE1, ARG1, ARG2, ...),
(TYPE2, ARG1, ARG2, ..., {KEY1=VALUE1, ...}),
...
)
Types with dynamic size are forbidden, eg. CString, PascalString8, etc.
"""
format = None # You have to redefine this class variable
_class = None
def __new__(cls, *args, **kw):
assert cls.format is not None, "Class attribute 'format' is not set"
if cls._class is not cls.__name__:
cls._class = cls.__name__
cls.static_size = cls._computeStaticSize()
return object.__new__(cls, *args, **kw)
@staticmethod
def _computeItemSize(item):
item_class = item[0]
if item_class.static_size is None:
raise ParserError("Unable to get static size of field type: %s"
% item_class.__name__)
if callable(item_class.static_size):
if isinstance(item[-1], dict):
return item_class.static_size(*item[1:-1], **item[-1])
else:
return item_class.static_size(*item[1:])
else:
assert isinstance(item_class.static_size, (int, long))
return item_class.static_size
def createFields(self):
for item in self.format:
if isinstance(item[-1], dict):
yield item[0](self, *item[1:-1], **item[-1])
else:
yield item[0](self, *item[1:])
@classmethod
def _computeStaticSize(cls, *args):
return sum(cls._computeItemSize(item) for item in cls.format)
# Initial value of static_size, it changes when first instance
# is created (see __new__)
static_size = _computeStaticSize
|
zartata/ZeroNet
|
refs/heads/master
|
src/util/SocksProxy.py
|
14
|
import socket
from lib.PySocks import socks
def create_connection(address, timeout=None, source_address=None):
sock = socks.socksocket()
sock.connect(address)
return sock
# Dns queries using the proxy
def getaddrinfo(*args):
return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
def monkeyPath(proxy_ip, proxy_port):
print proxy_ip, proxy_port
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
socket.socket = socks.socksocket
socket.create_connection = create_connection
socket.getaddrinfo = getaddrinfo
|
karthik339/Agni
|
refs/heads/master
|
MainDemo/flask/lib/python2.7/site-packages/sqlalchemy/exc.py
|
17
|
# sqlalchemy/exc.py
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Exceptions used with SQLAlchemy.
The base exception class is :class:`.SQLAlchemyError`. Exceptions which are raised as a
result of DBAPI exceptions are all subclasses of
:class:`.DBAPIError`.
"""
import traceback
class SQLAlchemyError(Exception):
"""Generic error class."""
class ArgumentError(SQLAlchemyError):
"""Raised when an invalid or conflicting function argument is supplied.
This error generally corresponds to construction time state errors.
"""
class CircularDependencyError(SQLAlchemyError):
"""Raised by topological sorts when a circular dependency is detected.
There are two scenarios where this error occurs:
* In a Session flush operation, if two objects are mutually dependent
on each other, they can not be inserted or deleted via INSERT or
DELETE statements alone; an UPDATE will be needed to post-associate
or pre-deassociate one of the foreign key constrained values.
The ``post_update`` flag described at :ref:`post_update` can resolve
this cycle.
* In a :meth:`.MetaData.create_all`, :meth:`.MetaData.drop_all`,
:attr:`.MetaData.sorted_tables` operation, two :class:`.ForeignKey`
or :class:`.ForeignKeyConstraint` objects mutually refer to each
other. Apply the ``use_alter=True`` flag to one or both,
see :ref:`use_alter`.
"""
def __init__(self, message, cycles, edges, msg=None):
if msg is None:
message += " Cycles: %r all edges: %r" % (cycles, edges)
else:
message = msg
SQLAlchemyError.__init__(self, message)
self.cycles = cycles
self.edges = edges
def __reduce__(self):
return self.__class__, (None, self.cycles,
self.edges, self.args[0])
class CompileError(SQLAlchemyError):
"""Raised when an error occurs during SQL compilation"""
class IdentifierError(SQLAlchemyError):
"""Raised when a schema name is beyond the max character limit"""
# Moved to orm.exc; compatibility definition installed by orm import until 0.6
ConcurrentModificationError = None
class DisconnectionError(SQLAlchemyError):
"""A disconnect is detected on a raw DB-API connection.
This error is raised and consumed internally by a connection pool. It can
be raised by the :meth:`.PoolEvents.checkout` event
so that the host pool forces a retry; the exception will be caught
three times in a row before the pool gives up and raises
:class:`~sqlalchemy.exc.InvalidRequestError` regarding the connection attempt.
"""
# Moved to orm.exc; compatibility definition installed by orm import until 0.6
FlushError = None
class TimeoutError(SQLAlchemyError):
"""Raised when a connection pool times out on getting a connection."""
class InvalidRequestError(SQLAlchemyError):
"""SQLAlchemy was asked to do something it can't do.
This error generally corresponds to runtime state errors.
"""
class ResourceClosedError(InvalidRequestError):
"""An operation was requested from a connection, cursor, or other
object that's in a closed state."""
class NoSuchColumnError(KeyError, InvalidRequestError):
"""A nonexistent column is requested from a ``RowProxy``."""
class NoReferenceError(InvalidRequestError):
"""Raised by ``ForeignKey`` to indicate a reference cannot be resolved."""
class NoReferencedTableError(NoReferenceError):
"""Raised by ``ForeignKey`` when the referred ``Table`` cannot be located."""
def __init__(self, message, tname):
NoReferenceError.__init__(self, message)
self.table_name = tname
def __reduce__(self):
return self.__class__, (self.args[0], self.table_name)
class NoReferencedColumnError(NoReferenceError):
"""Raised by ``ForeignKey`` when the referred ``Column`` cannot be located."""
def __init__(self, message, tname, cname):
NoReferenceError.__init__(self, message)
self.table_name = tname
self.column_name = cname
def __reduce__(self):
return self.__class__, (self.args[0], self.table_name,
self.column_name)
class NoSuchTableError(InvalidRequestError):
"""Table does not exist or is not visible to a connection."""
class UnboundExecutionError(InvalidRequestError):
"""SQL was attempted without a database connection to execute it on."""
class DontWrapMixin(object):
"""A mixin class which, when applied to a user-defined Exception class,
will not be wrapped inside of :class:`.StatementError` if the error is
emitted within the process of executing a statement.
E.g.::
from sqlalchemy.exc import DontWrapMixin
class MyCustomException(Exception, DontWrapMixin):
pass
class MySpecialType(TypeDecorator):
impl = String
def process_bind_param(self, value, dialect):
if value == 'invalid':
raise MyCustomException("invalid!")
"""
import sys
if sys.version_info < (2, 5):
class DontWrapMixin:
pass
# Moved to orm.exc; compatibility definition installed by orm import until 0.6
UnmappedColumnError = None
class StatementError(SQLAlchemyError):
"""An error occurred during execution of a SQL statement.
:class:`StatementError` wraps the exception raised
during execution, and features :attr:`.statement`
and :attr:`.params` attributes which supply context regarding
the specifics of the statement which had an issue.
The wrapped exception object is available in
the :attr:`.orig` attribute.
"""
statement = None
"""The string SQL statement being invoked when this exception occurred."""
params = None
"""The parameter list being used when this exception occurred."""
orig = None
"""The DBAPI exception object."""
def __init__(self, message, statement, params, orig):
SQLAlchemyError.__init__(self, message)
self.statement = statement
self.params = params
self.orig = orig
def __reduce__(self):
return self.__class__, (self.args[0], self.statement,
self.params, self.orig)
def __str__(self):
from sqlalchemy.sql import util
params_repr = util._repr_params(self.params, 10)
return ' '.join((SQLAlchemyError.__str__(self),
repr(self.statement), repr(params_repr)))
class DBAPIError(StatementError):
"""Raised when the execution of a database operation fails.
Wraps exceptions raised by the DB-API underlying the
database operation. Driver-specific implementations of the standard
DB-API exception types are wrapped by matching sub-types of SQLAlchemy's
:class:`DBAPIError` when possible. DB-API's ``Error`` type maps to
:class:`DBAPIError` in SQLAlchemy, otherwise the names are identical. Note
that there is no guarantee that different DB-API implementations will
raise the same exception type for any given error condition.
:class:`DBAPIError` features :attr:`~.StatementError.statement`
and :attr:`~.StatementError.params` attributes which supply context regarding
the specifics of the statement which had an issue, for the
typical case when the error was raised within the context of
emitting a SQL statement.
The wrapped exception object is available in the :attr:`~.StatementError.orig` attribute.
Its type and properties are DB-API implementation specific.
"""
@classmethod
def instance(cls, statement, params,
orig,
dbapi_base_err,
connection_invalidated=False):
# Don't ever wrap these, just return them directly as if
# DBAPIError didn't exist.
if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)):
return orig
if orig is not None:
# not a DBAPI error, statement is present.
# raise a StatementError
if not isinstance(orig, dbapi_base_err) and statement:
return StatementError(
"%s (original cause: %s)" % (
str(orig),
traceback.format_exception_only(orig.__class__, orig)[-1].strip()
), statement, params, orig)
name, glob = orig.__class__.__name__, globals()
if name in glob and issubclass(glob[name], DBAPIError):
cls = glob[name]
return cls(statement, params, orig, connection_invalidated)
def __reduce__(self):
return self.__class__, (self.statement, self.params,
self.orig, self.connection_invalidated)
def __init__(self, statement, params, orig, connection_invalidated=False):
try:
text = str(orig)
except (KeyboardInterrupt, SystemExit):
raise
except Exception, e:
text = 'Error in str() of DB-API-generated exception: ' + str(e)
StatementError.__init__(
self,
'(%s) %s' % (orig.__class__.__name__, text),
statement,
params,
orig
)
self.connection_invalidated = connection_invalidated
class InterfaceError(DBAPIError):
"""Wraps a DB-API InterfaceError."""
class DatabaseError(DBAPIError):
"""Wraps a DB-API DatabaseError."""
class DataError(DatabaseError):
"""Wraps a DB-API DataError."""
class OperationalError(DatabaseError):
"""Wraps a DB-API OperationalError."""
class IntegrityError(DatabaseError):
"""Wraps a DB-API IntegrityError."""
class InternalError(DatabaseError):
"""Wraps a DB-API InternalError."""
class ProgrammingError(DatabaseError):
"""Wraps a DB-API ProgrammingError."""
class NotSupportedError(DatabaseError):
"""Wraps a DB-API NotSupportedError."""
# Warnings
class SADeprecationWarning(DeprecationWarning):
"""Issued once per usage of a deprecated API."""
class SAPendingDeprecationWarning(PendingDeprecationWarning):
"""Issued once per usage of a deprecated API."""
class SAWarning(RuntimeWarning):
"""Issued at runtime."""
|
gboudreau/CouchPotato
|
refs/heads/master
|
library/sqlalchemy/dialects/postgresql/psycopg2.py
|
17
|
"""Support for the PostgreSQL database via the psycopg2 driver.
Driver
------
The psycopg2 driver is supported, available at http://pypi.python.org/pypi/psycopg2/ .
The dialect has several behaviors which are specifically tailored towards compatibility
with this module.
Note that psycopg1 is **not** supported.
Unicode
-------
By default, the Psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
extension, such that the DBAPI receives and returns all strings as Python
Unicode objects directly - SQLAlchemy passes these values through without
change. Note that this setting requires that the PG client encoding be set to
one which can accomodate the kind of character data being passed - typically
``utf-8``. If the Postgresql database is configured for ``SQL_ASCII``
encoding, which is often the default for PG installations, it may be necessary
for non-ascii strings to be encoded into a specific encoding before being
passed to the DBAPI. If changing the database's client encoding setting is not
an option, specify ``use_native_unicode=False`` as a keyword argument to
``create_engine()``, and take note of the ``encoding`` setting as well, which
also defaults to ``utf-8``. Note that disabling "native unicode" mode has a
slight performance penalty, as SQLAlchemy now must translate unicode strings
to/from an encoding such as utf-8, a task that is handled more efficiently
within the Psycopg2 driver natively.
Connecting
----------
URLs are of the form
``postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...]``.
psycopg2-specific keyword arguments which are accepted by
:func:`.create_engine()` are:
* *server_side_cursors* - Enable the usage of "server side cursors" for SQL
statements which support this feature. What this essentially means from a
psycopg2 point of view is that the cursor is created using a name, e.g.
`connection.cursor('some name')`, which has the effect that result rows are
not immediately pre-fetched and buffered after statement execution, but are
instead left on the server and only retrieved as needed. SQLAlchemy's
:class:`~sqlalchemy.engine.base.ResultProxy` uses special row-buffering
behavior when this feature is enabled, such that groups of 100 rows at a
time are fetched over the wire to reduce conversational overhead.
* *use_native_unicode* - Enable the usage of Psycopg2 "native unicode" mode
per connection. True by default.
Transactions
------------
The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations.
Transaction Isolation Level
---------------------------
The ``isolation_level`` parameter of :func:`.create_engine` here makes use
psycopg2's ``set_isolation_level()`` connection method, rather than
issuing a ``SET SESSION CHARACTERISTICS`` command. This because psycopg2
resets the isolation level on each new transaction, and needs to know
at the API level what level should be used.
NOTICE logging
---------------
The psycopg2 dialect will log Postgresql NOTICE messages via the
``sqlalchemy.dialects.postgresql`` logger::
import logging
logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
Per-Statement Execution Options
-------------------------------
The following per-statement execution options are respected:
* *stream_results* - Enable or disable usage of server side cursors for the SELECT-statement.
If *None* or not set, the *server_side_cursors* option of the connection is used. If
auto-commit is enabled, the option is ignored.
"""
import random
import re
import decimal
import logging
from sqlalchemy import util, exc
from sqlalchemy import processors
from sqlalchemy.engine import base, default
from sqlalchemy.sql import expression
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql.base import PGDialect, PGCompiler, \
PGIdentifierPreparer, PGExecutionContext, \
ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
class _PGNumeric(sqltypes.Numeric):
def bind_processor(self, dialect):
return None
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
return processors.to_decimal_processor_factory(decimal.Decimal)
elif coltype in _DECIMAL_TYPES:
# pg8000 returns Decimal natively for 1700
return None
else:
raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
else:
if coltype in _FLOAT_TYPES:
# pg8000 returns float natively for 701
return None
elif coltype in _DECIMAL_TYPES:
return processors.to_float
else:
raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
class _PGEnum(ENUM):
def __init__(self, *arg, **kw):
super(_PGEnum, self).__init__(*arg, **kw)
if self.convert_unicode:
self.convert_unicode = "force"
class _PGArray(ARRAY):
def __init__(self, *arg, **kw):
super(_PGArray, self).__init__(*arg, **kw)
# FIXME: this check won't work for setups that
# have convert_unicode only on their create_engine().
if isinstance(self.item_type, sqltypes.String) and \
self.item_type.convert_unicode:
self.item_type.convert_unicode = "force"
# When we're handed literal SQL, ensure it's a SELECT-query. Since
# 8.3, combining cursors and "FOR UPDATE" has been fine.
SERVER_SIDE_CURSOR_RE = re.compile(
r'\s*SELECT',
re.I | re.UNICODE)
class PGExecutionContext_psycopg2(PGExecutionContext):
def create_cursor(self):
# TODO: coverage for server side cursors + select.for_update()
if self.dialect.server_side_cursors:
is_server_side = \
self.execution_options.get('stream_results', True) and (
(self.compiled and isinstance(self.compiled.statement, expression.Selectable) \
or \
(
(not self.compiled or
isinstance(self.compiled.statement, expression._TextClause))
and self.statement and SERVER_SIDE_CURSOR_RE.match(self.statement))
)
)
else:
is_server_side = self.execution_options.get('stream_results', False)
self.__is_server_side = is_server_side
if is_server_side:
# use server-side cursors:
# http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
ident = "c_%s_%s" % (hex(id(self))[2:], hex(random.randint(0, 65535))[2:])
return self._connection.connection.cursor(ident)
else:
return self._connection.connection.cursor()
def get_result_proxy(self):
if logger.isEnabledFor(logging.INFO):
self._log_notices(self.cursor)
if self.__is_server_side:
return base.BufferedRowResultProxy(self)
else:
return base.ResultProxy(self)
def _log_notices(self, cursor):
for notice in cursor.connection.notices:
# NOTICE messages have a
# newline character at the end
logger.info(notice.rstrip())
cursor.connection.notices[:] = []
class PGCompiler_psycopg2(PGCompiler):
def visit_mod(self, binary, **kw):
return self.process(binary.left) + " %% " + self.process(binary.right)
def post_process_text(self, text):
return text.replace('%', '%%')
class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
return value.replace('%', '%%')
class PGDialect_psycopg2(PGDialect):
driver = 'psycopg2'
supports_unicode_statements = False
default_paramstyle = 'pyformat'
supports_sane_multi_rowcount = False
execution_ctx_cls = PGExecutionContext_psycopg2
statement_compiler = PGCompiler_psycopg2
preparer = PGIdentifierPreparer_psycopg2
colspecs = util.update_copy(
PGDialect.colspecs,
{
sqltypes.Numeric : _PGNumeric,
ENUM : _PGEnum, # needs force_unicode
sqltypes.Enum : _PGEnum, # needs force_unicode
ARRAY : _PGArray, # needs force_unicode
}
)
def __init__(self, server_side_cursors=False, use_native_unicode=True, **kwargs):
PGDialect.__init__(self, **kwargs)
self.server_side_cursors = server_side_cursors
self.use_native_unicode = use_native_unicode
self.supports_unicode_binds = use_native_unicode
@classmethod
def dbapi(cls):
psycopg = __import__('psycopg2')
return psycopg
def on_connect(self):
if self.isolation_level is not None:
extensions = __import__('psycopg2.extensions').extensions
isol = {
'READ_COMMITTED':extensions.ISOLATION_LEVEL_READ_COMMITTED,
'READ_UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
'REPEATABLE_READ':extensions.ISOLATION_LEVEL_REPEATABLE_READ,
'SERIALIZABLE':extensions.ISOLATION_LEVEL_SERIALIZABLE
}
def base_on_connect(conn):
try:
conn.set_isolation_level(isol[self.isolation_level])
except:
raise exc.InvalidRequestError(
"Invalid isolation level: '%s'" %
self.isolation_level)
else:
base_on_connect = None
if self.dbapi and self.use_native_unicode:
extensions = __import__('psycopg2.extensions').extensions
def connect(conn):
extensions.register_type(extensions.UNICODE, conn)
if base_on_connect:
base_on_connect(conn)
return connect
else:
return base_on_connect
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
if 'port' in opts:
opts['port'] = int(opts['port'])
opts.update(url.query)
return ([], opts)
def is_disconnect(self, e):
if isinstance(e, self.dbapi.OperationalError):
return 'closed the connection' in str(e) or 'connection not open' in str(e)
elif isinstance(e, self.dbapi.InterfaceError):
return 'connection already closed' in str(e) or 'cursor already closed' in str(e)
elif isinstance(e, self.dbapi.ProgrammingError):
# yes, it really says "losed", not "closed"
return "losed the connection unexpectedly" in str(e)
else:
return False
dialect = PGDialect_psycopg2
|
shuggiefisher/django-on-google-app-engine-base
|
refs/heads/master
|
django/utils/unittest/util.py
|
751
|
"""Various utility functions."""
__unittest = True
_MAX_LENGTH = 80
def safe_repr(obj, short=False):
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < _MAX_LENGTH:
return result
return result[:_MAX_LENGTH] + ' [truncated]...'
def safe_str(obj):
try:
return str(obj)
except Exception:
return object.__str__(obj)
def strclass(cls):
return "%s.%s" % (cls.__module__, cls.__name__)
def sorted_list_difference(expected, actual):
"""Finds elements in only one or the other of two, sorted input lists.
Returns a two-element tuple of lists. The first list contains those
elements in the "expected" list but not in the "actual" list, and the
second contains those elements in the "actual" list but not in the
"expected" list. Duplicate elements in either input list are ignored.
"""
i = j = 0
missing = []
unexpected = []
while True:
try:
e = expected[i]
a = actual[j]
if e < a:
missing.append(e)
i += 1
while expected[i] == e:
i += 1
elif e > a:
unexpected.append(a)
j += 1
while actual[j] == a:
j += 1
else:
i += 1
try:
while expected[i] == e:
i += 1
finally:
j += 1
while actual[j] == a:
j += 1
except IndexError:
missing.extend(expected[i:])
unexpected.extend(actual[j:])
break
return missing, unexpected
def unorderable_list_difference(expected, actual, ignore_duplicate=False):
"""Same behavior as sorted_list_difference but
for lists of unorderable items (like dicts).
As it does a linear search per item (remove) it
has O(n*n) performance.
"""
missing = []
unexpected = []
while expected:
item = expected.pop()
try:
actual.remove(item)
except ValueError:
missing.append(item)
if ignore_duplicate:
for lst in expected, actual:
try:
while True:
lst.remove(item)
except ValueError:
pass
if ignore_duplicate:
while actual:
item = actual.pop()
unexpected.append(item)
try:
while True:
actual.remove(item)
except ValueError:
pass
return missing, unexpected
# anything left in actual is unexpected
return missing, actual
|
apuckey/thrift-0.9.1
|
refs/heads/master
|
lib/py/src/transport/TTransport.py
|
105
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from cStringIO import StringIO
from struct import pack, unpack
from thrift.Thrift import TException
class TTransportException(TException):
"""Custom Transport Exception class"""
UNKNOWN = 0
NOT_OPEN = 1
ALREADY_OPEN = 2
TIMED_OUT = 3
END_OF_FILE = 4
def __init__(self, type=UNKNOWN, message=None):
TException.__init__(self, message)
self.type = type
class TTransportBase:
"""Base class for Thrift transport layer."""
def isOpen(self):
pass
def open(self):
pass
def close(self):
pass
def read(self, sz):
pass
def readAll(self, sz):
buff = ''
have = 0
while (have < sz):
chunk = self.read(sz - have)
have += len(chunk)
buff += chunk
if len(chunk) == 0:
raise EOFError()
return buff
def write(self, buf):
pass
def flush(self):
pass
# This class should be thought of as an interface.
class CReadableTransport:
"""base class for transports that are readable from C"""
# TODO(dreiss): Think about changing this interface to allow us to use
# a (Python, not c) StringIO instead, because it allows
# you to write after reading.
# NOTE: This is a classic class, so properties will NOT work
# correctly for setting.
@property
def cstringio_buf(self):
"""A cStringIO buffer that contains the current chunk we are reading."""
pass
def cstringio_refill(self, partialread, reqlen):
"""Refills cstringio_buf.
Returns the currently used buffer (which can but need not be the same as
the old cstringio_buf). partialread is what the C code has read from the
buffer, and should be inserted into the buffer before any more reads. The
return value must be a new, not borrowed reference. Something along the
lines of self._buf should be fine.
If reqlen bytes can't be read, throw EOFError.
"""
pass
class TServerTransportBase:
"""Base class for Thrift server transports."""
def listen(self):
pass
def accept(self):
pass
def close(self):
pass
class TTransportFactoryBase:
"""Base class for a Transport Factory"""
def getTransport(self, trans):
return trans
class TBufferedTransportFactory:
"""Factory transport that builds buffered transports"""
def getTransport(self, trans):
buffered = TBufferedTransport(trans)
return buffered
class TBufferedTransport(TTransportBase, CReadableTransport):
"""Class that wraps another transport and buffers its I/O.
The implementation uses a (configurable) fixed-size read buffer
but buffers all writes until a flush is performed.
"""
DEFAULT_BUFFER = 4096
def __init__(self, trans, rbuf_size=DEFAULT_BUFFER):
self.__trans = trans
self.__wbuf = StringIO()
self.__rbuf = StringIO("")
self.__rbuf_size = rbuf_size
def isOpen(self):
return self.__trans.isOpen()
def open(self):
return self.__trans.open()
def close(self):
return self.__trans.close()
def read(self, sz):
ret = self.__rbuf.read(sz)
if len(ret) != 0:
return ret
self.__rbuf = StringIO(self.__trans.read(max(sz, self.__rbuf_size)))
return self.__rbuf.read(sz)
def write(self, buf):
self.__wbuf.write(buf)
def flush(self):
out = self.__wbuf.getvalue()
# reset wbuf before write/flush to preserve state on underlying failure
self.__wbuf = StringIO()
self.__trans.write(out)
self.__trans.flush()
# Implement the CReadableTransport interface.
@property
def cstringio_buf(self):
return self.__rbuf
def cstringio_refill(self, partialread, reqlen):
retstring = partialread
if reqlen < self.__rbuf_size:
# try to make a read of as much as we can.
retstring += self.__trans.read(self.__rbuf_size)
# but make sure we do read reqlen bytes.
if len(retstring) < reqlen:
retstring += self.__trans.readAll(reqlen - len(retstring))
self.__rbuf = StringIO(retstring)
return self.__rbuf
class TMemoryBuffer(TTransportBase, CReadableTransport):
"""Wraps a cStringIO object as a TTransport.
NOTE: Unlike the C++ version of this class, you cannot write to it
then immediately read from it. If you want to read from a
TMemoryBuffer, you must either pass a string to the constructor.
TODO(dreiss): Make this work like the C++ version.
"""
def __init__(self, value=None):
"""value -- a value to read from for stringio
If value is set, this will be a transport for reading,
otherwise, it is for writing"""
if value is not None:
self._buffer = StringIO(value)
else:
self._buffer = StringIO()
def isOpen(self):
return not self._buffer.closed
def open(self):
pass
def close(self):
self._buffer.close()
def read(self, sz):
return self._buffer.read(sz)
def write(self, buf):
self._buffer.write(buf)
def flush(self):
pass
def getvalue(self):
return self._buffer.getvalue()
# Implement the CReadableTransport interface.
@property
def cstringio_buf(self):
return self._buffer
def cstringio_refill(self, partialread, reqlen):
# only one shot at reading...
raise EOFError()
class TFramedTransportFactory:
"""Factory transport that builds framed transports"""
def getTransport(self, trans):
framed = TFramedTransport(trans)
return framed
class TFramedTransport(TTransportBase, CReadableTransport):
"""Class that wraps another transport and frames its I/O when writing."""
def __init__(self, trans,):
self.__trans = trans
self.__rbuf = StringIO()
self.__wbuf = StringIO()
def isOpen(self):
return self.__trans.isOpen()
def open(self):
return self.__trans.open()
def close(self):
return self.__trans.close()
def read(self, sz):
ret = self.__rbuf.read(sz)
if len(ret) != 0:
return ret
self.readFrame()
return self.__rbuf.read(sz)
def readFrame(self):
buff = self.__trans.readAll(4)
sz, = unpack('!i', buff)
self.__rbuf = StringIO(self.__trans.readAll(sz))
def write(self, buf):
self.__wbuf.write(buf)
def flush(self):
wout = self.__wbuf.getvalue()
wsz = len(wout)
# reset wbuf before write/flush to preserve state on underlying failure
self.__wbuf = StringIO()
# N.B.: Doing this string concatenation is WAY cheaper than making
# two separate calls to the underlying socket object. Socket writes in
# Python turn out to be REALLY expensive, but it seems to do a pretty
# good job of managing string buffer operations without excessive copies
buf = pack("!i", wsz) + wout
self.__trans.write(buf)
self.__trans.flush()
# Implement the CReadableTransport interface.
@property
def cstringio_buf(self):
return self.__rbuf
def cstringio_refill(self, prefix, reqlen):
# self.__rbuf will already be empty here because fastbinary doesn't
# ask for a refill until the previous buffer is empty. Therefore,
# we can start reading new frames immediately.
while len(prefix) < reqlen:
self.readFrame()
prefix += self.__rbuf.getvalue()
self.__rbuf = StringIO(prefix)
return self.__rbuf
class TFileObjectTransport(TTransportBase):
"""Wraps a file-like object to make it work as a Thrift transport."""
def __init__(self, fileobj):
self.fileobj = fileobj
def isOpen(self):
return True
def close(self):
self.fileobj.close()
def read(self, sz):
return self.fileobj.read(sz)
def write(self, buf):
self.fileobj.write(buf)
def flush(self):
self.fileobj.flush()
|
google/material-design-icons
|
refs/heads/master
|
update/venv/lib/python3.9/site-packages/chardet/langrussianmodel.py
|
13
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from chardet.sbcharsetprober import SingleByteCharSetModel
# 3: Positive
# 2: Likely
# 1: Unlikely
# 0: Negative
RUSSIAN_LANG_MODEL = {
37: { # 'А'
37: 0, # 'А'
44: 1, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 1, # 'Ж'
51: 1, # 'З'
42: 1, # 'И'
60: 1, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 2, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 1, # 'Ф'
55: 1, # 'Х'
58: 1, # 'Ц'
50: 1, # 'Ч'
57: 1, # 'Ш'
63: 1, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 1, # 'Ю'
43: 1, # 'Я'
3: 1, # 'а'
21: 2, # 'б'
10: 2, # 'в'
19: 2, # 'г'
13: 2, # 'д'
2: 0, # 'е'
24: 1, # 'ж'
20: 1, # 'з'
4: 0, # 'и'
23: 1, # 'й'
11: 2, # 'к'
8: 3, # 'л'
12: 2, # 'м'
5: 2, # 'н'
1: 0, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 2, # 'у'
39: 2, # 'ф'
26: 2, # 'х'
28: 0, # 'ц'
22: 1, # 'ч'
25: 2, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 1, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
44: { # 'Б'
37: 1, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 1, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 1, # 'Я'
3: 2, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 1, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 2, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 2, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 2, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
33: { # 'В'
37: 2, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 0, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 1, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 1, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 1, # 'Я'
3: 2, # 'а'
21: 1, # 'б'
10: 1, # 'в'
19: 1, # 'г'
13: 2, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 2, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 1, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 2, # 'н'
1: 3, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 3, # 'с'
6: 2, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 1, # 'х'
28: 1, # 'ц'
22: 2, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 1, # 'ъ'
18: 3, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 0, # 'ю'
16: 1, # 'я'
},
46: { # 'Г'
37: 1, # 'А'
44: 1, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 0, # 'б'
10: 1, # 'в'
19: 0, # 'г'
13: 2, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 1, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 2, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 1, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
41: { # 'Д'
37: 1, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 2, # 'Е'
56: 1, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 0, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 1, # 'Ц'
50: 1, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 1, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 1, # 'Я'
3: 3, # 'а'
21: 0, # 'б'
10: 2, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 3, # 'ж'
20: 1, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 1, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 2, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
48: { # 'Е'
37: 1, # 'А'
44: 1, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 1, # 'Ж'
51: 1, # 'З'
42: 1, # 'И'
60: 1, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 2, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 2, # 'Р'
32: 2, # 'С'
40: 1, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 1, # 'Х'
58: 1, # 'Ц'
50: 1, # 'Ч'
57: 1, # 'Ш'
63: 1, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 1, # 'Я'
3: 0, # 'а'
21: 0, # 'б'
10: 2, # 'в'
19: 2, # 'г'
13: 2, # 'д'
2: 2, # 'е'
24: 1, # 'ж'
20: 1, # 'з'
4: 0, # 'и'
23: 2, # 'й'
11: 1, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 1, # 'н'
1: 0, # 'о'
15: 1, # 'п'
9: 1, # 'р'
7: 3, # 'с'
6: 0, # 'т'
14: 0, # 'у'
39: 1, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 1, # 'ш'
29: 2, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
56: { # 'Ж'
37: 1, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 1, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 1, # 'б'
10: 0, # 'в'
19: 1, # 'г'
13: 1, # 'д'
2: 2, # 'е'
24: 1, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 1, # 'м'
5: 0, # 'н'
1: 2, # 'о'
15: 0, # 'п'
9: 1, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 2, # 'ю'
16: 0, # 'я'
},
51: { # 'З'
37: 1, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 1, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 1, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 1, # 'б'
10: 2, # 'в'
19: 0, # 'г'
13: 2, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 1, # 'л'
12: 1, # 'м'
5: 2, # 'н'
1: 2, # 'о'
15: 0, # 'п'
9: 1, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 1, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 1, # 'я'
},
42: { # 'И'
37: 1, # 'А'
44: 1, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 1, # 'Д'
48: 2, # 'Е'
56: 1, # 'Ж'
51: 1, # 'З'
42: 1, # 'И'
60: 1, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 2, # 'С'
40: 1, # 'Т'
52: 0, # 'У'
53: 1, # 'Ф'
55: 1, # 'Х'
58: 1, # 'Ц'
50: 1, # 'Ч'
57: 0, # 'Ш'
63: 1, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 1, # 'Ю'
43: 1, # 'Я'
3: 1, # 'а'
21: 2, # 'б'
10: 2, # 'в'
19: 2, # 'г'
13: 2, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 2, # 'з'
4: 1, # 'и'
23: 0, # 'й'
11: 1, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 2, # 'н'
1: 1, # 'о'
15: 1, # 'п'
9: 2, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 1, # 'у'
39: 1, # 'ф'
26: 2, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 1, # 'ш'
29: 1, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
60: { # 'Й'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 1, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 1, # 'Х'
58: 1, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 0, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 1, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 0, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 2, # 'о'
15: 0, # 'п'
9: 0, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 0, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
36: { # 'К'
37: 2, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 1, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 1, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 2, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 1, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 0, # 'б'
10: 1, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 2, # 'л'
12: 0, # 'м'
5: 1, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 2, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
49: { # 'Л'
37: 2, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 1, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 1, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 0, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 0, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 1, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 1, # 'Ь'
47: 0, # 'Э'
59: 1, # 'Ю'
43: 1, # 'Я'
3: 2, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 1, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 1, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 1, # 'л'
12: 0, # 'м'
5: 1, # 'н'
1: 2, # 'о'
15: 0, # 'п'
9: 0, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 2, # 'ю'
16: 1, # 'я'
},
38: { # 'М'
37: 1, # 'А'
44: 1, # 'Б'
33: 1, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 1, # 'Ф'
55: 1, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 0, # 'Ь'
47: 1, # 'Э'
59: 0, # 'Ю'
43: 1, # 'Я'
3: 3, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 1, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 1, # 'л'
12: 1, # 'м'
5: 2, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 1, # 'р'
7: 1, # 'с'
6: 0, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 3, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
31: { # 'Н'
37: 2, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 1, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 1, # 'З'
42: 2, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 1, # 'Ф'
55: 1, # 'Х'
58: 1, # 'Ц'
50: 1, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 1, # 'Ь'
47: 1, # 'Э'
59: 0, # 'Ю'
43: 1, # 'Я'
3: 3, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 1, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 3, # 'у'
39: 0, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 2, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
34: { # 'О'
37: 0, # 'А'
44: 1, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 2, # 'Д'
48: 1, # 'Е'
56: 1, # 'Ж'
51: 1, # 'З'
42: 1, # 'И'
60: 1, # 'Й'
36: 1, # 'К'
49: 2, # 'Л'
38: 1, # 'М'
31: 2, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 2, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 1, # 'Ф'
55: 1, # 'Х'
58: 0, # 'Ц'
50: 1, # 'Ч'
57: 1, # 'Ш'
63: 1, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 1, # 'Я'
3: 1, # 'а'
21: 2, # 'б'
10: 1, # 'в'
19: 2, # 'г'
13: 2, # 'д'
2: 0, # 'е'
24: 1, # 'ж'
20: 1, # 'з'
4: 0, # 'и'
23: 1, # 'й'
11: 2, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 3, # 'н'
1: 0, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 1, # 'у'
39: 1, # 'ф'
26: 2, # 'х'
28: 1, # 'ц'
22: 2, # 'ч'
25: 2, # 'ш'
29: 1, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
35: { # 'П'
37: 1, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 1, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 2, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 1, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 1, # 'Я'
3: 2, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 2, # 'л'
12: 0, # 'м'
5: 1, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 3, # 'р'
7: 1, # 'с'
6: 1, # 'т'
14: 2, # 'у'
39: 1, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 2, # 'ь'
30: 1, # 'э'
27: 0, # 'ю'
16: 2, # 'я'
},
45: { # 'Р'
37: 2, # 'А'
44: 1, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 1, # 'Д'
48: 2, # 'Е'
56: 1, # 'Ж'
51: 0, # 'З'
42: 2, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 2, # 'О'
35: 0, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 1, # 'Х'
58: 1, # 'Ц'
50: 1, # 'Ч'
57: 1, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 1, # 'Ь'
47: 1, # 'Э'
59: 1, # 'Ю'
43: 1, # 'Я'
3: 3, # 'а'
21: 0, # 'б'
10: 1, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 1, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 1, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 2, # 'ы'
17: 0, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 2, # 'я'
},
32: { # 'С'
37: 1, # 'А'
44: 1, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 2, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 1, # 'Х'
58: 1, # 'Ц'
50: 1, # 'Ч'
57: 1, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 1, # 'Ь'
47: 1, # 'Э'
59: 1, # 'Ю'
43: 1, # 'Я'
3: 2, # 'а'
21: 1, # 'б'
10: 2, # 'в'
19: 1, # 'г'
13: 2, # 'д'
2: 3, # 'е'
24: 1, # 'ж'
20: 1, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 2, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 2, # 'н'
1: 2, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 1, # 'с'
6: 3, # 'т'
14: 2, # 'у'
39: 1, # 'ф'
26: 1, # 'х'
28: 1, # 'ц'
22: 1, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 1, # 'ъ'
18: 1, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
40: { # 'Т'
37: 1, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 2, # 'О'
35: 0, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 1, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 1, # 'Ь'
47: 1, # 'Э'
59: 1, # 'Ю'
43: 1, # 'Я'
3: 3, # 'а'
21: 1, # 'б'
10: 2, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 1, # 'к'
8: 1, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 2, # 'р'
7: 1, # 'с'
6: 0, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 1, # 'щ'
54: 0, # 'ъ'
18: 3, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
52: { # 'У'
37: 1, # 'А'
44: 1, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 1, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 1, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 1, # 'Х'
58: 0, # 'Ц'
50: 1, # 'Ч'
57: 1, # 'Ш'
63: 1, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 1, # 'Ю'
43: 0, # 'Я'
3: 1, # 'а'
21: 2, # 'б'
10: 2, # 'в'
19: 1, # 'г'
13: 2, # 'д'
2: 1, # 'е'
24: 2, # 'ж'
20: 2, # 'з'
4: 2, # 'и'
23: 1, # 'й'
11: 1, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 1, # 'н'
1: 2, # 'о'
15: 1, # 'п'
9: 2, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 0, # 'у'
39: 1, # 'ф'
26: 1, # 'х'
28: 1, # 'ц'
22: 2, # 'ч'
25: 1, # 'ш'
29: 1, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 2, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
53: { # 'Ф'
37: 1, # 'А'
44: 1, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 1, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 1, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 2, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 2, # 'о'
15: 0, # 'п'
9: 2, # 'р'
7: 0, # 'с'
6: 1, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
55: { # 'Х'
37: 1, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 0, # 'б'
10: 2, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 0, # 'н'
1: 2, # 'о'
15: 0, # 'п'
9: 2, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 1, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 1, # 'ь'
30: 1, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
58: { # 'Ц'
37: 1, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 1, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 1, # 'а'
21: 0, # 'б'
10: 1, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 0, # 'о'
15: 0, # 'п'
9: 0, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 1, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
50: { # 'Ч'
37: 1, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 0, # 'О'
35: 1, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 1, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 1, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 1, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 1, # 'о'
15: 0, # 'п'
9: 1, # 'р'
7: 0, # 'с'
6: 3, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 1, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
57: { # 'Ш'
37: 1, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 1, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 0, # 'б'
10: 1, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 1, # 'и'
23: 0, # 'й'
11: 1, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 1, # 'н'
1: 2, # 'о'
15: 2, # 'п'
9: 1, # 'р'
7: 0, # 'с'
6: 2, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 1, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
63: { # 'Щ'
37: 1, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 1, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 1, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 1, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 1, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 1, # 'о'
15: 0, # 'п'
9: 0, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 1, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
62: { # 'Ы'
37: 0, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 1, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 0, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 1, # 'Х'
58: 1, # 'Ц'
50: 0, # 'Ч'
57: 1, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 0, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 0, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 0, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 0, # 'о'
15: 0, # 'п'
9: 0, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 0, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
61: { # 'Ь'
37: 0, # 'А'
44: 1, # 'Б'
33: 1, # 'В'
46: 0, # 'Г'
41: 1, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 0, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 1, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 1, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 1, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 1, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 1, # 'Ю'
43: 1, # 'Я'
3: 0, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 0, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 0, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 0, # 'о'
15: 0, # 'п'
9: 0, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 0, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
47: { # 'Э'
37: 0, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 0, # 'Г'
41: 1, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 1, # 'Й'
36: 1, # 'К'
49: 1, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 0, # 'О'
35: 1, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 1, # 'а'
21: 1, # 'б'
10: 2, # 'в'
19: 1, # 'г'
13: 2, # 'д'
2: 0, # 'е'
24: 1, # 'ж'
20: 0, # 'з'
4: 0, # 'и'
23: 2, # 'й'
11: 2, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 2, # 'н'
1: 0, # 'о'
15: 1, # 'п'
9: 2, # 'р'
7: 1, # 'с'
6: 3, # 'т'
14: 1, # 'у'
39: 1, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
59: { # 'Ю'
37: 1, # 'А'
44: 1, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 1, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 1, # 'Р'
32: 0, # 'С'
40: 1, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 1, # 'Ч'
57: 0, # 'Ш'
63: 1, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 0, # 'а'
21: 1, # 'б'
10: 0, # 'в'
19: 1, # 'г'
13: 1, # 'д'
2: 0, # 'е'
24: 1, # 'ж'
20: 0, # 'з'
4: 0, # 'и'
23: 0, # 'й'
11: 1, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 2, # 'н'
1: 0, # 'о'
15: 1, # 'п'
9: 1, # 'р'
7: 1, # 'с'
6: 0, # 'т'
14: 0, # 'у'
39: 0, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
43: { # 'Я'
37: 0, # 'А'
44: 0, # 'Б'
33: 1, # 'В'
46: 1, # 'Г'
41: 0, # 'Д'
48: 1, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 1, # 'С'
40: 1, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 1, # 'Х'
58: 0, # 'Ц'
50: 1, # 'Ч'
57: 0, # 'Ш'
63: 1, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 1, # 'Ю'
43: 1, # 'Я'
3: 0, # 'а'
21: 1, # 'б'
10: 1, # 'в'
19: 1, # 'г'
13: 1, # 'д'
2: 0, # 'е'
24: 0, # 'ж'
20: 1, # 'з'
4: 0, # 'и'
23: 1, # 'й'
11: 1, # 'к'
8: 1, # 'л'
12: 1, # 'м'
5: 2, # 'н'
1: 0, # 'о'
15: 1, # 'п'
9: 1, # 'р'
7: 1, # 'с'
6: 0, # 'т'
14: 0, # 'у'
39: 0, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 1, # 'ш'
29: 1, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
3: { # 'а'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 1, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 3, # 'б'
10: 3, # 'в'
19: 3, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 3, # 'ж'
20: 3, # 'з'
4: 3, # 'и'
23: 3, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 2, # 'о'
15: 3, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 3, # 'у'
39: 2, # 'ф'
26: 3, # 'х'
28: 3, # 'ц'
22: 3, # 'ч'
25: 3, # 'ш'
29: 3, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 2, # 'э'
27: 3, # 'ю'
16: 3, # 'я'
},
21: { # 'б'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 1, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 2, # 'в'
19: 1, # 'г'
13: 2, # 'д'
2: 3, # 'е'
24: 2, # 'ж'
20: 1, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 2, # 'к'
8: 3, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 1, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 2, # 'т'
14: 3, # 'у'
39: 0, # 'ф'
26: 2, # 'х'
28: 1, # 'ц'
22: 1, # 'ч'
25: 2, # 'ш'
29: 3, # 'щ'
54: 2, # 'ъ'
18: 3, # 'ы'
17: 2, # 'ь'
30: 1, # 'э'
27: 2, # 'ю'
16: 3, # 'я'
},
10: { # 'в'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 2, # 'в'
19: 2, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 1, # 'ж'
20: 3, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 3, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 3, # 'у'
39: 1, # 'ф'
26: 2, # 'х'
28: 2, # 'ц'
22: 2, # 'ч'
25: 3, # 'ш'
29: 2, # 'щ'
54: 2, # 'ъ'
18: 3, # 'ы'
17: 3, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 3, # 'я'
},
19: { # 'г'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 1, # 'б'
10: 2, # 'в'
19: 1, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 1, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 2, # 'к'
8: 3, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 3, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 3, # 'у'
39: 1, # 'ф'
26: 1, # 'х'
28: 1, # 'ц'
22: 2, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 1, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
13: { # 'д'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 3, # 'в'
19: 2, # 'г'
13: 2, # 'д'
2: 3, # 'е'
24: 2, # 'ж'
20: 2, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 2, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 3, # 'у'
39: 1, # 'ф'
26: 2, # 'х'
28: 3, # 'ц'
22: 2, # 'ч'
25: 2, # 'ш'
29: 1, # 'щ'
54: 2, # 'ъ'
18: 3, # 'ы'
17: 3, # 'ь'
30: 1, # 'э'
27: 2, # 'ю'
16: 3, # 'я'
},
2: { # 'е'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 3, # 'б'
10: 3, # 'в'
19: 3, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 3, # 'ж'
20: 3, # 'з'
4: 2, # 'и'
23: 3, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 3, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 2, # 'у'
39: 2, # 'ф'
26: 3, # 'х'
28: 3, # 'ц'
22: 3, # 'ч'
25: 3, # 'ш'
29: 3, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 1, # 'э'
27: 2, # 'ю'
16: 3, # 'я'
},
24: { # 'ж'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 1, # 'в'
19: 2, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 2, # 'ж'
20: 1, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 2, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 3, # 'н'
1: 2, # 'о'
15: 1, # 'п'
9: 2, # 'р'
7: 2, # 'с'
6: 1, # 'т'
14: 3, # 'у'
39: 1, # 'ф'
26: 0, # 'х'
28: 1, # 'ц'
22: 2, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 2, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
20: { # 'з'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 3, # 'б'
10: 3, # 'в'
19: 3, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 2, # 'ж'
20: 2, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 3, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 3, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 1, # 'ц'
22: 2, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 2, # 'ъ'
18: 3, # 'ы'
17: 2, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 3, # 'я'
},
4: { # 'и'
37: 1, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 1, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 3, # 'б'
10: 3, # 'в'
19: 3, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 3, # 'ж'
20: 3, # 'з'
4: 3, # 'и'
23: 3, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 3, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 2, # 'у'
39: 2, # 'ф'
26: 3, # 'х'
28: 3, # 'ц'
22: 3, # 'ч'
25: 3, # 'ш'
29: 3, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 2, # 'э'
27: 3, # 'ю'
16: 3, # 'я'
},
23: { # 'й'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 1, # 'а'
21: 1, # 'б'
10: 1, # 'в'
19: 2, # 'г'
13: 3, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 2, # 'з'
4: 1, # 'и'
23: 0, # 'й'
11: 2, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 2, # 'о'
15: 1, # 'п'
9: 2, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 1, # 'у'
39: 2, # 'ф'
26: 1, # 'х'
28: 2, # 'ц'
22: 3, # 'ч'
25: 2, # 'ш'
29: 1, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 2, # 'я'
},
11: { # 'к'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 1, # 'б'
10: 3, # 'в'
19: 1, # 'г'
13: 1, # 'д'
2: 3, # 'е'
24: 2, # 'ж'
20: 2, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 2, # 'к'
8: 3, # 'л'
12: 1, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 3, # 'у'
39: 1, # 'ф'
26: 2, # 'х'
28: 2, # 'ц'
22: 1, # 'ч'
25: 2, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 1, # 'ы'
17: 1, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
8: { # 'л'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 2, # 'в'
19: 3, # 'г'
13: 2, # 'д'
2: 3, # 'е'
24: 3, # 'ж'
20: 2, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 2, # 'п'
9: 1, # 'р'
7: 3, # 'с'
6: 2, # 'т'
14: 3, # 'у'
39: 2, # 'ф'
26: 2, # 'х'
28: 1, # 'ц'
22: 3, # 'ч'
25: 2, # 'ш'
29: 1, # 'щ'
54: 0, # 'ъ'
18: 3, # 'ы'
17: 3, # 'ь'
30: 1, # 'э'
27: 3, # 'ю'
16: 3, # 'я'
},
12: { # 'м'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 2, # 'в'
19: 2, # 'г'
13: 1, # 'д'
2: 3, # 'е'
24: 1, # 'ж'
20: 1, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 2, # 'к'
8: 3, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 3, # 'с'
6: 2, # 'т'
14: 3, # 'у'
39: 2, # 'ф'
26: 2, # 'х'
28: 2, # 'ц'
22: 2, # 'ч'
25: 1, # 'ш'
29: 1, # 'щ'
54: 0, # 'ъ'
18: 3, # 'ы'
17: 2, # 'ь'
30: 2, # 'э'
27: 1, # 'ю'
16: 3, # 'я'
},
5: { # 'н'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 2, # 'в'
19: 3, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 2, # 'ж'
20: 2, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 1, # 'п'
9: 2, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 3, # 'у'
39: 2, # 'ф'
26: 2, # 'х'
28: 3, # 'ц'
22: 3, # 'ч'
25: 2, # 'ш'
29: 2, # 'щ'
54: 1, # 'ъ'
18: 3, # 'ы'
17: 3, # 'ь'
30: 1, # 'э'
27: 3, # 'ю'
16: 3, # 'я'
},
1: { # 'о'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 3, # 'б'
10: 3, # 'в'
19: 3, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 3, # 'ж'
20: 3, # 'з'
4: 3, # 'и'
23: 3, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 3, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 2, # 'у'
39: 2, # 'ф'
26: 3, # 'х'
28: 2, # 'ц'
22: 3, # 'ч'
25: 3, # 'ш'
29: 3, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 2, # 'э'
27: 3, # 'ю'
16: 3, # 'я'
},
15: { # 'п'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 1, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 2, # 'к'
8: 3, # 'л'
12: 1, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 2, # 'п'
9: 3, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 3, # 'у'
39: 1, # 'ф'
26: 0, # 'х'
28: 2, # 'ц'
22: 2, # 'ч'
25: 1, # 'ш'
29: 1, # 'щ'
54: 0, # 'ъ'
18: 3, # 'ы'
17: 2, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 3, # 'я'
},
9: { # 'р'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 3, # 'в'
19: 3, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 3, # 'ж'
20: 2, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 2, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 3, # 'у'
39: 2, # 'ф'
26: 3, # 'х'
28: 2, # 'ц'
22: 2, # 'ч'
25: 3, # 'ш'
29: 2, # 'щ'
54: 0, # 'ъ'
18: 3, # 'ы'
17: 3, # 'ь'
30: 2, # 'э'
27: 2, # 'ю'
16: 3, # 'я'
},
7: { # 'с'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 1, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 3, # 'в'
19: 2, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 2, # 'ж'
20: 2, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 3, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 3, # 'у'
39: 2, # 'ф'
26: 3, # 'х'
28: 2, # 'ц'
22: 3, # 'ч'
25: 2, # 'ш'
29: 1, # 'щ'
54: 2, # 'ъ'
18: 3, # 'ы'
17: 3, # 'ь'
30: 2, # 'э'
27: 3, # 'ю'
16: 3, # 'я'
},
6: { # 'т'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 2, # 'б'
10: 3, # 'в'
19: 2, # 'г'
13: 2, # 'д'
2: 3, # 'е'
24: 1, # 'ж'
20: 1, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 2, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 2, # 'т'
14: 3, # 'у'
39: 2, # 'ф'
26: 2, # 'х'
28: 2, # 'ц'
22: 2, # 'ч'
25: 2, # 'ш'
29: 2, # 'щ'
54: 2, # 'ъ'
18: 3, # 'ы'
17: 3, # 'ь'
30: 2, # 'э'
27: 2, # 'ю'
16: 3, # 'я'
},
14: { # 'у'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 3, # 'б'
10: 3, # 'в'
19: 3, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 3, # 'ж'
20: 3, # 'з'
4: 2, # 'и'
23: 2, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 2, # 'о'
15: 3, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 1, # 'у'
39: 2, # 'ф'
26: 3, # 'х'
28: 2, # 'ц'
22: 3, # 'ч'
25: 3, # 'ш'
29: 3, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 2, # 'э'
27: 3, # 'ю'
16: 2, # 'я'
},
39: { # 'ф'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 1, # 'б'
10: 0, # 'в'
19: 1, # 'г'
13: 0, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 1, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 1, # 'н'
1: 3, # 'о'
15: 1, # 'п'
9: 2, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 2, # 'у'
39: 2, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 1, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 2, # 'ы'
17: 1, # 'ь'
30: 2, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
26: { # 'х'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 0, # 'б'
10: 3, # 'в'
19: 1, # 'г'
13: 1, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 1, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 1, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 1, # 'п'
9: 3, # 'р'
7: 2, # 'с'
6: 2, # 'т'
14: 2, # 'у'
39: 1, # 'ф'
26: 1, # 'х'
28: 1, # 'ц'
22: 1, # 'ч'
25: 2, # 'ш'
29: 0, # 'щ'
54: 1, # 'ъ'
18: 0, # 'ы'
17: 1, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
28: { # 'ц'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 1, # 'б'
10: 2, # 'в'
19: 1, # 'г'
13: 1, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 1, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 2, # 'к'
8: 1, # 'л'
12: 1, # 'м'
5: 1, # 'н'
1: 3, # 'о'
15: 0, # 'п'
9: 1, # 'р'
7: 0, # 'с'
6: 1, # 'т'
14: 3, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 1, # 'ц'
22: 0, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 3, # 'ы'
17: 1, # 'ь'
30: 0, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
22: { # 'ч'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 1, # 'б'
10: 1, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 3, # 'е'
24: 1, # 'ж'
20: 0, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 2, # 'л'
12: 1, # 'м'
5: 3, # 'н'
1: 2, # 'о'
15: 0, # 'п'
9: 2, # 'р'
7: 1, # 'с'
6: 3, # 'т'
14: 3, # 'у'
39: 1, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 1, # 'ч'
25: 2, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 3, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
25: { # 'ш'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 1, # 'б'
10: 2, # 'в'
19: 1, # 'г'
13: 0, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 2, # 'м'
5: 3, # 'н'
1: 3, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 1, # 'с'
6: 2, # 'т'
14: 3, # 'у'
39: 2, # 'ф'
26: 1, # 'х'
28: 1, # 'ц'
22: 1, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 3, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 0, # 'я'
},
29: { # 'щ'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 3, # 'а'
21: 0, # 'б'
10: 1, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 3, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 3, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 1, # 'м'
5: 2, # 'н'
1: 1, # 'о'
15: 0, # 'п'
9: 2, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 2, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 2, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 0, # 'я'
},
54: { # 'ъ'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 0, # 'а'
21: 0, # 'б'
10: 0, # 'в'
19: 0, # 'г'
13: 0, # 'д'
2: 2, # 'е'
24: 0, # 'ж'
20: 0, # 'з'
4: 0, # 'и'
23: 0, # 'й'
11: 0, # 'к'
8: 0, # 'л'
12: 0, # 'м'
5: 0, # 'н'
1: 0, # 'о'
15: 0, # 'п'
9: 0, # 'р'
7: 0, # 'с'
6: 0, # 'т'
14: 0, # 'у'
39: 0, # 'ф'
26: 0, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 0, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 1, # 'ю'
16: 2, # 'я'
},
18: { # 'ы'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 0, # 'а'
21: 3, # 'б'
10: 3, # 'в'
19: 2, # 'г'
13: 2, # 'д'
2: 3, # 'е'
24: 2, # 'ж'
20: 2, # 'з'
4: 2, # 'и'
23: 3, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 1, # 'о'
15: 3, # 'п'
9: 3, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 1, # 'у'
39: 0, # 'ф'
26: 3, # 'х'
28: 2, # 'ц'
22: 3, # 'ч'
25: 3, # 'ш'
29: 2, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 0, # 'ю'
16: 2, # 'я'
},
17: { # 'ь'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 0, # 'а'
21: 2, # 'б'
10: 2, # 'в'
19: 2, # 'г'
13: 2, # 'д'
2: 3, # 'е'
24: 1, # 'ж'
20: 3, # 'з'
4: 2, # 'и'
23: 0, # 'й'
11: 3, # 'к'
8: 0, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 2, # 'о'
15: 2, # 'п'
9: 1, # 'р'
7: 3, # 'с'
6: 2, # 'т'
14: 0, # 'у'
39: 2, # 'ф'
26: 1, # 'х'
28: 2, # 'ц'
22: 2, # 'ч'
25: 3, # 'ш'
29: 2, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 1, # 'э'
27: 3, # 'ю'
16: 3, # 'я'
},
30: { # 'э'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 1, # 'М'
31: 1, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 1, # 'Р'
32: 1, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 1, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 0, # 'а'
21: 1, # 'б'
10: 1, # 'в'
19: 1, # 'г'
13: 2, # 'д'
2: 1, # 'е'
24: 0, # 'ж'
20: 1, # 'з'
4: 0, # 'и'
23: 2, # 'й'
11: 2, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 2, # 'н'
1: 0, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 2, # 'с'
6: 3, # 'т'
14: 1, # 'у'
39: 2, # 'ф'
26: 1, # 'х'
28: 0, # 'ц'
22: 0, # 'ч'
25: 1, # 'ш'
29: 0, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 1, # 'э'
27: 1, # 'ю'
16: 1, # 'я'
},
27: { # 'ю'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 2, # 'а'
21: 3, # 'б'
10: 1, # 'в'
19: 2, # 'г'
13: 3, # 'д'
2: 1, # 'е'
24: 2, # 'ж'
20: 2, # 'з'
4: 1, # 'и'
23: 1, # 'й'
11: 2, # 'к'
8: 2, # 'л'
12: 2, # 'м'
5: 2, # 'н'
1: 1, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 0, # 'у'
39: 1, # 'ф'
26: 2, # 'х'
28: 2, # 'ц'
22: 2, # 'ч'
25: 2, # 'ш'
29: 3, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 1, # 'э'
27: 2, # 'ю'
16: 1, # 'я'
},
16: { # 'я'
37: 0, # 'А'
44: 0, # 'Б'
33: 0, # 'В'
46: 0, # 'Г'
41: 0, # 'Д'
48: 0, # 'Е'
56: 0, # 'Ж'
51: 0, # 'З'
42: 0, # 'И'
60: 0, # 'Й'
36: 0, # 'К'
49: 0, # 'Л'
38: 0, # 'М'
31: 0, # 'Н'
34: 0, # 'О'
35: 0, # 'П'
45: 0, # 'Р'
32: 0, # 'С'
40: 0, # 'Т'
52: 0, # 'У'
53: 0, # 'Ф'
55: 0, # 'Х'
58: 0, # 'Ц'
50: 0, # 'Ч'
57: 0, # 'Ш'
63: 0, # 'Щ'
62: 0, # 'Ы'
61: 0, # 'Ь'
47: 0, # 'Э'
59: 0, # 'Ю'
43: 0, # 'Я'
3: 0, # 'а'
21: 2, # 'б'
10: 3, # 'в'
19: 2, # 'г'
13: 3, # 'д'
2: 3, # 'е'
24: 3, # 'ж'
20: 3, # 'з'
4: 2, # 'и'
23: 2, # 'й'
11: 3, # 'к'
8: 3, # 'л'
12: 3, # 'м'
5: 3, # 'н'
1: 0, # 'о'
15: 2, # 'п'
9: 2, # 'р'
7: 3, # 'с'
6: 3, # 'т'
14: 1, # 'у'
39: 1, # 'ф'
26: 3, # 'х'
28: 2, # 'ц'
22: 2, # 'ч'
25: 2, # 'ш'
29: 3, # 'щ'
54: 0, # 'ъ'
18: 0, # 'ы'
17: 0, # 'ь'
30: 0, # 'э'
27: 2, # 'ю'
16: 2, # 'я'
},
}
# 255: Undefined characters that did not exist in training text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# 251: Control characters
# Character Mapping Table(s):
IBM866_RUSSIAN_CHAR_TO_ORDER = {
0: 255, # '\x00'
1: 255, # '\x01'
2: 255, # '\x02'
3: 255, # '\x03'
4: 255, # '\x04'
5: 255, # '\x05'
6: 255, # '\x06'
7: 255, # '\x07'
8: 255, # '\x08'
9: 255, # '\t'
10: 254, # '\n'
11: 255, # '\x0b'
12: 255, # '\x0c'
13: 254, # '\r'
14: 255, # '\x0e'
15: 255, # '\x0f'
16: 255, # '\x10'
17: 255, # '\x11'
18: 255, # '\x12'
19: 255, # '\x13'
20: 255, # '\x14'
21: 255, # '\x15'
22: 255, # '\x16'
23: 255, # '\x17'
24: 255, # '\x18'
25: 255, # '\x19'
26: 255, # '\x1a'
27: 255, # '\x1b'
28: 255, # '\x1c'
29: 255, # '\x1d'
30: 255, # '\x1e'
31: 255, # '\x1f'
32: 253, # ' '
33: 253, # '!'
34: 253, # '"'
35: 253, # '#'
36: 253, # '$'
37: 253, # '%'
38: 253, # '&'
39: 253, # "'"
40: 253, # '('
41: 253, # ')'
42: 253, # '*'
43: 253, # '+'
44: 253, # ','
45: 253, # '-'
46: 253, # '.'
47: 253, # '/'
48: 252, # '0'
49: 252, # '1'
50: 252, # '2'
51: 252, # '3'
52: 252, # '4'
53: 252, # '5'
54: 252, # '6'
55: 252, # '7'
56: 252, # '8'
57: 252, # '9'
58: 253, # ':'
59: 253, # ';'
60: 253, # '<'
61: 253, # '='
62: 253, # '>'
63: 253, # '?'
64: 253, # '@'
65: 142, # 'A'
66: 143, # 'B'
67: 144, # 'C'
68: 145, # 'D'
69: 146, # 'E'
70: 147, # 'F'
71: 148, # 'G'
72: 149, # 'H'
73: 150, # 'I'
74: 151, # 'J'
75: 152, # 'K'
76: 74, # 'L'
77: 153, # 'M'
78: 75, # 'N'
79: 154, # 'O'
80: 155, # 'P'
81: 156, # 'Q'
82: 157, # 'R'
83: 158, # 'S'
84: 159, # 'T'
85: 160, # 'U'
86: 161, # 'V'
87: 162, # 'W'
88: 163, # 'X'
89: 164, # 'Y'
90: 165, # 'Z'
91: 253, # '['
92: 253, # '\\'
93: 253, # ']'
94: 253, # '^'
95: 253, # '_'
96: 253, # '`'
97: 71, # 'a'
98: 172, # 'b'
99: 66, # 'c'
100: 173, # 'd'
101: 65, # 'e'
102: 174, # 'f'
103: 76, # 'g'
104: 175, # 'h'
105: 64, # 'i'
106: 176, # 'j'
107: 177, # 'k'
108: 77, # 'l'
109: 72, # 'm'
110: 178, # 'n'
111: 69, # 'o'
112: 67, # 'p'
113: 179, # 'q'
114: 78, # 'r'
115: 73, # 's'
116: 180, # 't'
117: 181, # 'u'
118: 79, # 'v'
119: 182, # 'w'
120: 183, # 'x'
121: 184, # 'y'
122: 185, # 'z'
123: 253, # '{'
124: 253, # '|'
125: 253, # '}'
126: 253, # '~'
127: 253, # '\x7f'
128: 37, # 'А'
129: 44, # 'Б'
130: 33, # 'В'
131: 46, # 'Г'
132: 41, # 'Д'
133: 48, # 'Е'
134: 56, # 'Ж'
135: 51, # 'З'
136: 42, # 'И'
137: 60, # 'Й'
138: 36, # 'К'
139: 49, # 'Л'
140: 38, # 'М'
141: 31, # 'Н'
142: 34, # 'О'
143: 35, # 'П'
144: 45, # 'Р'
145: 32, # 'С'
146: 40, # 'Т'
147: 52, # 'У'
148: 53, # 'Ф'
149: 55, # 'Х'
150: 58, # 'Ц'
151: 50, # 'Ч'
152: 57, # 'Ш'
153: 63, # 'Щ'
154: 70, # 'Ъ'
155: 62, # 'Ы'
156: 61, # 'Ь'
157: 47, # 'Э'
158: 59, # 'Ю'
159: 43, # 'Я'
160: 3, # 'а'
161: 21, # 'б'
162: 10, # 'в'
163: 19, # 'г'
164: 13, # 'д'
165: 2, # 'е'
166: 24, # 'ж'
167: 20, # 'з'
168: 4, # 'и'
169: 23, # 'й'
170: 11, # 'к'
171: 8, # 'л'
172: 12, # 'м'
173: 5, # 'н'
174: 1, # 'о'
175: 15, # 'п'
176: 191, # '░'
177: 192, # '▒'
178: 193, # '▓'
179: 194, # '│'
180: 195, # '┤'
181: 196, # '╡'
182: 197, # '╢'
183: 198, # '╖'
184: 199, # '╕'
185: 200, # '╣'
186: 201, # '║'
187: 202, # '╗'
188: 203, # '╝'
189: 204, # '╜'
190: 205, # '╛'
191: 206, # '┐'
192: 207, # '└'
193: 208, # '┴'
194: 209, # '┬'
195: 210, # '├'
196: 211, # '─'
197: 212, # '┼'
198: 213, # '╞'
199: 214, # '╟'
200: 215, # '╚'
201: 216, # '╔'
202: 217, # '╩'
203: 218, # '╦'
204: 219, # '╠'
205: 220, # '═'
206: 221, # '╬'
207: 222, # '╧'
208: 223, # '╨'
209: 224, # '╤'
210: 225, # '╥'
211: 226, # '╙'
212: 227, # '╘'
213: 228, # '╒'
214: 229, # '╓'
215: 230, # '╫'
216: 231, # '╪'
217: 232, # '┘'
218: 233, # '┌'
219: 234, # '█'
220: 235, # '▄'
221: 236, # '▌'
222: 237, # '▐'
223: 238, # '▀'
224: 9, # 'р'
225: 7, # 'с'
226: 6, # 'т'
227: 14, # 'у'
228: 39, # 'ф'
229: 26, # 'х'
230: 28, # 'ц'
231: 22, # 'ч'
232: 25, # 'ш'
233: 29, # 'щ'
234: 54, # 'ъ'
235: 18, # 'ы'
236: 17, # 'ь'
237: 30, # 'э'
238: 27, # 'ю'
239: 16, # 'я'
240: 239, # 'Ё'
241: 68, # 'ё'
242: 240, # 'Є'
243: 241, # 'є'
244: 242, # 'Ї'
245: 243, # 'ї'
246: 244, # 'Ў'
247: 245, # 'ў'
248: 246, # '°'
249: 247, # '∙'
250: 248, # '·'
251: 249, # '√'
252: 250, # '№'
253: 251, # '¤'
254: 252, # '■'
255: 255, # '\xa0'
}
IBM866_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM866',
language='Russian',
char_to_order_map=IBM866_RUSSIAN_CHAR_TO_ORDER,
language_model=RUSSIAN_LANG_MODEL,
typical_positive_ratio=0.976601,
keep_ascii_letters=False,
alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё')
WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER = {
0: 255, # '\x00'
1: 255, # '\x01'
2: 255, # '\x02'
3: 255, # '\x03'
4: 255, # '\x04'
5: 255, # '\x05'
6: 255, # '\x06'
7: 255, # '\x07'
8: 255, # '\x08'
9: 255, # '\t'
10: 254, # '\n'
11: 255, # '\x0b'
12: 255, # '\x0c'
13: 254, # '\r'
14: 255, # '\x0e'
15: 255, # '\x0f'
16: 255, # '\x10'
17: 255, # '\x11'
18: 255, # '\x12'
19: 255, # '\x13'
20: 255, # '\x14'
21: 255, # '\x15'
22: 255, # '\x16'
23: 255, # '\x17'
24: 255, # '\x18'
25: 255, # '\x19'
26: 255, # '\x1a'
27: 255, # '\x1b'
28: 255, # '\x1c'
29: 255, # '\x1d'
30: 255, # '\x1e'
31: 255, # '\x1f'
32: 253, # ' '
33: 253, # '!'
34: 253, # '"'
35: 253, # '#'
36: 253, # '$'
37: 253, # '%'
38: 253, # '&'
39: 253, # "'"
40: 253, # '('
41: 253, # ')'
42: 253, # '*'
43: 253, # '+'
44: 253, # ','
45: 253, # '-'
46: 253, # '.'
47: 253, # '/'
48: 252, # '0'
49: 252, # '1'
50: 252, # '2'
51: 252, # '3'
52: 252, # '4'
53: 252, # '5'
54: 252, # '6'
55: 252, # '7'
56: 252, # '8'
57: 252, # '9'
58: 253, # ':'
59: 253, # ';'
60: 253, # '<'
61: 253, # '='
62: 253, # '>'
63: 253, # '?'
64: 253, # '@'
65: 142, # 'A'
66: 143, # 'B'
67: 144, # 'C'
68: 145, # 'D'
69: 146, # 'E'
70: 147, # 'F'
71: 148, # 'G'
72: 149, # 'H'
73: 150, # 'I'
74: 151, # 'J'
75: 152, # 'K'
76: 74, # 'L'
77: 153, # 'M'
78: 75, # 'N'
79: 154, # 'O'
80: 155, # 'P'
81: 156, # 'Q'
82: 157, # 'R'
83: 158, # 'S'
84: 159, # 'T'
85: 160, # 'U'
86: 161, # 'V'
87: 162, # 'W'
88: 163, # 'X'
89: 164, # 'Y'
90: 165, # 'Z'
91: 253, # '['
92: 253, # '\\'
93: 253, # ']'
94: 253, # '^'
95: 253, # '_'
96: 253, # '`'
97: 71, # 'a'
98: 172, # 'b'
99: 66, # 'c'
100: 173, # 'd'
101: 65, # 'e'
102: 174, # 'f'
103: 76, # 'g'
104: 175, # 'h'
105: 64, # 'i'
106: 176, # 'j'
107: 177, # 'k'
108: 77, # 'l'
109: 72, # 'm'
110: 178, # 'n'
111: 69, # 'o'
112: 67, # 'p'
113: 179, # 'q'
114: 78, # 'r'
115: 73, # 's'
116: 180, # 't'
117: 181, # 'u'
118: 79, # 'v'
119: 182, # 'w'
120: 183, # 'x'
121: 184, # 'y'
122: 185, # 'z'
123: 253, # '{'
124: 253, # '|'
125: 253, # '}'
126: 253, # '~'
127: 253, # '\x7f'
128: 191, # 'Ђ'
129: 192, # 'Ѓ'
130: 193, # '‚'
131: 194, # 'ѓ'
132: 195, # '„'
133: 196, # '…'
134: 197, # '†'
135: 198, # '‡'
136: 199, # '€'
137: 200, # '‰'
138: 201, # 'Љ'
139: 202, # '‹'
140: 203, # 'Њ'
141: 204, # 'Ќ'
142: 205, # 'Ћ'
143: 206, # 'Џ'
144: 207, # 'ђ'
145: 208, # '‘'
146: 209, # '’'
147: 210, # '“'
148: 211, # '”'
149: 212, # '•'
150: 213, # '–'
151: 214, # '—'
152: 215, # None
153: 216, # '™'
154: 217, # 'љ'
155: 218, # '›'
156: 219, # 'њ'
157: 220, # 'ќ'
158: 221, # 'ћ'
159: 222, # 'џ'
160: 223, # '\xa0'
161: 224, # 'Ў'
162: 225, # 'ў'
163: 226, # 'Ј'
164: 227, # '¤'
165: 228, # 'Ґ'
166: 229, # '¦'
167: 230, # '§'
168: 231, # 'Ё'
169: 232, # '©'
170: 233, # 'Є'
171: 234, # '«'
172: 235, # '¬'
173: 236, # '\xad'
174: 237, # '®'
175: 238, # 'Ї'
176: 239, # '°'
177: 240, # '±'
178: 241, # 'І'
179: 242, # 'і'
180: 243, # 'ґ'
181: 244, # 'µ'
182: 245, # '¶'
183: 246, # '·'
184: 68, # 'ё'
185: 247, # '№'
186: 248, # 'є'
187: 249, # '»'
188: 250, # 'ј'
189: 251, # 'Ѕ'
190: 252, # 'ѕ'
191: 253, # 'ї'
192: 37, # 'А'
193: 44, # 'Б'
194: 33, # 'В'
195: 46, # 'Г'
196: 41, # 'Д'
197: 48, # 'Е'
198: 56, # 'Ж'
199: 51, # 'З'
200: 42, # 'И'
201: 60, # 'Й'
202: 36, # 'К'
203: 49, # 'Л'
204: 38, # 'М'
205: 31, # 'Н'
206: 34, # 'О'
207: 35, # 'П'
208: 45, # 'Р'
209: 32, # 'С'
210: 40, # 'Т'
211: 52, # 'У'
212: 53, # 'Ф'
213: 55, # 'Х'
214: 58, # 'Ц'
215: 50, # 'Ч'
216: 57, # 'Ш'
217: 63, # 'Щ'
218: 70, # 'Ъ'
219: 62, # 'Ы'
220: 61, # 'Ь'
221: 47, # 'Э'
222: 59, # 'Ю'
223: 43, # 'Я'
224: 3, # 'а'
225: 21, # 'б'
226: 10, # 'в'
227: 19, # 'г'
228: 13, # 'д'
229: 2, # 'е'
230: 24, # 'ж'
231: 20, # 'з'
232: 4, # 'и'
233: 23, # 'й'
234: 11, # 'к'
235: 8, # 'л'
236: 12, # 'м'
237: 5, # 'н'
238: 1, # 'о'
239: 15, # 'п'
240: 9, # 'р'
241: 7, # 'с'
242: 6, # 'т'
243: 14, # 'у'
244: 39, # 'ф'
245: 26, # 'х'
246: 28, # 'ц'
247: 22, # 'ч'
248: 25, # 'ш'
249: 29, # 'щ'
250: 54, # 'ъ'
251: 18, # 'ы'
252: 17, # 'ь'
253: 30, # 'э'
254: 27, # 'ю'
255: 16, # 'я'
}
WINDOWS_1251_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251',
language='Russian',
char_to_order_map=WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER,
language_model=RUSSIAN_LANG_MODEL,
typical_positive_ratio=0.976601,
keep_ascii_letters=False,
alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё')
IBM855_RUSSIAN_CHAR_TO_ORDER = {
0: 255, # '\x00'
1: 255, # '\x01'
2: 255, # '\x02'
3: 255, # '\x03'
4: 255, # '\x04'
5: 255, # '\x05'
6: 255, # '\x06'
7: 255, # '\x07'
8: 255, # '\x08'
9: 255, # '\t'
10: 254, # '\n'
11: 255, # '\x0b'
12: 255, # '\x0c'
13: 254, # '\r'
14: 255, # '\x0e'
15: 255, # '\x0f'
16: 255, # '\x10'
17: 255, # '\x11'
18: 255, # '\x12'
19: 255, # '\x13'
20: 255, # '\x14'
21: 255, # '\x15'
22: 255, # '\x16'
23: 255, # '\x17'
24: 255, # '\x18'
25: 255, # '\x19'
26: 255, # '\x1a'
27: 255, # '\x1b'
28: 255, # '\x1c'
29: 255, # '\x1d'
30: 255, # '\x1e'
31: 255, # '\x1f'
32: 253, # ' '
33: 253, # '!'
34: 253, # '"'
35: 253, # '#'
36: 253, # '$'
37: 253, # '%'
38: 253, # '&'
39: 253, # "'"
40: 253, # '('
41: 253, # ')'
42: 253, # '*'
43: 253, # '+'
44: 253, # ','
45: 253, # '-'
46: 253, # '.'
47: 253, # '/'
48: 252, # '0'
49: 252, # '1'
50: 252, # '2'
51: 252, # '3'
52: 252, # '4'
53: 252, # '5'
54: 252, # '6'
55: 252, # '7'
56: 252, # '8'
57: 252, # '9'
58: 253, # ':'
59: 253, # ';'
60: 253, # '<'
61: 253, # '='
62: 253, # '>'
63: 253, # '?'
64: 253, # '@'
65: 142, # 'A'
66: 143, # 'B'
67: 144, # 'C'
68: 145, # 'D'
69: 146, # 'E'
70: 147, # 'F'
71: 148, # 'G'
72: 149, # 'H'
73: 150, # 'I'
74: 151, # 'J'
75: 152, # 'K'
76: 74, # 'L'
77: 153, # 'M'
78: 75, # 'N'
79: 154, # 'O'
80: 155, # 'P'
81: 156, # 'Q'
82: 157, # 'R'
83: 158, # 'S'
84: 159, # 'T'
85: 160, # 'U'
86: 161, # 'V'
87: 162, # 'W'
88: 163, # 'X'
89: 164, # 'Y'
90: 165, # 'Z'
91: 253, # '['
92: 253, # '\\'
93: 253, # ']'
94: 253, # '^'
95: 253, # '_'
96: 253, # '`'
97: 71, # 'a'
98: 172, # 'b'
99: 66, # 'c'
100: 173, # 'd'
101: 65, # 'e'
102: 174, # 'f'
103: 76, # 'g'
104: 175, # 'h'
105: 64, # 'i'
106: 176, # 'j'
107: 177, # 'k'
108: 77, # 'l'
109: 72, # 'm'
110: 178, # 'n'
111: 69, # 'o'
112: 67, # 'p'
113: 179, # 'q'
114: 78, # 'r'
115: 73, # 's'
116: 180, # 't'
117: 181, # 'u'
118: 79, # 'v'
119: 182, # 'w'
120: 183, # 'x'
121: 184, # 'y'
122: 185, # 'z'
123: 253, # '{'
124: 253, # '|'
125: 253, # '}'
126: 253, # '~'
127: 253, # '\x7f'
128: 191, # 'ђ'
129: 192, # 'Ђ'
130: 193, # 'ѓ'
131: 194, # 'Ѓ'
132: 68, # 'ё'
133: 195, # 'Ё'
134: 196, # 'є'
135: 197, # 'Є'
136: 198, # 'ѕ'
137: 199, # 'Ѕ'
138: 200, # 'і'
139: 201, # 'І'
140: 202, # 'ї'
141: 203, # 'Ї'
142: 204, # 'ј'
143: 205, # 'Ј'
144: 206, # 'љ'
145: 207, # 'Љ'
146: 208, # 'њ'
147: 209, # 'Њ'
148: 210, # 'ћ'
149: 211, # 'Ћ'
150: 212, # 'ќ'
151: 213, # 'Ќ'
152: 214, # 'ў'
153: 215, # 'Ў'
154: 216, # 'џ'
155: 217, # 'Џ'
156: 27, # 'ю'
157: 59, # 'Ю'
158: 54, # 'ъ'
159: 70, # 'Ъ'
160: 3, # 'а'
161: 37, # 'А'
162: 21, # 'б'
163: 44, # 'Б'
164: 28, # 'ц'
165: 58, # 'Ц'
166: 13, # 'д'
167: 41, # 'Д'
168: 2, # 'е'
169: 48, # 'Е'
170: 39, # 'ф'
171: 53, # 'Ф'
172: 19, # 'г'
173: 46, # 'Г'
174: 218, # '«'
175: 219, # '»'
176: 220, # '░'
177: 221, # '▒'
178: 222, # '▓'
179: 223, # '│'
180: 224, # '┤'
181: 26, # 'х'
182: 55, # 'Х'
183: 4, # 'и'
184: 42, # 'И'
185: 225, # '╣'
186: 226, # '║'
187: 227, # '╗'
188: 228, # '╝'
189: 23, # 'й'
190: 60, # 'Й'
191: 229, # '┐'
192: 230, # '└'
193: 231, # '┴'
194: 232, # '┬'
195: 233, # '├'
196: 234, # '─'
197: 235, # '┼'
198: 11, # 'к'
199: 36, # 'К'
200: 236, # '╚'
201: 237, # '╔'
202: 238, # '╩'
203: 239, # '╦'
204: 240, # '╠'
205: 241, # '═'
206: 242, # '╬'
207: 243, # '¤'
208: 8, # 'л'
209: 49, # 'Л'
210: 12, # 'м'
211: 38, # 'М'
212: 5, # 'н'
213: 31, # 'Н'
214: 1, # 'о'
215: 34, # 'О'
216: 15, # 'п'
217: 244, # '┘'
218: 245, # '┌'
219: 246, # '█'
220: 247, # '▄'
221: 35, # 'П'
222: 16, # 'я'
223: 248, # '▀'
224: 43, # 'Я'
225: 9, # 'р'
226: 45, # 'Р'
227: 7, # 'с'
228: 32, # 'С'
229: 6, # 'т'
230: 40, # 'Т'
231: 14, # 'у'
232: 52, # 'У'
233: 24, # 'ж'
234: 56, # 'Ж'
235: 10, # 'в'
236: 33, # 'В'
237: 17, # 'ь'
238: 61, # 'Ь'
239: 249, # '№'
240: 250, # '\xad'
241: 18, # 'ы'
242: 62, # 'Ы'
243: 20, # 'з'
244: 51, # 'З'
245: 25, # 'ш'
246: 57, # 'Ш'
247: 30, # 'э'
248: 47, # 'Э'
249: 29, # 'щ'
250: 63, # 'Щ'
251: 22, # 'ч'
252: 50, # 'Ч'
253: 251, # '§'
254: 252, # '■'
255: 255, # '\xa0'
}
IBM855_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM855',
language='Russian',
char_to_order_map=IBM855_RUSSIAN_CHAR_TO_ORDER,
language_model=RUSSIAN_LANG_MODEL,
typical_positive_ratio=0.976601,
keep_ascii_letters=False,
alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё')
KOI8_R_RUSSIAN_CHAR_TO_ORDER = {
0: 255, # '\x00'
1: 255, # '\x01'
2: 255, # '\x02'
3: 255, # '\x03'
4: 255, # '\x04'
5: 255, # '\x05'
6: 255, # '\x06'
7: 255, # '\x07'
8: 255, # '\x08'
9: 255, # '\t'
10: 254, # '\n'
11: 255, # '\x0b'
12: 255, # '\x0c'
13: 254, # '\r'
14: 255, # '\x0e'
15: 255, # '\x0f'
16: 255, # '\x10'
17: 255, # '\x11'
18: 255, # '\x12'
19: 255, # '\x13'
20: 255, # '\x14'
21: 255, # '\x15'
22: 255, # '\x16'
23: 255, # '\x17'
24: 255, # '\x18'
25: 255, # '\x19'
26: 255, # '\x1a'
27: 255, # '\x1b'
28: 255, # '\x1c'
29: 255, # '\x1d'
30: 255, # '\x1e'
31: 255, # '\x1f'
32: 253, # ' '
33: 253, # '!'
34: 253, # '"'
35: 253, # '#'
36: 253, # '$'
37: 253, # '%'
38: 253, # '&'
39: 253, # "'"
40: 253, # '('
41: 253, # ')'
42: 253, # '*'
43: 253, # '+'
44: 253, # ','
45: 253, # '-'
46: 253, # '.'
47: 253, # '/'
48: 252, # '0'
49: 252, # '1'
50: 252, # '2'
51: 252, # '3'
52: 252, # '4'
53: 252, # '5'
54: 252, # '6'
55: 252, # '7'
56: 252, # '8'
57: 252, # '9'
58: 253, # ':'
59: 253, # ';'
60: 253, # '<'
61: 253, # '='
62: 253, # '>'
63: 253, # '?'
64: 253, # '@'
65: 142, # 'A'
66: 143, # 'B'
67: 144, # 'C'
68: 145, # 'D'
69: 146, # 'E'
70: 147, # 'F'
71: 148, # 'G'
72: 149, # 'H'
73: 150, # 'I'
74: 151, # 'J'
75: 152, # 'K'
76: 74, # 'L'
77: 153, # 'M'
78: 75, # 'N'
79: 154, # 'O'
80: 155, # 'P'
81: 156, # 'Q'
82: 157, # 'R'
83: 158, # 'S'
84: 159, # 'T'
85: 160, # 'U'
86: 161, # 'V'
87: 162, # 'W'
88: 163, # 'X'
89: 164, # 'Y'
90: 165, # 'Z'
91: 253, # '['
92: 253, # '\\'
93: 253, # ']'
94: 253, # '^'
95: 253, # '_'
96: 253, # '`'
97: 71, # 'a'
98: 172, # 'b'
99: 66, # 'c'
100: 173, # 'd'
101: 65, # 'e'
102: 174, # 'f'
103: 76, # 'g'
104: 175, # 'h'
105: 64, # 'i'
106: 176, # 'j'
107: 177, # 'k'
108: 77, # 'l'
109: 72, # 'm'
110: 178, # 'n'
111: 69, # 'o'
112: 67, # 'p'
113: 179, # 'q'
114: 78, # 'r'
115: 73, # 's'
116: 180, # 't'
117: 181, # 'u'
118: 79, # 'v'
119: 182, # 'w'
120: 183, # 'x'
121: 184, # 'y'
122: 185, # 'z'
123: 253, # '{'
124: 253, # '|'
125: 253, # '}'
126: 253, # '~'
127: 253, # '\x7f'
128: 191, # '─'
129: 192, # '│'
130: 193, # '┌'
131: 194, # '┐'
132: 195, # '└'
133: 196, # '┘'
134: 197, # '├'
135: 198, # '┤'
136: 199, # '┬'
137: 200, # '┴'
138: 201, # '┼'
139: 202, # '▀'
140: 203, # '▄'
141: 204, # '█'
142: 205, # '▌'
143: 206, # '▐'
144: 207, # '░'
145: 208, # '▒'
146: 209, # '▓'
147: 210, # '⌠'
148: 211, # '■'
149: 212, # '∙'
150: 213, # '√'
151: 214, # '≈'
152: 215, # '≤'
153: 216, # '≥'
154: 217, # '\xa0'
155: 218, # '⌡'
156: 219, # '°'
157: 220, # '²'
158: 221, # '·'
159: 222, # '÷'
160: 223, # '═'
161: 224, # '║'
162: 225, # '╒'
163: 68, # 'ё'
164: 226, # '╓'
165: 227, # '╔'
166: 228, # '╕'
167: 229, # '╖'
168: 230, # '╗'
169: 231, # '╘'
170: 232, # '╙'
171: 233, # '╚'
172: 234, # '╛'
173: 235, # '╜'
174: 236, # '╝'
175: 237, # '╞'
176: 238, # '╟'
177: 239, # '╠'
178: 240, # '╡'
179: 241, # 'Ё'
180: 242, # '╢'
181: 243, # '╣'
182: 244, # '╤'
183: 245, # '╥'
184: 246, # '╦'
185: 247, # '╧'
186: 248, # '╨'
187: 249, # '╩'
188: 250, # '╪'
189: 251, # '╫'
190: 252, # '╬'
191: 253, # '©'
192: 27, # 'ю'
193: 3, # 'а'
194: 21, # 'б'
195: 28, # 'ц'
196: 13, # 'д'
197: 2, # 'е'
198: 39, # 'ф'
199: 19, # 'г'
200: 26, # 'х'
201: 4, # 'и'
202: 23, # 'й'
203: 11, # 'к'
204: 8, # 'л'
205: 12, # 'м'
206: 5, # 'н'
207: 1, # 'о'
208: 15, # 'п'
209: 16, # 'я'
210: 9, # 'р'
211: 7, # 'с'
212: 6, # 'т'
213: 14, # 'у'
214: 24, # 'ж'
215: 10, # 'в'
216: 17, # 'ь'
217: 18, # 'ы'
218: 20, # 'з'
219: 25, # 'ш'
220: 30, # 'э'
221: 29, # 'щ'
222: 22, # 'ч'
223: 54, # 'ъ'
224: 59, # 'Ю'
225: 37, # 'А'
226: 44, # 'Б'
227: 58, # 'Ц'
228: 41, # 'Д'
229: 48, # 'Е'
230: 53, # 'Ф'
231: 46, # 'Г'
232: 55, # 'Х'
233: 42, # 'И'
234: 60, # 'Й'
235: 36, # 'К'
236: 49, # 'Л'
237: 38, # 'М'
238: 31, # 'Н'
239: 34, # 'О'
240: 35, # 'П'
241: 43, # 'Я'
242: 45, # 'Р'
243: 32, # 'С'
244: 40, # 'Т'
245: 52, # 'У'
246: 56, # 'Ж'
247: 33, # 'В'
248: 61, # 'Ь'
249: 62, # 'Ы'
250: 51, # 'З'
251: 57, # 'Ш'
252: 47, # 'Э'
253: 63, # 'Щ'
254: 50, # 'Ч'
255: 70, # 'Ъ'
}
KOI8_R_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='KOI8-R',
language='Russian',
char_to_order_map=KOI8_R_RUSSIAN_CHAR_TO_ORDER,
language_model=RUSSIAN_LANG_MODEL,
typical_positive_ratio=0.976601,
keep_ascii_letters=False,
alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё')
MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER = {
0: 255, # '\x00'
1: 255, # '\x01'
2: 255, # '\x02'
3: 255, # '\x03'
4: 255, # '\x04'
5: 255, # '\x05'
6: 255, # '\x06'
7: 255, # '\x07'
8: 255, # '\x08'
9: 255, # '\t'
10: 254, # '\n'
11: 255, # '\x0b'
12: 255, # '\x0c'
13: 254, # '\r'
14: 255, # '\x0e'
15: 255, # '\x0f'
16: 255, # '\x10'
17: 255, # '\x11'
18: 255, # '\x12'
19: 255, # '\x13'
20: 255, # '\x14'
21: 255, # '\x15'
22: 255, # '\x16'
23: 255, # '\x17'
24: 255, # '\x18'
25: 255, # '\x19'
26: 255, # '\x1a'
27: 255, # '\x1b'
28: 255, # '\x1c'
29: 255, # '\x1d'
30: 255, # '\x1e'
31: 255, # '\x1f'
32: 253, # ' '
33: 253, # '!'
34: 253, # '"'
35: 253, # '#'
36: 253, # '$'
37: 253, # '%'
38: 253, # '&'
39: 253, # "'"
40: 253, # '('
41: 253, # ')'
42: 253, # '*'
43: 253, # '+'
44: 253, # ','
45: 253, # '-'
46: 253, # '.'
47: 253, # '/'
48: 252, # '0'
49: 252, # '1'
50: 252, # '2'
51: 252, # '3'
52: 252, # '4'
53: 252, # '5'
54: 252, # '6'
55: 252, # '7'
56: 252, # '8'
57: 252, # '9'
58: 253, # ':'
59: 253, # ';'
60: 253, # '<'
61: 253, # '='
62: 253, # '>'
63: 253, # '?'
64: 253, # '@'
65: 142, # 'A'
66: 143, # 'B'
67: 144, # 'C'
68: 145, # 'D'
69: 146, # 'E'
70: 147, # 'F'
71: 148, # 'G'
72: 149, # 'H'
73: 150, # 'I'
74: 151, # 'J'
75: 152, # 'K'
76: 74, # 'L'
77: 153, # 'M'
78: 75, # 'N'
79: 154, # 'O'
80: 155, # 'P'
81: 156, # 'Q'
82: 157, # 'R'
83: 158, # 'S'
84: 159, # 'T'
85: 160, # 'U'
86: 161, # 'V'
87: 162, # 'W'
88: 163, # 'X'
89: 164, # 'Y'
90: 165, # 'Z'
91: 253, # '['
92: 253, # '\\'
93: 253, # ']'
94: 253, # '^'
95: 253, # '_'
96: 253, # '`'
97: 71, # 'a'
98: 172, # 'b'
99: 66, # 'c'
100: 173, # 'd'
101: 65, # 'e'
102: 174, # 'f'
103: 76, # 'g'
104: 175, # 'h'
105: 64, # 'i'
106: 176, # 'j'
107: 177, # 'k'
108: 77, # 'l'
109: 72, # 'm'
110: 178, # 'n'
111: 69, # 'o'
112: 67, # 'p'
113: 179, # 'q'
114: 78, # 'r'
115: 73, # 's'
116: 180, # 't'
117: 181, # 'u'
118: 79, # 'v'
119: 182, # 'w'
120: 183, # 'x'
121: 184, # 'y'
122: 185, # 'z'
123: 253, # '{'
124: 253, # '|'
125: 253, # '}'
126: 253, # '~'
127: 253, # '\x7f'
128: 37, # 'А'
129: 44, # 'Б'
130: 33, # 'В'
131: 46, # 'Г'
132: 41, # 'Д'
133: 48, # 'Е'
134: 56, # 'Ж'
135: 51, # 'З'
136: 42, # 'И'
137: 60, # 'Й'
138: 36, # 'К'
139: 49, # 'Л'
140: 38, # 'М'
141: 31, # 'Н'
142: 34, # 'О'
143: 35, # 'П'
144: 45, # 'Р'
145: 32, # 'С'
146: 40, # 'Т'
147: 52, # 'У'
148: 53, # 'Ф'
149: 55, # 'Х'
150: 58, # 'Ц'
151: 50, # 'Ч'
152: 57, # 'Ш'
153: 63, # 'Щ'
154: 70, # 'Ъ'
155: 62, # 'Ы'
156: 61, # 'Ь'
157: 47, # 'Э'
158: 59, # 'Ю'
159: 43, # 'Я'
160: 191, # '†'
161: 192, # '°'
162: 193, # 'Ґ'
163: 194, # '£'
164: 195, # '§'
165: 196, # '•'
166: 197, # '¶'
167: 198, # 'І'
168: 199, # '®'
169: 200, # '©'
170: 201, # '™'
171: 202, # 'Ђ'
172: 203, # 'ђ'
173: 204, # '≠'
174: 205, # 'Ѓ'
175: 206, # 'ѓ'
176: 207, # '∞'
177: 208, # '±'
178: 209, # '≤'
179: 210, # '≥'
180: 211, # 'і'
181: 212, # 'µ'
182: 213, # 'ґ'
183: 214, # 'Ј'
184: 215, # 'Є'
185: 216, # 'є'
186: 217, # 'Ї'
187: 218, # 'ї'
188: 219, # 'Љ'
189: 220, # 'љ'
190: 221, # 'Њ'
191: 222, # 'њ'
192: 223, # 'ј'
193: 224, # 'Ѕ'
194: 225, # '¬'
195: 226, # '√'
196: 227, # 'ƒ'
197: 228, # '≈'
198: 229, # '∆'
199: 230, # '«'
200: 231, # '»'
201: 232, # '…'
202: 233, # '\xa0'
203: 234, # 'Ћ'
204: 235, # 'ћ'
205: 236, # 'Ќ'
206: 237, # 'ќ'
207: 238, # 'ѕ'
208: 239, # '–'
209: 240, # '—'
210: 241, # '“'
211: 242, # '”'
212: 243, # '‘'
213: 244, # '’'
214: 245, # '÷'
215: 246, # '„'
216: 247, # 'Ў'
217: 248, # 'ў'
218: 249, # 'Џ'
219: 250, # 'џ'
220: 251, # '№'
221: 252, # 'Ё'
222: 68, # 'ё'
223: 16, # 'я'
224: 3, # 'а'
225: 21, # 'б'
226: 10, # 'в'
227: 19, # 'г'
228: 13, # 'д'
229: 2, # 'е'
230: 24, # 'ж'
231: 20, # 'з'
232: 4, # 'и'
233: 23, # 'й'
234: 11, # 'к'
235: 8, # 'л'
236: 12, # 'м'
237: 5, # 'н'
238: 1, # 'о'
239: 15, # 'п'
240: 9, # 'р'
241: 7, # 'с'
242: 6, # 'т'
243: 14, # 'у'
244: 39, # 'ф'
245: 26, # 'х'
246: 28, # 'ц'
247: 22, # 'ч'
248: 25, # 'ш'
249: 29, # 'щ'
250: 54, # 'ъ'
251: 18, # 'ы'
252: 17, # 'ь'
253: 30, # 'э'
254: 27, # 'ю'
255: 255, # '€'
}
MACCYRILLIC_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='MacCyrillic',
language='Russian',
char_to_order_map=MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER,
language_model=RUSSIAN_LANG_MODEL,
typical_positive_ratio=0.976601,
keep_ascii_letters=False,
alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё')
ISO_8859_5_RUSSIAN_CHAR_TO_ORDER = {
0: 255, # '\x00'
1: 255, # '\x01'
2: 255, # '\x02'
3: 255, # '\x03'
4: 255, # '\x04'
5: 255, # '\x05'
6: 255, # '\x06'
7: 255, # '\x07'
8: 255, # '\x08'
9: 255, # '\t'
10: 254, # '\n'
11: 255, # '\x0b'
12: 255, # '\x0c'
13: 254, # '\r'
14: 255, # '\x0e'
15: 255, # '\x0f'
16: 255, # '\x10'
17: 255, # '\x11'
18: 255, # '\x12'
19: 255, # '\x13'
20: 255, # '\x14'
21: 255, # '\x15'
22: 255, # '\x16'
23: 255, # '\x17'
24: 255, # '\x18'
25: 255, # '\x19'
26: 255, # '\x1a'
27: 255, # '\x1b'
28: 255, # '\x1c'
29: 255, # '\x1d'
30: 255, # '\x1e'
31: 255, # '\x1f'
32: 253, # ' '
33: 253, # '!'
34: 253, # '"'
35: 253, # '#'
36: 253, # '$'
37: 253, # '%'
38: 253, # '&'
39: 253, # "'"
40: 253, # '('
41: 253, # ')'
42: 253, # '*'
43: 253, # '+'
44: 253, # ','
45: 253, # '-'
46: 253, # '.'
47: 253, # '/'
48: 252, # '0'
49: 252, # '1'
50: 252, # '2'
51: 252, # '3'
52: 252, # '4'
53: 252, # '5'
54: 252, # '6'
55: 252, # '7'
56: 252, # '8'
57: 252, # '9'
58: 253, # ':'
59: 253, # ';'
60: 253, # '<'
61: 253, # '='
62: 253, # '>'
63: 253, # '?'
64: 253, # '@'
65: 142, # 'A'
66: 143, # 'B'
67: 144, # 'C'
68: 145, # 'D'
69: 146, # 'E'
70: 147, # 'F'
71: 148, # 'G'
72: 149, # 'H'
73: 150, # 'I'
74: 151, # 'J'
75: 152, # 'K'
76: 74, # 'L'
77: 153, # 'M'
78: 75, # 'N'
79: 154, # 'O'
80: 155, # 'P'
81: 156, # 'Q'
82: 157, # 'R'
83: 158, # 'S'
84: 159, # 'T'
85: 160, # 'U'
86: 161, # 'V'
87: 162, # 'W'
88: 163, # 'X'
89: 164, # 'Y'
90: 165, # 'Z'
91: 253, # '['
92: 253, # '\\'
93: 253, # ']'
94: 253, # '^'
95: 253, # '_'
96: 253, # '`'
97: 71, # 'a'
98: 172, # 'b'
99: 66, # 'c'
100: 173, # 'd'
101: 65, # 'e'
102: 174, # 'f'
103: 76, # 'g'
104: 175, # 'h'
105: 64, # 'i'
106: 176, # 'j'
107: 177, # 'k'
108: 77, # 'l'
109: 72, # 'm'
110: 178, # 'n'
111: 69, # 'o'
112: 67, # 'p'
113: 179, # 'q'
114: 78, # 'r'
115: 73, # 's'
116: 180, # 't'
117: 181, # 'u'
118: 79, # 'v'
119: 182, # 'w'
120: 183, # 'x'
121: 184, # 'y'
122: 185, # 'z'
123: 253, # '{'
124: 253, # '|'
125: 253, # '}'
126: 253, # '~'
127: 253, # '\x7f'
128: 191, # '\x80'
129: 192, # '\x81'
130: 193, # '\x82'
131: 194, # '\x83'
132: 195, # '\x84'
133: 196, # '\x85'
134: 197, # '\x86'
135: 198, # '\x87'
136: 199, # '\x88'
137: 200, # '\x89'
138: 201, # '\x8a'
139: 202, # '\x8b'
140: 203, # '\x8c'
141: 204, # '\x8d'
142: 205, # '\x8e'
143: 206, # '\x8f'
144: 207, # '\x90'
145: 208, # '\x91'
146: 209, # '\x92'
147: 210, # '\x93'
148: 211, # '\x94'
149: 212, # '\x95'
150: 213, # '\x96'
151: 214, # '\x97'
152: 215, # '\x98'
153: 216, # '\x99'
154: 217, # '\x9a'
155: 218, # '\x9b'
156: 219, # '\x9c'
157: 220, # '\x9d'
158: 221, # '\x9e'
159: 222, # '\x9f'
160: 223, # '\xa0'
161: 224, # 'Ё'
162: 225, # 'Ђ'
163: 226, # 'Ѓ'
164: 227, # 'Є'
165: 228, # 'Ѕ'
166: 229, # 'І'
167: 230, # 'Ї'
168: 231, # 'Ј'
169: 232, # 'Љ'
170: 233, # 'Њ'
171: 234, # 'Ћ'
172: 235, # 'Ќ'
173: 236, # '\xad'
174: 237, # 'Ў'
175: 238, # 'Џ'
176: 37, # 'А'
177: 44, # 'Б'
178: 33, # 'В'
179: 46, # 'Г'
180: 41, # 'Д'
181: 48, # 'Е'
182: 56, # 'Ж'
183: 51, # 'З'
184: 42, # 'И'
185: 60, # 'Й'
186: 36, # 'К'
187: 49, # 'Л'
188: 38, # 'М'
189: 31, # 'Н'
190: 34, # 'О'
191: 35, # 'П'
192: 45, # 'Р'
193: 32, # 'С'
194: 40, # 'Т'
195: 52, # 'У'
196: 53, # 'Ф'
197: 55, # 'Х'
198: 58, # 'Ц'
199: 50, # 'Ч'
200: 57, # 'Ш'
201: 63, # 'Щ'
202: 70, # 'Ъ'
203: 62, # 'Ы'
204: 61, # 'Ь'
205: 47, # 'Э'
206: 59, # 'Ю'
207: 43, # 'Я'
208: 3, # 'а'
209: 21, # 'б'
210: 10, # 'в'
211: 19, # 'г'
212: 13, # 'д'
213: 2, # 'е'
214: 24, # 'ж'
215: 20, # 'з'
216: 4, # 'и'
217: 23, # 'й'
218: 11, # 'к'
219: 8, # 'л'
220: 12, # 'м'
221: 5, # 'н'
222: 1, # 'о'
223: 15, # 'п'
224: 9, # 'р'
225: 7, # 'с'
226: 6, # 'т'
227: 14, # 'у'
228: 39, # 'ф'
229: 26, # 'х'
230: 28, # 'ц'
231: 22, # 'ч'
232: 25, # 'ш'
233: 29, # 'щ'
234: 54, # 'ъ'
235: 18, # 'ы'
236: 17, # 'ь'
237: 30, # 'э'
238: 27, # 'ю'
239: 16, # 'я'
240: 239, # '№'
241: 68, # 'ё'
242: 240, # 'ђ'
243: 241, # 'ѓ'
244: 242, # 'є'
245: 243, # 'ѕ'
246: 244, # 'і'
247: 245, # 'ї'
248: 246, # 'ј'
249: 247, # 'љ'
250: 248, # 'њ'
251: 249, # 'ћ'
252: 250, # 'ќ'
253: 251, # '§'
254: 252, # 'ў'
255: 255, # 'џ'
}
ISO_8859_5_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5',
language='Russian',
char_to_order_map=ISO_8859_5_RUSSIAN_CHAR_TO_ORDER,
language_model=RUSSIAN_LANG_MODEL,
typical_positive_ratio=0.976601,
keep_ascii_letters=False,
alphabet='ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё')
|
brython-dev/brython
|
refs/heads/master
|
www/src/Lib/encodings/cp875.py
|
37
|
""" Python Character Mapping Codec cp875 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP875.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp875',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x9c' # 0x04 -> CONTROL
'\t' # 0x05 -> HORIZONTAL TABULATION
'\x86' # 0x06 -> CONTROL
'\x7f' # 0x07 -> DELETE
'\x97' # 0x08 -> CONTROL
'\x8d' # 0x09 -> CONTROL
'\x8e' # 0x0A -> CONTROL
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x9d' # 0x14 -> CONTROL
'\x85' # 0x15 -> CONTROL
'\x08' # 0x16 -> BACKSPACE
'\x87' # 0x17 -> CONTROL
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x92' # 0x1A -> CONTROL
'\x8f' # 0x1B -> CONTROL
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
'\x80' # 0x20 -> CONTROL
'\x81' # 0x21 -> CONTROL
'\x82' # 0x22 -> CONTROL
'\x83' # 0x23 -> CONTROL
'\x84' # 0x24 -> CONTROL
'\n' # 0x25 -> LINE FEED
'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
'\x1b' # 0x27 -> ESCAPE
'\x88' # 0x28 -> CONTROL
'\x89' # 0x29 -> CONTROL
'\x8a' # 0x2A -> CONTROL
'\x8b' # 0x2B -> CONTROL
'\x8c' # 0x2C -> CONTROL
'\x05' # 0x2D -> ENQUIRY
'\x06' # 0x2E -> ACKNOWLEDGE
'\x07' # 0x2F -> BELL
'\x90' # 0x30 -> CONTROL
'\x91' # 0x31 -> CONTROL
'\x16' # 0x32 -> SYNCHRONOUS IDLE
'\x93' # 0x33 -> CONTROL
'\x94' # 0x34 -> CONTROL
'\x95' # 0x35 -> CONTROL
'\x96' # 0x36 -> CONTROL
'\x04' # 0x37 -> END OF TRANSMISSION
'\x98' # 0x38 -> CONTROL
'\x99' # 0x39 -> CONTROL
'\x9a' # 0x3A -> CONTROL
'\x9b' # 0x3B -> CONTROL
'\x14' # 0x3C -> DEVICE CONTROL FOUR
'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
'\x9e' # 0x3E -> CONTROL
'\x1a' # 0x3F -> SUBSTITUTE
' ' # 0x40 -> SPACE
'\u0391' # 0x41 -> GREEK CAPITAL LETTER ALPHA
'\u0392' # 0x42 -> GREEK CAPITAL LETTER BETA
'\u0393' # 0x43 -> GREEK CAPITAL LETTER GAMMA
'\u0394' # 0x44 -> GREEK CAPITAL LETTER DELTA
'\u0395' # 0x45 -> GREEK CAPITAL LETTER EPSILON
'\u0396' # 0x46 -> GREEK CAPITAL LETTER ZETA
'\u0397' # 0x47 -> GREEK CAPITAL LETTER ETA
'\u0398' # 0x48 -> GREEK CAPITAL LETTER THETA
'\u0399' # 0x49 -> GREEK CAPITAL LETTER IOTA
'[' # 0x4A -> LEFT SQUARE BRACKET
'.' # 0x4B -> FULL STOP
'<' # 0x4C -> LESS-THAN SIGN
'(' # 0x4D -> LEFT PARENTHESIS
'+' # 0x4E -> PLUS SIGN
'!' # 0x4F -> EXCLAMATION MARK
'&' # 0x50 -> AMPERSAND
'\u039a' # 0x51 -> GREEK CAPITAL LETTER KAPPA
'\u039b' # 0x52 -> GREEK CAPITAL LETTER LAMDA
'\u039c' # 0x53 -> GREEK CAPITAL LETTER MU
'\u039d' # 0x54 -> GREEK CAPITAL LETTER NU
'\u039e' # 0x55 -> GREEK CAPITAL LETTER XI
'\u039f' # 0x56 -> GREEK CAPITAL LETTER OMICRON
'\u03a0' # 0x57 -> GREEK CAPITAL LETTER PI
'\u03a1' # 0x58 -> GREEK CAPITAL LETTER RHO
'\u03a3' # 0x59 -> GREEK CAPITAL LETTER SIGMA
']' # 0x5A -> RIGHT SQUARE BRACKET
'$' # 0x5B -> DOLLAR SIGN
'*' # 0x5C -> ASTERISK
')' # 0x5D -> RIGHT PARENTHESIS
';' # 0x5E -> SEMICOLON
'^' # 0x5F -> CIRCUMFLEX ACCENT
'-' # 0x60 -> HYPHEN-MINUS
'/' # 0x61 -> SOLIDUS
'\u03a4' # 0x62 -> GREEK CAPITAL LETTER TAU
'\u03a5' # 0x63 -> GREEK CAPITAL LETTER UPSILON
'\u03a6' # 0x64 -> GREEK CAPITAL LETTER PHI
'\u03a7' # 0x65 -> GREEK CAPITAL LETTER CHI
'\u03a8' # 0x66 -> GREEK CAPITAL LETTER PSI
'\u03a9' # 0x67 -> GREEK CAPITAL LETTER OMEGA
'\u03aa' # 0x68 -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
'\u03ab' # 0x69 -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
'|' # 0x6A -> VERTICAL LINE
',' # 0x6B -> COMMA
'%' # 0x6C -> PERCENT SIGN
'_' # 0x6D -> LOW LINE
'>' # 0x6E -> GREATER-THAN SIGN
'?' # 0x6F -> QUESTION MARK
'\xa8' # 0x70 -> DIAERESIS
'\u0386' # 0x71 -> GREEK CAPITAL LETTER ALPHA WITH TONOS
'\u0388' # 0x72 -> GREEK CAPITAL LETTER EPSILON WITH TONOS
'\u0389' # 0x73 -> GREEK CAPITAL LETTER ETA WITH TONOS
'\xa0' # 0x74 -> NO-BREAK SPACE
'\u038a' # 0x75 -> GREEK CAPITAL LETTER IOTA WITH TONOS
'\u038c' # 0x76 -> GREEK CAPITAL LETTER OMICRON WITH TONOS
'\u038e' # 0x77 -> GREEK CAPITAL LETTER UPSILON WITH TONOS
'\u038f' # 0x78 -> GREEK CAPITAL LETTER OMEGA WITH TONOS
'`' # 0x79 -> GRAVE ACCENT
':' # 0x7A -> COLON
'#' # 0x7B -> NUMBER SIGN
'@' # 0x7C -> COMMERCIAL AT
"'" # 0x7D -> APOSTROPHE
'=' # 0x7E -> EQUALS SIGN
'"' # 0x7F -> QUOTATION MARK
'\u0385' # 0x80 -> GREEK DIALYTIKA TONOS
'a' # 0x81 -> LATIN SMALL LETTER A
'b' # 0x82 -> LATIN SMALL LETTER B
'c' # 0x83 -> LATIN SMALL LETTER C
'd' # 0x84 -> LATIN SMALL LETTER D
'e' # 0x85 -> LATIN SMALL LETTER E
'f' # 0x86 -> LATIN SMALL LETTER F
'g' # 0x87 -> LATIN SMALL LETTER G
'h' # 0x88 -> LATIN SMALL LETTER H
'i' # 0x89 -> LATIN SMALL LETTER I
'\u03b1' # 0x8A -> GREEK SMALL LETTER ALPHA
'\u03b2' # 0x8B -> GREEK SMALL LETTER BETA
'\u03b3' # 0x8C -> GREEK SMALL LETTER GAMMA
'\u03b4' # 0x8D -> GREEK SMALL LETTER DELTA
'\u03b5' # 0x8E -> GREEK SMALL LETTER EPSILON
'\u03b6' # 0x8F -> GREEK SMALL LETTER ZETA
'\xb0' # 0x90 -> DEGREE SIGN
'j' # 0x91 -> LATIN SMALL LETTER J
'k' # 0x92 -> LATIN SMALL LETTER K
'l' # 0x93 -> LATIN SMALL LETTER L
'm' # 0x94 -> LATIN SMALL LETTER M
'n' # 0x95 -> LATIN SMALL LETTER N
'o' # 0x96 -> LATIN SMALL LETTER O
'p' # 0x97 -> LATIN SMALL LETTER P
'q' # 0x98 -> LATIN SMALL LETTER Q
'r' # 0x99 -> LATIN SMALL LETTER R
'\u03b7' # 0x9A -> GREEK SMALL LETTER ETA
'\u03b8' # 0x9B -> GREEK SMALL LETTER THETA
'\u03b9' # 0x9C -> GREEK SMALL LETTER IOTA
'\u03ba' # 0x9D -> GREEK SMALL LETTER KAPPA
'\u03bb' # 0x9E -> GREEK SMALL LETTER LAMDA
'\u03bc' # 0x9F -> GREEK SMALL LETTER MU
'\xb4' # 0xA0 -> ACUTE ACCENT
'~' # 0xA1 -> TILDE
's' # 0xA2 -> LATIN SMALL LETTER S
't' # 0xA3 -> LATIN SMALL LETTER T
'u' # 0xA4 -> LATIN SMALL LETTER U
'v' # 0xA5 -> LATIN SMALL LETTER V
'w' # 0xA6 -> LATIN SMALL LETTER W
'x' # 0xA7 -> LATIN SMALL LETTER X
'y' # 0xA8 -> LATIN SMALL LETTER Y
'z' # 0xA9 -> LATIN SMALL LETTER Z
'\u03bd' # 0xAA -> GREEK SMALL LETTER NU
'\u03be' # 0xAB -> GREEK SMALL LETTER XI
'\u03bf' # 0xAC -> GREEK SMALL LETTER OMICRON
'\u03c0' # 0xAD -> GREEK SMALL LETTER PI
'\u03c1' # 0xAE -> GREEK SMALL LETTER RHO
'\u03c3' # 0xAF -> GREEK SMALL LETTER SIGMA
'\xa3' # 0xB0 -> POUND SIGN
'\u03ac' # 0xB1 -> GREEK SMALL LETTER ALPHA WITH TONOS
'\u03ad' # 0xB2 -> GREEK SMALL LETTER EPSILON WITH TONOS
'\u03ae' # 0xB3 -> GREEK SMALL LETTER ETA WITH TONOS
'\u03ca' # 0xB4 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
'\u03af' # 0xB5 -> GREEK SMALL LETTER IOTA WITH TONOS
'\u03cc' # 0xB6 -> GREEK SMALL LETTER OMICRON WITH TONOS
'\u03cd' # 0xB7 -> GREEK SMALL LETTER UPSILON WITH TONOS
'\u03cb' # 0xB8 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
'\u03ce' # 0xB9 -> GREEK SMALL LETTER OMEGA WITH TONOS
'\u03c2' # 0xBA -> GREEK SMALL LETTER FINAL SIGMA
'\u03c4' # 0xBB -> GREEK SMALL LETTER TAU
'\u03c5' # 0xBC -> GREEK SMALL LETTER UPSILON
'\u03c6' # 0xBD -> GREEK SMALL LETTER PHI
'\u03c7' # 0xBE -> GREEK SMALL LETTER CHI
'\u03c8' # 0xBF -> GREEK SMALL LETTER PSI
'{' # 0xC0 -> LEFT CURLY BRACKET
'A' # 0xC1 -> LATIN CAPITAL LETTER A
'B' # 0xC2 -> LATIN CAPITAL LETTER B
'C' # 0xC3 -> LATIN CAPITAL LETTER C
'D' # 0xC4 -> LATIN CAPITAL LETTER D
'E' # 0xC5 -> LATIN CAPITAL LETTER E
'F' # 0xC6 -> LATIN CAPITAL LETTER F
'G' # 0xC7 -> LATIN CAPITAL LETTER G
'H' # 0xC8 -> LATIN CAPITAL LETTER H
'I' # 0xC9 -> LATIN CAPITAL LETTER I
'\xad' # 0xCA -> SOFT HYPHEN
'\u03c9' # 0xCB -> GREEK SMALL LETTER OMEGA
'\u0390' # 0xCC -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
'\u03b0' # 0xCD -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
'\u2018' # 0xCE -> LEFT SINGLE QUOTATION MARK
'\u2015' # 0xCF -> HORIZONTAL BAR
'}' # 0xD0 -> RIGHT CURLY BRACKET
'J' # 0xD1 -> LATIN CAPITAL LETTER J
'K' # 0xD2 -> LATIN CAPITAL LETTER K
'L' # 0xD3 -> LATIN CAPITAL LETTER L
'M' # 0xD4 -> LATIN CAPITAL LETTER M
'N' # 0xD5 -> LATIN CAPITAL LETTER N
'O' # 0xD6 -> LATIN CAPITAL LETTER O
'P' # 0xD7 -> LATIN CAPITAL LETTER P
'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
'R' # 0xD9 -> LATIN CAPITAL LETTER R
'\xb1' # 0xDA -> PLUS-MINUS SIGN
'\xbd' # 0xDB -> VULGAR FRACTION ONE HALF
'\x1a' # 0xDC -> SUBSTITUTE
'\u0387' # 0xDD -> GREEK ANO TELEIA
'\u2019' # 0xDE -> RIGHT SINGLE QUOTATION MARK
'\xa6' # 0xDF -> BROKEN BAR
'\\' # 0xE0 -> REVERSE SOLIDUS
'\x1a' # 0xE1 -> SUBSTITUTE
'S' # 0xE2 -> LATIN CAPITAL LETTER S
'T' # 0xE3 -> LATIN CAPITAL LETTER T
'U' # 0xE4 -> LATIN CAPITAL LETTER U
'V' # 0xE5 -> LATIN CAPITAL LETTER V
'W' # 0xE6 -> LATIN CAPITAL LETTER W
'X' # 0xE7 -> LATIN CAPITAL LETTER X
'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
'\xb2' # 0xEA -> SUPERSCRIPT TWO
'\xa7' # 0xEB -> SECTION SIGN
'\x1a' # 0xEC -> SUBSTITUTE
'\x1a' # 0xED -> SUBSTITUTE
'\xab' # 0xEE -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xEF -> NOT SIGN
'0' # 0xF0 -> DIGIT ZERO
'1' # 0xF1 -> DIGIT ONE
'2' # 0xF2 -> DIGIT TWO
'3' # 0xF3 -> DIGIT THREE
'4' # 0xF4 -> DIGIT FOUR
'5' # 0xF5 -> DIGIT FIVE
'6' # 0xF6 -> DIGIT SIX
'7' # 0xF7 -> DIGIT SEVEN
'8' # 0xF8 -> DIGIT EIGHT
'9' # 0xF9 -> DIGIT NINE
'\xb3' # 0xFA -> SUPERSCRIPT THREE
'\xa9' # 0xFB -> COPYRIGHT SIGN
'\x1a' # 0xFC -> SUBSTITUTE
'\x1a' # 0xFD -> SUBSTITUTE
'\xbb' # 0xFE -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
xuegang/gpdb
|
refs/heads/master
|
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/filerep/Filerep_Resync/schema/genSchema.py
|
6
|
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tinctest
import unittest2 as unittest
from tinctest.lib import local_path
from mpp.lib.PSQL import PSQL
from gppylib.commands.base import Command
from mpp.gpdb.tests.storage.filerep.Filerep_Resync.test_filerep_resync import FilerepResyncException
'''
Schema definitions required for Filerep Resync
'''
class Schema(Command):
def __init__(self, cmd = None):
Command.__init__(self, 'running schema related command', cmd)
def create_ao_table(self):
'''Creating a table in Append-Only mode'''
tinctest.logger.info('\nCreating the appendonly table')
out = PSQL.run_sql_file(local_path('create.sql'))
return out
def check_duplicate_entries_in_PT(self):
'''Checking duplicate entries in the Persistent table'''
cmd_str = "select count(*) from gp_dist_random('gp_persistent_relation_node') where segment_file_num = 1 and relfilenode_oid = 'ta'::regclass group by relfilenode_oid, gp_segment_id;"
out = PSQL.run_sql_command(cmd_str).split('\n')[3:-3]
if len(out) == 0:
exception_msg = "ERROR!! No values returned for the duplicate entries query. " \
"Check if 'query01.sql' got executed or not"
tinctest.logger.error(exception_msg)
raise FilerepResyncException(exception_msg)
count = out[0].strip()
if int(count) > 1 :
tinctest.logger.error('Duplicate entries found in Persistent table')
return True
else:
tinctest.logger.info('Duplicate entries not found in Persistent table')
return False
|
optiv-labs/talus_client
|
refs/heads/master
|
talus_client/cmds/corpus.py
|
1
|
#!/usr/bin/env python
# encoding: utf-8
import argparse
import arrow
import cmd
import glob
import json
import os
import shlex
import sys
from tabulate import tabulate
import textwrap
from talus_client.cmds import TalusCmdBase
import talus_client.api
import talus_client.errors
from talus_client.models import *
class CorpusCmd(TalusCmdBase):
"""The Talus job command processor
"""
command_name = "corpus"
def do_delete(self, args):
"""Delete the file specified by the provided id
"""
parts = shlex.split(args)
if len(parts) == 0:
raise errors.TalusApiError("You must provide the id of the file to delete")
file_id = parts[0]
res = self._talus_client.corpus_delete(file_id)
if "error" in res:
raise errors.TalusApiError("Could not delete file id {!r}: {}".format(
file_id,
res["error"],
))
print("deleted")
def do_get(self, args):
"""Get the file(s) from the corpus with the provided id(s), saving to the
current directory if no destination path is provided (works like cp and mv)
Example:
To fetch a single file and save it into /tmp:
corpus get 55cdcbaedd18da0008caa793 /tmp
To fetch multiple files and save to /tmp:
corpus get 55cdcbaedd18da0008caa793 55cdcbaedd18da0008caa794 55cdcbaedd18da0008caa795 /tmp
"""
parts = shlex.split(args)
if len(parts) == 0:
raise errors.TalusApiError("At least one id must be provided")
if len(parts) > 1:
dest = parts[-1]
file_ids = parts[:-1]
else:
dest = None
file_ids = [parts[0]]
if dest is None:
dest = "./"
full_dest = os.path.abspath(os.path.expanduser(dest))
# it needs to be a directory for this to work
if len(file_ids) > 1 and (not os.path.exists(full_dest) or not os.path.isdir(full_dest)):
raise errors.TalusApiError("Destination for multiple files must exist _and_ be a directory!")
for file_id in file_ids:
fname,data = self._talus_client.corpus_get(file_id)
if len(file_ids) > 1 or os.path.isdir(full_dest):
unexpanded_dest = os.path.join(dest, fname)
write_dest = os.path.join(full_dest, fname)
else:
unexpanded_dest = dest
write_dest = full_dest
with open(write_dest, "wb") as f:
f.write(data)
print("{} saved to {} ({} bytes)".format(
file_id,
unexpanded_dest,
len(data)
))
def do_list(self, args):
"""List all of the corpus files in talus. All supplied arguments will be used
to filter the results
corpus list [--SEARCH_KEY SEARCH_VAL ...] [-l]
-l,--list List only the ids (defaults to more info)
Examples:
To list all files with md5 ABCD:
corpus list --md5 ABCD
To list all files with size 1337:
corpus list --length 1337
To list all files with extra attribute urmom equal to blah:
corpus list --urmom blah
To list all files of content-type "text/plain":
corpus list --contentType text/plain
"""
parts = shlex.split(args)
filters = {}
ids_only = False
while len(parts) != 0:
key = parts[0]
if key == "-l" or key == "--list":
ids_only = True
parts = parts[1:]
continue
if key.startswith("-"):
key = key.replace("-", "")
if key not in ["md5", "id", "_id", "length", "content_type", "contentType"] and not key.startswith("metadata"):
key = "metadata." + key
value = parts[1]
if re.match(r'^[0-9]+$', value):
value = int(value)
# allow multiple values - gets treated as looking for that key's value
# to be one of the provided values. E.g. id in ["ID1", "ID2", "ID3", ...]
if key in filters:
if not isinstance(filters[key], list):
filters[key] = [filters[key]]
filters[key].append(value)
else:
filters[key] = value
parts = parts[2:]
else:
parts = parts[1:]
res = self._talus_client.corpus_list(**filters)
if ids_only:
for cfile in res:
print(cfile["_id"]["$oid"])
return
headers = ["id", "size (bytes)", "md5", "content-type", "upload date", "other attrs"]
values = []
print("{} corpus files found".format(len(res)))
for cfile in res:
# {
# u'contentType': u'text/plain',
# u'chunkSize': 261120,
# u'metadata': {u'filename': None},
# u'length': 5,
# u'uploadDate': {u'$date': 1439550357245},
# u'_id': {u'$oid': u'55cdcb95dd18da0008caa791'},
# u'md5': u'0d599f0ec05c3bda8c3b8a68c32a1b47'
#}
values.append([
cfile["_id"]["$oid"],
cfile["length"],
cfile["md5"],
cfile["contentType"],
arrow.get(cfile["uploadDate"]["$date"]/1000.0).humanize(),
" ".join("{}={}".format(k,v) for k,v in cfile["metadata"].iteritems())
])
print(tabulate(values, headers=headers))
def do_upload(self, args):
"""Upload a file into the talus corpus
corpus upload FILE_PATHS [--attr1 value1 [--attr2 value2] ...]
Examples:
To upload all files named "*.swf" with extra attribute tag being set to
"cool games", do:
corpus upload *.swf --tag "cool games"
"""
parts = shlex.split(args)
files = []
extra_attrs = {}
while len(parts) != 0:
key = parts[0]
if key.startswith("-"):
key = key.replace("-", "")
value = parts[1]
extra_attrs[key] = value
parts = parts[2:]
else:
files += glob.glob(parts[0])
# it's a file name/glob expression
parts = parts[1:]
uploaded_ids = []
for file_path in files:
file_path = os.path.abspath(file_path)
corpus_id = self._talus_client.corpus_upload(file_path, **extra_attrs)
uploaded_ids.append(corpus_id)
print("{} - {}".format(corpus_id, file_path))
|
taiansu/arguman.org
|
refs/heads/master
|
web/premises/migrations/0009_merge.py
|
7
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('premises', '0008_auto_20141023_0133'),
('premises', '0008_auto_20141022_1951'),
]
operations = [
]
|
hendradarwin/ITK
|
refs/heads/master
|
Wrapping/Generators/Python/Tests/template.py
|
11
|
#==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
import itk
dim = 2
PType = itk.UC
# check the repr string
assert "<itkTemplate itk::Image>" == repr(itk.Image)
# template should work with CType instance and with numbers
IType = itk.Image[PType, dim]
# template should return the same class with a class as parameter
# or with an object of this class, and should also be the same
# with the attribute
# create instances of image for the next tests
im = IType.New()
im2 = IType.New()
readerType = itk.ImageFileReader[IType]
readerType2 = itk.ImageFileReader[im]
readerType3 = itk.ImageFileReader.IUC2
assert readerType == readerType2 == readerType3
# we should be able to get the template and its parameters from the class
(tpl, parameters) = itk.template(IType)
assert tpl == itk.Image
assert parameters == (PType, dim)
# the template must raise a KeyError exception if the template parameter
# is unknown
try:
itk.ImageFileReader['unknown parameter']
raise Exception('no exception sent for unknown parameter')
except KeyError:
pass
# TODO: test the rest of the dict interface
# TODO: test __eq__, __ne__ and __hash__
# something else ?
# now test the New method
# without parameter
reader = readerType.New()
reader2 = readerType.New()
# with an attribute parameter
reader = readerType.New(FileName='test.png')
assert reader.GetFileName() == 'test.png'
# wwith a wrong attribute name
try:
reader = readerType.New(WrongName='test.png')
raise Exception('no exception sent for wrong attribute name')
except AttributeError:
pass
# wwith a wrong attribute type
try:
reader = readerType.New(FileName=1)
raise Exception('no exception sent for wrong attribute type')
except:
pass
# pass filter as argument for input
# to a filter with SetInput method
median = itk.MedianImageFilter[IType, IType].New(reader)
assert reader.GetOutput() == median.GetInput()
# to a filter with a SetImage method
calculator = itk.MinimumMaximumImageCalculator[IType].New(reader)
# not GetImage() method here to verify it's the right image
# to a filter with several inputs
sub = itk.SubtractImageFilter[IType, IType, IType].New(reader, reader2)
assert reader.GetOutput() == sub.GetInput(0)
assert reader2.GetOutput() == sub.GetInput(1)
# pass image as argument for input
# to a filter with SetInput method
median = itk.MedianImageFilter[IType, IType].New(im)
assert im == median.GetInput()
# to a filter with a SetImage method
calculator = itk.MinimumMaximumImageCalculator[IType].New(im)
# not GetImage() method here to verify it's the right image
# to a filter with several inputs
sub = itk.SubtractImageFilter[IType, IType, IType].New(im, im2)
assert im == sub.GetInput(0)
assert im2 == sub.GetInput(1)
# pass invalid input
try:
itk.MedianImageFilter[IType, IType].New(1)
raise Exception('no exception sent for wrong input type')
except:
pass
try:
itk.SubtractImageFilter[IType, IType, IType].New(im, "wrong")
raise Exception('no exception sent for wrong 2nd input type')
except TypeError:
pass
# pass both input and attribute
recons = itk.ReconstructionByDilationImageFilter[
IType, IType].New(reader.GetOutput(), im, FullyConnected=True)
assert reader.GetOutput() == recons.GetInput(0)
assert im == recons.GetInput(1)
assert recons.GetFullyConnected()
# pass input to object which do not take one
try:
IType.New(im)
raise Exception('no exception sent for object without input')
except AttributeError:
pass
# TODO: test auto_progress
# but how ?
# something else ?
|
shinsterneck/pdns
|
refs/heads/feature-geosql-backend
|
regression-tests.recursor-dnssec/test_NTA.py
|
9
|
import dns
from recursortests import RecursorTest
class testSimple(RecursorTest):
_confdir = 'NTA'
_config_template = """dnssec=validate"""
_lua_config_file = """addNTA("bogus.example")
addNTA('secure.optout.example', 'Should be Insecure, even with DS configured')
addTA('secure.optout.example', '64215 13 1 b88284d7a8d8605c398e8942262f97b9a5a31787')"""
def testDirectNTA(self):
"""Ensure a direct query to a bogus name with an NTA is Insecure"""
msg = dns.message.make_query("ted.bogus.example.", dns.rdatatype.A)
msg.flags = dns.flags.from_text('AD RD')
msg.use_edns(edns=0, ednsflags=dns.flags.edns_from_text('DO'))
res = self.sendUDPQuery(msg)
self.assertMessageHasFlags(res, ['QR', 'RA', 'RD'], ['DO'])
self.assertRcodeEqual(res, dns.rcode.NOERROR)
def testCNAMENTA(self):
"""Ensure a CNAME from a secure zone to a bogus one with an NTA is Insecure"""
msg = dns.message.make_query("cname-to-bogus.secure.example.", dns.rdatatype.A)
msg.flags = dns.flags.from_text('AD RD')
msg.use_edns(edns=0, ednsflags=dns.flags.edns_from_text('DO'))
res = self.sendUDPQuery(msg)
self.assertMessageHasFlags(res, ['QR', 'RA', 'RD'], ['DO'])
self.assertRcodeEqual(res, dns.rcode.NOERROR)
def testSecureWithNTAandDS(self):
"""#4391: when there is a TA *and* NTA configured for a name, the result must be insecure"""
msg = dns.message.make_query("node1.secure.optout.example.", dns.rdatatype.A)
msg.flags = dns.flags.from_text('AD RD')
msg.use_edns(edns=0, ednsflags=dns.flags.edns_from_text('DO'))
res = self.sendUDPQuery(msg)
self.assertMessageHasFlags(res, ['QR', 'RA', 'RD'], ['DO'])
self.assertRcodeEqual(res, dns.rcode.NOERROR)
|
persandstrom/home-assistant
|
refs/heads/master
|
homeassistant/components/cover/myq.py
|
1
|
"""
Support for MyQ-Enabled Garage Doors.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/cover.myq/
"""
import logging
import voluptuous as vol
from homeassistant.components.cover import (
CoverDevice, SUPPORT_CLOSE, SUPPORT_OPEN)
from homeassistant.const import (
CONF_PASSWORD, CONF_TYPE, CONF_USERNAME, STATE_CLOSED, STATE_OPEN,
STATE_CLOSING, STATE_OPENING)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pymyq==0.0.15']
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'myq'
MYQ_TO_HASS = {
'closed': STATE_CLOSED,
'open': STATE_OPEN,
'closing': STATE_CLOSING,
'opening': STATE_OPENING
}
NOTIFICATION_ID = 'myq_notification'
NOTIFICATION_TITLE = 'MyQ Cover Setup'
COVER_SCHEMA = vol.Schema({
vol.Required(CONF_TYPE): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the MyQ component."""
from pymyq import MyQAPI as pymyq
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
brand = config.get(CONF_TYPE)
myq = pymyq(username, password, brand)
try:
if not myq.is_supported_brand():
raise ValueError("Unsupported type. See documentation")
if not myq.is_login_valid():
raise ValueError("Username or Password is incorrect")
add_entities(MyQDevice(myq, door) for door in myq.get_garage_doors())
return True
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
hass.components.persistent_notification.create(
'Error: {}<br />'
'You will need to restart hass after fixing.'
''.format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID)
return False
class MyQDevice(CoverDevice):
"""Representation of a MyQ cover."""
def __init__(self, myq, device):
"""Initialize with API object, device id."""
self.myq = myq
self.device_id = device['deviceid']
self._name = device['name']
self._status = STATE_CLOSED
@property
def device_class(self):
"""Define this cover as a garage door."""
return 'garage'
@property
def should_poll(self):
"""Poll for state."""
return True
@property
def name(self):
"""Return the name of the garage door if any."""
return self._name if self._name else DEFAULT_NAME
@property
def is_closed(self):
"""Return true if cover is closed, else False."""
return MYQ_TO_HASS[self._status] == STATE_CLOSED
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return MYQ_TO_HASS[self._status] == STATE_CLOSING
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return MYQ_TO_HASS[self._status] == STATE_OPENING
def close_cover(self, **kwargs):
"""Issue close command to cover."""
self.myq.close_device(self.device_id)
def open_cover(self, **kwargs):
"""Issue open command to cover."""
self.myq.open_device(self.device_id)
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE
@property
def unique_id(self):
"""Return a unique, HASS-friendly identifier for this entity."""
return self.device_id
def update(self):
"""Update status of cover."""
self._status = self.myq.get_status(self.device_id)
|
ging/horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/admin/hypervisors/tests.py
|
3
|
# Copyright 2013 B1 Systems GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class HypervisorViewTest(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('extension_supported',
'hypervisor_list',
'hypervisor_stats',
'service_list')})
def test_index(self):
hypervisors = self.hypervisors.list()
services = self.services.list()
stats = self.hypervisors.stats
api.nova.extension_supported('AdminActions',
IsA(http.HttpRequest)) \
.MultipleTimes().AndReturn(True)
api.nova.hypervisor_list(IsA(http.HttpRequest)).AndReturn(hypervisors)
api.nova.hypervisor_stats(IsA(http.HttpRequest)).AndReturn(stats)
api.nova.service_list(IsA(http.HttpRequest)).AndReturn(services)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:hypervisors:index'))
self.assertTemplateUsed(res, 'admin/hypervisors/index.html')
hypervisors_tab = res.context['tab_group'].get_tab('hypervisor')
self.assertItemsEqual(hypervisors_tab._tables['hypervisors'].data,
hypervisors)
host_tab = res.context['tab_group'].get_tab('compute_host')
host_table = host_tab._tables['compute_host']
compute_services = [service for service in services
if service.binary == 'nova-compute']
self.assertItemsEqual(host_table.data, compute_services)
actions_host_up = host_table.get_row_actions(host_table.data[0])
self.assertEqual(0, len(actions_host_up))
actions_host_down = host_table.get_row_actions(host_table.data[1])
self.assertEqual(1, len(actions_host_down))
self.assertEqual('evacuate', actions_host_down[0].name)
class HypervisorDetailViewTest(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('hypervisor_search',)})
def test_index(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
api.nova.hypervisor_search(
IsA(http.HttpRequest), hypervisor).AndReturn([])
self.mox.ReplayAll()
url = reverse('horizon:admin:hypervisors:detail', args=[hypervisor])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/hypervisors/detail.html')
self.assertItemsEqual(res.context['table'].data, [])
|
hudl/Tyr
|
refs/heads/master
|
tyr/servers/iis/__init__.py
|
1
|
from node import IISNode
|
sloww/cntslinkgit
|
refs/heads/master
|
app_printer/migrations/0005_auto_20150531_1544.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('app_printer', '0004_auto_20150531_0104'),
]
operations = [
migrations.AlterField(
model_name='printer',
name='resolution',
field=models.CharField(max_length=3, choices=[('200', '200'), ('300', '300'), ('400', '400'), ('600', '600')]),
),
]
|
dkubiak789/odoo
|
refs/heads/8.0
|
addons/report/tests/__init__.py
|
456
|
import test_reports
|
mgedmin/ansible
|
refs/heads/devel
|
lib/ansible/plugins/callback/default.py
|
9
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible.plugins.callback import CallbackBase
from ansible.utils.color import colorize, hostcolor
class CallbackModule(CallbackBase):
'''
This is the default callback interface, which simply prints messages
to stdout when new callback events are received.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'default'
def __init__(self):
self._play = None
self._last_task_banner = None
super(CallbackModule, self).__init__()
def v2_runner_on_failed(self, result, ignore_errors=False):
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if 'exception' in result._result:
if self._display.verbosity < 3:
# extract just the actual error message from the exception text
error = result._result['exception'].strip().split('\n')[-1]
msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
else:
msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception']
self._display.display(msg, color=C.COLOR_ERROR)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
if delegated_vars:
self._display.display("fatal: [%s -> %s]: FAILED! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color=C.COLOR_ERROR)
else:
self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color=C.COLOR_ERROR)
if result._task.ignore_errors:
self._display.display("...ignoring", color=C.COLOR_SKIP)
def v2_runner_on_ok(self, result):
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
self._clean_results(result._result, result._task.action)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
self._clean_results(result._result, result._task.action)
if result._task.action in ('include', 'include_role'):
return
elif result._result.get('changed', False):
if delegated_vars:
msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "changed: [%s]" % result._host.get_name()
color = C.COLOR_CHANGED
else:
if delegated_vars:
msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "ok: [%s]" % result._host.get_name()
color = C.COLOR_OK
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += " => %s" % (self._dump_results(result._result),)
self._display.display(msg, color=color)
self._handle_warnings(result._result)
def v2_runner_on_skipped(self, result):
if C.DISPLAY_SKIPPED_HOSTS:
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
msg = "skipping: [%s]" % result._host.get_name()
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
def v2_runner_on_unreachable(self, result):
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if delegated_vars:
self._display.display("fatal: [%s -> %s]: UNREACHABLE! => %s" % (result._host.get_name(), delegated_vars['ansible_host'], self._dump_results(result._result)), color=C.COLOR_UNREACHABLE)
else:
self._display.display("fatal: [%s]: UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result)), color=C.COLOR_UNREACHABLE)
def v2_playbook_on_no_hosts_matched(self):
self._display.display("skipping: no hosts matched", color=C.COLOR_SKIP)
def v2_playbook_on_no_hosts_remaining(self):
self._display.banner("NO MORE HOSTS LEFT")
def v2_playbook_on_task_start(self, task, is_conditional):
if self._play.strategy != 'free':
self._print_task_banner(task)
def _print_task_banner(self, task):
# args can be specified as no_log in several places: in the task or in
# the argument spec. We can check whether the task is no_log but the
# argument spec can't be because that is only run on the target
# machine and we haven't run it thereyet at this time.
#
# So we give people a config option to affect display of the args so
# that they can secure this if they feel that their stdout is insecure
# (shoulder surfing, logging stdout straight to a file, etc).
args = ''
if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT:
args = u', '.join(u'%s=%s' % a for a in task.args.items())
args = u' %s' % args
self._display.banner(u"TASK [%s%s]" % (task.get_name().strip(), args))
if self._display.verbosity >= 2:
path = task.get_path()
if path:
self._display.display(u"task path: %s" % path, color=C.COLOR_DEBUG)
self._last_task_banner = task._uuid
def v2_playbook_on_cleanup_task_start(self, task):
self._display.banner("CLEANUP TASK [%s]" % task.get_name().strip())
def v2_playbook_on_handler_task_start(self, task):
self._display.banner("RUNNING HANDLER [%s]" % task.get_name().strip())
def v2_playbook_on_play_start(self, play):
name = play.get_name().strip()
if not name:
msg = u"PLAY"
else:
msg = u"PLAY [%s]" % name
self._play = play
self._display.banner(msg)
def v2_on_file_diff(self, result):
if result._task.loop and 'results' in result._result:
for res in result._result['results']:
if 'diff' in res and res['diff'] and res.get('changed', False):
diff = self._get_diff(res['diff'])
if diff:
self._display.display(diff)
elif 'diff' in result._result and result._result['diff'] and result._result.get('changed', False):
diff = self._get_diff(result._result['diff'])
if diff:
self._display.display(diff)
def v2_runner_item_on_ok(self, result):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if result._task.action in ('include', 'include_role'):
return
elif result._result.get('changed', False):
msg = 'changed'
color = C.COLOR_CHANGED
else:
msg = 'ok'
color = C.COLOR_OK
if delegated_vars:
msg += ": [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg += ": [%s]" % result._host.get_name()
msg += " => (item=%s)" % (self._get_item(result._result),)
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=color)
def v2_runner_item_on_failed(self, result):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if 'exception' in result._result:
if self._display.verbosity < 3:
# extract just the actual error message from the exception text
error = result._result['exception'].strip().split('\n')[-1]
msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
else:
msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception']
self._display.display(msg, color=C.COLOR_ERROR)
msg = "failed: "
if delegated_vars:
msg += "[%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg += "[%s]" % (result._host.get_name())
self._display.display(msg + " (item=%s) => %s" % (self._get_item(result._result), self._dump_results(result._result)), color=C.COLOR_ERROR)
self._handle_warnings(result._result)
def v2_runner_item_on_skipped(self, result):
if C.DISPLAY_SKIPPED_HOSTS:
msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), self._get_item(result._result))
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
def v2_playbook_on_include(self, included_file):
msg = 'included: %s for %s' % (included_file._filename, ", ".join([h.name for h in included_file._hosts]))
self._display.display(msg, color=C.COLOR_SKIP)
def v2_playbook_on_stats(self, stats):
self._display.banner("PLAY RECAP")
hosts = sorted(stats.processed.keys())
for h in hosts:
t = stats.summarize(h)
self._display.display(u"%s : %s %s %s %s" % (
hostcolor(h, t),
colorize(u'ok', t['ok'], C.COLOR_OK),
colorize(u'changed', t['changed'], C.COLOR_CHANGED),
colorize(u'unreachable', t['unreachable'], C.COLOR_UNREACHABLE),
colorize(u'failed', t['failures'], C.COLOR_ERROR)),
screen_only=True
)
self._display.display(u"%s : %s %s %s %s" % (
hostcolor(h, t, False),
colorize(u'ok', t['ok'], None),
colorize(u'changed', t['changed'], None),
colorize(u'unreachable', t['unreachable'], None),
colorize(u'failed', t['failures'], None)),
log_only=True
)
self._display.display("", screen_only=True)
def v2_playbook_on_start(self, playbook):
if self._display.verbosity > 1:
from os.path import basename
self._display.banner("PLAYBOOK: %s" % basename(playbook._file_name))
if self._display.verbosity > 3:
if self._options is not None:
for option in dir(self._options):
if option.startswith('_') or option in ['read_file', 'ensure_value', 'read_module']:
continue
val = getattr(self._options,option)
if val:
self._display.vvvv('%s: %s' % (option,val))
def v2_runner_retry(self, result):
msg = "FAILED - RETRYING: %s (%d retries left)." % (result._task, result._result['retries'] - result._result['attempts'])
if (self._display.verbosity > 2 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
msg += "Result was: %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_DEBUG)
|
Andriy963/e-olimp.com
|
refs/heads/master
|
1482.py
|
2
|
an,am = [int(x) for x in input().split()]
A = [[int(x) for x in input().split()] for i in range(an)]
bn,bm = [int(x) for x in input().split()]
B = [[int(x) for x in input().split()] for i in range(bn)]
if am == bn:
C = [[str(sum(A[i][g]*B[g][j] for g in range(am))) for j in range(bm)] for i in range(an)]
print(an, bm)
for line in C:
print(" ".join(line))
else:
print(-1)
|
kazcw/bitcoin
|
refs/heads/master
|
test/functional/mempool_accept.py
|
11
|
#!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool acceptance of raw transactions."""
from io import BytesIO
from test_framework.test_framework import BitcoinTestFramework
from test_framework.messages import (
BIP125_SEQUENCE_NUMBER,
COIN,
COutPoint,
CTransaction,
CTxOut,
MAX_BLOCK_BASE_SIZE,
)
from test_framework.script import (
hash160,
CScript,
OP_0,
OP_EQUAL,
OP_HASH160,
OP_RETURN,
)
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
bytes_to_hex_str,
hex_str_to_bytes,
wait_until,
)
class MempoolAcceptanceTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[
'-txindex',
'-reindex', # Need reindex for txindex
'-acceptnonstdtxn=0', # Try to mimic main-net
]] * self.num_nodes
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def check_mempool_result(self, result_expected, *args, **kwargs):
"""Wrapper to check result of testmempoolaccept on node_0's mempool"""
result_test = self.nodes[0].testmempoolaccept(*args, **kwargs)
assert_equal(result_expected, result_test)
assert_equal(self.nodes[0].getmempoolinfo()['size'], self.mempool_size) # Must not change mempool state
def run_test(self):
node = self.nodes[0]
self.log.info('Start with empty mempool, and 200 blocks')
self.mempool_size = 0
wait_until(lambda: node.getblockcount() == 200)
assert_equal(node.getmempoolinfo()['size'], self.mempool_size)
self.log.info('Should not accept garbage to testmempoolaccept')
assert_raises_rpc_error(-3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar'))
assert_raises_rpc_error(-8, 'Array must contain exactly one raw transaction for now', lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22']))
assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar']))
self.log.info('A transaction already in the blockchain')
coin = node.listunspent()[0] # Pick a random coin(base) to spend
raw_tx_in_block = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': coin['txid'], 'vout': coin['vout']}],
outputs=[{node.getnewaddress(): 0.3}, {node.getnewaddress(): 49}],
))['hex']
txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, allowhighfees=True)
node.generate(1)
self.check_mempool_result(
result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': '18: txn-already-known'}],
rawtxs=[raw_tx_in_block],
)
self.log.info('A transaction not in the mempool')
fee = 0.00000700
raw_tx_0 = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{"txid": txid_in_block, "vout": 0, "sequence": BIP125_SEQUENCE_NUMBER}], # RBF is used later
outputs=[{node.getnewaddress(): 0.3 - fee}],
))['hex']
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
txid_0 = tx.rehash()
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': True}],
rawtxs=[raw_tx_0],
)
self.log.info('A transaction in the mempool')
node.sendrawtransaction(hexstring=raw_tx_0)
self.mempool_size = 1
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': '18: txn-already-in-mempool'}],
rawtxs=[raw_tx_0],
)
self.log.info('A transaction that replaces a mempool transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vout[0].nValue -= int(fee * COIN) # Double the fee
tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1 # Now, opt out of RBF
raw_tx_0 = node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex']
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
txid_0 = tx.rehash()
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': True}],
rawtxs=[raw_tx_0],
)
self.log.info('A transaction that conflicts with an unconfirmed tx')
# Send the transaction that replaces the mempool transaction and opts out of replaceability
node.sendrawtransaction(hexstring=bytes_to_hex_str(tx.serialize()), allowhighfees=True)
# take original raw_tx_0
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vout[0].nValue -= int(4 * fee * COIN) # Set more fee
# skip re-signing the tx
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '18: txn-mempool-conflict'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
allowhighfees=True,
)
self.log.info('A transaction with missing inputs, that never existed')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
# skip re-signing the tx
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A transaction with missing inputs, that existed once in the past')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
tx.vin[0].prevout.n = 1 # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend
raw_tx_1 = node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex']
txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, allowhighfees=True)
# Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them
raw_tx_spend_both = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[
{'txid': txid_0, 'vout': 0},
{'txid': txid_1, 'vout': 0},
],
outputs=[{node.getnewaddress(): 0.1}]
))['hex']
txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, allowhighfees=True)
node.generate(1)
self.mempool_size = 0
# Now see if we can add the coins back to the utxo set by sending the exact txs again
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[raw_tx_0],
)
self.check_mempool_result(
result_expected=[{'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[raw_tx_1],
)
self.log.info('Create a signed "reference" tx for later use')
raw_tx_reference = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': txid_spend_both, 'vout': 0}],
outputs=[{node.getnewaddress(): 0.05}],
))['hex']
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
# Reference tx should be valid on itself
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': True}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A transaction with no outputs')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout = []
# Skip re-signing the transaction for context independent checks from now on
# tx.deserialize(BytesIO(hex_str_to_bytes(node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex'])))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-empty'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A really large transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin = [tx.vin[0]] * (MAX_BLOCK_BASE_SIZE // len(tx.vin[0].serialize()))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-oversize'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A transaction with negative output value')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].nValue *= -1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-negative'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A transaction with too large output value')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].nValue = 21000000 * COIN + 1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-toolarge'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A transaction with too large sum of output values')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout = [tx.vout[0]] * 2
tx.vout[0].nValue = 21000000 * COIN
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-txouttotal-toolarge'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A transaction with duplicate inputs')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin = [tx.vin[0]] * 2
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-inputs-duplicate'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A coinbase transaction')
# Pick the input of the first tx we signed, so it has to be a coinbase tx
raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid'])
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_coinbase_spent)))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: coinbase'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('Some nonstandard transactions')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.nVersion = 3 # A version currently non-standard
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: version'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].scriptPubKey = CScript([OP_0]) # Some non-standard script
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: scriptpubkey'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].scriptSig = CScript([OP_HASH160]) # Some not-pushonly scriptSig
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: scriptsig-not-pushonly'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript([OP_HASH160, hash160(b'burn'), OP_EQUAL]))
num_scripts = 100000 // len(output_p2sh_burn.serialize()) # Use enough outputs to make the tx too large for our policy
tx.vout = [output_p2sh_burn] * num_scripts
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: tx-size'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0] = output_p2sh_burn
tx.vout[0].nValue -= 1 # Make output smaller, such that it is dust for our policy
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: dust'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
tx.vout = [tx.vout[0]] * 2
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: multi-op-return'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A timelocked transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].nSequence -= 1 # Should be non-max, so locktime is not ignored
tx.nLockTime = node.getblockcount() + 1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: non-final'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
)
self.log.info('A transaction that is locked by BIP68 sequence logic')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
tx.vin[0].nSequence = 2 # We could include it in the second block mined from now, but not the very next one
# Can skip re-signing the tx because of early rejection
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: non-BIP68-final'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
allowhighfees=True,
)
if __name__ == '__main__':
MempoolAcceptanceTest().main()
|
adit-chandra/tensorflow
|
refs/heads/master
|
tensorflow/python/autograph/pyct/errors.py
|
20
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code transformation exceptions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class PyCTError(Exception):
"""Base class for all exceptions."""
pass
class UnsupportedLanguageElementError(PyCTError, NotImplementedError):
"""Raised for code patterns that AutoGraph does not support."""
pass
|
CollinsIchigo/hdx_2
|
refs/heads/master
|
venv/lib/python2.7/site-packages/requests/packages/chardet/universaldetector.py
|
1775
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
import codecs
from .latin1prober import Latin1Prober # windows-1252
from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
from .escprober import EscCharSetProber # ISO-2122, etc.
import re
MINIMUM_THRESHOLD = 0.20
ePureAscii = 0
eEscAscii = 1
eHighbyte = 2
class UniversalDetector:
def __init__(self):
self._highBitDetector = re.compile(b'[\x80-\xFF]')
self._escDetector = re.compile(b'(\033|~{)')
self._mEscCharSetProber = None
self._mCharSetProbers = []
self.reset()
def reset(self):
self.result = {'encoding': None, 'confidence': 0.0}
self.done = False
self._mStart = True
self._mGotData = False
self._mInputState = ePureAscii
self._mLastChar = b''
if self._mEscCharSetProber:
self._mEscCharSetProber.reset()
for prober in self._mCharSetProbers:
prober.reset()
def feed(self, aBuf):
if self.done:
return
aLen = len(aBuf)
if not aLen:
return
if not self._mGotData:
# If the data starts with BOM, we know it is UTF
if aBuf[:3] == codecs.BOM_UTF8:
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_LE:
# FF FE 00 00 UTF-32, little-endian BOM
self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_BE:
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
elif aBuf[:4] == b'\xFE\xFF\x00\x00':
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {
'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0
}
elif aBuf[:4] == b'\x00\x00\xFF\xFE':
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {
'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0
}
elif aBuf[:2] == codecs.BOM_LE:
# FF FE UTF-16, little endian BOM
self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
elif aBuf[:2] == codecs.BOM_BE:
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
self._mGotData = True
if self.result['encoding'] and (self.result['confidence'] > 0.0):
self.done = True
return
if self._mInputState == ePureAscii:
if self._highBitDetector.search(aBuf):
self._mInputState = eHighbyte
elif ((self._mInputState == ePureAscii) and
self._escDetector.search(self._mLastChar + aBuf)):
self._mInputState = eEscAscii
self._mLastChar = aBuf[-1:]
if self._mInputState == eEscAscii:
if not self._mEscCharSetProber:
self._mEscCharSetProber = EscCharSetProber()
if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),
'confidence': self._mEscCharSetProber.get_confidence()}
self.done = True
elif self._mInputState == eHighbyte:
if not self._mCharSetProbers:
self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
Latin1Prober()]
for prober in self._mCharSetProbers:
if prober.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': prober.get_charset_name(),
'confidence': prober.get_confidence()}
self.done = True
break
def close(self):
if self.done:
return
if not self._mGotData:
if constants._debug:
sys.stderr.write('no data received!\n')
return
self.done = True
if self._mInputState == ePureAscii:
self.result = {'encoding': 'ascii', 'confidence': 1.0}
return self.result
if self._mInputState == eHighbyte:
proberConfidence = None
maxProberConfidence = 0.0
maxProber = None
for prober in self._mCharSetProbers:
if not prober:
continue
proberConfidence = prober.get_confidence()
if proberConfidence > maxProberConfidence:
maxProberConfidence = proberConfidence
maxProber = prober
if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
self.result = {'encoding': maxProber.get_charset_name(),
'confidence': maxProber.get_confidence()}
return self.result
if constants._debug:
sys.stderr.write('no probers hit minimum threshhold\n')
for prober in self._mCharSetProbers[0].mProbers:
if not prober:
continue
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(),
prober.get_confidence()))
|
oceanobservatories/mi-instrument
|
refs/heads/master
|
mi/dataset/driver/dosta_abcdjm/ctdbp_p/dcl/dosta_abcdjm_ctdbp_p_dcl_telemetered_driver.py
|
7
|
#!/usr/bin/env python
"""
@package mi.dataset.driver.ctdbp_p.dcl
@file mi-dataset/mi/dataset/driver/dosta_abcdjm/ctdbp_p/dcl/dosta_abcdjm_ctdbp_p_dcl_telemetered_driver.py
@author Jeff Roy
@brief Driver for the dosta_abcdjm attached to the ctdbp_p_dcl instrument (Telemetered Data)
Release notes:
Initial Release
"""
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.parser.ctdbp_p_dcl import CtdbpPDclCommonParser
from mi.core.versioning import version
MODULE_NAME = 'mi.dataset.parser.ctdbp_p_dcl'
DOSTA_TELEM_CONFIG = {
DataSetDriverConfigKeys.PARTICLE_MODULE: MODULE_NAME,
DataSetDriverConfigKeys.PARTICLE_CLASS: 'DostaAbcdjmCtdbpPDclTelemeteredDataParticle'
}
@version("15.6.1")
def parse(unused, source_file_path, particle_data_handler):
"""
This is the method called by Uframe
:param unused
:param source_file_path This is the full path and filename of the file to be parsed
:param particle_data_handler Java Object to consume the output of the parser
:return particle_data_handler
"""
with open(source_file_path, 'rU') as stream_handle:
# create an instance of the concrete driver class defined below
driver = DostaAbcdjmCtdbpPDclTelemeteredDriver(unused, stream_handle, particle_data_handler)
driver.processFileStream()
return particle_data_handler
class DostaAbcdjmCtdbpPDclTelemeteredDriver(SimpleDatasetDriver):
"""
Derived ctdbp_p_dcl driver class
All this needs to do is create a concrete _build_parser method
"""
def _build_parser(self, stream_handle):
# The parser inherits from simple parser - other callbacks not needed here
parser = CtdbpPDclCommonParser(DOSTA_TELEM_CONFIG,
stream_handle,
self._exception_callback)
return parser
|
kantel/processingpy
|
refs/heads/master
|
sketches/shaunsim5/grass.py
|
2
|
# coding=utf-8
from config import Settings
s = Settings()
class Grass():
def __init__(self, x, y, sz):
self.x = x
self.y = y
self.energy = 5
self.eaten = False
self.sz = sz
def update(self):
noStroke()
if self.eaten:
if random(1000) < 5:
self.eaten = False
else:
fill(s.BROWN)
else:
fill(s.GREEN)
rect(self.x, self.y, self.sz, self.sz)
|
johscheuer/calico-docker
|
refs/heads/master
|
calico_containers/calico_ctl/node.py
|
3
|
# Copyright 2015 Metaswitch Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Usage:
calicoctl node [--ip=<IP>] [--ip6=<IP6>] [--node-image=<DOCKER_IMAGE_NAME>] [--as=<AS_NUM>] [--log-dir=<LOG_DIR>] [--detach=<DETACH>] [--kubernetes] [--rkt] [--libnetwork]
calicoctl node stop [--force]
calicoctl node bgp peer add <PEER_IP> as <AS_NUM>
calicoctl node bgp peer remove <PEER_IP>
calicoctl node bgp peer show [--ipv4 | --ipv6]
Description:
Configure the main calico/node container as well as default BGP information
for this node.
Options:
--force Stop the node process even if it has active endpoints.
--node-image=<DOCKER_IMAGE_NAME> Docker image to use for Calico's per-node
container. Default is calico/node:latest.
Default for Calico with libnetwork is
calico/node-libnetwork:latest.
--detach=<DETACH> Set "true" to run Calico service as detached,
"false" to run in the foreground. [default: true]
--log-dir=<LOG_DIR> The directory for logs [default: /var/log/calico]
--ip=<IP> The local management address to use.
--ip6=<IP6> The local IPv6 management address to use.
--as=<AS_NUM> The default AS number for this node.
--ipv4 Show IPv4 information only.
--ipv6 Show IPv6 information only.
--kubernetes Download and install the kubernetes plugin.
--rkt Download and install the rkt plugin.
--libnetwork Use the libnetwork plugin.
"""
import sys
import os
import stat
import docker
import socket
import urllib
import signal
from pycalico.datastore_datatypes import IPPool
from pycalico.datastore_datatypes import BGPPeer
from pycalico.datastore import (ETCD_AUTHORITY_ENV,
ETCD_AUTHORITY_DEFAULT)
from pycalico.util import get_host_ips
from netaddr import IPAddress
from prettytable import PrettyTable
from connectors import client
from connectors import docker_client
from utils import DOCKER_ORCHESTRATOR_ID
from utils import hostname
from utils import print_paragraph
from utils import get_container_ipv_from_arguments
from utils import validate_ip, validate_asn, convert_asn_to_asplain
from checksystem import check_system
DEFAULT_IPV4_POOL = IPPool("192.168.0.0/16")
DEFAULT_IPV6_POOL = IPPool("fd80:24e2:f998:72d6::/64")
POLICY_ONLY_ENV = "POLICY_ONLY_CALICO"
KUBERNETES_PLUGIN_VERSION = 'v0.1.0'
KUBERNETES_BINARY_URL = 'https://github.com/projectcalico/calico-kubernetes/releases/download/%s/calico_kubernetes' % KUBERNETES_PLUGIN_VERSION
KUBERNETES_PLUGIN_DIR = '/usr/libexec/kubernetes/kubelet-plugins/net/exec/calico/'
KUBERNETES_PLUGIN_DIR_BACKUP = '/etc/kubelet-plugins/calico/'
RKT_PLUGIN_VERSION = 'v0.1.0'
RKT_BINARY_URL = 'https://github.com/projectcalico/calico-rkt/releases/download/%s/calico_rkt' % RKT_PLUGIN_VERSION
RKT_PLUGIN_DIR = '/usr/lib/rkt/plugins/net/'
RKT_PLUGIN_DIR_BACKUP = '/etc/rkt-plugins/calico/'
CALICO_DEFAULT_IMAGE = "calico/node:latest"
LIBNETWORK_IMAGE = 'calico/node-libnetwork:latest'
def validate_arguments(arguments):
"""
Validate argument values:
<IP>
<IP6>
<PEER_IP>
<AS_NUM>
<DETACH>
Arguments not validated:
<DOCKER_IMAGE_NAME>
<LOG_DIR>
:param arguments: Docopt processed arguments
"""
# Validate IPs
ip_ok = arguments.get("--ip") is None or \
validate_ip(arguments.get("--ip"), 4)
ip6_ok = arguments.get("--ip6") is None or \
validate_ip(arguments.get("--ip6"), 6)
container_ip_ok = arguments.get("<IP>") is None or \
validate_ip(arguments["<IP>"], 4) or \
validate_ip(arguments["<IP>"], 6)
peer_ip_ok = arguments.get("<PEER_IP>") is None or \
validate_ip(arguments["<PEER_IP>"], 4) or \
validate_ip(arguments["<PEER_IP>"], 6)
asnum_ok = True
asnum = arguments.get("<AS_NUM>") or arguments.get("--as")
if asnum:
asnum_ok = validate_asn(asnum)
detach_ok = True
if arguments.get("<DETACH>") or arguments.get("--detach"):
detach_ok = arguments.get("--detach") in ["true", "false"]
# Print error message
if not ip_ok:
print "Invalid IPv4 address specified with --ip argument."
if not ip6_ok:
print "Invalid IPv6 address specified with --ip6 argument."
if not container_ip_ok or not peer_ip_ok:
print "Invalid IP address specified."
if not asnum_ok:
print "Invalid AS Number specified."
if not detach_ok:
print "Valid values for --detach are 'true' and 'false'"
# Exit if not valid argument
if not (ip_ok and ip6_ok and container_ip_ok and peer_ip_ok and asnum_ok
and detach_ok):
sys.exit(1)
def node(arguments):
"""
Main dispatcher for node commands. Calls the corresponding helper function.
:param arguments: A dictionary of arguments already processed through
this file's docstring with docopt
:return: None
"""
validate_arguments(arguments)
as_num = convert_asn_to_asplain(arguments.get("<AS_NUM>") or arguments.get("--as"))
if arguments.get("bgp"):
if arguments.get("peer"):
ip_version = get_container_ipv_from_arguments(arguments)
if arguments.get("add"):
node_bgppeer_add(arguments.get("<PEER_IP>"), ip_version,
as_num)
elif arguments.get("remove"):
node_bgppeer_remove(arguments.get("<PEER_IP>"), ip_version)
elif arguments.get("show"):
if not ip_version:
node_bgppeer_show(4)
node_bgppeer_show(6)
else:
node_bgppeer_show(ip_version)
elif arguments.get("stop"):
node_stop(arguments.get("--force"))
else:
assert arguments.get("--detach") in ["true", "false"]
detach = arguments.get("--detach") == "true"
node_start(ip=arguments.get("--ip"),
node_image=arguments.get('--node-image'),
log_dir=arguments.get("--log-dir"),
ip6=arguments.get("--ip6"),
as_num=as_num,
detach=detach,
kubernetes=arguments.get("--kubernetes"),
rkt=arguments.get("--rkt"),
libnetwork=arguments.get("--libnetwork"))
def node_start(node_image, log_dir, ip, ip6, as_num, detach, kubernetes, rkt,
libnetwork):
"""
Create the calico-node container and establish Calico networking on this
host.
:param ip: The IPv4 address of the host.
:param node_image: The calico-node image to use.
:param ip6: The IPv6 address of the host (or None if not configured)
:param as_num: The BGP AS Number to use for this node. If not specified
the global default value will be used.
:param detach: True to run in Docker's "detached" mode, False to run
attached.
:param kubernetes: True to install the kubernetes plugin, False otherwise.
:param rkt: True to install the rkt plugin, False otherwise.
:param libnetwork: True to use the calico/node-libnetwork image as the node
image, False otherwise.
:return: None.
"""
# Print warnings for any known system issues before continuing
check_system(fix=False, quit_if_error=False)
# Ensure log directory exists
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# Get IP address of host, if none was specified
if not ip:
ips = get_host_ips(exclude=["^docker.*", "^cbr.*"])
try:
ip = ips.pop()
except IndexError:
print "Couldn't autodetect a management IP address. Please provide" \
" an IP by rerunning the command with the --ip=<IP_ADDRESS> flag."
sys.exit(1)
else:
print "No IP provided. Using detected IP: %s" % ip
# Verify that the chosen IP exists on the current host
warn_if_unknown_ip(ip, ip6)
# Warn if this hostname conflicts with an existing host
warn_if_hostname_conflict(ip)
# Install kubernetes plugin
if kubernetes:
try:
# Attempt to install to the default kubernetes directory
install_plugin(KUBERNETES_PLUGIN_DIR, KUBERNETES_BINARY_URL)
except OSError:
# Use the backup directory
install_plugin(KUBERNETES_PLUGIN_DIR_BACKUP, KUBERNETES_BINARY_URL)
# Install rkt plugin
if rkt:
try:
# Attempt to install to the default rkt directory
install_plugin(RKT_PLUGIN_DIR, RKT_BINARY_URL)
except OSError:
# Use the backup directory
install_plugin(RKT_PLUGIN_DIR_BACKUP, RKT_BINARY_URL)
# Set up etcd
ipv4_pools = client.get_ip_pools(4)
ipv6_pools = client.get_ip_pools(6)
# Create default pools if required
if not ipv4_pools:
client.add_ip_pool(4, DEFAULT_IPV4_POOL)
if not ipv6_pools:
client.add_ip_pool(6, DEFAULT_IPV6_POOL)
client.ensure_global_config()
client.create_host(hostname, ip, ip6, as_num)
try:
docker_client.remove_container("calico-node", force=True)
except docker.errors.APIError as err:
if err.response.status_code != 404:
raise
# Always try to convert the address(hostname) to an IP. This is a noop if
# the address is already an IP address. Note that the format of the authority
# string has already been validated.
etcd_authority = os.getenv(ETCD_AUTHORITY_ENV, ETCD_AUTHORITY_DEFAULT)
etcd_authority_address, etcd_authority_port = etcd_authority.split(':')
etcd_authority = '%s:%s' % (socket.gethostbyname(etcd_authority_address),
etcd_authority_port)
environment = [
"HOSTNAME=%s" % hostname,
"IP=%s" % ip,
"IP6=%s" % (ip6 or ""),
"ETCD_AUTHORITY=%s" % etcd_authority, # etcd host:port
"FELIX_ETCDADDR=%s" % etcd_authority, # etcd host:port
"POLICY_ONLY_CALICO=%s" % os.getenv(POLICY_ONLY_ENV, ""),
]
binds = {
"/proc":
{
"bind": "/proc_host",
"ro": False
},
log_dir:
{
"bind": "/var/log/calico",
"ro": False
},
"/run/docker/plugins":
{
"bind": "/usr/share/docker/plugins",
"ro": False
}
}
host_config = docker.utils.create_host_config(
privileged=True,
restart_policy={"Name": "Always"},
network_mode="host",
binds=binds)
if not node_image:
# Use the calico/node-libnetwork image if the libnetwork flag was
# passed in. Otherwise, use the default calico/node image.
node_image = LIBNETWORK_IMAGE if libnetwork else CALICO_DEFAULT_IMAGE
_find_or_pull_node_image(node_image)
container = docker_client.create_container(
node_image,
name="calico-node",
detach=True,
environment=environment,
host_config=host_config,
volumes=["/proc_host",
"/var/log/calico",
"/usr/share/docker/plugins"])
cid = container["Id"]
docker_client.start(container)
print "Calico node is running with id: %s" % cid
if not detach:
_attach_and_stream(container)
def node_stop(force):
if force or len(client.get_endpoints(hostname=hostname,
orchestrator_id=DOCKER_ORCHESTRATOR_ID)) == 0:
client.remove_host(hostname)
try:
docker_client.stop("calico-node")
except docker.errors.APIError as err:
if err.response.status_code != 404:
raise
print "Node stopped and all configuration removed"
else:
print "Current host has active endpoints so can't be stopped." + \
" Force with --force"
def node_bgppeer_add(ip, version, as_num):
"""
Add a new BGP peer with the supplied IP address and AS Number to this node.
:param ip: The address to add
:param version: 4 or 6
:param as_num: The peer AS Number.
:return: None
"""
address = IPAddress(ip)
peer = BGPPeer(address, as_num)
client.add_bgp_peer(version, peer, hostname=hostname)
def node_bgppeer_remove(ip, version):
"""
Remove a global BGP peer from this node.
:param ip: The address to use.
:param version: 4 or 6
:return: None
"""
address = IPAddress(ip)
try:
client.remove_bgp_peer(version, address, hostname=hostname)
except KeyError:
print "%s is not a configured peer for this node." % address
sys.exit(1)
else:
print "BGP peer removed from node configuration"
def node_bgppeer_show(version):
"""
Print a list of the BGP Peers for this node.
"""
assert version in (4, 6)
peers = client.get_bgp_peers(version, hostname=hostname)
if peers:
heading = "Node specific IPv%s BGP Peer" % version
x = PrettyTable([heading, "AS Num"], sortby=heading)
for peer in peers:
x.add_row([peer.ip, peer.as_num])
x.align = "l"
print x.get_string(sortby=heading)
else:
print "No IPv%s BGP Peers defined for this node.\n" % version
def warn_if_unknown_ip(ip, ip6):
"""
Prints a warning message if the IP addresses are not assigned to interfaces
on the current host.
:param ip: IPv4 address which should be present on the host.
:param ip6: IPv6 address which should be present on the host.
:return: None
"""
if ip and ip not in get_host_ips(version=4, exclude=["docker0"]):
print "WARNING: Could not confirm that the provided IPv4 address is assigned" \
" to this host."
if ip6 and ip6 not in get_host_ips(version=6, exclude=["docker0"]):
print "WARNING: Could not confirm that the provided IPv6 address is assigned" \
" to this host."
def warn_if_hostname_conflict(ip):
"""
Prints a warning message if it seems like an existing host is already running
calico using this hostname.
:param ip: User-provided IP address to start this node with.
:return: Nothing
"""
# If there's already a calico-node container on this host, they're probably
# just re-running node to update one of the ip addresses, so skip..
if len(docker_client.containers(filters={'name': 'calico-node'})) == 0:
# Otherwise, check if another host with the same hostname
# is already configured
try:
current_ipv4, _ = client.get_host_bgp_ips(hostname)
except KeyError:
# No other machine has registered configuration under this hostname.
# This must be a new host with a unique hostname, which is the
# expected behavior.
pass
else:
if current_ipv4 != "" and current_ipv4 != ip:
print_paragraph("WARNING: Hostname '%s' is already in use "
"with IP address %s. Calico requires each compute host to "
"have a unique hostname. If this is your first time "
"running 'calicoctl node' on this host, ensure that "
"another host is not already using the "
"same hostname." % (hostname, ip))
def _find_or_pull_node_image(image_name):
"""
Check if Docker has a cached copy of an image, and if not, attempt to pull
it.
:param image_name: The full name of the image.
:return: None.
"""
try:
_ = docker_client.inspect_image(image_name)
except docker.errors.APIError as err:
if err.response.status_code == 404:
# TODO: Display proper status bar
print_paragraph("Pulling Docker image %s" % image_name)
try:
# Pull the image and then verify that it was succesfully
# pulled (the pull doesn't raise an exception on failure).
docker_client.pull(image_name)
docker_client.inspect_image(image_name)
except docker.errors.APIError:
# Unable to download the Docker image.
print_paragraph("ERROR: Unable to download Docker image.")
print_paragraph("Please verify that you have network "
"connectivity to DockerHub and that, if you "
"explicitly specified which calico/node image "
"to use, the image name is correct.")
sys.exit(1)
def _attach_and_stream(container):
"""
Attach to a container and stream its stdout and stderr output to this
process's stdout, until the container stops. If the user presses Ctrl-C or
the process is killed, also stop the Docker container.
Used to run the calico-node as a foreground attached service.
:param container: Docker container to attach to.
:return: None.
"""
# Register a SIGTERM handler, so we shut down the container if this
# process is kill'd.
def handle_sigterm(sig, frame):
print "Got SIGTERM"
docker_client.stop(container)
sys.exit(0)
signal.signal(signal.SIGTERM, handle_sigterm)
output = docker_client.attach(container, stream=True)
try:
for raw_data in output:
sys.stdout.write(raw_data)
except KeyboardInterrupt:
# mainline. someone press Ctrl-C.
print "Stopping Calico node..."
finally:
# Could either be this process is being killed, or output generator
# raises an exception.
docker_client.stop(container)
def install_plugin(plugin_dir, binary_url):
"""
Downloads a plugin to the specified directory.
:param plugin_dir: Desired download destination for the plugin.
:param binary_url: Download the plugin from this url.
:return: Nothing
"""
if not os.path.exists(plugin_dir):
os.makedirs(plugin_dir)
binary_path = plugin_dir + 'calico'
try:
urllib.urlretrieve(binary_url, binary_path)
except IOError:
print "ERROR: Couldn't download the plugin from %s" % binary_url
sys.exit(1)
else:
# Download successful - change permissions to allow execution.
try:
st = os.stat(binary_path)
executable_permissions = st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
os.chmod(binary_path, executable_permissions)
except OSError:
print "ERROR: Unable to set permissions on plugin %s" % binary_path
sys.exit(1)
|
tchaari/android_kernel_samsung_crespo
|
refs/heads/kk4.4
|
tools/perf/scripts/python/sctop.py
|
11180
|
# system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
brianwoo/django-tutorial
|
refs/heads/master
|
build/Django/build/lib.linux-x86_64-2.7/django/contrib/gis/geos/error.py
|
326
|
"""
This module houses the GEOS exceptions, specifically, GEOSException and
GEOSGeometryIndexError.
"""
class GEOSException(Exception):
"The base GEOS exception, indicates a GEOS-related error."
pass
class GEOSIndexError(GEOSException, KeyError):
"""
This exception is raised when an invalid index is encountered, and has
the 'silent_variable_feature' attribute set to true. This ensures that
django's templates proceed to use the next lookup type gracefully when
an Exception is raised. Fixes ticket #4740.
"""
# "If, during the method lookup, a method raises an exception, the exception
# will be propagated, unless the exception has an attribute
# `silent_variable_failure` whose value is True." -- Django template docs.
silent_variable_failure = True
|
pilou-/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/vmware/vmware_dvs_portgroup.py
|
14
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Joseph Callen <jcallen () csc.com>
# Copyright: (c) 2017-2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_dvs_portgroup
short_description: Create or remove a Distributed vSwitch portgroup.
description:
- Create or remove a Distributed vSwitch portgroup.
version_added: 2.0
author:
- Joseph Callen (@jcpowermac)
- Philippe Dellaert (@pdellaert) <philippe@dellaert.org>
notes:
- Tested on vSphere 5.5
- Tested on vSphere 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
portgroup_name:
description:
- The name of the portgroup that is to be created or deleted.
required: True
switch_name:
description:
- The name of the distributed vSwitch the port group should be created on.
required: True
vlan_id:
description:
- The VLAN ID that should be configured with the portgroup, use 0 for no VLAN.
- 'If C(vlan_trunk) is configured to be I(true), this can be a combination of multiple ranges and numbers, example: 1-200, 205, 400-4094.'
- The valid C(vlan_id) range is from 0 to 4094. Overlapping ranges are allowed.
required: True
num_ports:
description:
- The number of ports the portgroup should contain.
required: True
portgroup_type:
description:
- See VMware KB 1022312 regarding portgroup types.
required: True
choices:
- 'earlyBinding'
- 'lateBinding'
- 'ephemeral'
state:
description:
- Determines if the portgroup should be present or not.
required: True
type: str
choices:
- 'present'
- 'absent'
version_added: '2.5'
vlan_trunk:
description:
- Indicates whether this is a VLAN trunk or not.
required: False
default: False
type: bool
version_added: '2.5'
network_policy:
description:
- Dictionary which configures the different security values for portgroup.
- 'Valid attributes are:'
- '- C(promiscuous) (bool): indicates whether promiscuous mode is allowed. (default: false)'
- '- C(forged_transmits) (bool): indicates whether forged transmits are allowed. (default: false)'
- '- C(mac_changes) (bool): indicates whether mac changes are allowed. (default: false)'
required: False
version_added: '2.5'
default: {
promiscuous: False,
forged_transmits: False,
mac_changes: False,
}
teaming_policy:
description:
- Dictionary which configures the different teaming values for portgroup.
- 'Valid attributes are:'
- '- C(load_balance_policy) (string): Network adapter teaming policy. (default: loadbalance_srcid)'
- ' - choices: [ loadbalance_ip, loadbalance_srcmac, loadbalance_srcid, loadbalance_loadbased, failover_explicit]'
- ' - "loadbalance_loadbased" is available from version 2.6 and onwards'
- '- C(inbound_policy) (bool): Indicate whether or not the teaming policy is applied to inbound frames as well. (default: False)'
- '- C(notify_switches) (bool): Indicate whether or not to notify the physical switch if a link fails. (default: True)'
- '- C(rolling_order) (bool): Indicate whether or not to use a rolling policy when restoring links. (default: False)'
required: False
version_added: '2.5'
default: {
'notify_switches': True,
'load_balance_policy': 'loadbalance_srcid',
'inbound_policy': False,
'rolling_order': False
}
port_policy:
description:
- Dictionary which configures the advanced policy settings for the portgroup.
- 'Valid attributes are:'
- '- C(block_override) (bool): indicates if the block policy can be changed per port. (default: true)'
- '- C(ipfix_override) (bool): indicates if the ipfix policy can be changed per port. (default: false)'
- '- C(live_port_move) (bool): indicates if a live port can be moved in or out of the portgroup. (default: false)'
- '- C(network_rp_override) (bool): indicates if the network resource pool can be changed per port. (default: false)'
- '- C(port_config_reset_at_disconnect) (bool): indicates if the configuration of a port is reset automatically after disconnect. (default: true)'
- '- C(security_override) (bool): indicates if the security policy can be changed per port. (default: false)'
- '- C(shaping_override) (bool): indicates if the shaping policy can be changed per port. (default: false)'
- '- C(traffic_filter_override) (bool): indicates if the traffic filter can be changed per port. (default: false)'
- '- C(uplink_teaming_override) (bool): indicates if the uplink teaming policy can be changed per port. (default: false)'
- '- C(vendor_config_override) (bool): indicates if the vendor config can be changed per port. (default: false)'
- '- C(vlan_override) (bool): indicates if the vlan can be changed per port. (default: false)'
required: False
version_added: '2.5'
default: {
'traffic_filter_override': False,
'network_rp_override': False,
'live_port_move': False,
'security_override': False,
'vendor_config_override': False,
'port_config_reset_at_disconnect': True,
'uplink_teaming_override': False,
'block_override': True,
'shaping_override': False,
'vlan_override': False,
'ipfix_override': False
}
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Create vlan portgroup
vmware_dvs_portgroup:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
portgroup_name: vlan-123-portrgoup
switch_name: dvSwitch
vlan_id: 123
num_ports: 120
portgroup_type: earlyBinding
state: present
delegate_to: localhost
- name: Create vlan trunk portgroup
vmware_dvs_portgroup:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
portgroup_name: vlan-trunk-portrgoup
switch_name: dvSwitch
vlan_id: 1-1000, 1005, 1100-1200
vlan_trunk: True
num_ports: 120
portgroup_type: earlyBinding
state: present
delegate_to: localhost
- name: Create no-vlan portgroup
vmware_dvs_portgroup:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
portgroup_name: no-vlan-portrgoup
switch_name: dvSwitch
vlan_id: 0
num_ports: 120
portgroup_type: earlyBinding
state: present
delegate_to: localhost
- name: Create vlan portgroup with all security and port policies
vmware_dvs_portgroup:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
portgroup_name: vlan-123-portrgoup
switch_name: dvSwitch
vlan_id: 123
num_ports: 120
portgroup_type: earlyBinding
state: present
network_policy:
promiscuous: yes
forged_transmits: yes
mac_changes: yes
port_policy:
block_override: yes
ipfix_override: yes
live_port_move: yes
network_rp_override: yes
port_config_reset_at_disconnect: yes
security_override: yes
shaping_override: yes
traffic_filter_override: yes
uplink_teaming_override: yes
vendor_config_override: yes
vlan_override: yes
delegate_to: localhost
'''
try:
from pyVmomi import vim, vmodl
except ImportError as e:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import (PyVmomi, find_dvs_by_name, find_dvspg_by_name,
vmware_argument_spec, wait_for_task)
class VMwareDvsPortgroup(PyVmomi):
def __init__(self, module):
super(VMwareDvsPortgroup, self).__init__(module)
self.dvs_portgroup = None
self.dv_switch = None
def process_state(self):
dvspg_states = {
'absent': {
'present': self.state_destroy_dvspg,
'absent': self.state_exit_unchanged,
},
'present': {
'update': self.state_update_dvspg,
'present': self.state_exit_unchanged,
'absent': self.state_create_dvspg,
}
}
try:
dvspg_states[self.module.params['state']][self.check_dvspg_state()]()
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
except Exception as e:
self.module.fail_json(msg=str(e))
def create_port_group(self):
config = vim.dvs.DistributedVirtualPortgroup.ConfigSpec()
# Basic config
config.name = self.module.params['portgroup_name']
config.numPorts = self.module.params['num_ports']
# Default port config
config.defaultPortConfig = vim.dvs.VmwareDistributedVirtualSwitch.VmwarePortConfigPolicy()
if self.module.params['vlan_trunk']:
config.defaultPortConfig.vlan = vim.dvs.VmwareDistributedVirtualSwitch.TrunkVlanSpec()
vlan_id_list = []
for vlan_id_splitted in self.module.params['vlan_id'].split(','):
try:
vlan_id_start, vlan_id_end = map(int, vlan_id_splitted.split('-'))
if vlan_id_start not in range(0, 4095) or vlan_id_end not in range(0, 4095):
self.module.fail_json(msg="vlan_id range %s specified is incorrect. The valid vlan_id range is from 0 to 4094." % vlan_id_splitted)
vlan_id_list.append(vim.NumericRange(start=vlan_id_start, end=vlan_id_end))
except ValueError:
vlan_id_list.append(vim.NumericRange(start=int(vlan_id_splitted.strip()), end=int(vlan_id_splitted.strip())))
config.defaultPortConfig.vlan.vlanId = vlan_id_list
else:
config.defaultPortConfig.vlan = vim.dvs.VmwareDistributedVirtualSwitch.VlanIdSpec()
config.defaultPortConfig.vlan.vlanId = int(self.module.params['vlan_id'])
config.defaultPortConfig.vlan.inherited = False
config.defaultPortConfig.securityPolicy = vim.dvs.VmwareDistributedVirtualSwitch.SecurityPolicy()
config.defaultPortConfig.securityPolicy.allowPromiscuous = vim.BoolPolicy(value=self.module.params['network_policy']['promiscuous'])
config.defaultPortConfig.securityPolicy.forgedTransmits = vim.BoolPolicy(value=self.module.params['network_policy']['forged_transmits'])
config.defaultPortConfig.securityPolicy.macChanges = vim.BoolPolicy(value=self.module.params['network_policy']['mac_changes'])
# Teaming Policy
teamingPolicy = vim.dvs.VmwareDistributedVirtualSwitch.UplinkPortTeamingPolicy()
teamingPolicy.policy = vim.StringPolicy(value=self.module.params['teaming_policy']['load_balance_policy'])
teamingPolicy.reversePolicy = vim.BoolPolicy(value=self.module.params['teaming_policy']['inbound_policy'])
teamingPolicy.notifySwitches = vim.BoolPolicy(value=self.module.params['teaming_policy']['notify_switches'])
teamingPolicy.rollingOrder = vim.BoolPolicy(value=self.module.params['teaming_policy']['rolling_order'])
config.defaultPortConfig.uplinkTeamingPolicy = teamingPolicy
# PG policy (advanced_policy)
config.policy = vim.dvs.VmwareDistributedVirtualSwitch.VMwarePortgroupPolicy()
config.policy.blockOverrideAllowed = self.module.params['port_policy']['block_override']
config.policy.ipfixOverrideAllowed = self.module.params['port_policy']['ipfix_override']
config.policy.livePortMovingAllowed = self.module.params['port_policy']['live_port_move']
config.policy.networkResourcePoolOverrideAllowed = self.module.params['port_policy']['network_rp_override']
config.policy.portConfigResetAtDisconnect = self.module.params['port_policy']['port_config_reset_at_disconnect']
config.policy.securityPolicyOverrideAllowed = self.module.params['port_policy']['security_override']
config.policy.shapingOverrideAllowed = self.module.params['port_policy']['shaping_override']
config.policy.trafficFilterOverrideAllowed = self.module.params['port_policy']['traffic_filter_override']
config.policy.uplinkTeamingOverrideAllowed = self.module.params['port_policy']['uplink_teaming_override']
config.policy.vendorConfigOverrideAllowed = self.module.params['port_policy']['vendor_config_override']
config.policy.vlanOverrideAllowed = self.module.params['port_policy']['vlan_override']
# PG Type
config.type = self.module.params['portgroup_type']
task = self.dv_switch.AddDVPortgroup_Task([config])
changed, result = wait_for_task(task)
return changed, result
def state_destroy_dvspg(self):
changed = True
result = None
if not self.module.check_mode:
task = self.dvs_portgroup.Destroy_Task()
changed, result = wait_for_task(task)
self.module.exit_json(changed=changed, result=str(result))
def state_exit_unchanged(self):
self.module.exit_json(changed=False)
def state_update_dvspg(self):
self.module.exit_json(changed=False, msg="Currently not implemented.")
def state_create_dvspg(self):
changed = True
result = None
if not self.module.check_mode:
changed, result = self.create_port_group()
self.module.exit_json(changed=changed, result=str(result))
def check_dvspg_state(self):
self.dv_switch = find_dvs_by_name(self.content, self.module.params['switch_name'])
if self.dv_switch is None:
self.module.fail_json(msg="A distributed virtual switch with name %s does not exist" % self.module.params['switch_name'])
self.dvs_portgroup = find_dvspg_by_name(self.dv_switch, self.module.params['portgroup_name'])
if self.dvs_portgroup is None:
return 'absent'
else:
return 'present'
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
dict(
portgroup_name=dict(required=True, type='str'),
switch_name=dict(required=True, type='str'),
vlan_id=dict(required=True, type='str'),
num_ports=dict(required=True, type='int'),
portgroup_type=dict(required=True, choices=['earlyBinding', 'lateBinding', 'ephemeral'], type='str'),
state=dict(required=True, choices=['present', 'absent'], type='str'),
vlan_trunk=dict(type='bool', default=False),
network_policy=dict(
type='dict',
options=dict(
promiscuous=dict(type='bool', default=False),
forged_transmits=dict(type='bool', default=False),
mac_changes=dict(type='bool', default=False)
),
default=dict(
promiscuous=False,
forged_transmits=False,
mac_changes=False
)
),
teaming_policy=dict(
type='dict',
options=dict(
inbound_policy=dict(type='bool', default=False),
notify_switches=dict(type='bool', default=True),
rolling_order=dict(type='bool', default=False),
load_balance_policy=dict(type='str',
default='loadbalance_srcid',
choices=[
'loadbalance_ip',
'loadbalance_srcmac',
'loadbalance_srcid',
'loadbalance_loadbased',
'failover_explicit',
],
)
),
default=dict(
inbound_policy=False,
notify_switches=True,
rolling_order=False,
load_balance_policy='loadbalance_srcid',
),
),
port_policy=dict(
type='dict',
options=dict(
block_override=dict(type='bool', default=True),
ipfix_override=dict(type='bool', default=False),
live_port_move=dict(type='bool', default=False),
network_rp_override=dict(type='bool', default=False),
port_config_reset_at_disconnect=dict(type='bool', default=True),
security_override=dict(type='bool', default=False),
shaping_override=dict(type='bool', default=False),
traffic_filter_override=dict(type='bool', default=False),
uplink_teaming_override=dict(type='bool', default=False),
vendor_config_override=dict(type='bool', default=False),
vlan_override=dict(type='bool', default=False)
),
default=dict(
block_override=True,
ipfix_override=False,
live_port_move=False,
network_rp_override=False,
port_config_reset_at_disconnect=True,
security_override=False,
shaping_override=False,
traffic_filter_override=False,
uplink_teaming_override=False,
vendor_config_override=False,
vlan_override=False
)
)
)
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
vmware_dvs_portgroup = VMwareDvsPortgroup(module)
vmware_dvs_portgroup.process_state()
if __name__ == '__main__':
main()
|
m3wolf/xanespy
|
refs/heads/master
|
tests/mpi_spectrum_fit.py
|
1
|
#!/usr/bin/env python
"""A script to launch spectrum fitting for all the pixels of a XANES
Frameset. This script should be launched with `mpiexec
mpi_spectrum_fit.py` to make proper use of parallel processing."""
import logging
logging.basicConfig(level=logging.DEBUG)
import os
import xanespy as xp
HDF_FILE = '/tmp/txm_data_from_youngsang.h5'
# HDF_FILE = os.path.join(os.path.dirname(__file__), 'imported-ssrl-data.h5')
log = logging.getLogger()
def get_frameset():
"""Retrieve some imported data for testing."""
fs = xp.XanesFrameset(HDF_FILE, edge=xp.k_edges['Ni_NCA'],
groupname="NAT1050_Insitu03_p01_OCV")
return fs
if __name__ == '__main__':
fs = get_frameset()
# The ``fit_spectra()`` method contains all the MPI magic
fs.fit_spectra()
|
wikkiewikkie/elizabeth
|
refs/heads/master
|
tests/test_data/test_datetime.py
|
1
|
# -*- coding: utf-8 -*-
import datetime
import re
from ._patterns import STR_REGEX
def test_str(dt):
assert re.match(STR_REGEX, str(dt))
def test_year(dt):
result = dt.year(minimum=2000, maximum=2016)
assert result >= 2000
assert result <= 2016
def test_day_of_month(dt):
result = dt.day_of_month()
assert ((result >= 1) or (result <= 31))
def test_date(dt):
result = dt.date(start=1999, end=1999, fmt="%m/%d/%Y")
result = datetime.datetime.strptime(result, "%m/%d/%Y")
assert result.year == 1999 # check range was applied correctly
def test_time(dt):
default = dt.time()
default = datetime.datetime.strptime(default, dt.data['formats']['time'])
assert isinstance(default, datetime.datetime)
result = dt.time(fmt="%H:%M")
result = datetime.datetime.strptime(result, "%H:%M")
assert isinstance(result, datetime.datetime)
def test_century(dt):
result = dt.century()
assert result is not None
assert isinstance(result, str)
def test_day_of_week(generic):
result = generic.datetime.day_of_week()
assert result in generic.datetime.data['day']['name']
result_abbr = generic.datetime.day_of_week(abbr=True)
assert result_abbr in generic.datetime.data['day']['abbr']
def test_month(generic):
result = generic.datetime.month()
assert result is not None
result_abbr = generic.datetime.month(abbr=True)
assert isinstance(result_abbr, str)
def test_periodicity(generic):
result = generic.datetime.periodicity()
assert result in generic.datetime.data['periodicity']
|
alexus37/AugmentedRealityChess
|
refs/heads/master
|
pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/GL/AMD/conservative_depth.py
|
9
|
'''OpenGL extension AMD.conservative_depth
This module customises the behaviour of the
OpenGL.raw.GL.AMD.conservative_depth to provide a more
Python-friendly API
Overview (from the spec)
There is a common optimization for hardware accelerated implementation of
OpenGL which relies on an early depth test to be run before the fragment
shader so that the shader evaluation can be skipped if the fragment ends
up being discarded because it is occluded.
This optimization does not affect the final rendering, and is typically
possible when the fragment does not change the depth programmatically.
(i.e.: it does not write to the built-in gl_FragDepth output). There are,
however a class of operations on the depth in the shader which could
still be performed while allowing the early depth test to operate.
This extension allows the application to pass enough information to the
GL implementation to activate such optimizations safely.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/AMD/conservative_depth.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.AMD.conservative_depth import *
from OpenGL.raw.GL.AMD.conservative_depth import _EXTENSION_NAME
def glInitConservativeDepthAMD():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
vovanbo/django-oscar
|
refs/heads/master
|
tests/unit/voucher/model_tests.py
|
44
|
import datetime
from decimal import Decimal as D
from django.test import TestCase
from django.core import exceptions
from django.utils.timezone import utc
from oscar.apps.voucher.models import Voucher
from oscar.core.compat import get_user_model
from oscar.test.factories import OrderFactory, UserFactory, VoucherFactory
START_DATETIME = datetime.datetime(2011, 1, 1).replace(tzinfo=utc)
END_DATETIME = datetime.datetime(2012, 1, 1).replace(tzinfo=utc)
User = get_user_model()
class TestSavingAVoucher(TestCase):
def test_saves_code_as_uppercase(self):
voucher = VoucherFactory(
code='lower',
start_datetime=START_DATETIME, end_datetime=END_DATETIME)
self.assertEqual('LOWER', voucher.code)
def test_verifies_dates_are_sensible(self):
with self.assertRaises(exceptions.ValidationError):
voucher = Voucher.objects.create(
code='lower', start_datetime=END_DATETIME,
end_datetime=START_DATETIME)
voucher.clean()
class TestAVoucher(TestCase):
def setUp(self):
self.voucher = VoucherFactory(
start_datetime=START_DATETIME, end_datetime=END_DATETIME)
def test_is_active_between_start_and_end_dates(self):
test = datetime.datetime(2011, 6, 10).replace(tzinfo=utc)
self.assertTrue(self.voucher.is_active(test))
def test_is_active_on_end_date(self):
self.assertTrue(self.voucher.is_active(END_DATETIME))
def test_is_active_on_start_date(self):
self.assertTrue(self.voucher.is_active(START_DATETIME))
def test_is_inactive_outside_of_start_and_end_dates(self):
test = datetime.datetime(2012, 3, 10).replace(tzinfo=utc)
self.assertFalse(self.voucher.is_active(test))
def test_increments_total_discount_when_recording_usage(self):
self.voucher.record_discount({'discount': D('10.00')})
self.assertEqual(self.voucher.total_discount, D('10.00'))
self.voucher.record_discount({'discount': D('10.00')})
self.assertEqual(self.voucher.total_discount, D('20.00'))
class TestMultiuseVoucher(TestCase):
def setUp(self):
self.voucher = VoucherFactory(usage=Voucher.MULTI_USE)
def test_is_available_to_same_user_multiple_times(self):
user, order = UserFactory(), OrderFactory()
for i in range(10):
self.voucher.record_usage(order, user)
is_voucher_available_to_user, __ = self.voucher.is_available_to_user(user=user)
self.assertTrue(is_voucher_available_to_user)
class TestOncePerCustomerVoucher(TestCase):
def setUp(self):
self.voucher = VoucherFactory(usage=Voucher.ONCE_PER_CUSTOMER)
def test_is_available_to_a_user_once(self):
user, order = UserFactory(), OrderFactory()
is_voucher_available_to_user, __ = self.voucher.is_available_to_user(user=user)
self.assertTrue(is_voucher_available_to_user)
self.voucher.record_usage(order, user)
is_voucher_available_to_user, __ = self.voucher.is_available_to_user(user=user)
self.assertFalse(is_voucher_available_to_user)
def test_is_available_to_different_users(self):
users, order = [UserFactory(), UserFactory()], OrderFactory()
for user in users:
is_voucher_available_to_user, __ = self.voucher.is_available_to_user(user=user)
self.assertTrue(is_voucher_available_to_user)
self.voucher.record_usage(order, user)
is_voucher_available_to_user, __ = self.voucher.is_available_to_user(user=user)
self.assertFalse(is_voucher_available_to_user)
|
salamer/django
|
refs/heads/master
|
tests/forms_tests/tests/test_formsets.py
|
163
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.forms import (
CharField, DateField, FileField, Form, IntegerField, SplitDateTimeField,
ValidationError, formsets,
)
from django.forms.formsets import BaseFormSet, formset_factory
from django.forms.utils import ErrorList
from django.test import SimpleTestCase
from django.utils.encoding import force_text
class Choice(Form):
choice = CharField()
votes = IntegerField()
# FormSet allows us to use multiple instance of the same form on 1 page. For now,
# the best way to create a FormSet is by using the formset_factory function.
ChoiceFormSet = formset_factory(Choice)
class FavoriteDrinkForm(Form):
name = CharField()
class BaseFavoriteDrinksFormSet(BaseFormSet):
def clean(self):
seen_drinks = []
for drink in self.cleaned_data:
if drink['name'] in seen_drinks:
raise ValidationError('You may only specify a drink once.')
seen_drinks.append(drink['name'])
class EmptyFsetWontValidate(BaseFormSet):
def clean(self):
raise ValidationError("Clean method called")
# Let's define a FormSet that takes a list of favorite drinks, but raises an
# error if there are any duplicates. Used in ``test_clean_hook``,
# ``test_regression_6926`` & ``test_regression_12878``.
FavoriteDrinksFormSet = formset_factory(FavoriteDrinkForm,
formset=BaseFavoriteDrinksFormSet, extra=3)
# Used in ``test_formset_splitdatetimefield``.
class SplitDateTimeForm(Form):
when = SplitDateTimeField(initial=datetime.datetime.now)
SplitDateTimeFormSet = formset_factory(SplitDateTimeForm)
class CustomKwargForm(Form):
def __init__(self, *args, **kwargs):
self.custom_kwarg = kwargs.pop('custom_kwarg')
super(CustomKwargForm, self).__init__(*args, **kwargs)
class FormsFormsetTestCase(SimpleTestCase):
def make_choiceformset(self, formset_data=None, formset_class=ChoiceFormSet,
total_forms=None, initial_forms=0, max_num_forms=0, min_num_forms=0, **kwargs):
"""
Make a ChoiceFormset from the given formset_data.
The data should be given as a list of (choice, votes) tuples.
"""
kwargs.setdefault('prefix', 'choices')
kwargs.setdefault('auto_id', False)
if formset_data is None:
return formset_class(**kwargs)
if total_forms is None:
total_forms = len(formset_data)
def prefixed(*args):
args = (kwargs['prefix'],) + args
return '-'.join(args)
data = {
prefixed('TOTAL_FORMS'): str(total_forms),
prefixed('INITIAL_FORMS'): str(initial_forms),
prefixed('MAX_NUM_FORMS'): str(max_num_forms),
prefixed('MIN_NUM_FORMS'): str(min_num_forms),
}
for i, (choice, votes) in enumerate(formset_data):
data[prefixed(str(i), 'choice')] = choice
data[prefixed(str(i), 'votes')] = votes
return formset_class(data, **kwargs)
def test_basic_formset(self):
# A FormSet constructor takes the same arguments as Form. Let's create a FormSet
# for adding data. By default, it displays 1 blank form. It can display more,
# but we'll look at how to do so later.
formset = self.make_choiceformset()
self.assertHTMLEqual(str(formset), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MIN_NUM_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" value="1000" />
<tr><th>Choice:</th><td><input type="text" name="choices-0-choice" /></td></tr>
<tr><th>Votes:</th><td><input type="number" name="choices-0-votes" /></td></tr>""")
# We treat FormSet pretty much like we would treat a normal Form. FormSet has an
# is_valid method, and a cleaned_data or errors attribute depending on whether all
# the forms passed validation. However, unlike a Form instance, cleaned_data and
# errors will be a list of dicts rather than just a single dict.
formset = self.make_choiceformset([('Calexico', '100')])
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{'votes': 100, 'choice': 'Calexico'}])
# If a FormSet was not passed any data, its is_valid and has_changed
# methods should return False.
formset = self.make_choiceformset()
self.assertFalse(formset.is_valid())
self.assertFalse(formset.has_changed())
def test_form_kwargs_formset(self):
"""
Test that custom kwargs set on the formset instance are passed to the
underlying forms.
"""
FormSet = formset_factory(CustomKwargForm, extra=2)
formset = FormSet(form_kwargs={'custom_kwarg': 1})
for form in formset:
self.assertTrue(hasattr(form, 'custom_kwarg'))
self.assertEqual(form.custom_kwarg, 1)
def test_form_kwargs_formset_dynamic(self):
"""
Test that form kwargs can be passed dynamically in a formset.
"""
class DynamicBaseFormSet(BaseFormSet):
def get_form_kwargs(self, index):
return {'custom_kwarg': index}
DynamicFormSet = formset_factory(CustomKwargForm, formset=DynamicBaseFormSet, extra=2)
formset = DynamicFormSet(form_kwargs={'custom_kwarg': 'ignored'})
for i, form in enumerate(formset):
self.assertTrue(hasattr(form, 'custom_kwarg'))
self.assertEqual(form.custom_kwarg, i)
def test_form_kwargs_empty_form(self):
FormSet = formset_factory(CustomKwargForm)
formset = FormSet(form_kwargs={'custom_kwarg': 1})
self.assertTrue(hasattr(formset.empty_form, 'custom_kwarg'))
self.assertEqual(formset.empty_form.custom_kwarg, 1)
def test_formset_validation(self):
# FormSet instances can also have an error attribute if validation failed for
# any of the forms.
formset = self.make_choiceformset([('Calexico', '')])
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'votes': ['This field is required.']}])
def test_formset_has_changed(self):
# FormSet instances has_changed method will be True if any data is
# passed to his forms, even if the formset didn't validate
blank_formset = self.make_choiceformset([('', '')])
self.assertFalse(blank_formset.has_changed())
# invalid formset test
invalid_formset = self.make_choiceformset([('Calexico', '')])
self.assertFalse(invalid_formset.is_valid())
self.assertTrue(invalid_formset.has_changed())
# valid formset test
valid_formset = self.make_choiceformset([('Calexico', '100')])
self.assertTrue(valid_formset.is_valid())
self.assertTrue(valid_formset.has_changed())
def test_formset_initial_data(self):
# We can also prefill a FormSet with existing data by providing an ``initial``
# argument to the constructor. ``initial`` should be a list of dicts. By default,
# an extra blank form is included.
initial = [{'choice': 'Calexico', 'votes': 100}]
formset = self.make_choiceformset(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>""")
# Let's simulate what would happen if we submitted this form.
formset = self.make_choiceformset([('Calexico', '100'), ('', '')], initial_forms=1)
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{'votes': 100, 'choice': 'Calexico'}, {}])
def test_second_form_partially_filled(self):
# But the second form was blank! Shouldn't we get some errors? No. If we display
# a form as blank, it's ok for it to be submitted as blank. If we fill out even
# one of the fields of a blank form though, it will be validated. We may want to
# required that at least x number of forms are completed, but we'll show how to
# handle that later.
formset = self.make_choiceformset([('Calexico', '100'), ('The Decemberists', '')], initial_forms=1)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{}, {'votes': ['This field is required.']}])
def test_delete_prefilled_data(self):
# If we delete data that was pre-filled, we should get an error. Simply removing
# data from form fields isn't the proper way to delete it. We'll see how to
# handle that case later.
formset = self.make_choiceformset([('', ''), ('', '')], initial_forms=1)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'votes': ['This field is required.'], 'choice': ['This field is required.']}, {}])
def test_displaying_more_than_one_blank_form(self):
# Displaying more than 1 blank form ###########################################
# We can also display more than 1 empty form at a time. To do so, pass a
# extra argument to formset_factory.
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" /></li>
<li>Votes: <input type="number" name="choices-0-votes" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>""")
# Since we displayed every form as blank, we will also accept them back as blank.
# This may seem a little strange, but later we will show how to require a minimum
# number of forms to be completed.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': '',
'choices-0-votes': '',
'choices-1-choice': '',
'choices-1-votes': '',
'choices-2-choice': '',
'choices-2-votes': '',
}
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{}, {}, {}])
def test_min_num_displaying_more_than_one_blank_form(self):
# We can also display more than 1 empty form passing min_num argument
# to formset_factory. It will (essentially) increment the extra argument
ChoiceFormSet = formset_factory(Choice, extra=1, min_num=1)
formset = ChoiceFormSet(auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
# Min_num forms are required; extra forms can be empty.
self.assertFalse(formset.forms[0].empty_permitted)
self.assertTrue(formset.forms[1].empty_permitted)
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" /></li>
<li>Votes: <input type="number" name="choices-0-votes" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>""")
def test_min_num_displaying_more_than_one_blank_form_with_zero_extra(self):
# We can also display more than 1 empty form passing min_num argument
ChoiceFormSet = formset_factory(Choice, extra=0, min_num=3)
formset = ChoiceFormSet(auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" /></li>
<li>Votes: <input type="number" name="choices-0-votes" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>""")
def test_single_form_completed(self):
# We can just fill out one of the forms.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-1-choice': '',
'choices-1-votes': '',
'choices-2-choice': '',
'choices-2-votes': '',
}
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{'votes': 100, 'choice': 'Calexico'}, {}, {}])
def test_formset_validate_max_flag(self):
# If validate_max is set and max_num is less than TOTAL_FORMS in the
# data, then throw an exception. MAX_NUM_FORMS in the data is
# irrelevant here (it's output as a hint for the client but its
# value in the returned data is not checked)
data = {
'choices-TOTAL_FORMS': '2', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '2', # max number of forms - should be ignored
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
}
ChoiceFormSet = formset_factory(Choice, extra=1, max_num=1, validate_max=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ['Please submit 1 or fewer forms.'])
def test_formset_validate_min_flag(self):
# If validate_min is set and min_num is more than TOTAL_FORMS in the
# data, then throw an exception. MIN_NUM_FORMS in the data is
# irrelevant here (it's output as a hint for the client but its
# value in the returned data is not checked)
data = {
'choices-TOTAL_FORMS': '2', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms - should be ignored
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
}
ChoiceFormSet = formset_factory(Choice, extra=1, min_num=3, validate_min=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ['Please submit 3 or more forms.'])
def test_second_form_partially_filled_2(self):
# And once again, if we try to partially complete a form, validation will fail.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-1-choice': 'The Decemberists',
'choices-1-votes': '', # missing value
'choices-2-choice': '',
'choices-2-votes': '',
}
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{}, {'votes': ['This field is required.']}, {}])
def test_more_initial_data(self):
# The extra argument also works when the formset is pre-filled with initial
# data.
initial = [{'choice': 'Calexico', 'votes': 100}]
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>
<li>Choice: <input type="text" name="choices-3-choice" /></li>
<li>Votes: <input type="number" name="choices-3-votes" /></li>""")
# Make sure retrieving an empty form works, and it shows up in the form list
self.assertTrue(formset.empty_form.empty_permitted)
self.assertHTMLEqual(formset.empty_form.as_ul(), """<li>Choice: <input type="text" name="choices-__prefix__-choice" /></li>
<li>Votes: <input type="number" name="choices-__prefix__-votes" /></li>""")
def test_formset_with_deletion(self):
# FormSets with deletion ######################################################
# We can easily add deletion ability to a FormSet with an argument to
# formset_factory. This will add a boolean field to each form instance. When
# that boolean field is True, the form will be in formset.deleted_forms
ChoiceFormSet = formset_factory(Choice, can_delete=True)
initial = [{'choice': 'Calexico', 'votes': 100}, {'choice': 'Fergie', 'votes': 900}]
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Delete: <input type="checkbox" name="choices-0-DELETE" /></li>
<li>Choice: <input type="text" name="choices-1-choice" value="Fergie" /></li>
<li>Votes: <input type="number" name="choices-1-votes" value="900" /></li>
<li>Delete: <input type="checkbox" name="choices-1-DELETE" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>
<li>Delete: <input type="checkbox" name="choices-2-DELETE" /></li>""")
# To delete something, we just need to set that form's special delete field to
# 'on'. Let's go ahead and delete Fergie.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '2', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-0-DELETE': '',
'choices-1-choice': 'Fergie',
'choices-1-votes': '900',
'choices-1-DELETE': 'on',
'choices-2-choice': '',
'choices-2-votes': '',
'choices-2-DELETE': '',
}
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{'votes': 100, 'DELETE': False, 'choice': 'Calexico'}, {'votes': 900, 'DELETE': True, 'choice': 'Fergie'}, {}])
self.assertEqual([form.cleaned_data for form in formset.deleted_forms], [{'votes': 900, 'DELETE': True, 'choice': 'Fergie'}])
# If we fill a form with something and then we check the can_delete checkbox for
# that form, that form's errors should not make the entire formset invalid since
# it's going to be deleted.
class CheckForm(Form):
field = IntegerField(min_value=100)
data = {
'check-TOTAL_FORMS': '3', # the number of forms rendered
'check-INITIAL_FORMS': '2', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'check-MAX_NUM_FORMS': '0', # max number of forms
'check-0-field': '200',
'check-0-DELETE': '',
'check-1-field': '50',
'check-1-DELETE': 'on',
'check-2-field': '',
'check-2-DELETE': '',
}
CheckFormSet = formset_factory(CheckForm, can_delete=True)
formset = CheckFormSet(data, prefix='check')
self.assertTrue(formset.is_valid())
# If we remove the deletion flag now we will have our validation back.
data['check-1-DELETE'] = ''
formset = CheckFormSet(data, prefix='check')
self.assertFalse(formset.is_valid())
# Should be able to get deleted_forms from a valid formset even if a
# deleted form would have been invalid.
class Person(Form):
name = CharField()
PeopleForm = formset_factory(
form=Person,
can_delete=True)
p = PeopleForm(
{'form-0-name': '', 'form-0-DELETE': 'on', # no name!
'form-TOTAL_FORMS': 1, 'form-INITIAL_FORMS': 1,
'form-MIN_NUM_FORMS': 0, 'form-MAX_NUM_FORMS': 1})
self.assertTrue(p.is_valid())
self.assertEqual(len(p.deleted_forms), 1)
def test_formsets_with_ordering(self):
# FormSets with ordering ######################################################
# We can also add ordering ability to a FormSet with an argument to
# formset_factory. This will add an integer field to each form instance. When
# form validation succeeds, [form.cleaned_data for form in formset.forms] will have the data in the correct
# order specified by the ordering fields. If a number is duplicated in the set
# of ordering fields, for instance form 0 and form 3 are both marked as 1, then
# the form index used as a secondary ordering criteria. In order to put
# something at the front of the list, you'd need to set it's order to 0.
ChoiceFormSet = formset_factory(Choice, can_order=True)
initial = [{'choice': 'Calexico', 'votes': 100}, {'choice': 'Fergie', 'votes': 900}]
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Order: <input type="number" name="choices-0-ORDER" value="1" /></li>
<li>Choice: <input type="text" name="choices-1-choice" value="Fergie" /></li>
<li>Votes: <input type="number" name="choices-1-votes" value="900" /></li>
<li>Order: <input type="number" name="choices-1-ORDER" value="2" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>
<li>Order: <input type="number" name="choices-2-ORDER" /></li>""")
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '2', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-0-ORDER': '1',
'choices-1-choice': 'Fergie',
'choices-1-votes': '900',
'choices-1-ORDER': '2',
'choices-2-choice': 'The Decemberists',
'choices-2-votes': '500',
'choices-2-ORDER': '0',
}
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
form_output = []
for form in formset.ordered_forms:
form_output.append(form.cleaned_data)
self.assertEqual(form_output, [
{'votes': 500, 'ORDER': 0, 'choice': 'The Decemberists'},
{'votes': 100, 'ORDER': 1, 'choice': 'Calexico'},
{'votes': 900, 'ORDER': 2, 'choice': 'Fergie'},
])
def test_empty_ordered_fields(self):
# Ordering fields are allowed to be left blank, and if they *are* left blank,
# they will be sorted below everything else.
data = {
'choices-TOTAL_FORMS': '4', # the number of forms rendered
'choices-INITIAL_FORMS': '3', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-0-ORDER': '1',
'choices-1-choice': 'Fergie',
'choices-1-votes': '900',
'choices-1-ORDER': '2',
'choices-2-choice': 'The Decemberists',
'choices-2-votes': '500',
'choices-2-ORDER': '',
'choices-3-choice': 'Basia Bulat',
'choices-3-votes': '50',
'choices-3-ORDER': '',
}
ChoiceFormSet = formset_factory(Choice, can_order=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
form_output = []
for form in formset.ordered_forms:
form_output.append(form.cleaned_data)
self.assertEqual(form_output, [
{'votes': 100, 'ORDER': 1, 'choice': 'Calexico'},
{'votes': 900, 'ORDER': 2, 'choice': 'Fergie'},
{'votes': 500, 'ORDER': None, 'choice': 'The Decemberists'},
{'votes': 50, 'ORDER': None, 'choice': 'Basia Bulat'},
])
def test_ordering_blank_fieldsets(self):
# Ordering should work with blank fieldsets.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
}
ChoiceFormSet = formset_factory(Choice, can_order=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
form_output = []
for form in formset.ordered_forms:
form_output.append(form.cleaned_data)
self.assertEqual(form_output, [])
def test_formset_with_ordering_and_deletion(self):
# FormSets with ordering + deletion ###########################################
# Let's try throwing ordering and deletion into the same form.
ChoiceFormSet = formset_factory(Choice, can_order=True, can_delete=True)
initial = [
{'choice': 'Calexico', 'votes': 100},
{'choice': 'Fergie', 'votes': 900},
{'choice': 'The Decemberists', 'votes': 500},
]
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Order: <input type="number" name="choices-0-ORDER" value="1" /></li>
<li>Delete: <input type="checkbox" name="choices-0-DELETE" /></li>
<li>Choice: <input type="text" name="choices-1-choice" value="Fergie" /></li>
<li>Votes: <input type="number" name="choices-1-votes" value="900" /></li>
<li>Order: <input type="number" name="choices-1-ORDER" value="2" /></li>
<li>Delete: <input type="checkbox" name="choices-1-DELETE" /></li>
<li>Choice: <input type="text" name="choices-2-choice" value="The Decemberists" /></li>
<li>Votes: <input type="number" name="choices-2-votes" value="500" /></li>
<li>Order: <input type="number" name="choices-2-ORDER" value="3" /></li>
<li>Delete: <input type="checkbox" name="choices-2-DELETE" /></li>
<li>Choice: <input type="text" name="choices-3-choice" /></li>
<li>Votes: <input type="number" name="choices-3-votes" /></li>
<li>Order: <input type="number" name="choices-3-ORDER" /></li>
<li>Delete: <input type="checkbox" name="choices-3-DELETE" /></li>""")
# Let's delete Fergie, and put The Decemberists ahead of Calexico.
data = {
'choices-TOTAL_FORMS': '4', # the number of forms rendered
'choices-INITIAL_FORMS': '3', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-0-ORDER': '1',
'choices-0-DELETE': '',
'choices-1-choice': 'Fergie',
'choices-1-votes': '900',
'choices-1-ORDER': '2',
'choices-1-DELETE': 'on',
'choices-2-choice': 'The Decemberists',
'choices-2-votes': '500',
'choices-2-ORDER': '0',
'choices-2-DELETE': '',
'choices-3-choice': '',
'choices-3-votes': '',
'choices-3-ORDER': '',
'choices-3-DELETE': '',
}
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
form_output = []
for form in formset.ordered_forms:
form_output.append(form.cleaned_data)
self.assertEqual(form_output, [
{'votes': 500, 'DELETE': False, 'ORDER': 0, 'choice': 'The Decemberists'},
{'votes': 100, 'DELETE': False, 'ORDER': 1, 'choice': 'Calexico'},
])
self.assertEqual([form.cleaned_data for form in formset.deleted_forms], [{'votes': 900, 'DELETE': True, 'ORDER': 2, 'choice': 'Fergie'}])
def test_invalid_deleted_form_with_ordering(self):
# Should be able to get ordered forms from a valid formset even if a
# deleted form would have been invalid.
class Person(Form):
name = CharField()
PeopleForm = formset_factory(form=Person, can_delete=True, can_order=True)
p = PeopleForm({
'form-0-name': '',
'form-0-DELETE': 'on', # no name!
'form-TOTAL_FORMS': 1,
'form-INITIAL_FORMS': 1,
'form-MIN_NUM_FORMS': 0,
'form-MAX_NUM_FORMS': 1
})
self.assertTrue(p.is_valid())
self.assertEqual(p.ordered_forms, [])
def test_clean_hook(self):
# FormSet clean hook ##########################################################
# FormSets have a hook for doing extra validation that shouldn't be tied to any
# particular form. It follows the same pattern as the clean hook on Forms.
# We start out with a some duplicate data.
data = {
'drinks-TOTAL_FORMS': '2', # the number of forms rendered
'drinks-INITIAL_FORMS': '0', # the number of forms with initial data
'drinks-MIN_NUM_FORMS': '0', # min number of forms
'drinks-MAX_NUM_FORMS': '0', # max number of forms
'drinks-0-name': 'Gin and Tonic',
'drinks-1-name': 'Gin and Tonic',
}
formset = FavoriteDrinksFormSet(data, prefix='drinks')
self.assertFalse(formset.is_valid())
# Any errors raised by formset.clean() are available via the
# formset.non_form_errors() method.
for error in formset.non_form_errors():
self.assertEqual(str(error), 'You may only specify a drink once.')
# Make sure we didn't break the valid case.
data = {
'drinks-TOTAL_FORMS': '2', # the number of forms rendered
'drinks-INITIAL_FORMS': '0', # the number of forms with initial data
'drinks-MIN_NUM_FORMS': '0', # min number of forms
'drinks-MAX_NUM_FORMS': '0', # max number of forms
'drinks-0-name': 'Gin and Tonic',
'drinks-1-name': 'Bloody Mary',
}
formset = FavoriteDrinksFormSet(data, prefix='drinks')
self.assertTrue(formset.is_valid())
self.assertEqual(formset.non_form_errors(), [])
def test_limiting_max_forms(self):
# Limiting the maximum number of forms ########################################
# Base case for max_num.
# When not passed, max_num will take a high default value, leaving the
# number of forms only controlled by the value of the extra parameter.
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=3)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" id="id_form-0-name" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input type="text" name="form-1-name" id="id_form-1-name" /></td></tr>
<tr><th><label for="id_form-2-name">Name:</label></th><td><input type="text" name="form-2-name" id="id_form-2-name" /></td></tr>""")
# If max_num is 0 then no form is rendered at all.
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=3, max_num=0)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertEqual('\n'.join(form_output), "")
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=5, max_num=2)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" id="id_form-0-name" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input type="text" name="form-1-name" id="id_form-1-name" /></td></tr>""")
# Ensure that max_num has no effect when extra is less than max_num.
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1, max_num=2)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" id="id_form-0-name" /></td></tr>""")
def test_max_num_with_initial_data(self):
# max_num with initial data
# When not passed, max_num will take a high default value, leaving the
# number of forms only controlled by the value of the initial and extra
# parameters.
initial = [
{'name': 'Fernet and Coke'},
]
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" value="Fernet and Coke" id="id_form-0-name" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input type="text" name="form-1-name" id="id_form-1-name" /></td></tr>""")
def test_max_num_zero(self):
# If max_num is 0 then no form is rendered at all, regardless of extra,
# unless initial data is present. (This changed in the patch for bug
# 20084 -- previously max_num=0 trumped initial data)
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1, max_num=0)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertEqual('\n'.join(form_output), "")
# test that initial trumps max_num
initial = [
{'name': 'Fernet and Coke'},
{'name': 'Bloody Mary'},
]
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1, max_num=0)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input id="id_form-0-name" name="form-0-name" type="text" value="Fernet and Coke" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input id="id_form-1-name" name="form-1-name" type="text" value="Bloody Mary" /></td></tr>""")
def test_more_initial_than_max_num(self):
# More initial forms than max_num now results in all initial forms
# being displayed (but no extra forms). This behavior was changed
# from max_num taking precedence in the patch for #20084
initial = [
{'name': 'Gin Tonic'},
{'name': 'Bloody Mary'},
{'name': 'Jack and Coke'},
]
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1, max_num=2)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input id="id_form-0-name" name="form-0-name" type="text" value="Gin Tonic" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input id="id_form-1-name" name="form-1-name" type="text" value="Bloody Mary" /></td></tr>
<tr><th><label for="id_form-2-name">Name:</label></th><td><input id="id_form-2-name" name="form-2-name" type="text" value="Jack and Coke" /></td></tr>""")
# One form from initial and extra=3 with max_num=2 should result in the one
# initial form and one extra.
initial = [
{'name': 'Gin Tonic'},
]
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=3, max_num=2)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" value="Gin Tonic" id="id_form-0-name" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input type="text" name="form-1-name" id="id_form-1-name" /></td></tr>""")
def test_regression_6926(self):
# Regression test for #6926 ##################################################
# Make sure the management form has the correct prefix.
formset = FavoriteDrinksFormSet()
self.assertEqual(formset.management_form.prefix, 'form')
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MIN_NUM_FORMS': '0',
'form-MAX_NUM_FORMS': '0',
}
formset = FavoriteDrinksFormSet(data=data)
self.assertEqual(formset.management_form.prefix, 'form')
formset = FavoriteDrinksFormSet(initial={})
self.assertEqual(formset.management_form.prefix, 'form')
def test_regression_12878(self):
# Regression test for #12878 #################################################
data = {
'drinks-TOTAL_FORMS': '2', # the number of forms rendered
'drinks-INITIAL_FORMS': '0', # the number of forms with initial data
'drinks-MIN_NUM_FORMS': '0', # min number of forms
'drinks-MAX_NUM_FORMS': '0', # max number of forms
'drinks-0-name': 'Gin and Tonic',
'drinks-1-name': 'Gin and Tonic',
}
formset = FavoriteDrinksFormSet(data, prefix='drinks')
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ['You may only specify a drink once.'])
def test_formset_iteration(self):
# Regression tests for #16455 -- formset instances are iterable
ChoiceFormset = formset_factory(Choice, extra=3)
formset = ChoiceFormset()
# confirm iterated formset yields formset.forms
forms = list(formset)
self.assertEqual(forms, formset.forms)
self.assertEqual(len(formset), len(forms))
# confirm indexing of formset
self.assertEqual(formset[0], forms[0])
try:
formset[3]
self.fail('Requesting an invalid formset index should raise an exception')
except IndexError:
pass
# Formsets can override the default iteration order
class BaseReverseFormSet(BaseFormSet):
def __iter__(self):
return reversed(self.forms)
def __getitem__(self, idx):
return super(BaseReverseFormSet, self).__getitem__(len(self) - idx - 1)
ReverseChoiceFormset = formset_factory(Choice, BaseReverseFormSet, extra=3)
reverse_formset = ReverseChoiceFormset()
# confirm that __iter__ modifies rendering order
# compare forms from "reverse" formset with forms from original formset
self.assertEqual(str(reverse_formset[0]), str(forms[-1]))
self.assertEqual(str(reverse_formset[1]), str(forms[-2]))
self.assertEqual(len(reverse_formset), len(forms))
def test_formset_nonzero(self):
"""
Formsets with no forms should still evaluate as true.
Regression test for #15722
"""
ChoiceFormset = formset_factory(Choice, extra=0)
formset = ChoiceFormset()
self.assertEqual(len(formset.forms), 0)
self.assertTrue(formset)
def test_formset_splitdatetimefield(self):
"""
Formset should also work with SplitDateTimeField(initial=datetime.datetime.now).
Regression test for #18709.
"""
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-0-when_0': '1904-06-16',
'form-0-when_1': '15:51:33',
}
formset = SplitDateTimeFormSet(data)
self.assertTrue(formset.is_valid())
def test_formset_error_class(self):
# Regression tests for #16479 -- formsets form use ErrorList instead of supplied error_class
class CustomErrorList(ErrorList):
pass
formset = FavoriteDrinksFormSet(error_class=CustomErrorList)
self.assertEqual(formset.forms[0].error_class, CustomErrorList)
def test_formset_calls_forms_is_valid(self):
# Regression tests for #18574 -- make sure formsets call
# is_valid() on each form.
class AnotherChoice(Choice):
def is_valid(self):
self.is_valid_called = True
return super(AnotherChoice, self).is_valid()
AnotherChoiceFormSet = formset_factory(AnotherChoice)
data = {
'choices-TOTAL_FORMS': '1', # number of forms rendered
'choices-INITIAL_FORMS': '0', # number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
}
formset = AnotherChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
self.assertTrue(all(form.is_valid_called for form in formset.forms))
def test_hard_limit_on_instantiated_forms(self):
"""A formset has a hard limit on the number of forms instantiated."""
# reduce the default limit of 1000 temporarily for testing
_old_DEFAULT_MAX_NUM = formsets.DEFAULT_MAX_NUM
try:
formsets.DEFAULT_MAX_NUM = 2
ChoiceFormSet = formset_factory(Choice, max_num=1)
# someone fiddles with the mgmt form data...
formset = ChoiceFormSet(
{
'choices-TOTAL_FORMS': '4',
'choices-INITIAL_FORMS': '0',
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '4',
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
'choices-2-choice': 'Two',
'choices-2-votes': '2',
'choices-3-choice': 'Three',
'choices-3-votes': '3',
},
prefix='choices',
)
# But we still only instantiate 3 forms
self.assertEqual(len(formset.forms), 3)
# and the formset isn't valid
self.assertFalse(formset.is_valid())
finally:
formsets.DEFAULT_MAX_NUM = _old_DEFAULT_MAX_NUM
def test_increase_hard_limit(self):
"""Can increase the built-in forms limit via a higher max_num."""
# reduce the default limit of 1000 temporarily for testing
_old_DEFAULT_MAX_NUM = formsets.DEFAULT_MAX_NUM
try:
formsets.DEFAULT_MAX_NUM = 3
# for this form, we want a limit of 4
ChoiceFormSet = formset_factory(Choice, max_num=4)
formset = ChoiceFormSet(
{
'choices-TOTAL_FORMS': '4',
'choices-INITIAL_FORMS': '0',
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '4',
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
'choices-2-choice': 'Two',
'choices-2-votes': '2',
'choices-3-choice': 'Three',
'choices-3-votes': '3',
},
prefix='choices',
)
# Four forms are instantiated and no exception is raised
self.assertEqual(len(formset.forms), 4)
finally:
formsets.DEFAULT_MAX_NUM = _old_DEFAULT_MAX_NUM
def test_non_form_errors_run_full_clean(self):
# Regression test for #11160
# If non_form_errors() is called without calling is_valid() first,
# it should ensure that full_clean() is called.
class BaseCustomFormSet(BaseFormSet):
def clean(self):
raise ValidationError("This is a non-form error")
ChoiceFormSet = formset_factory(Choice, formset=BaseCustomFormSet)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertIsInstance(formset.non_form_errors(), ErrorList)
self.assertEqual(list(formset.non_form_errors()),
['This is a non-form error'])
def test_validate_max_ignores_forms_marked_for_deletion(self):
class CheckForm(Form):
field = IntegerField()
data = {
'check-TOTAL_FORMS': '2',
'check-INITIAL_FORMS': '0',
'check-MAX_NUM_FORMS': '1',
'check-0-field': '200',
'check-0-DELETE': '',
'check-1-field': '50',
'check-1-DELETE': 'on',
}
CheckFormSet = formset_factory(CheckForm, max_num=1, validate_max=True,
can_delete=True)
formset = CheckFormSet(data, prefix='check')
self.assertTrue(formset.is_valid())
def test_formset_total_error_count(self):
"""A valid formset should have 0 total errors."""
data = [ # formset_data, expected error count
([('Calexico', '100')], 0),
([('Calexico', '')], 1),
([('', 'invalid')], 2),
([('Calexico', '100'), ('Calexico', '')], 1),
([('Calexico', ''), ('Calexico', '')], 2),
]
for formset_data, expected_error_count in data:
formset = self.make_choiceformset(formset_data)
self.assertEqual(formset.total_error_count(), expected_error_count)
def test_formset_total_error_count_with_non_form_errors(self):
data = {
'choices-TOTAL_FORMS': '2', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MAX_NUM_FORMS': '2', # max number of forms - should be ignored
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
}
ChoiceFormSet = formset_factory(Choice, extra=1, max_num=1, validate_max=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertEqual(formset.total_error_count(), 1)
data['choices-1-votes'] = ''
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertEqual(formset.total_error_count(), 2)
def test_html_safe(self):
formset = self.make_choiceformset()
self.assertTrue(hasattr(formset, '__html__'))
self.assertEqual(force_text(formset), formset.__html__())
data = {
'choices-TOTAL_FORMS': '1', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
}
class Choice(Form):
choice = CharField()
votes = IntegerField()
ChoiceFormSet = formset_factory(Choice)
class FormsetAsFooTests(SimpleTestCase):
def test_as_table(self):
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertHTMLEqual(formset.as_table(), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MIN_NUM_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" value="0" />
<tr><th>Choice:</th><td><input type="text" name="choices-0-choice" value="Calexico" /></td></tr>
<tr><th>Votes:</th><td><input type="number" name="choices-0-votes" value="100" /></td></tr>""")
def test_as_p(self):
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertHTMLEqual(formset.as_p(), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MIN_NUM_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" value="0" />
<p>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></p>
<p>Votes: <input type="number" name="choices-0-votes" value="100" /></p>""")
def test_as_ul(self):
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertHTMLEqual(formset.as_ul(), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MIN_NUM_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" value="0" />
<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>""")
# Regression test for #11418 #################################################
class ArticleForm(Form):
title = CharField()
pub_date = DateField()
ArticleFormSet = formset_factory(ArticleForm)
class TestIsBoundBehavior(SimpleTestCase):
def test_no_data_raises_validation_error(self):
with self.assertRaises(ValidationError):
ArticleFormSet({}).is_valid()
def test_with_management_data_attrs_work_fine(self):
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
}
formset = ArticleFormSet(data)
self.assertEqual(0, formset.initial_form_count())
self.assertEqual(1, formset.total_form_count())
self.assertTrue(formset.is_bound)
self.assertTrue(formset.forms[0].is_bound)
self.assertTrue(formset.is_valid())
self.assertTrue(formset.forms[0].is_valid())
self.assertEqual([{}], formset.cleaned_data)
def test_form_errors_are_caught_by_formset(self):
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-0-title': 'Test',
'form-0-pub_date': '1904-06-16',
'form-1-title': 'Test',
'form-1-pub_date': '', # <-- this date is missing but required
}
formset = ArticleFormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual([{}, {'pub_date': ['This field is required.']}], formset.errors)
def test_empty_forms_are_unbound(self):
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-0-title': 'Test',
'form-0-pub_date': '1904-06-16',
}
unbound_formset = ArticleFormSet()
bound_formset = ArticleFormSet(data)
empty_forms = []
empty_forms.append(unbound_formset.empty_form)
empty_forms.append(bound_formset.empty_form)
# Empty forms should be unbound
self.assertFalse(empty_forms[0].is_bound)
self.assertFalse(empty_forms[1].is_bound)
# The empty forms should be equal.
self.assertHTMLEqual(empty_forms[0].as_p(), empty_forms[1].as_p())
class TestEmptyFormSet(SimpleTestCase):
def test_empty_formset_is_valid(self):
"""Test that an empty formset still calls clean()"""
EmptyFsetWontValidateFormset = formset_factory(FavoriteDrinkForm, extra=0, formset=EmptyFsetWontValidate)
formset = EmptyFsetWontValidateFormset(data={'form-INITIAL_FORMS': '0', 'form-TOTAL_FORMS': '0'}, prefix="form")
formset2 = EmptyFsetWontValidateFormset(data={'form-INITIAL_FORMS': '0', 'form-TOTAL_FORMS': '1', 'form-0-name': 'bah'}, prefix="form")
self.assertFalse(formset.is_valid())
self.assertFalse(formset2.is_valid())
def test_empty_formset_media(self):
"""Make sure media is available on empty formset, refs #19545"""
class MediaForm(Form):
class Media:
js = ('some-file.js',)
self.assertIn('some-file.js', str(formset_factory(MediaForm, extra=0)().media))
def test_empty_formset_is_multipart(self):
"""Make sure `is_multipart()` works with empty formset, refs #19545"""
class FileForm(Form):
file = FileField()
self.assertTrue(formset_factory(FileForm, extra=0)().is_multipart())
|
DecipherOne/Troglodyte
|
refs/heads/master
|
Trog Build Dependencies/Python26/Lib/test/test_imgfile.py
|
150
|
#! /usr/bin/env python
"""Simple test script for imgfile.c
Roger E. Masse
"""
from test.test_support import verbose, unlink, findfile, import_module
imgfile = import_module('imgfile', deprecated=True)
import uu
def testimage(name):
"""Run through the imgfile's battery of possible methods
on the image passed in name.
"""
import sys
import os
outputfile = '/tmp/deleteme'
# try opening the name directly
try:
# This function returns a tuple (x, y, z) where x and y are the size
# of the image in pixels and z is the number of bytes per pixel. Only
# 3 byte RGB pixels and 1 byte greyscale pixels are supported.
sizes = imgfile.getsizes(name)
except imgfile.error:
# get a more qualified path component of the script...
if __name__ == '__main__':
ourname = sys.argv[0]
else: # ...or the full path of the module
ourname = sys.modules[__name__].__file__
parts = ourname.split(os.sep)
parts[-1] = name
name = os.sep.join(parts)
sizes = imgfile.getsizes(name)
if verbose:
print 'Opening test image: %s, sizes: %s' % (name, str(sizes))
# This function reads and decodes the image on the specified file,
# and returns it as a python string. The string has either 1 byte
# greyscale pixels or 4 byte RGBA pixels. The bottom left pixel
# is the first in the string. This format is suitable to pass
# to gl.lrectwrite, for instance.
image = imgfile.read(name)
# This function writes the RGB or greyscale data in data to
# image file file. x and y give the size of the image, z is
# 1 for 1 byte greyscale images or 3 for RGB images (which
# are stored as 4 byte values of which only the lower three
# bytes are used). These are the formats returned by gl.lrectread.
if verbose:
print 'Writing output file'
imgfile.write (outputfile, image, sizes[0], sizes[1], sizes[2])
if verbose:
print 'Opening scaled test image: %s, sizes: %s' % (name, str(sizes))
# This function is identical to read but it returns an image that
# is scaled to the given x and y sizes. If the filter and blur
# parameters are omitted scaling is done by simply dropping
# or duplicating pixels, so the result will be less than perfect,
# especially for computer-generated images. Alternatively,
# you can specify a filter to use to smoothen the image after
# scaling. The filter forms supported are 'impulse', 'box',
# 'triangle', 'quadratic' and 'gaussian'. If a filter is
# specified blur is an optional parameter specifying the
# blurriness of the filter. It defaults to 1.0. readscaled
# makes no attempt to keep the aspect ratio correct, so that
# is the users' responsibility.
if verbose:
print 'Filtering with "impulse"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'impulse', 2.0)
# This function sets a global flag which defines whether the
# scan lines of the image are read or written from bottom to
# top (flag is zero, compatible with SGI GL) or from top to
# bottom(flag is one, compatible with X). The default is zero.
if verbose:
print 'Switching to X compatibility'
imgfile.ttob (1)
if verbose:
print 'Filtering with "triangle"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'triangle', 3.0)
if verbose:
print 'Switching back to SGI compatibility'
imgfile.ttob (0)
if verbose: print 'Filtering with "quadratic"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'quadratic')
if verbose: print 'Filtering with "gaussian"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'gaussian', 1.0)
if verbose:
print 'Writing output file'
imgfile.write (outputfile, simage, sizes[0]/2, sizes[1]/2, sizes[2])
os.unlink(outputfile)
def test_main():
uu.decode(findfile('testrgb.uue'), 'test.rgb')
uu.decode(findfile('greyrgb.uue'), 'greytest.rgb')
# Test a 3 byte color image
testimage('test.rgb')
# Test a 1 byte greyscale image
testimage('greytest.rgb')
unlink('test.rgb')
unlink('greytest.rgb')
if __name__ == '__main__':
test_main()
|
dd00/commandergenius
|
refs/heads/dd00
|
project/jni/python/src/Lib/test/test_imgfile.py
|
150
|
#! /usr/bin/env python
"""Simple test script for imgfile.c
Roger E. Masse
"""
from test.test_support import verbose, unlink, findfile, import_module
imgfile = import_module('imgfile', deprecated=True)
import uu
def testimage(name):
"""Run through the imgfile's battery of possible methods
on the image passed in name.
"""
import sys
import os
outputfile = '/tmp/deleteme'
# try opening the name directly
try:
# This function returns a tuple (x, y, z) where x and y are the size
# of the image in pixels and z is the number of bytes per pixel. Only
# 3 byte RGB pixels and 1 byte greyscale pixels are supported.
sizes = imgfile.getsizes(name)
except imgfile.error:
# get a more qualified path component of the script...
if __name__ == '__main__':
ourname = sys.argv[0]
else: # ...or the full path of the module
ourname = sys.modules[__name__].__file__
parts = ourname.split(os.sep)
parts[-1] = name
name = os.sep.join(parts)
sizes = imgfile.getsizes(name)
if verbose:
print 'Opening test image: %s, sizes: %s' % (name, str(sizes))
# This function reads and decodes the image on the specified file,
# and returns it as a python string. The string has either 1 byte
# greyscale pixels or 4 byte RGBA pixels. The bottom left pixel
# is the first in the string. This format is suitable to pass
# to gl.lrectwrite, for instance.
image = imgfile.read(name)
# This function writes the RGB or greyscale data in data to
# image file file. x and y give the size of the image, z is
# 1 for 1 byte greyscale images or 3 for RGB images (which
# are stored as 4 byte values of which only the lower three
# bytes are used). These are the formats returned by gl.lrectread.
if verbose:
print 'Writing output file'
imgfile.write (outputfile, image, sizes[0], sizes[1], sizes[2])
if verbose:
print 'Opening scaled test image: %s, sizes: %s' % (name, str(sizes))
# This function is identical to read but it returns an image that
# is scaled to the given x and y sizes. If the filter and blur
# parameters are omitted scaling is done by simply dropping
# or duplicating pixels, so the result will be less than perfect,
# especially for computer-generated images. Alternatively,
# you can specify a filter to use to smoothen the image after
# scaling. The filter forms supported are 'impulse', 'box',
# 'triangle', 'quadratic' and 'gaussian'. If a filter is
# specified blur is an optional parameter specifying the
# blurriness of the filter. It defaults to 1.0. readscaled
# makes no attempt to keep the aspect ratio correct, so that
# is the users' responsibility.
if verbose:
print 'Filtering with "impulse"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'impulse', 2.0)
# This function sets a global flag which defines whether the
# scan lines of the image are read or written from bottom to
# top (flag is zero, compatible with SGI GL) or from top to
# bottom(flag is one, compatible with X). The default is zero.
if verbose:
print 'Switching to X compatibility'
imgfile.ttob (1)
if verbose:
print 'Filtering with "triangle"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'triangle', 3.0)
if verbose:
print 'Switching back to SGI compatibility'
imgfile.ttob (0)
if verbose: print 'Filtering with "quadratic"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'quadratic')
if verbose: print 'Filtering with "gaussian"'
simage = imgfile.readscaled (name, sizes[0]/2, sizes[1]/2, 'gaussian', 1.0)
if verbose:
print 'Writing output file'
imgfile.write (outputfile, simage, sizes[0]/2, sizes[1]/2, sizes[2])
os.unlink(outputfile)
def test_main():
uu.decode(findfile('testrgb.uue'), 'test.rgb')
uu.decode(findfile('greyrgb.uue'), 'greytest.rgb')
# Test a 3 byte color image
testimage('test.rgb')
# Test a 1 byte greyscale image
testimage('greytest.rgb')
unlink('test.rgb')
unlink('greytest.rgb')
if __name__ == '__main__':
test_main()
|
izhukov/ansible
|
refs/heads/devel
|
v2/ansible/playbook/vars_file.py
|
7690
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
mgoulish/qpid-dispatch
|
refs/heads/master
|
python/qpid_dispatch_internal/compat/ordereddict.py
|
6
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License
#
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# From http://code.activestate.com/recipes/576693/
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
# Note well: this class is only used for versions of python < 2.7
# Since 2.7 OrderDict is part of the collections module of the standard
# library. It does not need to be python3 compatible and can
# eventually removed when python versions <= 2.6 are no longer supported.
#
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
|
tylertian/Openstack
|
refs/heads/master
|
openstack F/nova/nova/tests/scheduler/test_host_filters.py
|
5
|
# Copyright 2011 OpenStack LLC. # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler Host Filters.
"""
import httplib
import stubout
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova.openstack.common import jsonutils
from nova.scheduler import filters
from nova.scheduler.filters import extra_specs_ops
from nova.scheduler.filters.trusted_filter import AttestationService
from nova import test
from nova.tests.scheduler import fakes
from nova import utils
DATA = ''
def stub_out_https_backend(stubs):
"""
Stubs out the httplib.HTTPRequest.getresponse to return
faked-out data instead of grabbing actual contents of a resource
The stubbed getresponse() returns an iterator over
the data "I am a teapot, short and stout\n"
:param stubs: Set of stubout stubs
"""
class FakeHTTPResponse(object):
def read(self):
return DATA
def fake_do_request(self, *args, **kwargs):
return httplib.OK, FakeHTTPResponse()
stubs.Set(AttestationService, '_do_request', fake_do_request)
class TestFilter(filters.BaseHostFilter):
pass
class TestBogusFilter(object):
"""Class that doesn't inherit from BaseHostFilter"""
pass
class ExtraSpecsOpsTestCase(test.TestCase):
def _do_extra_specs_ops_test(self, value, req, matches):
assertion = self.assertTrue if matches else self.assertFalse
assertion(extra_specs_ops.match(value, req))
def test_extra_specs_matches_simple(self):
self._do_extra_specs_ops_test(
value='1',
req='1',
matches=True)
def test_extra_specs_fails_simple(self):
self._do_extra_specs_ops_test(
value='',
req='1',
matches=False)
def test_extra_specs_fails_simple2(self):
self._do_extra_specs_ops_test(
value='3',
req='1',
matches=False)
def test_extra_specs_fails_simple3(self):
self._do_extra_specs_ops_test(
value='222',
req='2',
matches=False)
def test_extra_specs_fails_with_bogus_ops(self):
self._do_extra_specs_ops_test(
value='4',
req='> 2',
matches=False)
def test_extra_specs_matches_with_op_eq(self):
self._do_extra_specs_ops_test(
value='123',
req='= 123',
matches=True)
def test_extra_specs_matches_with_op_eq2(self):
self._do_extra_specs_ops_test(
value='124',
req='= 123',
matches=True)
def test_extra_specs_fails_with_op_eq(self):
self._do_extra_specs_ops_test(
value='34',
req='= 234',
matches=False)
def test_extra_specs_fails_with_op_eq3(self):
self._do_extra_specs_ops_test(
value='34',
req='=',
matches=False)
def test_extra_specs_matches_with_op_seq(self):
self._do_extra_specs_ops_test(
value='123',
req='s== 123',
matches=True)
def test_extra_specs_fails_with_op_seq(self):
self._do_extra_specs_ops_test(
value='1234',
req='s== 123',
matches=False)
def test_extra_specs_matches_with_op_sneq(self):
self._do_extra_specs_ops_test(
value='1234',
req='s!= 123',
matches=True)
def test_extra_specs_fails_with_op_sneq(self):
self._do_extra_specs_ops_test(
value='123',
req='s!= 123',
matches=False)
def test_extra_specs_fails_with_op_sge(self):
self._do_extra_specs_ops_test(
value='1000',
req='s>= 234',
matches=False)
def test_extra_specs_fails_with_op_sle(self):
self._do_extra_specs_ops_test(
value='1234',
req='s<= 1000',
matches=False)
def test_extra_specs_fails_with_op_sl(self):
self._do_extra_specs_ops_test(
value='2',
req='s< 12',
matches=False)
def test_extra_specs_fails_with_op_sg(self):
self._do_extra_specs_ops_test(
value='12',
req='s> 2',
matches=False)
def test_extra_specs_matches_with_op_in(self):
self._do_extra_specs_ops_test(
value='12311321',
req='<in> 11',
matches=True)
def test_extra_specs_matches_with_op_in2(self):
self._do_extra_specs_ops_test(
value='12311321',
req='<in> 12311321',
matches=True)
def test_extra_specs_matches_with_op_in3(self):
self._do_extra_specs_ops_test(
value='12311321',
req='<in> 12311321 <in>',
matches=True)
def test_extra_specs_fails_with_op_in(self):
self._do_extra_specs_ops_test(
value='12310321',
req='<in> 11',
matches=False)
def test_extra_specs_fails_with_op_in2(self):
self._do_extra_specs_ops_test(
value='12310321',
req='<in> 11 <in>',
matches=False)
def test_extra_specs_matches_with_op_or(self):
self._do_extra_specs_ops_test(
value='12',
req='<or> 11 <or> 12',
matches=True)
def test_extra_specs_matches_with_op_or2(self):
self._do_extra_specs_ops_test(
value='12',
req='<or> 11 <or> 12 <or>',
matches=True)
def test_extra_specs_fails_with_op_or(self):
self._do_extra_specs_ops_test(
value='13',
req='<or> 11 <or> 12',
matches=False)
def test_extra_specs_fails_with_op_or2(self):
self._do_extra_specs_ops_test(
value='13',
req='<or> 11 <or> 12 <or>',
matches=False)
def test_extra_specs_matches_with_op_le(self):
self._do_extra_specs_ops_test(
value='2',
req='<= 10',
matches=True)
def test_extra_specs_fails_with_op_le(self):
self._do_extra_specs_ops_test(
value='3',
req='<= 2',
matches=False)
def test_extra_specs_matches_with_op_ge(self):
self._do_extra_specs_ops_test(
value='3',
req='>= 1',
matches=True)
def test_extra_specs_fails_with_op_ge(self):
self._do_extra_specs_ops_test(
value='2',
req='>= 3',
matches=False)
class HostFiltersTestCase(test.TestCase):
"""Test case for host filters."""
def setUp(self):
super(HostFiltersTestCase, self).setUp()
self.stubs = stubout.StubOutForTesting()
stub_out_https_backend(self.stubs)
self.context = context.RequestContext('fake', 'fake')
self.json_query = jsonutils.dumps(
['and', ['>=', '$free_ram_mb', 1024],
['>=', '$free_disk_mb', 200 * 1024]])
# This has a side effect of testing 'get_filter_classes'
# when specifying a method (in this case, our standard filters)
classes = filters.get_filter_classes(
['nova.scheduler.filters.standard_filters'])
self.class_map = {}
for cls in classes:
self.class_map[cls.__name__] = cls
def test_get_filter_classes(self):
classes = filters.get_filter_classes(
['nova.tests.scheduler.test_host_filters.TestFilter'])
self.assertEqual(len(classes), 1)
self.assertEqual(classes[0].__name__, 'TestFilter')
# Test a specific class along with our standard filters
classes = filters.get_filter_classes(
['nova.tests.scheduler.test_host_filters.TestFilter',
'nova.scheduler.filters.standard_filters'])
self.assertEqual(len(classes), 1 + len(self.class_map))
def test_get_filter_classes_raises_on_invalid_classes(self):
self.assertRaises(ImportError,
filters.get_filter_classes,
['nova.tests.scheduler.test_host_filters.NoExist'])
self.assertRaises(exception.ClassNotFound,
filters.get_filter_classes,
['nova.tests.scheduler.test_host_filters.TestBogusFilter'])
def test_all_host_filter(self):
filt_cls = self.class_map['AllHostsFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(filt_cls.host_passes(host, {}))
def _stub_service_is_up(self, ret_value):
def fake_service_is_up(service):
return ret_value
self.stubs.Set(utils, 'service_is_up', fake_service_is_up)
def test_affinity_different_filter_passes(self):
filt_cls = self.class_map['DifferentHostFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
instance = fakes.FakeInstance(context=self.context,
params={'host': 'host2'})
instance_uuid = instance.uuid
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': {
'different_host': [instance_uuid], }}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_affinity_different_filter_no_list_passes(self):
filt_cls = self.class_map['DifferentHostFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
instance = fakes.FakeInstance(context=self.context,
params={'host': 'host2'})
instance_uuid = instance.uuid
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': {
'different_host': instance_uuid}}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_affinity_different_filter_fails(self):
filt_cls = self.class_map['DifferentHostFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
instance = fakes.FakeInstance(context=self.context,
params={'host': 'host1'})
instance_uuid = instance.uuid
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': {
'different_host': [instance_uuid], }}
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_affinity_different_filter_handles_none(self):
filt_cls = self.class_map['DifferentHostFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
instance = fakes.FakeInstance(context=self.context,
params={'host': 'host2'})
instance_uuid = instance.uuid
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': None}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_affinity_same_filter_no_list_passes(self):
filt_cls = self.class_map['SameHostFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
instance = fakes.FakeInstance(context=self.context,
params={'host': 'host1'})
instance_uuid = instance.uuid
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': {
'same_host': instance_uuid}}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_affinity_same_filter_passes(self):
filt_cls = self.class_map['SameHostFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
instance = fakes.FakeInstance(context=self.context,
params={'host': 'host1'})
instance_uuid = instance.uuid
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': {
'same_host': [instance_uuid], }}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_affinity_same_filter_fails(self):
filt_cls = self.class_map['SameHostFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
instance = fakes.FakeInstance(context=self.context,
params={'host': 'host2'})
instance_uuid = instance.uuid
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': {
'same_host': [instance_uuid], }}
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_affinity_same_filter_handles_none(self):
filt_cls = self.class_map['SameHostFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
instance = fakes.FakeInstance(context=self.context,
params={'host': 'host2'})
instance_uuid = instance.uuid
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': None}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_affinity_simple_cidr_filter_passes(self):
filt_cls = self.class_map['SimpleCIDRAffinityFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
host.capabilities = {'host_ip': '10.8.1.1'}
affinity_ip = "10.8.1.100"
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': {
'cidr': '/24',
'build_near_host_ip': affinity_ip}}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_affinity_simple_cidr_filter_fails(self):
filt_cls = self.class_map['SimpleCIDRAffinityFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
host.capabilities = {'host_ip': '10.8.1.1'}
affinity_ip = "10.8.1.100"
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': {
'cidr': '/32',
'build_near_host_ip': affinity_ip}}
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_affinity_simple_cidr_filter_handles_none(self):
filt_cls = self.class_map['SimpleCIDRAffinityFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
affinity_ip = flags.FLAGS.my_ip.split('.')[0:3]
affinity_ip.append('100')
affinity_ip = str.join('.', affinity_ip)
filter_properties = {'context': self.context.elevated(),
'scheduler_hints': None}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_compute_filter_passes(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ComputeFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024}}
capabilities = {'enabled': True}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024, 'capabilities': capabilities,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_type_filter(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['TypeAffinityFilter']()
filter_properties = {'context': self.context,
'instance_type': {'id': 1}}
filter2_properties = {'context': self.context,
'instance_type': {'id': 2}}
capabilities = {'enabled': True}
service = {'disabled': False}
host = fakes.FakeHostState('fake_host', 'compute',
{'capabilities': capabilities,
'service': service})
#True since empty
self.assertTrue(filt_cls.host_passes(host, filter_properties))
fakes.FakeInstance(context=self.context,
params={'host': 'fake_host', 'instance_type_id': 1})
#True since same type
self.assertTrue(filt_cls.host_passes(host, filter_properties))
#False since different type
self.assertFalse(filt_cls.host_passes(host, filter2_properties))
#False since node not homogeneous
fakes.FakeInstance(context=self.context,
params={'host': 'fake_host', 'instance_type_id': 2})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_aggregate_type_filter(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['AggregateTypeAffinityFilter']()
filter_properties = {'context': self.context,
'instance_type': {'name': 'fake1'}}
filter2_properties = {'context': self.context,
'instance_type': {'name': 'fake2'}}
capabilities = {'enabled': True}
service = {'disabled': False}
host = fakes.FakeHostState('fake_host', 'compute',
{'capabilities': capabilities,
'service': service})
#True since no aggregates
self.assertTrue(filt_cls.host_passes(host, filter_properties))
#True since type matches aggregate, metadata
self._create_aggregate_with_host(name='fake_aggregate',
hosts=['fake_host'], metadata={'instance_type': 'fake1'})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
#False since type matches aggregate, metadata
self.assertFalse(filt_cls.host_passes(host, filter2_properties))
def test_ram_filter_fails_on_memory(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['RamFilter']()
self.flags(ram_allocation_ratio=1.0)
filter_properties = {'instance_type': {'memory_mb': 1024}}
capabilities = {'enabled': True}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1023, 'total_usable_ram_mb': 1024,
'capabilities': capabilities, 'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_ram_filter_passes(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['RamFilter']()
self.flags(ram_allocation_ratio=1.0)
filter_properties = {'instance_type': {'memory_mb': 1024}}
capabilities = {'enabled': True}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024, 'total_usable_ram_mb': 1024,
'capabilities': capabilities, 'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_ram_filter_oversubscribe(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['RamFilter']()
self.flags(ram_allocation_ratio=2.0)
filter_properties = {'instance_type': {'memory_mb': 1024}}
capabilities = {'enabled': True}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': -1024, 'total_usable_ram_mb': 2048,
'capabilities': capabilities, 'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
self.assertEqual(2048 * 2.0, host.limits['memory_mb'])
def test_disk_filter_passes(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['DiskFilter']()
self.flags(disk_allocation_ratio=1.0)
filter_properties = {'instance_type': {'root_gb': 1,
'ephemeral_gb': 1}}
capabilities = {'enabled': True}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_disk_mb': 11 * 1024, 'total_usable_disk_gb': 13,
'capabilities': capabilities, 'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_disk_filter_fails(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['DiskFilter']()
self.flags(disk_allocation_ratio=1.0)
filter_properties = {'instance_type': {'root_gb': 2,
'ephemeral_gb': 1}}
capabilities = {'enabled': True}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_disk_mb': 11 * 1024, 'total_usable_disk_gb': 13,
'capabilities': capabilities, 'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_disk_filter_oversubscribe(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['DiskFilter']()
self.flags(disk_allocation_ratio=10.0)
filter_properties = {'instance_type': {'root_gb': 100,
'ephemeral_gb': 19}}
capabilities = {'enabled': True}
service = {'disabled': False}
# 1GB used... so 119GB allowed...
host = fakes.FakeHostState('host1', 'compute',
{'free_disk_mb': 11 * 1024, 'total_usable_disk_gb': 12,
'capabilities': capabilities, 'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
self.assertEqual(12 * 10.0, host.limits['disk_gb'])
def test_disk_filter_oversubscribe_fail(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['DiskFilter']()
self.flags(disk_allocation_ratio=10.0)
filter_properties = {'instance_type': {'root_gb': 100,
'ephemeral_gb': 20}}
capabilities = {'enabled': True}
service = {'disabled': False}
# 1GB used... so 119GB allowed...
host = fakes.FakeHostState('host1', 'compute',
{'free_disk_mb': 11 * 1024, 'total_usable_disk_gb': 12,
'capabilities': capabilities, 'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_compute_filter_fails_on_service_disabled(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ComputeFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024}}
capabilities = {'enabled': True}
service = {'disabled': True}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024, 'capabilities': capabilities,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_compute_filter_fails_on_service_down(self):
self._stub_service_is_up(False)
filt_cls = self.class_map['ComputeFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024}}
capabilities = {'enabled': True}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024, 'capabilities': capabilities,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_compute_filter_fails_on_capability_disabled(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ComputeFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024}}
capabilities = {'enabled': False}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024, 'capabilities': capabilities,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_compute_filter_passes_on_volume(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ComputeFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024}}
capabilities = {'enabled': False}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'volume',
{'free_ram_mb': 1024, 'capabilities': capabilities,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_compute_filter_passes_on_no_instance_type(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ComputeFilter']()
filter_properties = {}
capabilities = {'enabled': False}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024, 'capabilities': capabilities,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_passes_same_inst_props(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ImagePropertiesFilter']()
img_props = {'properties': {'_architecture': 'x86_64',
'hypervisor_type': 'kvm',
'vm_mode': 'hvm'}}
filter_properties = {'request_spec': {'image': img_props}}
capabilities = {'enabled': True,
'supported_instances': [
('x86_64', 'kvm', 'hvm')]}
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': capabilities})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_different_inst_props(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ImagePropertiesFilter']()
img_props = {'properties': {'architecture': 'arm',
'hypervisor_type': 'qemu',
'vm_mode': 'hvm'}}
filter_properties = {'request_spec': {'image': img_props}}
capabilities = {'enabled': True,
'supported_instances': [
('x86_64', 'kvm', 'hvm')]}
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': capabilities})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_passes_partial_inst_props(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ImagePropertiesFilter']()
img_props = {'properties': {'architecture': 'x86_64',
'vm_mode': 'hvm'}}
filter_properties = {'request_spec': {'image': img_props}}
capabilities = {'enabled': True,
'supported_instances': [
('x86_64', 'kvm', 'hvm')]}
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': capabilities})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_partial_inst_props(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ImagePropertiesFilter']()
img_props = {'properties': {'architecture': 'x86_64',
'vm_mode': 'hvm'}}
filter_properties = {'request_spec': {'image': img_props}}
capabilities = {'enabled': True,
'supported_instances': [
('x86_64', 'xen', 'xen')]}
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': capabilities})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_passes_without_inst_props(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ImagePropertiesFilter']()
filter_properties = {'request_spec': {}}
capabilities = {'enabled': True,
'supported_instances': [
('x86_64', 'kvm', 'hvm')]}
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': capabilities})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_without_host_props(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['ImagePropertiesFilter']()
img_props = {'properties': {'architecture': 'x86_64',
'hypervisor_type': 'kvm',
'vm_mode': 'hvm'}}
filter_properties = {'request_spec': {'image': img_props}}
capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': capabilities})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def _do_test_compute_filter_extra_specs(self, ecaps, especs, passes):
self._stub_service_is_up(True)
filt_cls = self.class_map['ComputeCapabilitiesFilter']()
capabilities = {'enabled': True}
capabilities.update(ecaps)
service = {'disabled': False}
filter_properties = {'instance_type': {'memory_mb': 1024,
'extra_specs': especs}}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024, 'capabilities': capabilities,
'service': service})
assertion = self.assertTrue if passes else self.assertFalse
assertion(filt_cls.host_passes(host, filter_properties))
def test_compute_filter_passes_extra_specs_simple(self):
self._do_test_compute_filter_extra_specs(
ecaps={'opt1': '1', 'opt2': '2'},
especs={'opt1': '1', 'opt2': '2', 'trust:trusted_host': 'true'},
passes=True)
def test_compute_filter_fails_extra_specs_simple(self):
self._do_test_compute_filter_extra_specs(
ecaps={'opt1': '1', 'opt2': '2'},
especs={'opt1': '1', 'opt2': '222', 'trust:trusted_host': 'true'},
passes=False)
def test_aggregate_filter_passes_no_extra_specs(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['AggregateInstanceExtraSpecsFilter']()
capabilities = {'enabled': True, 'opt1': 1, 'opt2': 2}
filter_properties = {'context': self.context, 'instance_type':
{'memory_mb': 1024}}
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': capabilities})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def _create_aggregate_with_host(self, name='fake_aggregate',
metadata=None,
hosts=['host1']):
values = {'name': name,
'availability_zone': 'fake_avail_zone', }
result = db.aggregate_create(self.context.elevated(), values, metadata)
for host in hosts:
db.aggregate_host_add(self.context.elevated(), result.id, host)
return result
def _do_test_aggregate_filter_extra_specs(self, emeta, especs, passes):
self._stub_service_is_up(True)
filt_cls = self.class_map['AggregateInstanceExtraSpecsFilter']()
self._create_aggregate_with_host(name='fake2', metadata=emeta)
filter_properties = {'context': self.context,
'instance_type': {'memory_mb': 1024, 'extra_specs': especs}}
host = fakes.FakeHostState('host1', 'compute', {'free_ram_mb': 1024})
assertion = self.assertTrue if passes else self.assertFalse
assertion(filt_cls.host_passes(host, filter_properties))
def test_aggregate_filter_fails_extra_specs_deleted_host(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['AggregateInstanceExtraSpecsFilter']()
extra_specs = {'opt1': 's== 1', 'opt2': 's== 2',
'trust:trusted_host': 'true'}
self._create_aggregate_with_host(metadata={'opt1': '1'})
agg2 = self._create_aggregate_with_host(name='fake2',
metadata={'opt2': '2'})
filter_properties = {'context': self.context, 'instance_type':
{'memory_mb': 1024, 'extra_specs': extra_specs}}
host = fakes.FakeHostState('host1', 'compute', {'free_ram_mb': 1024})
db.aggregate_host_delete(self.context.elevated(), agg2.id, 'host1')
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_aggregate_filter_passes_extra_specs_simple(self):
self._do_test_aggregate_filter_extra_specs(
emeta={'opt1': '1', 'opt2': '2'},
especs={'opt1': '1', 'opt2': '2',
'trust:trusted_host': 'true'},
passes=True)
def test_aggregate_filter_fails_extra_specs_simple(self):
self._do_test_aggregate_filter_extra_specs(
emeta={'opt1': '1', 'opt2': '2'},
especs={'opt1': '1', 'opt2': '222',
'trust:trusted_host': 'true'},
passes=False)
def test_isolated_hosts_fails_isolated_on_non_isolated(self):
self.flags(isolated_images=['isolated'], isolated_hosts=['isolated'])
filt_cls = self.class_map['IsolatedHostsFilter']()
filter_properties = {
'request_spec': {
'instance_properties': {'image_ref': 'isolated'}
}
}
host = fakes.FakeHostState('non-isolated', 'compute', {})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_isolated_hosts_fails_non_isolated_on_isolated(self):
self.flags(isolated_images=['isolated'], isolated_hosts=['isolated'])
filt_cls = self.class_map['IsolatedHostsFilter']()
filter_properties = {
'request_spec': {
'instance_properties': {'image_ref': 'non-isolated'}
}
}
host = fakes.FakeHostState('isolated', 'compute', {})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_isolated_hosts_passes_isolated_on_isolated(self):
self.flags(isolated_images=['isolated'], isolated_hosts=['isolated'])
filt_cls = self.class_map['IsolatedHostsFilter']()
filter_properties = {
'request_spec': {
'instance_properties': {'image_ref': 'isolated'}
}
}
host = fakes.FakeHostState('isolated', 'compute', {})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_isolated_hosts_passes_non_isolated_on_non_isolated(self):
self.flags(isolated_images=['isolated'], isolated_hosts=['isolated'])
filt_cls = self.class_map['IsolatedHostsFilter']()
filter_properties = {
'request_spec': {
'instance_properties': {'image_ref': 'non-isolated'}
}
}
host = fakes.FakeHostState('non-isolated', 'compute', {})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_json_filter_passes(self):
filt_cls = self.class_map['JsonFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024,
'root_gb': 200,
'ephemeral_gb': 0},
'scheduler_hints': {'query': self.json_query}}
capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024,
'free_disk_mb': 200 * 1024,
'capabilities': capabilities})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_json_filter_passes_with_no_query(self):
filt_cls = self.class_map['JsonFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024,
'root_gb': 200,
'ephemeral_gb': 0}}
capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 0,
'free_disk_mb': 0,
'capabilities': capabilities})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_json_filter_fails_on_memory(self):
filt_cls = self.class_map['JsonFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024,
'root_gb': 200,
'ephemeral_gb': 0},
'scheduler_hints': {'query': self.json_query}}
capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1023,
'free_disk_mb': 200 * 1024,
'capabilities': capabilities})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_json_filter_fails_on_disk(self):
filt_cls = self.class_map['JsonFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024,
'root_gb': 200,
'ephemeral_gb': 0},
'scheduler_hints': {'query': self.json_query}}
capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024,
'free_disk_mb': (200 * 1024) - 1,
'capabilities': capabilities})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_json_filter_fails_on_caps_disabled(self):
filt_cls = self.class_map['JsonFilter']()
json_query = jsonutils.dumps(
['and', ['>=', '$free_ram_mb', 1024],
['>=', '$free_disk_mb', 200 * 1024],
'$capabilities.enabled'])
filter_properties = {'instance_type': {'memory_mb': 1024,
'root_gb': 200,
'ephemeral_gb': 0},
'scheduler_hints': {'query': json_query}}
capabilities = {'enabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024,
'free_disk_mb': 200 * 1024,
'capabilities': capabilities})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_json_filter_fails_on_service_disabled(self):
filt_cls = self.class_map['JsonFilter']()
json_query = jsonutils.dumps(
['and', ['>=', '$free_ram_mb', 1024],
['>=', '$free_disk_mb', 200 * 1024],
['not', '$service.disabled']])
filter_properties = {'instance_type': {'memory_mb': 1024,
'local_gb': 200},
'scheduler_hints': {'query': json_query}}
capabilities = {'enabled': True}
service = {'disabled': True}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 1024,
'free_disk_mb': 200 * 1024,
'capabilities': capabilities})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_json_filter_happy_day(self):
"""Test json filter more thoroughly"""
filt_cls = self.class_map['JsonFilter']()
raw = ['and',
'$capabilities.enabled',
['=', '$capabilities.opt1', 'match'],
['or',
['and',
['<', '$free_ram_mb', 30],
['<', '$free_disk_mb', 300]],
['and',
['>', '$free_ram_mb', 30],
['>', '$free_disk_mb', 300]]]]
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
# Passes
capabilities = {'enabled': True, 'opt1': 'match'}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 10,
'free_disk_mb': 200,
'capabilities': capabilities,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
# Passes
capabilities = {'enabled': True, 'opt1': 'match'}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 40,
'free_disk_mb': 400,
'capabilities': capabilities,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
# Fails due to capabilities being disabled
capabilities = {'enabled': False, 'opt1': 'match'}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'instance_type',
{'free_ram_mb': 40,
'free_disk_mb': 400,
'capabilities': capabilities,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
# Fails due to being exact memory/disk we don't want
capabilities = {'enabled': True, 'opt1': 'match'}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 30,
'free_disk_mb': 300,
'capabilities': capabilities,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
# Fails due to memory lower but disk higher
capabilities = {'enabled': True, 'opt1': 'match'}
service = {'disabled': False}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 20,
'free_disk_mb': 400,
'capabilities': capabilities,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
# Fails due to capabilities 'opt1' not equal
capabilities = {'enabled': True, 'opt1': 'no-match'}
service = {'enabled': True}
host = fakes.FakeHostState('host1', 'compute',
{'free_ram_mb': 20,
'free_disk_mb': 400,
'capabilities': capabilities,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_json_filter_basic_operators(self):
filt_cls = self.class_map['JsonFilter']()
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': {'enabled': True}})
# (operator, arguments, expected_result)
ops_to_test = [
['=', [1, 1], True],
['=', [1, 2], False],
['<', [1, 2], True],
['<', [1, 1], False],
['<', [2, 1], False],
['>', [2, 1], True],
['>', [2, 2], False],
['>', [2, 3], False],
['<=', [1, 2], True],
['<=', [1, 1], True],
['<=', [2, 1], False],
['>=', [2, 1], True],
['>=', [2, 2], True],
['>=', [2, 3], False],
['in', [1, 1], True],
['in', [1, 1, 2, 3], True],
['in', [4, 1, 2, 3], False],
['not', [True], False],
['not', [False], True],
['or', [True, False], True],
['or', [False, False], False],
['and', [True, True], True],
['and', [False, False], False],
['and', [True, False], False],
# Nested ((True or False) and (2 > 1)) == Passes
['and', [['or', True, False], ['>', 2, 1]], True]]
for (op, args, expected) in ops_to_test:
raw = [op] + args
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
self.assertEqual(expected,
filt_cls.host_passes(host, filter_properties))
# This results in [False, True, False, True] and if any are True
# then it passes...
raw = ['not', True, False, True, False]
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
# This results in [False, False, False] and if any are True
# then it passes...which this doesn't
raw = ['not', True, True, True]
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_json_filter_unknown_operator_raises(self):
filt_cls = self.class_map['JsonFilter']()
raw = ['!=', 1, 2]
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': {'enabled': True}})
self.assertRaises(KeyError,
filt_cls.host_passes, host, filter_properties)
def test_json_filter_empty_filters_pass(self):
filt_cls = self.class_map['JsonFilter']()
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': {'enabled': True}})
raw = []
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
raw = {}
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_json_filter_invalid_num_arguments_fails(self):
filt_cls = self.class_map['JsonFilter']()
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': {'enabled': True}})
raw = ['>', ['and', ['or', ['not', ['<', ['>=', ['<=', ['in', ]]]]]]]]
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
self.assertFalse(filt_cls.host_passes(host, filter_properties))
raw = ['>', 1]
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_json_filter_unknown_variable_ignored(self):
filt_cls = self.class_map['JsonFilter']()
host = fakes.FakeHostState('host1', 'compute',
{'capabilities': {'enabled': True}})
raw = ['=', '$........', 1, 1]
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
raw = ['=', '$foo', 2, 2]
filter_properties = {
'scheduler_hints': {
'query': jsonutils.dumps(raw),
},
}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_trusted_filter_default_passes(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['TrustedFilter']()
filter_properties = {'instance_type': {'memory_mb': 1024}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_trusted_filter_trusted_and_trusted_passes(self):
global DATA
DATA = '{"hosts":[{"host_name":"host1","trust_lvl":"trusted"}]}'
self._stub_service_is_up(True)
filt_cls = self.class_map['TrustedFilter']()
extra_specs = {'trust:trusted_host': 'trusted'}
filter_properties = {'instance_type': {'memory_mb': 1024,
'extra_specs': extra_specs}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_trusted_filter_trusted_and_untrusted_fails(self):
global DATA
DATA = '{"hosts":[{"host_name":"host1","trust_lvl":"untrusted"}]}'
self._stub_service_is_up(True)
filt_cls = self.class_map['TrustedFilter']()
extra_specs = {'trust:trusted_host': 'trusted'}
filter_properties = {'instance_type': {'memory_mb': 1024,
'extra_specs': extra_specs}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_trusted_filter_untrusted_and_trusted_fails(self):
global DATA
DATA = '{"hosts":[{"host_name":"host1","trust_lvl":"trusted"}]}'
self._stub_service_is_up(True)
filt_cls = self.class_map['TrustedFilter']()
extra_specs = {'trust:trusted_host': 'untrusted'}
filter_properties = {'instance_type': {'memory_mb': 1024,
'extra_specs': extra_specs}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_trusted_filter_untrusted_and_untrusted_passes(self):
global DATA
DATA = '{"hosts":[{"host_name":"host1","trust_lvl":"untrusted"}]}'
self._stub_service_is_up(True)
filt_cls = self.class_map['TrustedFilter']()
extra_specs = {'trust:trusted_host': 'untrusted'}
filter_properties = {'instance_type': {'memory_mb': 1024,
'extra_specs': extra_specs}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_core_filter_passes(self):
filt_cls = self.class_map['CoreFilter']()
filter_properties = {'instance_type': {'vcpus': 1}}
self.flags(cpu_allocation_ratio=2)
host = fakes.FakeHostState('host1', 'compute',
{'vcpus_total': 4, 'vcpus_used': 7})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_core_filter_fails_safe(self):
filt_cls = self.class_map['CoreFilter']()
filter_properties = {'instance_type': {'vcpus': 1}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_core_filter_fails(self):
filt_cls = self.class_map['CoreFilter']()
filter_properties = {'instance_type': {'vcpus': 1}}
self.flags(cpu_allocation_ratio=2)
host = fakes.FakeHostState('host1', 'compute',
{'vcpus_total': 4, 'vcpus_used': 8})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
@staticmethod
def _make_zone_request(zone, is_admin=False):
ctxt = context.RequestContext('fake', 'fake', is_admin=is_admin)
return {
'context': ctxt,
'request_spec': {
'instance_properties': {
'availability_zone': zone
}
}
}
def test_availability_zone_filter_same(self):
filt_cls = self.class_map['AvailabilityZoneFilter']()
service = {'availability_zone': 'nova'}
request = self._make_zone_request('nova')
host = fakes.FakeHostState('host1', 'compute', {'service': service})
self.assertTrue(filt_cls.host_passes(host, request))
def test_availability_zone_filter_different(self):
filt_cls = self.class_map['AvailabilityZoneFilter']()
service = {'availability_zone': 'nova'}
request = self._make_zone_request('bad')
host = fakes.FakeHostState('host1', 'compute', {'service': service})
self.assertFalse(filt_cls.host_passes(host, request))
def test_retry_filter_disabled(self):
"""Test case where retry/re-scheduling is disabled"""
filt_cls = self.class_map['RetryFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
filter_properties = {}
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_retry_filter_pass(self):
"""Host not previously tried"""
filt_cls = self.class_map['RetryFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
retry = dict(num_attempts=1, hosts=['host2', 'host3'])
filter_properties = dict(retry=retry)
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_retry_filter_fail(self):
"""Host was already tried"""
filt_cls = self.class_map['RetryFilter']()
host = fakes.FakeHostState('host1', 'compute', {})
retry = dict(num_attempts=1, hosts=['host3', 'host1'])
filter_properties = dict(retry=retry)
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
thinkle/gourmet
|
refs/heads/master
|
gourmet/plugins/email_plugin/__init__.py
|
1
|
from . import emailer_plugin
plugins = [emailer_plugin.EmailRecipePlugin]
|
sangwon03/TizenRT
|
refs/heads/master
|
external/iotivity/iotivity_1.2-rel/extlibs/gtest/gtest-1.7.0/test/gtest_shuffle_test.py
|
3023
|
#!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
|
sosy-lab/benchexec
|
refs/heads/master
|
benchexec/intel_cpu_energy.py
|
3
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import collections
import logging
import os
import subprocess
import signal
import re
from benchexec.util import find_executable2
from decimal import Decimal
DOMAIN_PACKAGE = "package"
DOMAIN_CORE = "core"
DOMAIN_UNCORE = "uncore"
DOMAIN_DRAM = "dram"
class EnergyMeasurement(object):
def __init__(self, executable):
self._executable = executable
self._measurement_process = None
@classmethod
def create_if_supported(cls):
executable = find_executable2("cpu-energy-meter")
if executable is None: # not available on current system
logging.debug(
"Energy measurement not available because cpu-energy-meter binary could not be found."
)
return None
return cls(executable)
def start(self):
"""Starts the external measurement program."""
assert (
not self.is_running()
), "Attempted to start an energy measurement while one was already running."
self._measurement_process = subprocess.Popen(
[self._executable, "-r"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
bufsize=10_000,
preexec_fn=os.setpgrp, # Prevent delivery of Ctrl+C to subprocess
)
def stop(self):
"""Stops the external measurement program and returns the measurement result,
if the measurement was running."""
consumed_energy = collections.defaultdict(dict)
if not self.is_running():
return None
# cpu-energy-meter expects SIGINT to stop and report its result
self._measurement_process.send_signal(signal.SIGINT)
(out, err) = self._measurement_process.communicate()
assert self._measurement_process.returncode is not None
if self._measurement_process.returncode:
logging.debug(
"Energy measurement terminated with return code %s",
self._measurement_process.returncode,
)
self._measurement_process = None
for line in err.splitlines():
logging.debug("energy measurement stderr: %s", line)
for line in out.splitlines():
logging.debug("energy measurement output: %s", line)
match = re.match(r"cpu(\d+)_([a-z]+)_joules=(\d+\.?\d*)", line)
if not match:
continue
cpu, domain, energy = match.groups()
cpu = int(cpu)
energy = Decimal(energy)
consumed_energy[cpu][domain] = energy
return consumed_energy
def is_running(self):
"""Returns True if there is currently an instance of the external measurement program running, False otherwise."""
return self._measurement_process is not None
def format_energy_results(energy):
"""Take the result of an energy measurement and return a flat dictionary that contains all values."""
if not energy:
return {}
result = {}
cpuenergy = Decimal(0)
for pkg, domains in energy.items():
for domain, value in domains.items():
if domain == DOMAIN_PACKAGE:
cpuenergy += value
result[f"cpuenergy-pkg{pkg}-{domain}"] = value
result["cpuenergy"] = cpuenergy
result = collections.OrderedDict(sorted(result.items()))
return result
|
MwanzanFelipe/rockletonfortune
|
refs/heads/master
|
lib/django/core/management/commands/dumpdata.py
|
305
|
from collections import OrderedDict
from django.apps import apps
from django.core import serializers
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, router
class Command(BaseCommand):
help = ("Output the contents of the database as a fixture of the given "
"format (using each model's default manager unless --all is "
"specified).")
def add_arguments(self, parser):
parser.add_argument('args', metavar='app_label[.ModelName]', nargs='*',
help='Restricts dumped data to the specified app_label or app_label.ModelName.')
parser.add_argument('--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.')
parser.add_argument('--indent', default=None, dest='indent', type=int,
help='Specifies the indent level to use when pretty-printing output.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to dump fixtures from. '
'Defaults to the "default" database.')
parser.add_argument('-e', '--exclude', dest='exclude', action='append', default=[],
help='An app_label or app_label.ModelName to exclude '
'(use multiple --exclude to exclude multiple apps/models).')
parser.add_argument('--natural-foreign', action='store_true', dest='use_natural_foreign_keys', default=False,
help='Use natural foreign keys if they are available.')
parser.add_argument('--natural-primary', action='store_true', dest='use_natural_primary_keys', default=False,
help='Use natural primary keys if they are available.')
parser.add_argument('-a', '--all', action='store_true', dest='use_base_manager', default=False,
help="Use Django's base manager to dump all models stored in the database, "
"including those that would otherwise be filtered or modified by a custom manager.")
parser.add_argument('--pks', dest='primary_keys',
help="Only dump objects with given primary keys. "
"Accepts a comma separated list of keys. "
"This option will only work when you specify one model.")
parser.add_argument('-o', '--output', default=None, dest='output',
help='Specifies file to which the output is written.')
def handle(self, *app_labels, **options):
format = options.get('format')
indent = options.get('indent')
using = options.get('database')
excludes = options.get('exclude')
output = options.get('output')
show_traceback = options.get('traceback')
use_natural_foreign_keys = options.get('use_natural_foreign_keys')
use_natural_primary_keys = options.get('use_natural_primary_keys')
use_base_manager = options.get('use_base_manager')
pks = options.get('primary_keys')
if pks:
primary_keys = pks.split(',')
else:
primary_keys = []
excluded_apps = set()
excluded_models = set()
for exclude in excludes:
if '.' in exclude:
try:
model = apps.get_model(exclude)
except LookupError:
raise CommandError('Unknown model in excludes: %s' % exclude)
excluded_models.add(model)
else:
try:
app_config = apps.get_app_config(exclude)
except LookupError as e:
raise CommandError(str(e))
excluded_apps.add(app_config)
if len(app_labels) == 0:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict((app_config, None)
for app_config in apps.get_app_configs()
if app_config.models_module is not None and app_config not in excluded_apps)
else:
if len(app_labels) > 1 and primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict()
for label in app_labels:
try:
app_label, model_label = label.split('.')
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
try:
model = app_config.get_model(model_label)
except LookupError:
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
app_list_value = app_list.setdefault(app_config, [])
# We may have previously seen a "all-models" request for
# this app (no model qualifier was given). In this case
# there is no need adding specific models to the list.
if app_list_value is not None:
if model not in app_list_value:
app_list_value.append(model)
except ValueError:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
# This is just an app - no model qualifier
app_label = label
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
app_list[app_config] = None
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
if format not in serializers.get_public_serializer_formats():
try:
serializers.get_serializer(format)
except serializers.SerializerDoesNotExist:
pass
raise CommandError("Unknown serialization format: %s" % format)
def get_objects(count_only=False):
"""
Collate the objects to be serialized. If count_only is True, just
count the number of objects to be serialized.
"""
for model in serializers.sort_dependencies(app_list.items()):
if model in excluded_models:
continue
if not model._meta.proxy and router.allow_migrate_model(using, model):
if use_base_manager:
objects = model._base_manager
else:
objects = model._default_manager
queryset = objects.using(using).order_by(model._meta.pk.name)
if primary_keys:
queryset = queryset.filter(pk__in=primary_keys)
if count_only:
yield queryset.order_by().count()
else:
for obj in queryset.iterator():
yield obj
try:
self.stdout.ending = None
progress_output = None
object_count = 0
# If dumpdata is outputting to stdout, there is no way to display progress
if (output and self.stdout.isatty() and options['verbosity'] > 0):
progress_output = self.stdout
object_count = sum(get_objects(count_only=True))
stream = open(output, 'w') if output else None
try:
serializers.serialize(format, get_objects(), indent=indent,
use_natural_foreign_keys=use_natural_foreign_keys,
use_natural_primary_keys=use_natural_primary_keys,
stream=stream or self.stdout, progress_output=progress_output,
object_count=object_count)
finally:
if stream:
stream.close()
except Exception as e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)
|
nguyenfilip/subscription-manager
|
refs/heads/master
|
src/subscription_manager/gui/mysubstab.py
|
1
|
#
# Copyright (c) 2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import gettext
import os
from datetime import datetime
from rhsm.certificate import GMT
from subscription_manager.ga import Gtk as ga_Gtk
from subscription_manager.ga import GObject as ga_GObject
from subscription_manager.ga import GdkPixbuf as ga_GdkPixbuf
from subscription_manager.async import AsyncBind
from subscription_manager.cert_sorter import EntitlementCertStackingGroupSorter
from subscription_manager.entcertlib import EntCertDeleteAction
from subscription_manager import injection as inj
from subscription_manager.gui import messageWindow, progress
from subscription_manager.gui.storage import MappedTreeStore
from subscription_manager.gui import widgets
from subscription_manager.gui.utils import handle_gui_exception
from subscription_manager.utils import is_true_value
_ = gettext.gettext
prefix = os.path.dirname(__file__)
WARNING_IMG = os.path.join(prefix, "data/icons/partial.svg")
EXPIRING_IMG = os.path.join(prefix, "data/icons/expiring.svg")
EXPIRED_IMG = os.path.join(prefix, "data/icons/invalid.svg")
class MySubscriptionsTab(widgets.SubscriptionManagerTab):
widget_names = widgets.SubscriptionManagerTab.widget_names + \
['details_box', 'unsubscribe_button']
gui_file = "mysubs"
def __init__(self, backend, parent_win,
ent_dir, prod_dir):
"""
Create a new 'My Subscriptions' tab.
"""
super(MySubscriptionsTab, self).__init__()
self.backend = backend
self.identity = inj.require(inj.IDENTITY)
self.parent_win = parent_win
self.entitlement_dir = ent_dir
self.product_dir = prod_dir
self.sub_details = widgets.ContractSubDetailsWidget(prod_dir)
self.async_bind = AsyncBind(self.backend.certlib)
self.pooltype_cache = inj.require(inj.POOLTYPE_CACHE)
# Progress bar
self.pb = None
self.timer = 0
# Put the details widget in the middle
details = self.sub_details.get_widget()
self.details_box.pack_start(details, True, True, 0)
# Set up columns on the view
text_renderer = ga_Gtk.CellRendererText()
image_renderer = ga_Gtk.CellRendererPixbuf()
column = ga_Gtk.TreeViewColumn(_('Subscription'))
column.set_expand(True)
column.pack_start(image_renderer, False)
column.pack_start(text_renderer, False)
column.add_attribute(image_renderer, 'pixbuf', self.store['image'])
column.add_attribute(text_renderer, 'text', self.store['subscription'])
column.add_attribute(text_renderer, 'cell-background',
self.store['background'])
column.add_attribute(image_renderer, 'cell-background',
self.store['background'])
column.set_sizing(ga_Gtk.TreeViewColumnSizing.AUTOSIZE)
self.top_view.append_column(column)
cols = []
cols.append((column, 'text', 'subscription'))
progress_renderer = ga_Gtk.CellRendererProgress()
products_column = ga_Gtk.TreeViewColumn(_("Installed Products"),
progress_renderer,
value=self.store['installed_value'],
text=self.store['installed_text'])
products_column.add_attribute(progress_renderer, 'cell-background',
self.store['background'])
self.empty_progress_renderer = ga_Gtk.CellRendererText()
products_column.pack_end(self.empty_progress_renderer, True)
products_column.set_cell_data_func(progress_renderer, self._update_progress_renderer)
self.top_view.append_column(products_column)
column = self.add_date_column(_("End Date"), 'expiration_date')
cols.append((column, 'date', 'expiration_date'))
column = self.add_text_column(_("Quantity"), 'quantity')
cols.append((column, 'text', 'quantity'))
self.set_sorts(self.store, cols)
self.top_view.connect("row_activated",
widgets.expand_collapse_on_row_activated_callback)
# Don't update the icon in the first run, we don't have real compliance data yet
self.update_subscriptions(update_dbus=False)
self.connect_signals({'on_unsubscribe_button_clicked': self.unsubscribe_button_clicked})
def get_store(self):
return MappedTreeStore(self.get_type_map())
def _clear_progress_bar(self):
if self.pb:
self.pb.hide()
ga_GObject.source_remove(self.timer)
self.timer = 0
self.pb = None
def _handle_unbind_exception(self, e, selection):
self._clear_progress_bar()
handle_gui_exception(e, _("There was an error removing %s with serial number %s") %
(selection['subscription'], selection['serial']), self.parent_win, format_msg=False)
def _unsubscribe_callback(self):
self.backend.cs.force_cert_check()
self._clear_progress_bar()
def _on_unsubscribe_prompt_response(self, dialog, response, selection):
if not response:
return
serial = long(selection['serial'])
if self.identity.is_valid():
self.pb = progress.Progress(_("Removing"),
_("Removing subscription. Please wait."))
self.timer = ga_GObject.timeout_add(100, self.pb.pulse)
self.pb.set_transient_for(self.parent_win)
self.async_bind.unbind(serial, selection, self._unsubscribe_callback, self._handle_unbind_exception)
else:
# unregistered, just delete the certs directly
action = EntCertDeleteAction(self.entitlement_dir)
action.perform([serial])
self.update_subscriptions()
def unsubscribe_button_clicked(self, widget):
selection = widgets.SelectionWrapper(self.top_view.get_selection(), self.store)
# nothing selected
if not selection.is_valid():
return
# remove all markup, see rh bz#982286
subscription_text = ga_GObject.markup_escape_text(selection['subscription'])
prompt = messageWindow.YesNoDialog(_("Are you sure you want to remove %s?") % subscription_text,
self.content.get_toplevel())
prompt.connect('response', self._on_unsubscribe_prompt_response, selection)
def update_subscriptions(self, update_dbus=True):
"""
Pulls the entitlement certificates and updates the subscription model.
"""
self.pooltype_cache.update()
sorter = EntitlementCertStackingGroupSorter(self.entitlement_dir.list())
self.store.clear()
# FIXME: mapped list store inits are weird
for group in sorter.groups:
self._add_group(group)
self.top_view.expand_all()
self._stripe_rows(None, self.store)
if update_dbus:
inj.require(inj.DBUS_IFACE).update()
self.unsubscribe_button.set_property('sensitive', False)
# 841396: Select first item in My Subscriptions table by default
selection = self.top_view.get_selection()
selection.select_path(0)
def _add_group(self, group):
tree_iter = None
if group.name and len(group.entitlements) > 1:
unique = self.find_unique_name_count(group.entitlements)
if unique - 1 > 1:
name_string = _("Stack of %s and %s others") % \
(group.name, str(unique - 1))
elif unique - 1 == 1:
name_string = _("Stack of %s and 1 other") % (group.name)
else:
name_string = _("Stack of %s") % (group.name)
tree_iter = self.store.add_map(tree_iter, self._create_stacking_header_entry(name_string))
new_parent_image = None
for i, cert in enumerate(group.entitlements):
image = self._get_entry_image(cert)
self.store.add_map(tree_iter, self._create_entry_map(cert, image))
# Determine if we need to change the parent's image. We
# will match the parent's image with the children if any of
# the children have an image.
if self.image_ranks_higher(new_parent_image, image):
new_parent_image = image
# Update the parent image if required.
if new_parent_image and tree_iter:
self.store.set_value(tree_iter, self.store['image'],
ga_GdkPixbuf.Pixbuf.new_from_file_at_size(new_parent_image, 13, 13))
def find_unique_name_count(self, entitlements):
result = dict()
for ent in entitlements:
result[ent.order.name] = ent.order.name
return len(result)
def image_ranks_higher(self, old_image, new_image):
images = [None, WARNING_IMG, EXPIRING_IMG, EXPIRED_IMG]
return images.index(new_image) > images.index(old_image)
def get_label(self):
return _("My Subscriptions")
def get_type_map(self):
return {
'image': ga_GdkPixbuf.Pixbuf,
'subscription': str,
'installed_value': float,
'installed_text': str,
'start_date': ga_GObject.TYPE_PYOBJECT,
'expiration_date': ga_GObject.TYPE_PYOBJECT,
# In the rhsm.certficate models, quantity is an int
# and serial is a long, we could store them in the widget
# store that way
'quantity': str,
'serial': str,
'align': float,
'background': str,
'is_group_row': bool
}
def on_selection(self, selection):
"""
Updates the 'Subscription Details' panel with the currently selected
subscription.
"""
if selection['is_group_row']:
self.sub_details.clear()
self.unsubscribe_button.set_property('sensitive', False)
return
self.unsubscribe_button.set_property('sensitive', True)
# Load the entitlement certificate for the selected row:
serial = selection['serial']
cert = self.entitlement_dir.find(long(serial))
order = cert.order
products = [(product.name, product.id)
for product in cert.products]
reasons = []
if self.backend.cs.are_reasons_supported():
reasons = self.backend.cs.reasons.get_subscription_reasons(cert.subject['CN'])
if not reasons:
if cert in self.backend.cs.valid_entitlement_certs:
reasons.append(_('Subscription is current.'))
else:
if cert.valid_range.end() < datetime.now(GMT()):
reasons.append(_('Subscription is expired.'))
else:
reasons.append(_('Subscription has not begun.'))
else:
reasons.append(_("Subscription management service doesn't support Status Details."))
pool_type = ''
if cert.pool and cert.pool.id:
pool_type = self.pooltype_cache.get(cert.pool.id)
if is_true_value(order.virt_only):
virt_only = _("Virtual")
else:
virt_only = _("Physical")
if is_true_value(order.provides_management):
management = _("Yes")
else:
management = _("No")
self.sub_details.show(order.name,
contract=order.contract or "",
start=cert.valid_range.begin(),
end=cert.valid_range.end(),
account=order.account or "",
management=management,
virt_only=virt_only or "",
support_level=order.service_level or "",
support_type=order.service_type or "",
products=products,
sku=order.sku,
reasons=reasons,
expiring=cert.is_expiring(),
pool_type=pool_type)
def on_no_selection(self):
"""
Clears out the subscription details panel when no subscription is
selected and disables the unsubscribe button.
"""
self.sub_details.clear()
self.unsubscribe_button.set_property('sensitive', False)
def _create_stacking_header_entry(self, title):
entry = {}
entry['subscription'] = title
entry['installed_value'] = 0.0
entry['align'] = 0.5 # Center horizontally
entry['background'] = None
entry['is_group_row'] = True
return entry
def _create_entry_map(self, cert, image):
order = cert.order
products = cert.products
installed = self._get_installed(products)
# Initialize an entry list of the proper length
entry = {}
if image:
entry['image'] = ga_GdkPixbuf.Pixbuf.new_from_file_at_size(image, 13, 13)
entry['subscription'] = order.name
entry['installed_value'] = self._percentage(installed, products)
entry['installed_text'] = '%s / %s' % (len(installed), len(products))
entry['start_date'] = cert.valid_range.begin()
entry['expiration_date'] = cert.valid_range.end()
entry['quantity'] = str(order.quantity_used)
entry['serial'] = str(cert.serial)
entry['align'] = 0.5 # Center horizontally
entry['background'] = None
entry['is_group_row'] = False
return entry
def _get_entry_image(self, cert):
date_range = cert.valid_range
now = datetime.now(GMT())
if date_range.end() < now:
return EXPIRED_IMG
if cert.is_expiring():
return EXPIRING_IMG
if cert.subject and 'CN' in cert.subject and \
self.backend.cs.reasons.get_subscription_reasons(cert.subject['CN']):
return WARNING_IMG
return None
def _percentage(self, subset, full_set):
if (len(full_set) == 0):
return 100
else:
return (float(len(subset)) / len(full_set)) * 100
def _get_installed(self, products):
installed_dir = self.product_dir
installed_products = []
for product in products:
installed = installed_dir.find_by_product(product.id)
if installed:
installed_products.append(installed)
return installed_products
def _update_progress_renderer(self, column, cell_renderer, tree_model, tree_iter, data=None):
hide_progress = tree_model.get_value(tree_iter, self.store['is_group_row'])
background_color = tree_model.get_value(tree_iter, self.store['background'])
cell_renderer.set_property('visible', not hide_progress)
self.empty_progress_renderer.set_property('visible', hide_progress)
self.empty_progress_renderer.set_property('cell-background', background_color)
|
rubienr/network-monitoring
|
refs/heads/master
|
data_vis/views.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import time
from collections import OrderedDict
import speedtest_cli as speedtest
from django.db.models import Max, Min
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.views.generic import TemplateView
from common.models import PingTestResult
from common.models import SpeedtestServer
from common.models import TransferTestResult
from service.probing import getLocalIp
def defaultView(request):
return render_to_response('bootstrap/base.html', context_instance=RequestContext(request))
def transformPingProbes2TimelinechartData(timeFrame):
# filter data
objects = PingTestResult.objects.filter(pingStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]])
# map data
hostToTimestampToValue = {}
for result in objects:
timestamp = time.mktime(result.pingStart.timetuple()) * 1000
# host = result.destinationHost
host = result.probeName
value = result.rttAvg
if host not in hostToTimestampToValue .keys():
hostToTimestampToValue[host] = {}
hostToTimestampToValue[host][timestamp] = value
# let empty periodes fallback to zero
relaxedData = []
for host in hostToTimestampToValue.keys():
relaxedData.append(seriesToReturnToZeroSeries(hostToTimestampToValue[host]))
# merge sets to chart data
xValues, chartData = mergeDictionariesToChartData(relaxedData)
# prepare template tag arguments
extra_serie = {"tooltip": {"y_start": "", "y_end": " [ms] avg. delay"}}
chartdata = {
'x': xValues,
}
idx = 1
hostnameLookup = dict(zip(chartData.keys(), hostToTimestampToValue.keys()))
for key, hostData in chartData.items():
chartdata["name%s" % idx] = hostnameLookup["y%s" % idx]
chartdata["y%s" % idx] = hostData.values()
chartdata["extra%s" % idx] = extra_serie
idx += 1
axis_date= "%H:%M %p"
tooltip_date = "%d %b %H:%M"
data = {
'charttype': "lineWithFocusChart",
'chartdata': chartdata,
"chartcontainer": "linewithfocuschart_container",
"title": "Average Ping Duration",
"extra": {
'x_is_date': True,
'x_axis_format': axis_date,
"charttooltip_dateformat": tooltip_date,
'tag_script_js': True,
'jquery_on_ready': False
}
}
return data
def transformTransferProbes2TimelinechartData(direction, timeFrame):
# filter data
objects = None
isBothDirections = False
if "download" in direction and "upload" in direction:
isBothDirections = True
objects = TransferTestResult.objects\
.filter(transferStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]])
else:
objects = TransferTestResult.objects\
.filter(transferStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]])\
.filter(direction=direction)
# map data
hostToTimestampToValue = {}
for result in objects:
timestamp = time.mktime(result.transferStart.timetuple()) * 1000.0
if isBothDirections:
# host = "%s %s" % (result.host, result.direction)
host = "%s (%s)" % (result.probeName, result.direction)
else:
# host = result.host
host = result.probeName
throughput = round(((result.transferredUnitsPerSecond * 1) / (1000.0 * 1000)), 2)
if host not in hostToTimestampToValue.keys():
hostToTimestampToValue[host] = {}
hostToTimestampToValue[host][timestamp] = throughput
# let empty periodes fallback to zero
relaxedData = []
for host in hostToTimestampToValue.keys():
relaxedData.append(seriesToReturnToZeroSeries(hostToTimestampToValue[host]))
# merge sets to chart data
xValues, chartData = mergeDictionariesToChartData(relaxedData)
# prepare template tag arguments
extra_serie = {"tooltip": {"y_start": "", "y_end": "MBit/s"}}
chartdata = {
'x': xValues,
}
idx = 1
hostnameLookup = dict(zip(chartData.keys(), hostToTimestampToValue.keys()))
for key, hostData in chartData.items():
chartdata["name%s" % idx] = hostnameLookup["y%s" % idx]
chartdata["y%s" % idx] = hostData.values()
chartdata["extra%s" % idx] = extra_serie
idx += 1
title = ""
if "download" in direction and "upload" in direction:
title = "Up-/Download Speed Tests"
elif "download" in direction:
title = "Download Speed Tests"
elif "upload" in direction:
title = "Upload Speed Tests"
axis_date= "%H:%M"
tooltip_date = "%d %b %H:%M"
data = {
'charttype': "lineWithFocusChart",
'chartdata': chartdata,
"chartcontainer": "linewithfocuschart_container",
"title": title,
"extra": {
'x_is_date': True,
'x_axis_format': axis_date,
"charttooltip_dateformat": tooltip_date,
'tag_script_js': True,
'jquery_on_ready': False
}
}
return data
def transformProbes2PreviewTimelinechartData():
timestampToPingProbes = {}
roundSeconds = -2 # factor to bin values
for result in PingTestResult.objects.order_by('pingStart').all():
timestamp = int(round(time.mktime(result.pingStart.timetuple()), roundSeconds))
if timestamp in timestampToPingProbes.keys():
timestampToPingProbes[timestamp] = timestampToPingProbes[timestamp] + 1
else:
timestampToPingProbes[timestamp] = 1
timestampToTransferProbes = {}
for result in TransferTestResult.objects.order_by('transferStart').all():
timestamp = int(round(time.mktime(result.transferStart.timetuple()), roundSeconds))
if timestamp in timestampToTransferProbes.keys():
timestampToTransferProbes[timestamp] = timestampToTransferProbes[timestamp] + 1
else:
timestampToTransferProbes[timestamp] = 1
pingChartData = seriesToReturnToZeroSeries(timestampToPingProbes)
transferChartData = seriesToReturnToZeroSeries(timestampToTransferProbes)
xValues, theData = mergeDictionariesToChartData([pingChartData, transferChartData])
extra_serie = {"tooltip": {"y_start": "", "y_end": " probes"}}
chartdata = {'x': [1000 * s for s in xValues]}
chartdata["name1"] = "ping probes"
chartdata["y1"] = theData["y1"].values()
chartdata["extra1"] = extra_serie
chartdata["name2"] = "transfer probes"
chartdata["y2"] = theData["y2"].values()
chartdata["extra2"] = extra_serie
if len(chartdata["x"]) > 30:
title = "Specify time window to generate charts from (optional):"
elif len(chartdata["x"]) > 0:
title = "Data overview (still less probes):"
else:
title = "Unfortunately no data available. Please configure and start the service."
axis_date= "%d %b"
tooltip_date = "%d %b %H:%M"
data = {
'preview_charttype': "lineWithFocusChart",
'preview_chartdata': chartdata,
"preview_chartcontainer": "linewithfocuschart_container",
"preview_title": title,
"preview_extra": {
'x_is_date': True,
'x_axis_format': axis_date,
"charttooltip_dateformat": tooltip_date,
'tag_script_js': True,
'jquery_on_ready': False
}
}
return data
def getClosestServersView(request):
config = speedtest.getConfig()
closestServers = speedtest.closestServers(config['client'])
# store to db
models = []
localIp = getLocalIp("speedtest.net")
for server in closestServers:
server["serverId"] = server.pop("id")
model = SpeedtestServer().fromDict(**server)
model.interfaceIp = localIp
models.append(model)
SpeedtestServer.objects.bulk_create(models)
# filter/reorder/translate values for view
title = "Speedtest.net - Closest Server"
columnToName = OrderedDict ([
("serverId", "ID"),
("name", "City"),
("url", "URL"),
("country", "Country"),
("d", "Distance [km]"),
#("cc", "country code"),
#("host", "host name"),
("sponsor", ""),
#("url2", "url"),
("lat", "Latitude"),
("lon", "Longitude"),
])
columns = columnToName.keys()
servers = []
for c in closestServers:
server = OrderedDict([(columnToName[filteredColumn], c[filteredColumn]) for filteredColumn in columns])
distanceColumn = columnToName["d"]
server[distanceColumn] = round(server[distanceColumn],1)
servers.append(server)
data = {
"title": title,
"tableHeader" : servers[0].keys(),
"servers": servers,
}
return render_to_response('bootstrap/serverlist.html', data, context_instance=RequestContext(request))
def transformPingProbes2PiechartData(timeFrame):
# filter data
objects = PingTestResult.objects.filter(pingStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]])
results = {}
for result in objects:
if result.probeName in results:
results[result.probeName].append(result)
else:
results[result.probeName] = [result]
xdata = results.keys()
ydata = [len(results[x]) for x in results]
extra_serie = {"tooltip": {"y_start": "", "y_end": " probes"}}
chartdata = {'x': xdata, 'y': ydata, "extra": extra_serie}
charttype = "pieChart"
chartcontainer = 'piechart_container'
data = {
"title": "Total Pings",
'charttype': charttype,
'chartdata': chartdata,
'chartcontainer': chartcontainer,
'extra': {
'x_is_date': False,
'tag_script_js': True,
'jquery_on_ready': False,
"donut": True,
"showLabels": True,
}
}
return data
def transformTransferProbes2PiechartData(direction, timeFrame):
"""
Arguments
direction: download, upload, downloadupload"""
# filter data
objects = None
if "download" in direction and "upload" in direction:
objects = TransferTestResult.objects\
.filter(transferStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]])
else:
objects = TransferTestResult.objects\
.filter(transferStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]])\
.filter(direction=direction)
results = {}
for result in objects:
if result.probeName in results:
results[result.probeName].append(result)
else:
results[result.probeName] = [result]
xdata = results.keys()
ydata = [len(results[x]) for x in results]
extra_serie = {"tooltip": {"y_start": "", "y_end": " probes"}}
chartdata = {'x': xdata, 'y': ydata, "extra": extra_serie}
charttype = "pieChart"
chartcontainer = 'piechart_container'
title = "Total %s"
if "download" in direction and "upload" in direction:
title = title % "Up-/Downloads"
elif "download" in direction:
title = title % "Downloads"
elif "upload" in direction:
title = title % "Uploads"
data = {
"title": title,
'charttype': charttype,
'chartdata': chartdata,
'chartcontainer': chartcontainer,
'extra': {
'x_is_date': False,
'x_axis_format': '',
'tag_script_js': True,
'jquery_on_ready': False,
"donut": True,
"showLabels": True,
}
}
return data
class DefaultChartView(TemplateView):
"""Piechart and Timlinechart view
Arguments
view -- pie, timeline
dataSource -- ping, transfer
direction: None (defaults to download), upload, download, uploaddownload
"""
template_name = "bootstrap/piechart.html"
dataSource = "ping"
view = "pie"
direction = None
templates = {
"pie": "bootstrap/piechart.html",
"timeline": "bootstrap/timeline.html",
}
renderStrategy = {
"pie":
{
"ping": transformPingProbes2PiechartData,
"transfer": transformTransferProbes2PiechartData,
},
"timeline": {
"ping": transformPingProbes2TimelinechartData,
"transfer": transformTransferProbes2TimelinechartData,
},
}
def __init__(self, dataSource="ping", view="pie", direction=None):
object.__init__(self)
self.dataSource = dataSource
self.view = view
self.direction = direction
self.template_name = self.templates[self.view]
self.relativeDataFrom = 0
self.relativeDataTo = 0
def get(self, request, *args, **kwargs):
self.relativeDataFrom = float(request.GET.get('relFrom', '0'))
self.relativeDataTo = float(request.GET.get('relTo', '0'))
return render(request, self.template_name, self.get_context_data())
def get_context_data(self, **kwargs):
context = super(DefaultChartView, self).get_context_data(**kwargs)
timeFrameArguments = {}
if 0 <= self.relativeDataFrom \
and self.relativeDataFrom < self.relativeDataTo \
and self.relativeDataTo <= 1.0:
timeFrameArguments["fromDateTime"] = self.relativeToDbTimestamp(self.relativeDataFrom)
timeFrameArguments["toDateTime"] = self.relativeToDbTimestamp(self.relativeDataTo)
else:
timeFrameArguments["fromDateTime"] = self.relativeToDbTimestamp(0)
timeFrameArguments["toDateTime"] = self.relativeToDbTimestamp(1)
if self.direction is not None:
chartData = self.renderStrategy[self.view][self.dataSource](self.direction, timeFrame=timeFrameArguments)
else:
chartData = self.renderStrategy[self.view][self.dataSource](timeFrame=timeFrameArguments)
for key, value in chartData.items():
context[key] = value
context["timeframe_filter_extras"] = "relFrom=%s&relTo=%s" % (self.relativeDataFrom, self.relativeDataTo)
return context
def relativeToDbTimestamp(self, relativeValue):
try:
pingProbes = PingTestResult.objects
key, latestPingProbe = pingProbes.aggregate(Max('pingStart')).popitem()
key, firstPingProbe = pingProbes.aggregate(Min('pingStart')).popitem()
transferProbes = TransferTestResult.objects
key, latestTransferProbe = transferProbes.aggregate(Max('transferStart')).popitem()
key, firstTransferProbe = transferProbes.aggregate(Min('transferStart')).popitem()
# in case of missing probes
minTime = firstPingProbe
maxTime = latestPingProbe
if minTime is None or maxTime is None:
minTime = firstTransferProbe
maxTime = latestTransferProbe
if minTime is None or maxTime is None:
return datetime.datetime.utcfromtimestamp(0)
try:
if firstPingProbe > firstTransferProbe:
minTime = firstTransferProbe
maxTime = latestPingProbe
if latestPingProbe < latestTransferProbe:
maxTime = latestTransferProbe
except:
pass
moment = time.mktime(minTime.timetuple()) + relativeValue * (time.mktime(maxTime.timetuple()) -
time.mktime(minTime.timetuple()))
return datetime.datetime.utcfromtimestamp(moment)
except:
return datetime.datetime.utcfromtimestamp(0)
class ProbePreviewChartView(TemplateView):
"""Timlinechart view showing number of probes per time line
"""
template_name = "bootstrap/base.html"
def __init__(self):
object.__init__(self)
pass
def get_context_data(self, **kwargs):
context = super(ProbePreviewChartView, self).get_context_data(**kwargs)
chartData = transformProbes2PreviewTimelinechartData()
for key, value in chartData.items():
context[key] = value
return context
def mergeDictionariesToChartData(dictList=None):
if dictList is None:
dictList = []
def uniq(lst):
last = object()
for item in lst:
if item == last:
continue
yield item
last = item
def sort_and_deduplicate(l):
return list(uniq(sorted(l, reverse=True)))
# sort the key set of all dicts
xValues = []
for d in dictList:
xValues.extend(d.keys())
xValues = sort_and_deduplicate(xValues)
# create result dicts
idx = 1
chartDicts = {}
for d in dictList:
chartDicts["y%s" % idx] = OrderedDict()
idx += 1
# for all keys and all dicts in dictList store value or default (0) to result
idx = 1
for d in dictList:
for x in xValues:
if x in d.keys():
chartDicts["y%s" % idx][x] = d[x]
else:
chartDicts["y%s" % idx][x] = 0
idx += 1
return xValues, chartDicts
def seriesToReturnToZeroSeries(series, cutoffSeconds=300):
""" let series fallback to zero:
if no probes available withing cutom cutoffSeconds timespan
before 1st probe
after last probe"""
lastTimestamp = None
withFallback = OrderedDict()
isFirst = True
for timestamp in sorted(series):
if lastTimestamp is not None:
if lastTimestamp + cutoffSeconds < timestamp: # return to 0 in between probes
withFallback[lastTimestamp + 1] = 0
withFallback[timestamp - 1] = 0
else: # return to 0 before 1st probe
withFallback[timestamp - 1] = 0
withFallback[timestamp] = series[timestamp]
lastTimestamp = timestamp
if lastTimestamp is not None:
withFallback[lastTimestamp + 1] = 0
return withFallback
|
unreal666/outwiker
|
refs/heads/master
|
plugins/source/source/pygments/lexers/varnish.py
|
6
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.varnish
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for Varnish configuration
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, bygroups, using, this, \
inherit, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Literal
__all__ = ['VCLLexer', 'VCLSnippetLexer']
class VCLLexer(RegexLexer):
"""
For Varnish Configuration Language (VCL).
.. versionadded:: 2.2
"""
name = 'VCL'
aliases = ['vcl']
filenames = ['*.vcl']
mimetypes = ['text/x-vclsrc']
def analyse_text(text):
# If the very first line is 'vcl 4.0;' it's pretty much guaranteed
# that this is VCL
if text.startswith('vcl 4.0;'):
return 1.0
# Skip over comments and blank lines
# This is accurate enough that returning 0.9 is reasonable.
# Almost no VCL files start without some comments.
elif '\nvcl 4.0;' in text[:1000]:
return 0.9
tokens = {
'probe': [
include('whitespace'),
include('comments'),
(r'(\.\w+)(\s*=\s*)([^;]*)(;)',
bygroups(Name.Attribute, Operator, using(this), Punctuation)),
(r'\}', Punctuation, '#pop'),
],
'acl': [
include('whitespace'),
include('comments'),
(r'[!/]+', Operator),
(r';', Punctuation),
(r'\d+', Number),
(r'\}', Punctuation, '#pop'),
],
'backend': [
include('whitespace'),
(r'(\.probe)(\s*=\s*)(\w+)(;)',
bygroups(Name.Attribute, Operator, Name.Variable.Global, Punctuation)),
(r'(\.probe)(\s*=\s*)(\{)',
bygroups(Name.Attribute, Operator, Punctuation), 'probe'),
(r'(\.\w+\b)(\s*=\s*)([^;]*)(\s*;)',
bygroups(Name.Attribute, Operator, using(this), Punctuation)),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
],
'statements': [
(r'(\d\.)?\d+[sdwhmy]', Literal.Date),
(r'(\d\.)?\d+ms', Literal.Date),
(r'(vcl_pass|vcl_hash|vcl_hit|vcl_init|vcl_backend_fetch|vcl_pipe|'
r'vcl_backend_response|vcl_synth|vcl_deliver|vcl_backend_error|'
r'vcl_fini|vcl_recv|vcl_purge|vcl_miss)\b', Name.Function),
(r'(pipe|retry|hash|synth|deliver|purge|abandon|lookup|pass|fail|ok|'
r'miss|fetch|restart)\b', Name.Constant),
(r'(beresp|obj|resp|req|req_top|bereq)\.http\.[a-zA-Z_-]+\b', Name.Variable),
(words((
'obj.status', 'req.hash_always_miss', 'beresp.backend', 'req.esi_level',
'req.can_gzip', 'beresp.ttl', 'obj.uncacheable', 'req.ttl', 'obj.hits',
'client.identity', 'req.hash_ignore_busy', 'obj.reason', 'req.xid',
'req_top.proto', 'beresp.age', 'obj.proto', 'obj.age', 'local.ip',
'beresp.uncacheable', 'req.method', 'beresp.backend.ip', 'now',
'obj.grace', 'req.restarts', 'beresp.keep', 'req.proto', 'resp.proto',
'bereq.xid', 'bereq.between_bytes_timeout', 'req.esi',
'bereq.first_byte_timeout', 'bereq.method', 'bereq.connect_timeout',
'beresp.do_gzip', 'resp.status', 'beresp.do_gunzip',
'beresp.storage_hint', 'resp.is_streaming', 'beresp.do_stream',
'req_top.method', 'bereq.backend', 'beresp.backend.name', 'beresp.status',
'req.url', 'obj.keep', 'obj.ttl', 'beresp.reason', 'bereq.retries',
'resp.reason', 'bereq.url', 'beresp.do_esi', 'beresp.proto', 'client.ip',
'bereq.proto', 'server.hostname', 'remote.ip', 'req.backend_hint',
'server.identity', 'req_top.url', 'beresp.grace', 'beresp.was_304',
'server.ip', 'bereq.uncacheable'), suffix=r'\b'),
Name.Variable),
(r'[!%&+*\-,/<.}{>=|~]+', Operator),
(r'[();]', Punctuation),
(r'[,]+', Punctuation),
(words(('hash_data', 'regsub', 'regsuball', 'if', 'else',
'elsif', 'elif', 'synth', 'synthetic', 'ban',
'return', 'set', 'unset', 'import', 'include', 'new',
'rollback', 'call'), suffix=r'\b'),
Keyword),
(r'storage\.\w+\.\w+\b', Name.Variable),
(words(('true', 'false')), Name.Builtin),
(r'\d+\b', Number),
(r'(backend)(\s+\w+)(\s*\{)',
bygroups(Keyword, Name.Variable.Global, Punctuation), 'backend'),
(r'(probe\s)(\s*\w+\s)(\{)',
bygroups(Keyword, Name.Variable.Global, Punctuation), 'probe'),
(r'(acl\s)(\s*\w+\s)(\{)',
bygroups(Keyword, Name.Variable.Global, Punctuation), 'acl'),
(r'(vcl )(4.0)(;)$',
bygroups(Keyword.Reserved, Name.Constant, Punctuation)),
(r'(sub\s+)([a-zA-Z]\w*)(\s*\{)',
bygroups(Keyword, Name.Function, Punctuation)),
(r'([a-zA-Z_]\w*)'
r'(\.)'
r'([a-zA-Z_]\w*)'
r'(\s*\(.*\))',
bygroups(Name.Function, Punctuation, Name.Function, using(this))),
(r'[a-zA-Z_]\w*', Name),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'comments': [
(r'#.*$', Comment),
(r'/\*', Comment.Multiline, 'comment'),
(r'//.*$', Comment),
],
'string': [
(r'"', String, '#pop'),
(r'[^"\n]+', String), # all other characters
],
'multistring': [
(r'[^"}]', String),
(r'"\}', String, '#pop'),
(r'["}]', String),
],
'whitespace': [
(r'L?"', String, 'string'),
(r'\{"', String, 'multistring'),
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
],
'root': [
include('whitespace'),
include('comments'),
include('statements'),
(r'\s+', Text),
],
}
class VCLSnippetLexer(VCLLexer):
"""
For Varnish Configuration Language snippets.
.. versionadded:: 2.2
"""
name = 'VCLSnippets'
aliases = ['vclsnippets', 'vclsnippet']
mimetypes = ['text/x-vclsnippet']
filenames = []
def analyse_text(text):
# override method inherited from VCLLexer
return 0
tokens = {
'snippetspre': [
(r'\.\.\.+', Comment),
(r'(bereq|req|req_top|resp|beresp|obj|client|server|local|remote|'
r'storage)($|\.\*)', Name.Variable),
],
'snippetspost': [
(r'(backend)\b', Keyword.Reserved),
],
'root': [
include('snippetspre'),
inherit,
include('snippetspost'),
],
}
|
yongshengwang/hue
|
refs/heads/master
|
build/env/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/universaldetector.py
|
1775
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
import codecs
from .latin1prober import Latin1Prober # windows-1252
from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
from .escprober import EscCharSetProber # ISO-2122, etc.
import re
MINIMUM_THRESHOLD = 0.20
ePureAscii = 0
eEscAscii = 1
eHighbyte = 2
class UniversalDetector:
def __init__(self):
self._highBitDetector = re.compile(b'[\x80-\xFF]')
self._escDetector = re.compile(b'(\033|~{)')
self._mEscCharSetProber = None
self._mCharSetProbers = []
self.reset()
def reset(self):
self.result = {'encoding': None, 'confidence': 0.0}
self.done = False
self._mStart = True
self._mGotData = False
self._mInputState = ePureAscii
self._mLastChar = b''
if self._mEscCharSetProber:
self._mEscCharSetProber.reset()
for prober in self._mCharSetProbers:
prober.reset()
def feed(self, aBuf):
if self.done:
return
aLen = len(aBuf)
if not aLen:
return
if not self._mGotData:
# If the data starts with BOM, we know it is UTF
if aBuf[:3] == codecs.BOM_UTF8:
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_LE:
# FF FE 00 00 UTF-32, little-endian BOM
self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_BE:
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
elif aBuf[:4] == b'\xFE\xFF\x00\x00':
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {
'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0
}
elif aBuf[:4] == b'\x00\x00\xFF\xFE':
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {
'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0
}
elif aBuf[:2] == codecs.BOM_LE:
# FF FE UTF-16, little endian BOM
self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
elif aBuf[:2] == codecs.BOM_BE:
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
self._mGotData = True
if self.result['encoding'] and (self.result['confidence'] > 0.0):
self.done = True
return
if self._mInputState == ePureAscii:
if self._highBitDetector.search(aBuf):
self._mInputState = eHighbyte
elif ((self._mInputState == ePureAscii) and
self._escDetector.search(self._mLastChar + aBuf)):
self._mInputState = eEscAscii
self._mLastChar = aBuf[-1:]
if self._mInputState == eEscAscii:
if not self._mEscCharSetProber:
self._mEscCharSetProber = EscCharSetProber()
if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),
'confidence': self._mEscCharSetProber.get_confidence()}
self.done = True
elif self._mInputState == eHighbyte:
if not self._mCharSetProbers:
self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
Latin1Prober()]
for prober in self._mCharSetProbers:
if prober.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': prober.get_charset_name(),
'confidence': prober.get_confidence()}
self.done = True
break
def close(self):
if self.done:
return
if not self._mGotData:
if constants._debug:
sys.stderr.write('no data received!\n')
return
self.done = True
if self._mInputState == ePureAscii:
self.result = {'encoding': 'ascii', 'confidence': 1.0}
return self.result
if self._mInputState == eHighbyte:
proberConfidence = None
maxProberConfidence = 0.0
maxProber = None
for prober in self._mCharSetProbers:
if not prober:
continue
proberConfidence = prober.get_confidence()
if proberConfidence > maxProberConfidence:
maxProberConfidence = proberConfidence
maxProber = prober
if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
self.result = {'encoding': maxProber.get_charset_name(),
'confidence': maxProber.get_confidence()}
return self.result
if constants._debug:
sys.stderr.write('no probers hit minimum threshhold\n')
for prober in self._mCharSetProbers[0].mProbers:
if not prober:
continue
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(),
prober.get_confidence()))
|
ActiveState/code
|
refs/heads/master
|
recipes/Python/577262_AreCurve_using_Monte_Carlo/recipe-577262.py
|
1
|
# Calculating area under the curve using Monte Carlo method
# FB - 201006137
import math
import random
# define any function here!
def f(x):
return math.sqrt(1.0 - x * x)
# define any xmin-xmax interval here! (xmin < xmax)
xmin = -1.0
xmax = 1.0
# find ymin-ymax
numSteps = 1000000 # bigger the better but slower!
ymin = f(xmin)
ymax = ymin
for i in range(numSteps):
x = xmin + (xmax - xmin) * float(i) / numSteps
y = f(x)
if y < ymin: ymin = y
if y > ymax: ymax = y
# Monte Carlo
rectArea = (xmax - xmin) * (ymax - ymin)
numPoints = 1000000 # bigger the better but slower!
ctr = 0
for j in range(numPoints):
x = xmin + (xmax - xmin) * random.random()
y = ymin + (ymax - ymin) * random.random()
if f(x) > 0 and y > 0 and y <= f(x):
ctr += 1
if f(x) < 0 and y < 0 and y >= f(x):
ctr += 1
fnArea = rectArea * float(ctr) / numPoints
print "Area under the curve = " + str(fnArea)
|
cyc805/FTRerouting
|
refs/heads/master
|
src/virtual-net-device/bindings/callbacks_list.py
|
127
|
callback_classes = [
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Packet>', 'ns3::Address const&', 'ns3::Address const&', 'unsigned short', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
yasoob/PythonRSSReader
|
refs/heads/master
|
venv/lib/python2.7/dist-packages/twisted/python/deprecate.py
|
13
|
# -*- test-case-name: twisted.python.test.test_deprecate -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Deprecation framework for Twisted.
To mark a method or function as being deprecated do this::
from twisted.python.versions import Version
from twisted.python.deprecate import deprecated
@deprecated(Version("Twisted", 8, 0, 0))
def badAPI(self, first, second):
'''
Docstring for badAPI.
'''
...
The newly-decorated badAPI will issue a warning when called. It will also have
a deprecation notice appended to its docstring.
To mark module-level attributes as being deprecated you can use::
badAttribute = "someValue"
...
deprecatedModuleAttribute(
Version("Twisted", 8, 0, 0),
"Use goodAttribute instead.",
"your.full.module.name",
"badAttribute")
The deprecated attributes will issue a warning whenever they are accessed. If
the attributes being deprecated are in the same module as the
L{deprecatedModuleAttribute} call is being made from, the C{__name__} global
can be used as the C{moduleName} parameter.
See also L{Version}.
@type DEPRECATION_WARNING_FORMAT: C{str}
@var DEPRECATION_WARNING_FORMAT: The default deprecation warning string format
to use when one is not provided by the user.
"""
from __future__ import division, absolute_import
__all__ = [
'deprecated',
'getDeprecationWarningString',
'getWarningMethod',
'setWarningMethod',
'deprecatedModuleAttribute',
]
import sys, inspect
from warnings import warn, warn_explicit
from dis import findlinestarts
from functools import wraps
from twisted.python.versions import getVersionString
DEPRECATION_WARNING_FORMAT = '%(fqpn)s was deprecated in %(version)s'
# Notionally, part of twisted.python.reflect, but defining it there causes a
# cyclic dependency between this module and that module. Define it here,
# instead, and let reflect import it to re-expose to the public.
def _fullyQualifiedName(obj):
"""
Return the fully qualified name of a module, class, method or function.
Classes and functions need to be module level ones to be correctly
qualified.
@rtype: C{str}.
"""
try:
name = obj.__qualname__
except AttributeError:
name = obj.__name__
if inspect.isclass(obj) or inspect.isfunction(obj):
moduleName = obj.__module__
return "%s.%s" % (moduleName, name)
elif inspect.ismethod(obj):
try:
cls = obj.im_class
except AttributeError:
# Python 3 eliminates im_class, substitutes __module__ and
# __qualname__ to provide similar information.
return "%s.%s" % (obj.__module__, obj.__qualname__)
else:
className = _fullyQualifiedName(cls)
return "%s.%s" % (className, name)
return name
# Try to keep it looking like something in twisted.python.reflect.
_fullyQualifiedName.__module__ = 'twisted.python.reflect'
_fullyQualifiedName.__name__ = 'fullyQualifiedName'
_fullyQualifiedName.__qualname__ = 'fullyQualifiedName'
def _getReplacementString(replacement):
"""
Surround a replacement for a deprecated API with some polite text exhorting
the user to consider it as an alternative.
@type replacement: C{str} or callable
@return: a string like "please use twisted.python.modules.getModule
instead".
"""
if callable(replacement):
replacement = _fullyQualifiedName(replacement)
return "please use %s instead" % (replacement,)
def _getDeprecationDocstring(version, replacement=None):
"""
Generate an addition to a deprecated object's docstring that explains its
deprecation.
@param version: the version it was deprecated.
@type version: L{Version}
@param replacement: The replacement, if specified.
@type replacement: C{str} or callable
@return: a string like "Deprecated in Twisted 27.2.0; please use
twisted.timestream.tachyon.flux instead."
"""
doc = "Deprecated in %s" % (getVersionString(version),)
if replacement:
doc = "%s; %s" % (doc, _getReplacementString(replacement))
return doc + "."
def _getDeprecationWarningString(fqpn, version, format=None, replacement=None):
"""
Return a string indicating that the Python name was deprecated in the given
version.
@param fqpn: Fully qualified Python name of the thing being deprecated
@type fqpn: C{str}
@param version: Version that C{fqpn} was deprecated in.
@type version: L{twisted.python.versions.Version}
@param format: A user-provided format to interpolate warning values into, or
L{DEPRECATION_WARNING_FORMAT
<twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if C{None} is
given.
@type format: C{str}
@param replacement: what should be used in place of C{fqpn}. Either pass in
a string, which will be inserted into the warning message, or a
callable, which will be expanded to its full import path.
@type replacement: C{str} or callable
@return: A textual description of the deprecation
@rtype: C{str}
"""
if format is None:
format = DEPRECATION_WARNING_FORMAT
warningString = format % {
'fqpn': fqpn,
'version': getVersionString(version)}
if replacement:
warningString = "%s; %s" % (
warningString, _getReplacementString(replacement))
return warningString
def getDeprecationWarningString(callableThing, version, format=None,
replacement=None):
"""
Return a string indicating that the callable was deprecated in the given
version.
@type callableThing: C{callable}
@param callableThing: Callable object to be deprecated
@type version: L{twisted.python.versions.Version}
@param version: Version that C{callableThing} was deprecated in
@type format: C{str}
@param format: A user-provided format to interpolate warning values into,
or L{DEPRECATION_WARNING_FORMAT
<twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if C{None} is
given
@param callableThing: A callable to be deprecated.
@param version: The L{twisted.python.versions.Version} that the callable
was deprecated in.
@param replacement: what should be used in place of the callable. Either
pass in a string, which will be inserted into the warning message,
or a callable, which will be expanded to its full import path.
@type replacement: C{str} or callable
@return: A string describing the deprecation.
@rtype: C{str}
"""
return _getDeprecationWarningString(
_fullyQualifiedName(callableThing), version, format, replacement)
def _appendToDocstring(thingWithDoc, textToAppend):
"""
Append the given text to the docstring of C{thingWithDoc}.
If C{thingWithDoc} has no docstring, then the text just replaces the
docstring. If it has a single-line docstring then it appends a blank line
and the message text. If it has a multi-line docstring, then in appends a
blank line a the message text, and also does the indentation correctly.
"""
if thingWithDoc.__doc__:
docstringLines = thingWithDoc.__doc__.splitlines()
else:
docstringLines = []
if len(docstringLines) == 0:
docstringLines.append(textToAppend)
elif len(docstringLines) == 1:
docstringLines.extend(['', textToAppend, ''])
else:
spaces = docstringLines.pop()
docstringLines.extend(['',
spaces + textToAppend,
spaces])
thingWithDoc.__doc__ = '\n'.join(docstringLines)
def deprecated(version, replacement=None):
"""
Return a decorator that marks callables as deprecated.
@type version: L{twisted.python.versions.Version}
@param version: The version in which the callable will be marked as
having been deprecated. The decorated function will be annotated
with this version, having it set as its C{deprecatedVersion}
attribute.
@param version: the version that the callable was deprecated in.
@type version: L{twisted.python.versions.Version}
@param replacement: what should be used in place of the callable. Either
pass in a string, which will be inserted into the warning message,
or a callable, which will be expanded to its full import path.
@type replacement: C{str} or callable
"""
def deprecationDecorator(function):
"""
Decorator that marks C{function} as deprecated.
"""
warningString = getDeprecationWarningString(
function, version, None, replacement)
@wraps(function)
def deprecatedFunction(*args, **kwargs):
warn(
warningString,
DeprecationWarning,
stacklevel=2)
return function(*args, **kwargs)
_appendToDocstring(deprecatedFunction,
_getDeprecationDocstring(version, replacement))
deprecatedFunction.deprecatedVersion = version
return deprecatedFunction
return deprecationDecorator
def getWarningMethod():
"""
Return the warning method currently used to record deprecation warnings.
"""
return warn
def setWarningMethod(newMethod):
"""
Set the warning method to use to record deprecation warnings.
The callable should take message, category and stacklevel. The return
value is ignored.
"""
global warn
warn = newMethod
class _InternalState(object):
"""
An L{_InternalState} is a helper object for a L{_ModuleProxy}, so that it
can easily access its own attributes, bypassing its logic for delegating to
another object that it's proxying for.
@ivar proxy: a L{ModuleProxy}
"""
def __init__(self, proxy):
object.__setattr__(self, 'proxy', proxy)
def __getattribute__(self, name):
return object.__getattribute__(object.__getattribute__(self, 'proxy'),
name)
def __setattr__(self, name, value):
return object.__setattr__(object.__getattribute__(self, 'proxy'),
name, value)
class _ModuleProxy(object):
"""
Python module wrapper to hook module-level attribute access.
Access to deprecated attributes first checks
L{_ModuleProxy._deprecatedAttributes}, if the attribute does not appear
there then access falls through to L{_ModuleProxy._module}, the wrapped
module object.
@ivar _module: Module on which to hook attribute access.
@type _module: C{module}
@ivar _deprecatedAttributes: Mapping of attribute names to objects that
retrieve the module attribute's original value.
@type _deprecatedAttributes: C{dict} mapping C{str} to
L{_DeprecatedAttribute}
@ivar _lastWasPath: Heuristic guess as to whether warnings about this
package should be ignored for the next call. If the last attribute
access of this module was a C{getattr} of C{__path__}, we will assume
that it was the import system doing it and we won't emit a warning for
the next access, even if it is to a deprecated attribute. The CPython
import system always tries to access C{__path__}, then the attribute
itself, then the attribute itself again, in both successful and failed
cases.
@type _lastWasPath: C{bool}
"""
def __init__(self, module):
state = _InternalState(self)
state._module = module
state._deprecatedAttributes = {}
state._lastWasPath = False
def __repr__(self):
"""
Get a string containing the type of the module proxy and a
representation of the wrapped module object.
"""
state = _InternalState(self)
return '<%s module=%r>' % (type(self).__name__, state._module)
def __setattr__(self, name, value):
"""
Set an attribute on the wrapped module object.
"""
state = _InternalState(self)
state._lastWasPath = False
setattr(state._module, name, value)
def __getattribute__(self, name):
"""
Get an attribute from the module object, possibly emitting a warning.
If the specified name has been deprecated, then a warning is issued.
(Unless certain obscure conditions are met; see
L{_ModuleProxy._lastWasPath} for more information about what might quash
such a warning.)
"""
state = _InternalState(self)
if state._lastWasPath:
deprecatedAttribute = None
else:
deprecatedAttribute = state._deprecatedAttributes.get(name)
if deprecatedAttribute is not None:
# If we have a _DeprecatedAttribute object from the earlier lookup,
# allow it to issue the warning.
value = deprecatedAttribute.get()
else:
# Otherwise, just retrieve the underlying value directly; it's not
# deprecated, there's no warning to issue.
value = getattr(state._module, name)
if name == '__path__':
state._lastWasPath = True
else:
state._lastWasPath = False
return value
class _DeprecatedAttribute(object):
"""
Wrapper for deprecated attributes.
This is intended to be used by L{_ModuleProxy}. Calling
L{_DeprecatedAttribute.get} will issue a warning and retrieve the
underlying attribute's value.
@type module: C{module}
@ivar module: The original module instance containing this attribute
@type fqpn: C{str}
@ivar fqpn: Fully qualified Python name for the deprecated attribute
@type version: L{twisted.python.versions.Version}
@ivar version: Version that the attribute was deprecated in
@type message: C{str}
@ivar message: Deprecation message
"""
def __init__(self, module, name, version, message):
"""
Initialise a deprecated name wrapper.
"""
self.module = module
self.__name__ = name
self.fqpn = module.__name__ + '.' + name
self.version = version
self.message = message
def get(self):
"""
Get the underlying attribute value and issue a deprecation warning.
"""
# This might fail if the deprecated thing is a module inside a package.
# In that case, don't emit the warning this time. The import system
# will come back again when it's not an AttributeError and we can emit
# the warning then.
result = getattr(self.module, self.__name__)
message = _getDeprecationWarningString(self.fqpn, self.version,
DEPRECATION_WARNING_FORMAT + ': ' + self.message)
warn(message, DeprecationWarning, stacklevel=3)
return result
def _deprecateAttribute(proxy, name, version, message):
"""
Mark a module-level attribute as being deprecated.
@type proxy: L{_ModuleProxy}
@param proxy: The module proxy instance proxying the deprecated attributes
@type name: C{str}
@param name: Attribute name
@type version: L{twisted.python.versions.Version}
@param version: Version that the attribute was deprecated in
@type message: C{str}
@param message: Deprecation message
"""
_module = object.__getattribute__(proxy, '_module')
attr = _DeprecatedAttribute(_module, name, version, message)
# Add a deprecated attribute marker for this module's attribute. When this
# attribute is accessed via _ModuleProxy a warning is emitted.
_deprecatedAttributes = object.__getattribute__(
proxy, '_deprecatedAttributes')
_deprecatedAttributes[name] = attr
def deprecatedModuleAttribute(version, message, moduleName, name):
"""
Declare a module-level attribute as being deprecated.
@type version: L{twisted.python.versions.Version}
@param version: Version that the attribute was deprecated in
@type message: C{str}
@param message: Deprecation message
@type moduleName: C{str}
@param moduleName: Fully-qualified Python name of the module containing
the deprecated attribute; if called from the same module as the
attributes are being deprecated in, using the C{__name__} global can
be helpful
@type name: C{str}
@param name: Attribute name to deprecate
"""
module = sys.modules[moduleName]
if not isinstance(module, _ModuleProxy):
module = _ModuleProxy(module)
sys.modules[moduleName] = module
_deprecateAttribute(module, name, version, message)
def warnAboutFunction(offender, warningString):
"""
Issue a warning string, identifying C{offender} as the responsible code.
This function is used to deprecate some behavior of a function. It differs
from L{warnings.warn} in that it is not limited to deprecating the behavior
of a function currently on the call stack.
@param function: The function that is being deprecated.
@param warningString: The string that should be emitted by this warning.
@type warningString: C{str}
@since: 11.0
"""
# inspect.getmodule() is attractive, but somewhat
# broken in Python < 2.6. See Python bug 4845.
offenderModule = sys.modules[offender.__module__]
filename = inspect.getabsfile(offenderModule)
lineStarts = list(findlinestarts(offender.__code__))
lastLineNo = lineStarts[-1][1]
globals = offender.__globals__
kwargs = dict(
category=DeprecationWarning,
filename=filename,
lineno=lastLineNo,
module=offenderModule.__name__,
registry=globals.setdefault("__warningregistry__", {}),
module_globals=None)
warn_explicit(warningString, **kwargs)
|
joel-airspring/Diamond
|
refs/heads/master
|
src/diamond/handler/queue.py
|
21
|
# coding=utf-8
"""
This is a meta handler to act as a shim for the new threading model. Please
do not try to use it as a normal handler
"""
from Handler import Handler
class QueueHandler(Handler):
def __init__(self, config=None, queue=None, log=None):
# Initialize Handler
Handler.__init__(self, config=config, log=log)
self.metrics = []
self.queue = queue
def __del__(self):
"""
Ensure as many of the metrics as possible are sent to the handers on
a shutdown
"""
self._flush()
def process(self, metric):
return self._process(metric)
def _process(self, metric):
"""
We skip any locking code due to the fact that this is now a single
process per collector
"""
self.metrics.append(metric)
def flush(self):
return self._flush()
def _flush(self):
"""
We skip any locking code due to the fact that this is now a single
process per collector
"""
if len(self.metrics) > 0:
self.queue.put(self.metrics, block=False)
self.metrics = []
|
danilito19/django
|
refs/heads/master
|
tests/admin_ordering/models.py
|
147
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.db import models
class Band(models.Model):
name = models.CharField(max_length=100)
bio = models.TextField()
rank = models.IntegerField()
class Meta:
ordering = ('name',)
class Song(models.Model):
band = models.ForeignKey(Band)
name = models.CharField(max_length=100)
duration = models.IntegerField()
other_interpreters = models.ManyToManyField(Band, related_name='covers')
class Meta:
ordering = ('name',)
class SongInlineDefaultOrdering(admin.StackedInline):
model = Song
class SongInlineNewOrdering(admin.StackedInline):
model = Song
ordering = ('duration', )
class DynOrderingBandAdmin(admin.ModelAdmin):
def get_ordering(self, request):
if request.user.is_superuser:
return ['rank']
else:
return ['name']
|
vinhqdang/MOOC
|
refs/heads/master
|
projecteuler/17 number letter counts/solve.py
|
1
|
# http://stackoverflow.com/questions/8982163/how-do-i-tell-python-to-convert-integers-into-words
import sys
delimiter = "" #use for this problem
def handel_upto_99(number):
predef={0:"zero",1:"one",2:"two",3:"three",4:"four",5:"five",6:"six",7:"seven",8:"eight",9:"nine",10:"ten",11:"eleven",12:"twelve",13:"thirteen",14:"fourteen",15:"fifteen",16:"sixteen",17:"seventeen",18:"eighteen",19:"nineteen",20:"twenty",30:"thirty",40:"forty",50:"fifty",60:"sixty",70:"seventy",80:"eighty",90:"ninety",100:"hundred",100000:"lakh",10000000:"crore",1000000:"million",1000000000:"billion"}
if number in predef.keys():
return predef[number]
else:
return predef[(number/10)*10]+ delimiter +predef[number%10]
def return_bigdigit(number,devideby):
predef={0:"zero",1:"one",2:"two",3:"three",4:"four",5:"five",6:"six",7:"seven",8:"eight",9:"nine",10:"ten",11:"eleven",12:"twelve",13:"thirteen",14:"fourteen",15:"fifteen",16:"sixteen",17:"seventeen",18:"eighteen",19:"nineteen",20:"twenty",30:"thirty",40:"forty",50:"fifty",60:"sixty",70:"seventy",80:"eighty",90:"ninety",100:"hundred",1000:"thousand",100000:"lakh",10000000:"crore",1000000:"million",1000000000:"billion"}
if devideby in predef.keys():
return predef[number/devideby]+ delimiter +predef[devideby]
else:
devideby/=10
return handel_upto_99(number/devideby)+ delimiter +predef[devideby]
def mainfunction(number):
dev={100:"hundred",1000:"thousand",100000:"lakh",10000000:"crore",1000000000:"billion"}
if number is 0:
return "Zero"
if number<100:
result=handel_upto_99(number)
else:
result=""
while number>=100:
devideby=1
length=len(str(number))
for i in range(length-1):
devideby*=10
if number%devideby==0:
if devideby in dev:
return handel_upto_99(number/devideby)+delimiter+ dev[devideby]
else:
return handel_upto_99(number/(devideby/10))+delimiter+ dev[devideby/10]
res=return_bigdigit(number,devideby)
result=result+delimiter+res
if devideby not in dev:
number=number-((devideby/10)*(number/(devideby/10)))
number=number-devideby*(number/devideby)
if number <100:
result = result + delimiter + "and" + delimiter + handel_upto_99(number)
return result
def test ():
assert len (mainfunction (342)) == 23
assert len (mainfunction (115)) == 20
def solve (n):
sum = 0
for i in range (1, n + 1):
sum += len (mainfunction (i))
return sum
print solve (int(sys.argv[1]))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.