repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
rsvip/Django
|
refs/heads/master
|
tests/migrations/test_migrations_conflict/0001_initial.py
|
2995
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
operations = [
migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=255)),
("slug", models.SlugField(null=True)),
("age", models.IntegerField(default=0)),
("silly_field", models.BooleanField(default=False)),
],
),
migrations.CreateModel(
"Tribble",
[
("id", models.AutoField(primary_key=True)),
("fluffy", models.BooleanField(default=True)),
],
)
]
|
bl4ckdu5t/registron
|
refs/heads/master
|
PyInstaller/hooks/hook-wx.lib.activex.py
|
10
|
#-----------------------------------------------------------------------------
# Copyright (c) 2013, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from PyInstaller.hooks.hookutils import exec_statement
# This needed because comtypes wx.lib.activex generates some stuff.
exec_statement("import wx.lib.activex")
|
xiaolihope/PerfKitBenchmarker-1.7.0
|
refs/heads/master
|
perfkitbenchmarker/linux_packages/hbase.py
|
2
|
# Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing HBase installation and cleanup functions.
HBase is a scalable NoSQL database built on Hadoop.
https://hbase.apache.org/
"""
import functools
import os
import posixpath
from perfkitbenchmarker import data
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import hadoop
HBASE_VERSION = '1.1.5'
HBASE_URL = ('http://www.us.apache.org/dist/hbase/{0}/'
'hbase-{0}-bin.tar.gz').format(HBASE_VERSION)
DATA_FILES = ['hbase/hbase-site.xml.j2', 'hbase/regionservers.j2',
'hbase/hbase-env.sh.j2']
HBASE_DIR = posixpath.join(vm_util.VM_TMP_DIR, 'hbase')
HBASE_BIN = posixpath.join(HBASE_DIR, 'bin')
HBASE_CONF_DIR = posixpath.join(HBASE_DIR, 'conf')
def CheckPrerequisites():
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
for resource in DATA_FILES:
data.ResourcePath(resource)
def _Install(vm):
vm.Install('hadoop')
vm.Install('curl')
vm.RemoteCommand(('mkdir {0} && curl -L {1} | '
'tar -C {0} --strip-components=1 -xzf -').format(
HBASE_DIR, HBASE_URL))
def YumInstall(vm):
"""Installs HBase on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs HBase on the VM."""
_Install(vm)
def _RenderConfig(vm, master_ip, zk_ips, regionserver_ips):
# Use the same heap configuration as Cassandra
memory_mb = vm.total_memory_kb // 1024
hbase_memory_mb = max(min(memory_mb // 2, 1024),
min(memory_mb // 4, 8192))
context = {
'master_ip': master_ip,
'worker_ips': regionserver_ips,
'zk_quorum_ips': zk_ips,
'hadoop_private_key': hadoop.HADOOP_PRIVATE_KEY,
'hbase_memory_mb': hbase_memory_mb,
'scratch_dir': vm.GetScratchDir(),
}
for file_name in DATA_FILES:
file_path = data.ResourcePath(file_name)
remote_path = posixpath.join(HBASE_CONF_DIR,
os.path.basename(file_name))
if file_name.endswith('.j2'):
vm.RenderTemplate(file_path, os.path.splitext(remote_path)[0], context)
else:
vm.RemoteCopy(file_path, remote_path)
def ConfigureAndStart(master, regionservers, zk_nodes):
"""Configure HBase on a cluster.
Args:
master: VM. Master VM.
regionservers: List of VMs.
"""
vms = [master] + regionservers
def LinkNativeLibraries(vm):
vm.RemoteCommand(('mkdir {0}/lib/native && '
'ln -s {1} {0}/lib/native/Linux-amd64-64').format(
HBASE_DIR,
posixpath.join(hadoop.HADOOP_DIR, 'lib', 'native')))
vm_util.RunThreaded(LinkNativeLibraries, vms)
fn = functools.partial(_RenderConfig, master_ip=master.internal_ip,
zk_ips=[vm.internal_ip for vm in zk_nodes],
regionserver_ips=[regionserver.internal_ip
for regionserver in regionservers])
vm_util.RunThreaded(fn, vms)
master.RemoteCommand('{0} dfs -mkdir /hbase'.format(
posixpath.join(hadoop.HADOOP_BIN, 'hdfs')))
master.RemoteCommand(posixpath.join(HBASE_BIN, 'start-hbase.sh'))
def Stop(master):
"""Stop HBase.
Args:
master: VM. Master VM.
"""
master.RemoteCommand(posixpath.join(HBASE_BIN, 'stop-hbase.sh'))
|
Shrhawk/edx-platform
|
refs/heads/master
|
lms/djangoapps/mobile_api/social_facebook/preferences/urls.py
|
128
|
"""
URLs for users sharing preferences
"""
from django.conf.urls import patterns, url
from .views import UserSharing
urlpatterns = patterns(
'mobile_api.social_facebook.preferences.views',
url(
r'^preferences/$',
UserSharing.as_view(),
name='preferences'
),
)
|
Lab603/PicEncyclopedias
|
refs/heads/master
|
jni-build/jni-build/jni/include/tensorflow/contrib/learn/python/learn/tests/io_test.py
|
5
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""tf.learn IO operation tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import tensorflow as tf
from tensorflow.contrib.learn.python import learn
from tensorflow.contrib.learn.python.learn import datasets
from tensorflow.contrib.learn.python.learn.estimators._sklearn import accuracy_score
# pylint: disable=wildcard-import
from tensorflow.contrib.learn.python.learn.learn_io import *
# pylint: enable=wildcard-import
class IOTest(tf.test.TestCase):
# pylint: disable=undefined-variable
"""tf.learn IO operation tests."""
def test_pandas_dataframe(self):
if HAS_PANDAS:
import pandas as pd # pylint: disable=g-import-not-at-top
random.seed(42)
iris = datasets.load_iris()
data = pd.DataFrame(iris.data)
labels = pd.DataFrame(iris.target)
classifier = learn.TensorFlowLinearClassifier(
feature_columns=learn.infer_real_valued_columns_from_input(data),
n_classes=3)
classifier.fit(data, labels)
score = accuracy_score(labels[0], classifier.predict(data))
self.assertGreater(score, 0.5, "Failed with score = {0}".format(score))
else:
print("No pandas installed. pandas-related tests are skipped.")
def test_pandas_series(self):
if HAS_PANDAS:
import pandas as pd # pylint: disable=g-import-not-at-top
random.seed(42)
iris = datasets.load_iris()
data = pd.DataFrame(iris.data)
labels = pd.Series(iris.target)
classifier = learn.TensorFlowLinearClassifier(
feature_columns=learn.infer_real_valued_columns_from_input(data),
n_classes=3)
classifier.fit(data, labels)
score = accuracy_score(labels, classifier.predict(data))
self.assertGreater(score, 0.5, "Failed with score = {0}".format(score))
def test_string_data_formats(self):
if HAS_PANDAS:
import pandas as pd # pylint: disable=g-import-not-at-top
with self.assertRaises(ValueError):
learn.io.extract_pandas_data(pd.DataFrame({"Test": ["A", "B"]}))
with self.assertRaises(ValueError):
learn.io.extract_pandas_labels(pd.DataFrame({"Test": ["A", "B"]}))
def test_dask_io(self):
if HAS_DASK and HAS_PANDAS:
import pandas as pd # pylint: disable=g-import-not-at-top
import dask.dataframe as dd # pylint: disable=g-import-not-at-top
# test dask.dataframe
df = pd.DataFrame(
dict(a=list("aabbcc"), b=list(range(6))),
index=pd.date_range(start="20100101", periods=6))
ddf = dd.from_pandas(df, npartitions=3)
extracted_ddf = extract_dask_data(ddf)
self.assertEqual(
extracted_ddf.divisions, (0, 2, 4, 6),
"Failed with divisions = {0}".format(extracted_ddf.divisions))
self.assertEqual(
extracted_ddf.columns.tolist(), ["a", "b"],
"Failed with columns = {0}".format(extracted_ddf.columns))
# test dask.series
labels = ddf["a"]
extracted_labels = extract_dask_labels(labels)
self.assertEqual(
extracted_labels.divisions, (0, 2, 4, 6),
"Failed with divisions = {0}".format(extracted_labels.divisions))
# labels should only have one column
with self.assertRaises(ValueError):
extract_dask_labels(ddf)
else:
print("No dask installed. dask-related tests are skipped.")
def test_dask_iris_classification(self):
if HAS_DASK and HAS_PANDAS:
import pandas as pd # pylint: disable=g-import-not-at-top
import dask.dataframe as dd # pylint: disable=g-import-not-at-top
random.seed(42)
iris = datasets.load_iris()
data = pd.DataFrame(iris.data)
data = dd.from_pandas(data, npartitions=2)
labels = pd.DataFrame(iris.target)
labels = dd.from_pandas(labels, npartitions=2)
classifier = learn.TensorFlowLinearClassifier(
feature_columns=learn.infer_real_valued_columns_from_input(data),
n_classes=3)
classifier.fit(data, labels)
predictions = data.map_partitions(classifier.predict).compute()
score = accuracy_score(labels.compute(), predictions)
self.assertGreater(score, 0.5, "Failed with score = {0}".format(score))
if __name__ == "__main__":
tf.test.main()
|
landlab/landlab
|
refs/heads/master
|
tests/grid/test_radial_grid/test_nodes.py
|
3
|
from numpy.testing import assert_array_equal
from pytest import approx
from landlab import RadialModelGrid
def test_radius_at_node():
grid = RadialModelGrid(2, 8)
assert grid.radius_at_node == approx(
[
2.0,
2.0,
2.0,
2.0,
2.0,
1.0,
2.0,
2.0,
1.0,
1.0,
2.0,
1.0,
0.0,
1.0,
2.0,
1.0,
1.0,
2.0,
2.0,
1.0,
2.0,
2.0,
2.0,
2.0,
2.0,
]
)
def test_spacing_of_rings():
grid = RadialModelGrid(2, spacing=1.0)
assert grid.spacing_of_rings == approx(1.0)
assert isinstance(grid.spacing_of_rings, float)
grid = RadialModelGrid(2, spacing=11.0)
assert grid.spacing_of_rings == approx(11.0)
assert isinstance(grid.spacing_of_rings, float)
def test_number_of_rings():
grid = RadialModelGrid(2)
assert grid.number_of_rings == 2
assert isinstance(grid.number_of_rings, int)
grid = RadialModelGrid(4)
assert grid.number_of_rings == 4
def test_nodes_per_ring():
grid = RadialModelGrid(3, 8)
assert_array_equal(grid.number_of_nodes_in_ring, [8, 16, 32])
|
amoldeshpande/googletest-vs2013
|
refs/heads/master
|
test/gtest_env_var_test.py
|
2408
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
if __name__ == '__main__':
gtest_test_utils.Main()
|
dsiddharth/access-keys
|
refs/heads/master
|
keystone/openstack/common/db/sqlalchemy/utils.py
|
2
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2010-2011 OpenStack Foundation.
# Copyright 2012 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from migrate.changeset import UniqueConstraint
import sqlalchemy
from sqlalchemy import Boolean
from sqlalchemy import CheckConstraint
from sqlalchemy import Column
from sqlalchemy.engine import reflection
from sqlalchemy.ext.compiler import compiles
from sqlalchemy import func
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy.sql.expression import literal_column
from sqlalchemy.sql.expression import UpdateBase
from sqlalchemy.sql import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy.types import NullType
from keystone.openstack.common.gettextutils import _
from keystone.openstack.common import log as logging
from keystone.openstack.common import timeutils
LOG = logging.getLogger(__name__)
_DBURL_REGEX = re.compile(r"[^:]+://([^:]+):([^@]+)@.+")
def sanitize_db_url(url):
match = _DBURL_REGEX.match(url)
if match:
return '%s****:****%s' % (url[:match.start(1)], url[match.end(2):])
return url
class InvalidSortKey(Exception):
message = _("Sort key supplied was not valid.")
# copy from glance/db/sqlalchemy/api.py
def paginate_query(query, model, limit, sort_keys, marker=None,
sort_dir=None, sort_dirs=None):
"""Returns a query with sorting / pagination criteria added.
Pagination works by requiring a unique sort_key, specified by sort_keys.
(If sort_keys is not unique, then we risk looping through values.)
We use the last row in the previous page as the 'marker' for pagination.
So we must return values that follow the passed marker in the order.
With a single-valued sort_key, this would be easy: sort_key > X.
With a compound-values sort_key, (k1, k2, k3) we must do this to repeat
the lexicographical ordering:
(k1 > X1) or (k1 == X1 && k2 > X2) or (k1 == X1 && k2 == X2 && k3 > X3)
We also have to cope with different sort_directions.
Typically, the id of the last row is used as the client-facing pagination
marker, then the actual marker object must be fetched from the db and
passed in to us as marker.
:param query: the query object to which we should add paging/sorting
:param model: the ORM model class
:param limit: maximum number of items to return
:param sort_keys: array of attributes by which results should be sorted
:param marker: the last item of the previous page; we returns the next
results after this value.
:param sort_dir: direction in which results should be sorted (asc, desc)
:param sort_dirs: per-column array of sort_dirs, corresponding to sort_keys
:rtype: sqlalchemy.orm.query.Query
:return: The query with sorting/pagination added.
"""
if 'id' not in sort_keys:
# TODO(justinsb): If this ever gives a false-positive, check
# the actual primary key, rather than assuming its id
LOG.warning(_('Id not in sort_keys; is sort_keys unique?'))
assert(not (sort_dir and sort_dirs))
# Default the sort direction to ascending
if sort_dirs is None and sort_dir is None:
sort_dir = 'asc'
# Ensure a per-column sort direction
if sort_dirs is None:
sort_dirs = [sort_dir for _sort_key in sort_keys]
assert(len(sort_dirs) == len(sort_keys))
# Add sorting
for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs):
try:
sort_dir_func = {
'asc': sqlalchemy.asc,
'desc': sqlalchemy.desc,
}[current_sort_dir]
except KeyError:
raise ValueError(_("Unknown sort direction, "
"must be 'desc' or 'asc'"))
try:
sort_key_attr = getattr(model, current_sort_key)
except AttributeError:
raise InvalidSortKey()
query = query.order_by(sort_dir_func(sort_key_attr))
# Add pagination
if marker is not None:
marker_values = []
for sort_key in sort_keys:
v = getattr(marker, sort_key)
marker_values.append(v)
# Build up an array of sort criteria as in the docstring
criteria_list = []
for i in range(len(sort_keys)):
crit_attrs = []
for j in range(i):
model_attr = getattr(model, sort_keys[j])
crit_attrs.append((model_attr == marker_values[j]))
model_attr = getattr(model, sort_keys[i])
if sort_dirs[i] == 'desc':
crit_attrs.append((model_attr < marker_values[i]))
else:
crit_attrs.append((model_attr > marker_values[i]))
criteria = sqlalchemy.sql.and_(*crit_attrs)
criteria_list.append(criteria)
f = sqlalchemy.sql.or_(*criteria_list)
query = query.filter(f)
if limit is not None:
query = query.limit(limit)
return query
def get_table(engine, name):
"""Returns an sqlalchemy table dynamically from db.
Needed because the models don't work for us in migrations
as models will be far out of sync with the current data.
"""
metadata = MetaData()
metadata.bind = engine
return Table(name, metadata, autoload=True)
class InsertFromSelect(UpdateBase):
"""Form the base for `INSERT INTO table (SELECT ... )` statement."""
def __init__(self, table, select):
self.table = table
self.select = select
@compiles(InsertFromSelect)
def visit_insert_from_select(element, compiler, **kw):
"""Form the `INSERT INTO table (SELECT ... )` statement."""
return "INSERT INTO %s %s" % (
compiler.process(element.table, asfrom=True),
compiler.process(element.select))
class ColumnError(Exception):
"""Error raised when no column or an invalid column is found."""
def _get_not_supported_column(col_name_col_instance, column_name):
try:
column = col_name_col_instance[column_name]
except KeyError:
msg = _("Please specify column %s in col_name_col_instance "
"param. It is required because column has unsupported "
"type by sqlite).")
raise ColumnError(msg % column_name)
if not isinstance(column, Column):
msg = _("col_name_col_instance param has wrong type of "
"column instance for column %s It should be instance "
"of sqlalchemy.Column.")
raise ColumnError(msg % column_name)
return column
def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns,
**col_name_col_instance):
"""Drop unique constraint from table.
This method drops UC from table and works for mysql, postgresql and sqlite.
In mysql and postgresql we are able to use "alter table" construction.
Sqlalchemy doesn't support some sqlite column types and replaces their
type with NullType in metadata. We process these columns and replace
NullType with the correct column type.
:param migrate_engine: sqlalchemy engine
:param table_name: name of table that contains uniq constraint.
:param uc_name: name of uniq constraint that will be dropped.
:param columns: columns that are in uniq constraint.
:param col_name_col_instance: contains pair column_name=column_instance.
column_instance is instance of Column. These params
are required only for columns that have unsupported
types by sqlite. For example BigInteger.
"""
meta = MetaData()
meta.bind = migrate_engine
t = Table(table_name, meta, autoload=True)
if migrate_engine.name == "sqlite":
override_cols = [
_get_not_supported_column(col_name_col_instance, col.name)
for col in t.columns
if isinstance(col.type, NullType)
]
for col in override_cols:
t.columns.replace(col)
uc = UniqueConstraint(*columns, table=t, name=uc_name)
uc.drop()
def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
use_soft_delete, *uc_column_names):
"""Drop all old rows having the same values for columns in uc_columns.
This method drop (or mark ad `deleted` if use_soft_delete is True) old
duplicate rows form table with name `table_name`.
:param migrate_engine: Sqlalchemy engine
:param table_name: Table with duplicates
:param use_soft_delete: If True - values will be marked as `deleted`,
if False - values will be removed from table
:param uc_column_names: Unique constraint columns
"""
meta = MetaData()
meta.bind = migrate_engine
table = Table(table_name, meta, autoload=True)
columns_for_group_by = [table.c[name] for name in uc_column_names]
columns_for_select = [func.max(table.c.id)]
columns_for_select.extend(columns_for_group_by)
duplicated_rows_select = select(columns_for_select,
group_by=columns_for_group_by,
having=func.count(table.c.id) > 1)
for row in migrate_engine.execute(duplicated_rows_select):
# NOTE(boris-42): Do not remove row that has the biggest ID.
delete_condition = table.c.id != row[0]
is_none = None # workaround for pyflakes
delete_condition &= table.c.deleted_at == is_none
for name in uc_column_names:
delete_condition &= table.c[name] == row[name]
rows_to_delete_select = select([table.c.id]).where(delete_condition)
for row in migrate_engine.execute(rows_to_delete_select).fetchall():
LOG.info(_("Deleting duplicated row with id: %(id)s from table: "
"%(table)s") % dict(id=row[0], table=table_name))
if use_soft_delete:
delete_statement = table.update().\
where(delete_condition).\
values({
'deleted': literal_column('id'),
'updated_at': literal_column('updated_at'),
'deleted_at': timeutils.utcnow()
})
else:
delete_statement = table.delete().where(delete_condition)
migrate_engine.execute(delete_statement)
def _get_default_deleted_value(table):
if isinstance(table.c.id.type, Integer):
return 0
if isinstance(table.c.id.type, String):
return ""
raise ColumnError(_("Unsupported id columns type"))
def _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes):
table = get_table(migrate_engine, table_name)
insp = reflection.Inspector.from_engine(migrate_engine)
real_indexes = insp.get_indexes(table_name)
existing_index_names = dict(
[(index['name'], index['column_names']) for index in real_indexes])
# NOTE(boris-42): Restore indexes on `deleted` column
for index in indexes:
if 'deleted' not in index['column_names']:
continue
name = index['name']
if name in existing_index_names:
column_names = [table.c[c] for c in existing_index_names[name]]
old_index = Index(name, *column_names, unique=index["unique"])
old_index.drop(migrate_engine)
column_names = [table.c[c] for c in index['column_names']]
new_index = Index(index["name"], *column_names, unique=index["unique"])
new_index.create(migrate_engine)
def change_deleted_column_type_to_boolean(migrate_engine, table_name,
**col_name_col_instance):
if migrate_engine.name == "sqlite":
return _change_deleted_column_type_to_boolean_sqlite(
migrate_engine, table_name, **col_name_col_instance)
insp = reflection.Inspector.from_engine(migrate_engine)
indexes = insp.get_indexes(table_name)
table = get_table(migrate_engine, table_name)
old_deleted = Column('old_deleted', Boolean, default=False)
old_deleted.create(table, populate_default=False)
table.update().\
where(table.c.deleted == table.c.id).\
values(old_deleted=True).\
execute()
table.c.deleted.drop()
table.c.old_deleted.alter(name="deleted")
_restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes)
def _change_deleted_column_type_to_boolean_sqlite(migrate_engine, table_name,
**col_name_col_instance):
insp = reflection.Inspector.from_engine(migrate_engine)
table = get_table(migrate_engine, table_name)
columns = []
for column in table.columns:
column_copy = None
if column.name != "deleted":
if isinstance(column.type, NullType):
column_copy = _get_not_supported_column(col_name_col_instance,
column.name)
else:
column_copy = column.copy()
else:
column_copy = Column('deleted', Boolean, default=0)
columns.append(column_copy)
constraints = [constraint.copy() for constraint in table.constraints]
meta = table.metadata
new_table = Table(table_name + "__tmp__", meta,
*(columns + constraints))
new_table.create()
indexes = []
for index in insp.get_indexes(table_name):
column_names = [new_table.c[c] for c in index['column_names']]
indexes.append(Index(index["name"], *column_names,
unique=index["unique"]))
c_select = []
for c in table.c:
if c.name != "deleted":
c_select.append(c)
else:
c_select.append(table.c.deleted == table.c.id)
ins = InsertFromSelect(new_table, select(c_select))
migrate_engine.execute(ins)
table.drop()
[index.create(migrate_engine) for index in indexes]
new_table.rename(table_name)
new_table.update().\
where(new_table.c.deleted == new_table.c.id).\
values(deleted=True).\
execute()
def change_deleted_column_type_to_id_type(migrate_engine, table_name,
**col_name_col_instance):
if migrate_engine.name == "sqlite":
return _change_deleted_column_type_to_id_type_sqlite(
migrate_engine, table_name, **col_name_col_instance)
insp = reflection.Inspector.from_engine(migrate_engine)
indexes = insp.get_indexes(table_name)
table = get_table(migrate_engine, table_name)
new_deleted = Column('new_deleted', table.c.id.type,
default=_get_default_deleted_value(table))
new_deleted.create(table, populate_default=True)
deleted = True # workaround for pyflakes
table.update().\
where(table.c.deleted == deleted).\
values(new_deleted=table.c.id).\
execute()
table.c.deleted.drop()
table.c.new_deleted.alter(name="deleted")
_restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes)
def _change_deleted_column_type_to_id_type_sqlite(migrate_engine, table_name,
**col_name_col_instance):
# NOTE(boris-42): sqlaclhemy-migrate can't drop column with check
# constraints in sqlite DB and our `deleted` column has
# 2 check constraints. So there is only one way to remove
# these constraints:
# 1) Create new table with the same columns, constraints
# and indexes. (except deleted column).
# 2) Copy all data from old to new table.
# 3) Drop old table.
# 4) Rename new table to old table name.
insp = reflection.Inspector.from_engine(migrate_engine)
meta = MetaData(bind=migrate_engine)
table = Table(table_name, meta, autoload=True)
default_deleted_value = _get_default_deleted_value(table)
columns = []
for column in table.columns:
column_copy = None
if column.name != "deleted":
if isinstance(column.type, NullType):
column_copy = _get_not_supported_column(col_name_col_instance,
column.name)
else:
column_copy = column.copy()
else:
column_copy = Column('deleted', table.c.id.type,
default=default_deleted_value)
columns.append(column_copy)
def is_deleted_column_constraint(constraint):
# NOTE(boris-42): There is no other way to check is CheckConstraint
# associated with deleted column.
if not isinstance(constraint, CheckConstraint):
return False
sqltext = str(constraint.sqltext)
return (sqltext.endswith("deleted in (0, 1)") or
sqltext.endswith("deleted IN (:deleted_1, :deleted_2)"))
constraints = []
for constraint in table.constraints:
if not is_deleted_column_constraint(constraint):
constraints.append(constraint.copy())
new_table = Table(table_name + "__tmp__", meta,
*(columns + constraints))
new_table.create()
indexes = []
for index in insp.get_indexes(table_name):
column_names = [new_table.c[c] for c in index['column_names']]
indexes.append(Index(index["name"], *column_names,
unique=index["unique"]))
ins = InsertFromSelect(new_table, table.select())
migrate_engine.execute(ins)
table.drop()
[index.create(migrate_engine) for index in indexes]
new_table.rename(table_name)
deleted = True # workaround for pyflakes
new_table.update().\
where(new_table.c.deleted == deleted).\
values(deleted=new_table.c.id).\
execute()
# NOTE(boris-42): Fix value of deleted column: False -> "" or 0.
deleted = False # workaround for pyflakes
new_table.update().\
where(new_table.c.deleted == deleted).\
values(deleted=default_deleted_value).\
execute()
|
sam-m888/gprime
|
refs/heads/master
|
gprime/datehandler/_date_de.py
|
1
|
# -*- coding: utf-8 -*-
#
# gPrime - A web-based genealogy program
#
# Copyright (C) 2004-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
German-specific classes for parsing and displaying dates.
"""
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import re
#-------------------------------------------------------------------------
#
# Gprime modules
#
#-------------------------------------------------------------------------
from ..lib.date import Date
from ._dateparser import DateParser
from ._datedisplay import DateDisplay
from ._datehandler import register_datehandler
#-------------------------------------------------------------------------
#
# German parser
#
#-------------------------------------------------------------------------
class DateParserDE(DateParser):
month_to_int = DateParser.month_to_int
# Always add german and austrian name variants no matter what the current
# locale is
month_to_int["januar"] = 1
month_to_int["jan"] = 1
month_to_int["jänner"] = 1
month_to_int["jän"] = 1
# Add other common latin,
month_to_int["januaris"] = 1
month_to_int["januarii"] = 1
month_to_int["januarius"] = 1
month_to_int["februaris"] = 2
month_to_int["februarii"] = 2
month_to_int["februarius"] = 2
month_to_int["martii"] = 3
month_to_int["martius"] = 3
month_to_int["aprilis"] = 4
month_to_int["maius"] = 5
month_to_int["maii"] = 5
month_to_int["junius"] = 6
month_to_int["junii"] = 6
month_to_int["julius"] = 7
month_to_int["julii"] = 7
month_to_int["augustus"] = 8
month_to_int["augusti"] = 8
month_to_int["septembris"] = 9
month_to_int["7bris"] = 9
month_to_int["september"] = 9
month_to_int["october"] = 10
month_to_int["octobris"] = 10
month_to_int["8bris"] = 10
month_to_int["novembris"] = 11
month_to_int["9bris"] = 11
month_to_int["november"] = 11
month_to_int["decembris"] = 12
month_to_int["10bris"] = 12
month_to_int["xbris"] = 12
month_to_int["december"] = 12
# local and historical variants
month_to_int["jenner"] = 1
month_to_int["feber"] = 2
month_to_int["merz"] = 3
month_to_int["augst"] = 8
month_to_int["7ber"] = 9
month_to_int["8ber"] = 10
month_to_int["9ber"] = 11
month_to_int["10ber"] = 12
month_to_int["xber"] = 12
# old german names
month_to_int["hartung"] = 1
month_to_int["eismond"] = 1
month_to_int["hornung"] = 2
month_to_int["schmelzmond"] = 2
month_to_int["taumond"] = 2
month_to_int["narrenmond"] = 2
month_to_int["rebmond"] = 2
month_to_int["hintester"] = 2
month_to_int["lenzing"] = 3
month_to_int["lenzmond"] = 3
month_to_int["launing"] = 4
month_to_int["ostermond"] = 4
month_to_int["winnemond"] = 5
month_to_int["weidenmonat"] = 5
month_to_int["blumenmond"] = 5
month_to_int["brachet"] = 6
month_to_int["brachmond"] = 6
month_to_int["heuert"] = 7
month_to_int["heumond"] = 7
month_to_int["ernting"] = 8
month_to_int["erntemond"] = 8
month_to_int["bisemond"] = 8
month_to_int["scheiding"] = 9
month_to_int["herbstmond"] = 9
month_to_int["gilbhard"] = 10
month_to_int["gilbhart"] = 10
month_to_int["windmond"] = 11
month_to_int["nebelung"] = 11
month_to_int["wintermond"] = 11
month_to_int["julmond"] = 12
month_to_int["heilmond"] = 12
month_to_int["christmond"] = 12
month_to_int["dustermond"] = 12
modifier_to_int = {
'vor' : Date.MOD_BEFORE,
'nach' : Date.MOD_AFTER,
'gegen' : Date.MOD_ABOUT,
'um' : Date.MOD_ABOUT,
'etwa' : Date.MOD_ABOUT,
'circa' : Date.MOD_ABOUT,
'ca.' : Date.MOD_ABOUT,
}
calendar_to_int = {
'gregorianisch' : Date.CAL_GREGORIAN,
'greg.' : Date.CAL_GREGORIAN,
'julianisch' : Date.CAL_JULIAN,
'jul.' : Date.CAL_JULIAN,
'hebräisch' : Date.CAL_HEBREW,
'hebr.' : Date.CAL_HEBREW,
'islamisch' : Date.CAL_ISLAMIC,
'isl.' : Date.CAL_ISLAMIC,
'französisch republikanisch': Date.CAL_FRENCH,
'franz.' : Date.CAL_FRENCH,
'persisch' : Date.CAL_PERSIAN,
'schwedisch' : Date.CAL_SWEDISH,
's' : Date.CAL_SWEDISH,
}
quality_to_int = {
'geschätzt' : Date.QUAL_ESTIMATED,
'gesch.' : Date.QUAL_ESTIMATED,
'errechnet' : Date.QUAL_CALCULATED,
'berechnet' : Date.QUAL_CALCULATED,
'ber.' : Date.QUAL_CALCULATED,
}
bce = ["vor unserer Zeitrechnung", "vor unserer Zeit",
"vor der Zeitrechnung", "vor der Zeit",
"v. u. Z.", "v. d. Z.", "v.u.Z.", "v.d.Z.",
"vor Christi Geburt", "vor Christus", "v. Chr."] + DateParser.bce
def init_strings(self):
DateParser.init_strings(self)
self._span = re.compile("(von|vom)\s+(?P<start>.+)\s+(bis)\s+(?P<stop>.+)",
re.IGNORECASE)
self._range = re.compile("zwischen\s+(?P<start>.+)\s+und\s+(?P<stop>.+)",
re.IGNORECASE)
self._text2 = re.compile('(\d+)?.?\s+?%s\s*((\d+)(/\d+)?)?' % self._mon_str,
re.IGNORECASE)
self._jtext2 = re.compile('(\d+)?.?\s+?%s\s*((\d+)(/\d+)?)?' % self._jmon_str,
re.IGNORECASE)
#-------------------------------------------------------------------------
#
# German display
#
#-------------------------------------------------------------------------
class DateDisplayDE(DateDisplay):
"""
German language date display class.
"""
long_months = ( "", "Januar", "Februar", "März", "April", "Mai",
"Juni", "Juli", "August", "September", "Oktober",
"November", "Dezember" )
short_months = ( "", "Jan", "Feb", "Mär", "Apr", "Mai", "Jun",
"Jul", "Aug", "Sep", "Okt", "Nov", "Dez" )
calendar = (
"", "julianisch", "hebräisch",
"französisch republikanisch", "persisch", "islamisch",
"schwedisch"
)
_mod_str = ("", "vor ", "nach ", "etwa ", "", "", "")
_qual_str = ("", "geschätzt ", "errechnet ")
_bce_str = "%s v. u. Z."
formats = (
"JJJJ-MM-DD (ISO)", "Numerisch", "Monat Tag Jahr",
"MONAT Tag Jahr", "Tag. Monat Jahr", "Tag. MONAT Jahr"
)
# this definition must agree with its "_display_gregorian" method
def _display_gregorian(self, date_val):
"""
display gregorian calendar date in different format
"""
# this must agree with its locale-specific "formats" definition
year = self._slash_year(date_val[2], date_val[3])
if self.format == 0:
return self.display_iso(date_val)
elif self.format == 1:
# day.month_number.year
if date_val[3]:
return self.display_iso(date_val)
else:
if date_val[0] == date_val[1] == 0:
value = str(date_val[2])
else:
value = self._tformat.replace('%m', str(date_val[1]))
value = value.replace('%d', str(date_val[0]))
value = value.replace('%Y', str(date_val[2]))
elif self.format == 2:
# month_name day, year
if date_val[0] == 0:
if date_val[1] == 0:
value = year
else:
value = "%s %s" % (self.long_months[date_val[1]], year)
else:
value = "%s %d, %s" % (self.long_months[date_val[1]],
date_val[0], year)
elif self.format == 3:
# month_abbreviation day, year
if date_val[0] == 0:
if date_val[1] == 0:
value = year
else:
value = "%s %s" % (self.short_months[date_val[1]], year)
else:
value = "%s %d, %s" % (self.short_months[date_val[1]],
date_val[0], year)
elif self.format == 4:
# day. month_name year
if date_val[0] == 0:
if date_val[1] == 0:
value = year
else:
value = "%s %s" % (self.long_months[date_val[1]], year)
else:
value = "%d. %s %s" % (date_val[0],
self.long_months[date_val[1]], year)
else:
# day. month_abbreviation year
if date_val[0] == 0:
if date_val[1] == 0:
value = year
else:
value = "%s %s" % (self.short_months[date_val[1]], year)
else:
value = "%d. %s %s" % (date_val[0],
self.short_months[date_val[1]], year)
if date_val[2] < 0:
return self._bce_str % value
else:
return value
def display(self, date):
"""
Return a text string representing the date.
"""
mod = date.get_modifier()
cal = date.get_calendar()
qual = date.get_quality()
start = date.get_start_date()
newyear = date.get_new_year()
qual_str = self._qual_str[qual]
if mod == Date.MOD_TEXTONLY:
return date.get_text()
elif start == Date.EMPTY:
return ""
elif mod == Date.MOD_SPAN:
d1 = self.display_cal[cal](start)
d2 = self.display_cal[cal](date.get_stop_date())
scal = self.format_extras(cal, newyear)
return "%s%s %s %s %s%s" % (qual_str, 'von', d1, 'bis', d2, scal)
elif mod == Date.MOD_RANGE:
d1 = self.display_cal[cal](start)
d2 = self.display_cal[cal](date.get_stop_date())
scal = self.format_extras(cal, newyear)
return "%szwischen %s und %s%s" % (qual_str, d1, d2, scal)
else:
text = self.display_cal[date.get_calendar()](start)
scal = self.format_extras(cal, newyear)
return "%s%s%s%s" % (qual_str, self._mod_str[mod], text, scal)
#-------------------------------------------------------------------------
#
# Register classes
#
#-------------------------------------------------------------------------
register_datehandler(('de_DE', 'german', 'German', 'de_AT', 'de_CH',
'de_LI', 'de_LU', 'de_BE', 'de'),
DateParserDE, DateDisplayDE)
|
orekyuu/intellij-community
|
refs/heads/master
|
python/testData/psi/BadDecoratorNotMethod.py
|
80
|
class Foo(object):
@staticmethod
def bad_method(): #test
pass
|
sinkpoint/research-mri-db
|
refs/heads/master
|
patientdb/patientdb/settings-default.py
|
1
|
"""
Django settings for patientdb project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
PROJECT_DIR = os.path.dirname(__file__)
BASE_DIR = os.path.dirname(PROJECT_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'i$61&b%if(v5do!x2r@4tqui*byg$&(epl_unw&hgkptu#^zcs'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'patientdb',
'django_tables2',
'bootstrap3',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.linkedin_oauth2',
'rest_framework',
'rest_framework_swagger',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware'
)
TEMPLATE_CONTEXT_PROCESSORS = [
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'allauth.account.context_processors.account',
'allauth.socialaccount.context_processors.socialaccount'
]
TEMPLATE_LOADERS = ('django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
# `allauth` specific authentication methods, such as login by e-mail
'allauth.account.auth_backends.AuthenticationBackend'
)
ROOT_URLCONF = 'patientdb.urls'
WSGI_APPLICATION = 'patientdb.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME' : 'patientdb',
'USER': '',
'PASSWORD': ''
}
}
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
if DEBUG:
MEDIA_URL = '/media/'
STATIC_ROOT = ''
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
else:
STATIC_ROOT = os.path.join(BASE_DIR, "static")
#Template location
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(BASE_DIR), "templates"),
)
LOGIN_URL = '/accounts/login/'
SITE_ID = 1
|
mdanielwork/intellij-community
|
refs/heads/master
|
plugins/hg4idea/testData/bin/mercurial/lsprofcalltree.py
|
100
|
"""
lsprofcalltree.py - lsprof output which is readable by kcachegrind
Authors:
* David Allouche <david <at> allouche.net>
* Jp Calderone & Itamar Shtull-Trauring
* Johan Dahlin
This software may be used and distributed according to the terms
of the GNU General Public License, incorporated herein by reference.
"""
def label(code):
if isinstance(code, str):
return '~' + code # built-in functions ('~' sorts at the end)
else:
return '%s %s:%d' % (code.co_name,
code.co_filename,
code.co_firstlineno)
class KCacheGrind(object):
def __init__(self, profiler):
self.data = profiler.getstats()
self.out_file = None
def output(self, out_file):
self.out_file = out_file
print >> out_file, 'events: Ticks'
self._print_summary()
for entry in self.data:
self._entry(entry)
def _print_summary(self):
max_cost = 0
for entry in self.data:
totaltime = int(entry.totaltime * 1000)
max_cost = max(max_cost, totaltime)
print >> self.out_file, 'summary: %d' % (max_cost,)
def _entry(self, entry):
out_file = self.out_file
code = entry.code
#print >> out_file, 'ob=%s' % (code.co_filename,)
if isinstance(code, str):
print >> out_file, 'fi=~'
else:
print >> out_file, 'fi=%s' % (code.co_filename,)
print >> out_file, 'fn=%s' % (label(code),)
inlinetime = int(entry.inlinetime * 1000)
if isinstance(code, str):
print >> out_file, '0 ', inlinetime
else:
print >> out_file, '%d %d' % (code.co_firstlineno, inlinetime)
# recursive calls are counted in entry.calls
if entry.calls:
calls = entry.calls
else:
calls = []
if isinstance(code, str):
lineno = 0
else:
lineno = code.co_firstlineno
for subentry in calls:
self._subentry(lineno, subentry)
print >> out_file
def _subentry(self, lineno, subentry):
out_file = self.out_file
code = subentry.code
#print >> out_file, 'cob=%s' % (code.co_filename,)
print >> out_file, 'cfn=%s' % (label(code),)
if isinstance(code, str):
print >> out_file, 'cfi=~'
print >> out_file, 'calls=%d 0' % (subentry.callcount,)
else:
print >> out_file, 'cfi=%s' % (code.co_filename,)
print >> out_file, 'calls=%d %d' % (
subentry.callcount, code.co_firstlineno)
totaltime = int(subentry.totaltime * 1000)
print >> out_file, '%d %d' % (lineno, totaltime)
|
skycucumber/Messaging-Gateway
|
refs/heads/master
|
src/Command/SetProfile.py
|
1
|
'''
Created on 2013-9-5
@author: E525649
'''
from BaseCommand import CBaseCommand
from sqlalchemy.exc import SQLAlchemyError
from DB import SBDB,SBDB_ORM
from Command import BaseCommand
import logging
class CSetProfile(CBaseCommand):
'''
classdocs
'''
command_id=0x00050001
def __init__(self,data=None,protocol=None):
'''
Constructor
'''
CBaseCommand.__init__(self, data, protocol)
def Run(self):
with self.protocol.lockCmd:
if not self.Authorized():
self.SendUnauthorizedResp()
return
CBaseCommand.Run(self)
with SBDB.session_scope() as session :
password=self.body.get(BaseCommand.PN_PASSWORD)
email=self.body.get(BaseCommand.PN_EMAIL)
language_name=self.body.get(BaseCommand.PN_LANGUAGENAME)
mobile_phone=self.body.get(BaseCommand.PN_MOBLEPHONE)
respond=self.GetResp()
try:
account=session.query(SBDB_ORM.Account).filter(SBDB_ORM.Account.id==self.protocol.account_id).one()
if password is not None: account.password=password
if email is not None: account.email=email
if language_name is not None:
for language in session.query(SBDB_ORM.Language).all():
if language.language==language_name:
account.language_id=language.id
if mobile_phone is not None: account.mobile_phone=mobile_phone
session.commit()
except SQLAlchemyError,e:
respond.SetErrorCode(BaseCommand.CS_DBEXCEPTION)
logging.error("transport %d:%s",id(self.protocol.transport),e)
session.rollback()
respond.Send()
|
ujjvala-addsol/addsol_hr
|
refs/heads/master
|
openerp/addons/test_documentation_examples/delegation.py
|
366
|
# -*- coding: utf-8 -*-
from openerp import models, fields
class Child0(models.Model):
_name = 'delegation.child0'
field_0 = fields.Integer()
class Child1(models.Model):
_name = 'delegation.child1'
field_1 = fields.Integer()
class Delegating(models.Model):
_name = 'delegation.parent'
_inherits = {
'delegation.child0': 'child0_id',
'delegation.child1': 'child1_id',
}
child0_id = fields.Many2one('delegation.child0', required=True, ondelete='cascade')
child1_id = fields.Many2one('delegation.child1', required=True, ondelete='cascade')
|
pathway27/servo
|
refs/heads/master
|
tests/wpt/css-tests/tools/html5lib/html5lib/filters/optionaltags.py
|
1727
|
from __future__ import absolute_import, division, unicode_literals
from . import _base
class Filter(_base.Filter):
def slider(self):
previous1 = previous2 = None
for token in self.source:
if previous1 is not None:
yield previous2, previous1, token
previous2 = previous1
previous1 = token
yield previous2, previous1, None
def __iter__(self):
for previous, token, next in self.slider():
type = token["type"]
if type == "StartTag":
if (token["data"] or
not self.is_optional_start(token["name"], previous, next)):
yield token
elif type == "EndTag":
if not self.is_optional_end(token["name"], next):
yield token
else:
yield token
def is_optional_start(self, tagname, previous, next):
type = next and next["type"] or None
if tagname in 'html':
# An html element's start tag may be omitted if the first thing
# inside the html element is not a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname == 'head':
# A head element's start tag may be omitted if the first thing
# inside the head element is an element.
# XXX: we also omit the start tag if the head element is empty
if type in ("StartTag", "EmptyTag"):
return True
elif type == "EndTag":
return next["name"] == "head"
elif tagname == 'body':
# A body element's start tag may be omitted if the first thing
# inside the body element is not a space character or a comment,
# except if the first thing inside the body element is a script
# or style element and the node immediately preceding the body
# element is a head element whose end tag has been omitted.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we do not look at the preceding event, so we never omit
# the body element's start tag if it's followed by a script or
# a style element.
return next["name"] not in ('script', 'style')
else:
return True
elif tagname == 'colgroup':
# A colgroup element's start tag may be omitted if the first thing
# inside the colgroup element is a col element, and if the element
# is not immediately preceeded by another colgroup element whose
# end tag has been omitted.
if type in ("StartTag", "EmptyTag"):
# XXX: we do not look at the preceding event, so instead we never
# omit the colgroup element's end tag when it is immediately
# followed by another colgroup element. See is_optional_end.
return next["name"] == "col"
else:
return False
elif tagname == 'tbody':
# A tbody element's start tag may be omitted if the first thing
# inside the tbody element is a tr element, and if the element is
# not immediately preceeded by a tbody, thead, or tfoot element
# whose end tag has been omitted.
if type == "StartTag":
# omit the thead and tfoot elements' end tag when they are
# immediately followed by a tbody element. See is_optional_end.
if previous and previous['type'] == 'EndTag' and \
previous['name'] in ('tbody', 'thead', 'tfoot'):
return False
return next["name"] == 'tr'
else:
return False
return False
def is_optional_end(self, tagname, next):
type = next and next["type"] or None
if tagname in ('html', 'head', 'body'):
# An html element's end tag may be omitted if the html element
# is not immediately followed by a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname in ('li', 'optgroup', 'tr'):
# A li element's end tag may be omitted if the li element is
# immediately followed by another li element or if there is
# no more content in the parent element.
# An optgroup element's end tag may be omitted if the optgroup
# element is immediately followed by another optgroup element,
# or if there is no more content in the parent element.
# A tr element's end tag may be omitted if the tr element is
# immediately followed by another tr element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] == tagname
else:
return type == "EndTag" or type is None
elif tagname in ('dt', 'dd'):
# A dt element's end tag may be omitted if the dt element is
# immediately followed by another dt element or a dd element.
# A dd element's end tag may be omitted if the dd element is
# immediately followed by another dd element or a dt element,
# or if there is no more content in the parent element.
if type == "StartTag":
return next["name"] in ('dt', 'dd')
elif tagname == 'dd':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'p':
# A p element's end tag may be omitted if the p element is
# immediately followed by an address, article, aside,
# blockquote, datagrid, dialog, dir, div, dl, fieldset,
# footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,
# nav, ol, p, pre, section, table, or ul, element, or if
# there is no more content in the parent element.
if type in ("StartTag", "EmptyTag"):
return next["name"] in ('address', 'article', 'aside',
'blockquote', 'datagrid', 'dialog',
'dir', 'div', 'dl', 'fieldset', 'footer',
'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'header', 'hr', 'menu', 'nav', 'ol',
'p', 'pre', 'section', 'table', 'ul')
else:
return type == "EndTag" or type is None
elif tagname == 'option':
# An option element's end tag may be omitted if the option
# element is immediately followed by another option element,
# or if it is immediately followed by an <code>optgroup</code>
# element, or if there is no more content in the parent
# element.
if type == "StartTag":
return next["name"] in ('option', 'optgroup')
else:
return type == "EndTag" or type is None
elif tagname in ('rt', 'rp'):
# An rt element's end tag may be omitted if the rt element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
# An rp element's end tag may be omitted if the rp element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('rt', 'rp')
else:
return type == "EndTag" or type is None
elif tagname == 'colgroup':
# A colgroup element's end tag may be omitted if the colgroup
# element is not immediately followed by a space character or
# a comment.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we also look for an immediately following colgroup
# element. See is_optional_start.
return next["name"] != 'colgroup'
else:
return True
elif tagname in ('thead', 'tbody'):
# A thead element's end tag may be omitted if the thead element
# is immediately followed by a tbody or tfoot element.
# A tbody element's end tag may be omitted if the tbody element
# is immediately followed by a tbody or tfoot element, or if
# there is no more content in the parent element.
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] in ['tbody', 'tfoot']
elif tagname == 'tbody':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'tfoot':
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] == 'tbody'
else:
return type == "EndTag" or type is None
elif tagname in ('td', 'th'):
# A td element's end tag may be omitted if the td element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
# A th element's end tag may be omitted if the th element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('td', 'th')
else:
return type == "EndTag" or type is None
return False
|
ptitjes/quodlibet
|
refs/heads/master
|
dev-utils/win_installer/misc/gen_supported_types.py
|
5
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2016 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from quodlibet.formats import init, loaders
if __name__ == "__main__":
init()
# these are for showing up in the openwith dialog
for ext in sorted(loaders.keys()):
print('WriteRegStr HKLM "${QL_ASSOC_KEY}" '
'"%s" "${QL_ID}.assoc.ANY"' % ext)
|
pintubigfoot/pinturun
|
refs/heads/master
|
printrun/GCodeAnalyzer.py
|
1
|
# This file is part of the Printrun suite.
#
# Copyright 2013 Francesco Santini francesco.santini@gmail.com
#
# Printrun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Printrun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Printrun. If not, see <http://www.gnu.org/licenses/>.
#
# This code is imported from RepetierHost - Original copyright and license:
# Copyright 2011 repetier repetierdev@gmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gcoder
class GCodeAnalyzer():
def __init__(self):
self.x = 0
self.y = 0
self.z = 0
self.e = 0
self.emax = 0
self.f = 1000
self.lastX = 0
self.lastY = 0
self.lastZ = 0
self.lastE = 0
self.xOffset = 0
self.yOffset = 0
self.zOffset = 0
self.eOffset = 0
self.lastZPrint = 0
self.layerZ = 0
self.imperial = False
self.relative = False
self.eRelative = False
self.homeX = 0
self.homeY = 0
self.homeZ = 0
self.maxX = 150
self.maxY = 150
self.maxZ = 150
self.minX = 0
self.minY = 0
self.minZ = 0
self.hasHomeX = False
self.hasHomeY = False
self.hasHomeZ = False
def Analyze(self, gcode):
gline = gcoder.Line(gcode)
split_raw = gcoder.split(gline)
if gline.command.startswith(";@"): return # code is a host command
gcoder.parse_coordinates(gline, split_raw, self.imperial)
code_g = int(gline.command[1:]) if gline.command.startswith("G") else None
code_m = int(gline.command[1:]) if gline.command.startswith("M") else None
#get movement codes
if gline.is_move:
self.lastX = self.x
self.lastY = self.y
self.lastZ = self.z
self.lastE = self.e
code_f = gline.f
if code_f is not None:
self.f = code_f
code_x = gline.x
code_y = gline.y
code_z = gline.z
code_e = gline.e
if self.relative:
if code_x is not None: self.x += code_x
if code_y is not None: self.y += code_y
if code_z is not None: self.z += code_z
if code_e is not None:
if code_e != 0:
self.e += code_e
else:
# absolute coordinates
if code_x is not None: self.x = self.xOffset + code_x
if code_y is not None: self.y = self.yOffset + code_y
if code_z is not None: self.z = self.zOffset + code_z
if code_e is not None:
if self.eRelative:
if code_e != 0:
self.e += code_e
else:
# e is absolute. Is it changed?
if self.e != self.eOffset + code_e:
self.e = self.eOffset + code_e
#limit checking
"""
if self.x < self.minX: self.x = self.minX
if self.y < self.minY: self.y = self.minY
if self.z < self.minZ: self.z = self.minZ
if self.x > self.maxX: self.x = self.maxX
if self.y > self.maxY: self.y = self.maxY
if self.z > self.maxZ: self.z = self.maxZ
"""
#Repetier has a bunch of limit-checking code here and time calculations: we are leaving them for now
elif code_g == 20: self.imperial = True
elif code_g == 21: self.imperial = False
elif code_g == 28 or code_g == 161:
self.lastX = self.x
self.lastY = self.y
self.lastZ = self.z
self.lastE = self.e
code_x = gline.x
code_y = gline.y
code_z = gline.z
code_e = gline.e
homeAll = False
if code_x is None and code_y is None and code_z is None: homeAll = True
if code_x is not None or homeAll:
self.hasHomeX = True
self.xOffset = 0
self.x = self.homeX
if code_y is not None or homeAll:
self.hasHomeY = True
self.yOffset = 0
self.y = self.homeY
if code_z is not None or homeAll:
self.hasHomeZ = True
self.zOffset = 0
self.z = self.homeZ
if code_e is not None:
self.eOffset = 0
self.e = 0
elif code_g == 162:
self.lastX = self.x
self.lastY = self.y
self.lastZ = self.z
self.lastE = self.e
code_x = gline.x
code_y = gline.y
code_z = gline.z
homeAll = False
if code_x is None and code_y is None and code_z is None: homeAll = True
if code_x is not None or homeAll:
self.hasHomeX = True
self.xOffset = 0
self.x = self.maxX
if code_y is not None or homeAll:
self.hasHomeY = True
self.yOffset = 0
self.y = self.maxY
if code_z is not None or homeAll:
self.hasHomeZ = True
self.zOffset = 0
self.z = self.maxZ
elif code_g == 90: self.relative = False
elif code_g == 91: self.relative = True
elif code_g == 92:
code_x = gline.x
code_y = gline.y
code_z = gline.z
code_e = gline.e
if code_x is not None:
self.xOffset = self.x - float(code_x)
self.x = self.xOffset
if code_y is not None:
self.yOffset = self.y - float(code_y)
self.y = self.yOffset
if code_z is not None:
self.zOffset = self.z - float(code_z)
self.z = self.zOffset
if code_e is not None:
self.xOffset = self.e - float(code_e)
self.e = self.eOffset
#End code_g is not None
if code_m is not None:
if code_m == 82: self.eRelative = False
elif code_m == 83: self.eRelative = True
def print_status(self):
attrs = vars(self)
print '\n'.join("%s: %s" % item for item in attrs.items())
|
ryankurte/mbed-os
|
refs/heads/master
|
tools/misc/find_c_includes.py
|
64
|
#!/usr/bin/env python
import os
import re
def main(path='.', pattern=r'#include\s+"([^"]*\.(?:c|cpp))"'):
pattern = re.compile(pattern)
for root, dirs, files in os.walk(path, followlinks=True):
for file in files:
with open(os.path.join(root, file)) as f:
for line in f.read().splitlines():
m = re.search(pattern, line)
if m:
print os.path.relpath(os.path.join(root, m.group(1)))
if __name__ == "__main__":
import sys
main(*sys.argv[1:])
|
Cinntax/home-assistant
|
refs/heads/dev
|
homeassistant/components/zha/device_tracker.py
|
5
|
"""Support for the ZHA platform."""
import logging
import time
from homeassistant.components.device_tracker import DOMAIN, SOURCE_TYPE_ROUTER
from homeassistant.components.device_tracker.config_entry import ScannerEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .core.const import (
CHANNEL_POWER_CONFIGURATION,
DATA_ZHA,
DATA_ZHA_DISPATCHERS,
SIGNAL_ATTR_UPDATED,
ZHA_DISCOVERY_NEW,
)
from .entity import ZhaEntity
from .sensor import battery_percentage_remaining_formatter
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation device tracker from config entry."""
async def async_discover(discovery_info):
await _async_setup_entities(
hass, config_entry, async_add_entities, [discovery_info]
)
unsub = async_dispatcher_connect(
hass, ZHA_DISCOVERY_NEW.format(DOMAIN), async_discover
)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
device_trackers = hass.data.get(DATA_ZHA, {}).get(DOMAIN)
if device_trackers is not None:
await _async_setup_entities(
hass, config_entry, async_add_entities, device_trackers.values()
)
del hass.data[DATA_ZHA][DOMAIN]
async def _async_setup_entities(
hass, config_entry, async_add_entities, discovery_infos
):
"""Set up the ZHA device trackers."""
entities = []
for discovery_info in discovery_infos:
entities.append(ZHADeviceScannerEntity(**discovery_info))
async_add_entities(entities, update_before_add=True)
class ZHADeviceScannerEntity(ScannerEntity, ZhaEntity):
"""Represent a tracked device."""
def __init__(self, **kwargs):
"""Initialize the ZHA device tracker."""
super().__init__(**kwargs)
self._battery_channel = self.cluster_channels.get(CHANNEL_POWER_CONFIGURATION)
self._connected = False
self._keepalive_interval = 60
self._should_poll = True
self._battery_level = None
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
if self._battery_channel:
await self.async_accept_signal(
self._battery_channel,
SIGNAL_ATTR_UPDATED,
self.async_battery_percentage_remaining_updated,
)
async def async_update(self):
"""Handle polling."""
if self.zha_device.last_seen is None:
self._connected = False
else:
difference = time.time() - self.zha_device.last_seen
if difference > self._keepalive_interval:
self._connected = False
else:
self._connected = True
@property
def is_connected(self):
"""Return true if the device is connected to the network."""
return self._connected
@property
def source_type(self):
"""Return the source type, eg gps or router, of the device."""
return SOURCE_TYPE_ROUTER
@callback
def async_battery_percentage_remaining_updated(self, value):
"""Handle tracking."""
self.debug("battery_percentage_remaining updated: %s", value)
self._connected = True
self._battery_level = battery_percentage_remaining_formatter(value)
self.async_schedule_update_ha_state()
@property
def battery_level(self):
"""Return the battery level of the device.
Percentage from 0-100.
"""
return self._battery_level
|
ThomasDoylend/Zen
|
refs/heads/master
|
sys/boot/config/config2.py
|
1
|
#Zen
#
#Config manager 2
import pickle
class Config2:
name = 'Config Manager v2'
config_path = '{home}/sys/boot/config/config.pkl'
def __init__(self,kernel):
self.kernel = kernel
self.fs = self.kernel.get_service('fs')
self.data = pickle.load(self.fs.open(self.config_path,'rb'))
def get(self,option):
return self.data[option]
def set(self,option,value):
self.data[option] = value
def save(self):
f = self.fs.open(self.config_path,'wb')
pickle.dump(self.data,f)
APPLICATION = []
SERVICE = {'config':Config2}
|
RabadanLab/MITKats
|
refs/heads/master
|
Modules/Biophotonics/python/iMC/mc/test/test_sim.py
|
6
|
'''
Created on Sep 8, 2015
@author: wirkert
'''
import unittest
import filecmp
import os
from mc.sim import MciWrapper, SimWrapper, \
get_total_reflectance, get_diffuse_reflectance
path_to_gpumcml = "/home/wirkert/workspace/monteCarlo/gpumcml/" + \
"fast-gpumcml/gpumcml.sm_20"
skip_gpu_tests = not os.path.exists(path_to_gpumcml)
class Test(unittest.TestCase):
def setUp(self):
self.mci_filename = "temp.mci"
self.mco_filename = "temp.mco"
# create a mci_wrapper which shall create a mci file
self.mci_wrapper = MciWrapper()
self.mci_wrapper.set_mci_filename(self.mci_filename)
self.mci_wrapper.set_mco_filename(self.mco_filename)
self.mci_wrapper.set_nr_photons(10 ** 6)
self.mci_wrapper.add_layer(1.0, 2.1, 3.2, 4.3, 5.4)
self.mci_wrapper.add_layer(6.5, 7.8, 8.9, 9.10, 10.11)
self.mci_wrapper.add_layer(100.1001, 101.10001, 102.100001,
103.1000001, 104.10000001)
self.mci_wrapper.set_layer(1, 1, 1, 1, 1, 1)
# expected mci file
self.correct_mci_filename = "./mc/data/correct.mci"
# path to the externaly installed mcml simulation. This is machine
# dependent. Thus tests depending on the execution of mcml will only
# be performed if this file exists.
# Should the file be located somewhere else on your computer,
# change this path to your actual location.
def tearDown(self):
os.remove(self.mci_filename)
mcml_path, mcml_file = os.path.split(path_to_gpumcml)
created_mco_file = mcml_path + "/" + self.mco_filename
if os.path.isfile(created_mco_file):
os.remove(created_mco_file)
def test_mci_wrapper(self):
self.mci_wrapper.create_mci_file()
self.assertTrue(os.path.isfile(self.mci_filename),
"mci file was created")
self.assertTrue(filecmp.cmp(self.mci_filename,
self.correct_mci_filename, shallow=False),
"the written mci file is the same as the stored " +
"reference file")
@unittest.skipIf(skip_gpu_tests, "skip if gpumcml not installed")
def test_sim_wrapper(self):
mcml_path, mcml_file = os.path.split(path_to_gpumcml)
if os.path.isfile(path_to_gpumcml):
self.mci_wrapper.create_mci_file()
sim_wrapper = SimWrapper()
sim_wrapper.set_mci_filename(self.mci_filename)
sim_wrapper.set_mcml_executable(path_to_gpumcml)
sim_wrapper.run_simulation()
self.assertTrue(os.path.isfile(os.path.join(mcml_path,
self.mco_filename)),
"mco file was created")
@unittest.skipIf(skip_gpu_tests, "skip if gpumcml not installed")
def test_mci_wrapper_book_example(self):
"""see if our result matches the one from
Biomedical Optics
Principles and Imaging
page 55 (Table 3.1)"""
# create a book_p55_mci which shall create a mci file
book_p55_mci = MciWrapper()
book_p55_mci.set_mci_filename(self.mci_filename)
book_p55_mci.set_mco_filename(self.mco_filename)
book_p55_mci.set_nr_photons(10**6)
book_p55_mci.add_layer(1, 1000, 9000, 0.75, 0.0002)
mcml_path, mcml_file = os.path.split(path_to_gpumcml)
if os.path.isfile(path_to_gpumcml):
book_p55_mci.create_mci_file()
sim_wrapper = SimWrapper()
sim_wrapper.set_mci_filename(self.mci_filename)
sim_wrapper.set_mcml_executable(path_to_gpumcml)
sim_wrapper.run_simulation()
self.assertTrue(os.path.isfile(mcml_path + "/" + self.mco_filename),
"mco file was created")
refl = get_diffuse_reflectance(os.path.join(mcml_path,
self.mco_filename))
self.assertAlmostEqual(refl, 0.09734, 3,
"correct reflectance determined " +
"according to book table 3.1")
@unittest.skipIf(skip_gpu_tests, "skip if gpumcml not installed")
def test_mci_wrapper_book_example_2(self):
"""see if our result matches the one from
Biomedical Optics
Principles and Imaging
page 56 (Table 3.2)"""
# create a book_p56_mci which shall create a mci file
book_p56_mci = MciWrapper()
book_p56_mci.set_mci_filename(self.mci_filename)
book_p56_mci.set_mco_filename(self.mco_filename)
book_p56_mci.set_nr_photons(10**6)
book_p56_mci.add_layer(1.5, 1000, 9000, 0., 1)
mcml_path, mcml_file = os.path.split(path_to_gpumcml)
if os.path.isfile(path_to_gpumcml):
book_p56_mci.create_mci_file()
sim_wrapper = SimWrapper()
sim_wrapper.set_mci_filename(self.mci_filename)
sim_wrapper.set_mcml_executable(path_to_gpumcml)
sim_wrapper.run_simulation()
self.assertTrue(os.path.isfile(mcml_path + "/" + self.mco_filename),
"mco file was created")
refl = get_total_reflectance(os.path.join(mcml_path,
self.mco_filename))
self.assertAlmostEqual(refl, 0.26, delta=0.01,
msg="correct reflectance determined " +
"according to book table 3.2")
|
piotrpawlaczek/suds-jurko
|
refs/heads/master
|
suds/xsd/schema.py
|
7
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{schema} module provides an intelligent representation of
an XSD schema. The I{raw} model is the XML tree and the I{model}
is the denormalized, objectified and intelligent view of the schema.
Most of the I{value-add} provided by the model is centered around
transparent referenced type resolution and targeted denormalization.
"""
from suds import *
from suds.xsd import *
from suds.xsd.sxbuiltin import *
from suds.xsd.sxbasic import Factory as BasicFactory
from suds.xsd.sxbuiltin import Factory as BuiltinFactory
from suds.xsd.sxbase import SchemaObject
from suds.xsd.deplist import DepList
from suds.sax.element import Element
from suds.sax import splitPrefix, Namespace
from logging import getLogger
log = getLogger(__name__)
class SchemaCollection(UnicodeMixin):
"""
A collection of schema objects. This class is needed because WSDLs
may contain more then one <schema/> node.
@ivar wsdl: A wsdl object.
@type wsdl: L{suds.wsdl.Definitions}
@ivar children: A list contained schemas.
@type children: [L{Schema},...]
@ivar namespaces: A dictionary of contained schemas by namespace.
@type namespaces: {str:L{Schema}}
"""
def __init__(self, wsdl):
"""
@param wsdl: A wsdl object.
@type wsdl: L{suds.wsdl.Definitions}
"""
self.wsdl = wsdl
self.children = []
self.namespaces = {}
def add(self, schema):
"""
Add a schema node to the collection. Schema(s) within the same target
namespace are consolidated.
@param schema: A schema object.
@type schema: (L{Schema})
"""
key = schema.tns[1]
existing = self.namespaces.get(key)
if existing is None:
self.children.append(schema)
self.namespaces[key] = schema
else:
existing.root.children += schema.root.children
existing.root.nsprefixes.update(schema.root.nsprefixes)
def load(self, options):
"""
Load the schema objects for the root nodes.
- de-references schemas
- merge schemas
@param options: An options dictionary.
@type options: L{options.Options}
@return: The merged schema.
@rtype: L{Schema}
"""
if options.autoblend:
self.autoblend()
for child in self.children:
child.build()
for child in self.children:
child.open_imports(options)
for child in self.children:
child.dereference()
log.debug('loaded:\n%s', self)
merged = self.merge()
log.debug('MERGED:\n%s', merged)
return merged
def autoblend(self):
"""
Ensure that all schemas within the collection
import each other which has a blending effect.
@return: self
@rtype: L{SchemaCollection}
"""
namespaces = self.namespaces.keys()
for s in self.children:
for ns in namespaces:
tns = s.root.get('targetNamespace')
if tns == ns:
continue
for imp in s.root.getChildren('import'):
if imp.get('namespace') == ns:
continue
imp = Element('import', ns=Namespace.xsdns)
imp.set('namespace', ns)
s.root.append(imp)
return self
def locate(self, ns):
"""
Find a schema by namespace. Only the URI portion of
the namespace is compared to each schema's I{targetNamespace}
@param ns: A namespace.
@type ns: (prefix,URI)
@return: The schema matching the namespace, else None.
@rtype: L{Schema}
"""
return self.namespaces.get(ns[1])
def merge(self):
"""
Merge the contained schemas into one.
@return: The merged schema.
@rtype: L{Schema}
"""
if len(self):
schema = self.children[0]
for s in self.children[1:]:
schema.merge(s)
return schema
else:
return None
def __len__(self):
return len(self.children)
def __unicode__(self):
result = ['\nschema collection']
for s in self.children:
result.append(s.str(1))
return '\n'.join(result)
class Schema(UnicodeMixin):
"""
The schema is an objectification of a <schema/> (xsd) definition.
It provides inspection, lookup and type resolution.
@ivar root: The root node.
@type root: L{sax.element.Element}
@ivar baseurl: The I{base} URL for this schema.
@type baseurl: str
@ivar container: A schema collection containing this schema.
@type container: L{SchemaCollection}
@ivar children: A list of direct top level children.
@type children: [L{SchemaObject},...]
@ivar all: A list of all (includes imported) top level children.
@type all: [L{SchemaObject},...]
@ivar types: A schema types cache.
@type types: {name:L{SchemaObject}}
@ivar imports: A list of import objects.
@type imports: [L{SchemaObject},...]
@ivar elements: A list of <element/> objects.
@type elements: [L{SchemaObject},...]
@ivar attributes: A list of <attribute/> objects.
@type attributes: [L{SchemaObject},...]
@ivar groups: A list of group objects.
@type groups: [L{SchemaObject},...]
@ivar agrps: A list of attribute group objects.
@type agrps: [L{SchemaObject},...]
@ivar form_qualified: The flag indicating:
(@elementFormDefault).
@type form_qualified: bool
"""
Tag = 'schema'
def __init__(self, root, baseurl, options, container=None):
"""
@param root: The xml root.
@type root: L{sax.element.Element}
@param baseurl: The base url used for importing.
@type baseurl: basestring
@param options: An options dictionary.
@type options: L{options.Options}
@param container: An optional container.
@type container: L{SchemaCollection}
"""
self.root = root
self.id = objid(self)
self.tns = self.mktns()
self.baseurl = baseurl
self.container = container
self.children = []
self.all = []
self.types = {}
self.imports = []
self.elements = {}
self.attributes = {}
self.groups = {}
self.agrps = {}
if options.doctor is not None:
options.doctor.examine(root)
form = self.root.get('elementFormDefault')
if form is None:
self.form_qualified = False
else:
self.form_qualified = ( form == 'qualified' )
if container is None:
self.build()
self.open_imports(options)
log.debug('built:\n%s', self)
self.dereference()
log.debug('dereferenced:\n%s', self)
def mktns(self):
"""
Make the schema's target namespace.
@return: The namespace representation of the schema's
targetNamespace value.
@rtype: (prefix, uri)
"""
tns = [None, self.root.get('targetNamespace')]
if tns[1] is not None:
tns[0] = self.root.findPrefix(tns[1])
return tuple(tns)
def build(self):
"""
Build the schema (object graph) using the root node
using the factory.
- Build the graph.
- Collate the children.
"""
self.children = BasicFactory.build(self.root, self)
collated = BasicFactory.collate(self.children)
self.children = collated[0]
self.attributes = collated[2]
self.imports = collated[1]
self.elements = collated[3]
self.types = collated[4]
self.groups = collated[5]
self.agrps = collated[6]
def merge(self, schema):
"""
Merge the contents from the schema. Only objects not already contained
in this schema's collections are merged. This is to provide for bidirectional
import which produce cyclic includes.
@returns: self
@rtype: L{Schema}
"""
for item in schema.attributes.items():
if item[0] in self.attributes:
continue
self.all.append(item[1])
self.attributes[item[0]] = item[1]
for item in schema.elements.items():
if item[0] in self.elements:
continue
self.all.append(item[1])
self.elements[item[0]] = item[1]
for item in schema.types.items():
if item[0] in self.types:
continue
self.all.append(item[1])
self.types[item[0]] = item[1]
for item in schema.groups.items():
if item[0] in self.groups:
continue
self.all.append(item[1])
self.groups[item[0]] = item[1]
for item in schema.agrps.items():
if item[0] in self.agrps:
continue
self.all.append(item[1])
self.agrps[item[0]] = item[1]
schema.merged = True
return self
def open_imports(self, options):
"""
Instruct all contained L{sxbasic.Import} children to import
the schema's which they reference. The contents of the
imported schema are I{merged} in.
@param options: An options dictionary.
@type options: L{options.Options}
"""
for imp in self.imports:
imported = imp.open(options)
if imported is None:
continue
imported.open_imports(options)
log.debug('imported:\n%s', imported)
self.merge(imported)
def dereference(self):
"""
Instruct all children to perform dereferencing.
"""
all = []
indexes = {}
for child in self.children:
child.content(all)
deplist = DepList()
for x in all:
x.qualify()
midx, deps = x.dependencies()
item = (x, tuple(deps))
deplist.add(item)
indexes[x] = midx
for x, deps in deplist.sort():
midx = indexes.get(x)
if midx is None: continue
d = deps[midx]
log.debug('(%s) merging %s <== %s', self.tns[1], Repr(x), Repr(d))
x.merge(d)
def locate(self, ns):
"""
Find a schema by namespace. Only the URI portion of
the namespace is compared to each schema's I{targetNamespace}.
The request is passed to the container.
@param ns: A namespace.
@type ns: (prefix,URI)
@return: The schema matching the namespace, else None.
@rtype: L{Schema}
"""
if self.container is not None:
return self.container.locate(ns)
else:
return None
def custom(self, ref, context=None):
"""
Get whether the specified reference is B{not} an (xs) builtin.
@param ref: A str or qref.
@type ref: (str|qref)
@return: True if B{not} a builtin, else False.
@rtype: bool
"""
if ref is None:
return True
else:
return ( not self.builtin(ref, context) )
def builtin(self, ref, context=None):
"""
Get whether the specified reference is an (xs) builtin.
@param ref: A str or qref.
@type ref: (str|qref)
@return: True if builtin, else False.
@rtype: bool
"""
w3 = 'http://www.w3.org'
try:
if isqref(ref):
ns = ref[1]
return ( ref[0] in Factory.tags and ns.startswith(w3) )
if context is None:
context = self.root
prefix = splitPrefix(ref)[0]
prefixes = context.findPrefixes(w3, 'startswith')
return ( prefix in prefixes and ref[0] in Factory.tags )
except:
return False
def instance(self, root, baseurl, options):
"""
Create and return an new schema object using the
specified I{root} and I{url}.
@param root: A schema root node.
@type root: L{sax.element.Element}
@param baseurl: A base URL.
@type baseurl: str
@param options: An options dictionary.
@type options: L{options.Options}
@return: The newly created schema object.
@rtype: L{Schema}
@note: This is only used by Import children.
"""
return Schema(root, baseurl, options)
def str(self, indent=0):
tab = '%*s'%(indent*3, '')
result = []
result.append('%s%s' % (tab, self.id))
result.append('%s(raw)' % tab)
result.append(self.root.str(indent+1))
result.append('%s(model)' % tab)
for c in self.children:
result.append(c.str(indent+1))
result.append('')
return '\n'.join(result)
def __repr__(self):
return '<%s tns="%s"/>' % (self.id, self.tns[1])
def __unicode__(self):
return self.str()
|
def-/commandergenius
|
refs/heads/sdl_android
|
project/jni/python/src/Demo/scripts/ftpstats.py
|
32
|
#! /usr/bin/env python
# Extract statistics from ftp daemon log.
# Usage:
# ftpstats [-m maxitems] [-s search] [file]
# -m maxitems: restrict number of items in "top-N" lists, default 25.
# -s string: restrict statistics to lines containing this string.
# Default file is /usr/adm/ftpd; a "-" means read standard input.
# The script must be run on the host where the ftp daemon runs.
# (At CWI this is currently buizerd.)
import os
import sys
import re
import string
import getopt
pat = '^([a-zA-Z0-9 :]*)!(.*)!(.*)!([<>].*)!([0-9]+)!([0-9]+)$'
prog = re.compile(pat)
def main():
maxitems = 25
search = None
try:
opts, args = getopt.getopt(sys.argv[1:], 'm:s:')
except getopt.error, msg:
print msg
print 'usage: ftpstats [-m maxitems] [file]'
sys.exit(2)
for o, a in opts:
if o == '-m':
maxitems = string.atoi(a)
if o == '-s':
search = a
file = '/usr/adm/ftpd'
if args: file = args[0]
if file == '-':
f = sys.stdin
else:
try:
f = open(file, 'r')
except IOError, msg:
print file, ':', msg
sys.exit(1)
bydate = {}
bytime = {}
byfile = {}
bydir = {}
byhost = {}
byuser = {}
bytype = {}
lineno = 0
try:
while 1:
line = f.readline()
if not line: break
lineno = lineno + 1
if search and string.find(line, search) < 0:
continue
if prog.match(line) < 0:
print 'Bad line', lineno, ':', repr(line)
continue
items = prog.group(1, 2, 3, 4, 5, 6)
(logtime, loguser, loghost, logfile, logbytes,
logxxx2) = items
## print logtime
## print '-->', loguser
## print '--> -->', loghost
## print '--> --> -->', logfile
## print '--> --> --> -->', logbytes
## print '--> --> --> --> -->', logxxx2
## for i in logtime, loghost, logbytes, logxxx2:
## if '!' in i: print '???', i
add(bydate, logtime[-4:] + ' ' + logtime[:6], items)
add(bytime, logtime[7:9] + ':00-59', items)
direction, logfile = logfile[0], logfile[1:]
# The real path probably starts at the last //...
while 1:
i = string.find(logfile, '//')
if i < 0: break
logfile = logfile[i+1:]
add(byfile, logfile + ' ' + direction, items)
logdir = os.path.dirname(logfile)
## logdir = os.path.normpath(logdir) + '/.'
while 1:
add(bydir, logdir + ' ' + direction, items)
dirhead = os.path.dirname(logdir)
if dirhead == logdir: break
logdir = dirhead
add(byhost, loghost, items)
add(byuser, loguser, items)
add(bytype, direction, items)
except KeyboardInterrupt:
print 'Interrupted at line', lineno
show(bytype, 'by transfer direction', maxitems)
show(bydir, 'by directory', maxitems)
show(byfile, 'by file', maxitems)
show(byhost, 'by host', maxitems)
show(byuser, 'by user', maxitems)
showbar(bydate, 'by date')
showbar(bytime, 'by time of day')
def showbar(dict, title):
n = len(title)
print '='*((70-n)//2), title, '='*((71-n)//2)
list = []
keys = dict.keys()
keys.sort()
for key in keys:
n = len(str(key))
list.append((len(dict[key]), key))
maxkeylength = 0
maxcount = 0
for count, key in list:
maxkeylength = max(maxkeylength, len(key))
maxcount = max(maxcount, count)
maxbarlength = 72 - maxkeylength - 7
for count, key in list:
barlength = int(round(maxbarlength*float(count)/maxcount))
bar = '*'*barlength
print '%5d %-*s %s' % (count, maxkeylength, key, bar)
def show(dict, title, maxitems):
if len(dict) > maxitems:
title = title + ' (first %d)'%maxitems
n = len(title)
print '='*((70-n)//2), title, '='*((71-n)//2)
list = []
keys = dict.keys()
for key in keys:
list.append((-len(dict[key]), key))
list.sort()
for count, key in list[:maxitems]:
print '%5d %s' % (-count, key)
def add(dict, key, item):
if dict.has_key(key):
dict[key].append(item)
else:
dict[key] = [item]
if __name__ == "__main__":
main()
|
reevejd/Teksavvy-Internet-Monitor
|
refs/heads/master
|
node_modules/utf8/tests/generate-test-data.py
|
1788
|
#!/usr/bin/env python
import re
import json
# https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
# http://stackoverflow.com/a/13436167/96656
def unisymbol(codePoint):
if codePoint >= 0x0000 and codePoint <= 0xFFFF:
return unichr(codePoint)
elif codePoint >= 0x010000 and codePoint <= 0x10FFFF:
highSurrogate = int((codePoint - 0x10000) / 0x400) + 0xD800
lowSurrogate = int((codePoint - 0x10000) % 0x400) + 0xDC00
return unichr(highSurrogate) + unichr(lowSurrogate)
else:
return 'Error'
def hexify(codePoint):
return 'U+' + hex(codePoint)[2:].upper().zfill(6)
def writeFile(filename, contents):
print filename
with open(filename, 'w') as f:
f.write(contents.strip() + '\n')
data = []
for codePoint in range(0x000000, 0x10FFFF + 1):
# Skip non-scalar values.
if codePoint >= 0xD800 and codePoint <= 0xDFFF:
continue
symbol = unisymbol(codePoint)
# http://stackoverflow.com/a/17199950/96656
bytes = symbol.encode('utf8').decode('latin1')
data.append({
'codePoint': codePoint,
'decoded': symbol,
'encoded': bytes
});
jsonData = json.dumps(data, sort_keys=False, indent=2, separators=(',', ': '))
# Use tabs instead of double spaces for indentation
jsonData = jsonData.replace(' ', '\t')
# Escape hexadecimal digits in escape sequences
jsonData = re.sub(
r'\\u([a-fA-F0-9]{4})',
lambda match: r'\u{}'.format(match.group(1).upper()),
jsonData
)
writeFile('data.json', jsonData)
|
DaneTheory/MozDef
|
refs/heads/master
|
alerts/lib/alerttask.py
|
3
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Anthony Verez averez@mozilla.com
# Jeff Bryner jbryner@mozilla.com
import json
import kombu
import pytz
import pyes
from datetime import datetime
from datetime import timedelta
from dateutil.parser import parse
from collections import Counter
from celery import Task
from celery.utils.log import get_task_logger
from config import RABBITMQ, ES, OPTIONS
def toUTC(suspectedDate, localTimeZone=None):
'''make a UTC date out of almost anything'''
utc = pytz.UTC
objDate = None
if localTimeZone is None:
localTimeZone= OPTIONS['defaulttimezone']
if type(suspectedDate) in (str, unicode):
objDate = parse(suspectedDate, fuzzy=True)
elif type(suspectedDate) == datetime:
objDate = suspectedDate
if objDate.tzinfo is None:
objDate = pytz.timezone(localTimeZone).localize(objDate)
objDate = utc.normalize(objDate)
else:
objDate = utc.normalize(objDate)
if objDate is not None:
objDate = utc.normalize(objDate)
return objDate
class AlertTask(Task):
def __init__(self):
self.alert_name = self.__class__.__name__
self.filter = None
# List of events
self.events = None
# List of aggregations
# e.g. when aggregField is email: [{value:'evil@evil.com',count:1337,events:[...]}, ...]
self.aggregations = None
self.log.debug('starting {0}'.format(self.alert_name))
self.log.debug(RABBITMQ)
self.log.debug(ES)
self._configureKombu()
self._configureES()
@property
def log(self):
return get_task_logger('%s.%s' % (__name__, self.alert_name))
def _configureKombu(self):
"""
Configure kombu for rabbitmq
"""
try:
connString = 'amqp://{0}:{1}@{2}:{3}//'.format(
RABBITMQ['mquser'],
RABBITMQ['mqpassword'],
RABBITMQ['mqserver'],
RABBITMQ['mqport'])
self.mqConn = kombu.Connection(connString)
self.alertExchange = kombu.Exchange(
name=RABBITMQ['alertexchange'],
type='topic',
durable=True)
self.alertExchange(self.mqConn).declare()
alertQueue = kombu.Queue(RABBITMQ['alertqueue'],
exchange=self.alertExchange)
alertQueue(self.mqConn).declare()
self.mqproducer = self.mqConn.Producer(serializer='json')
self.log.debug('Kombu configured')
except Exception as e:
self.log.error('Exception while configuring kombu for alerts: {0}'.format(e))
def _configureES(self):
"""
Configure pyes for elasticsearch
"""
try:
self.es = pyes.ES(ES['servers'])
self.log.debug('ES configured')
except Exception as e:
self.log.error('Exception while configuring ES for alerts: {0}'.format(e))
def alertToMessageQueue(self, alertDict):
"""
Send alert to the rabbit message queue
"""
try:
# cherry pick items from the alertDict to send to the alerts messageQueue
mqAlert = dict(severity='INFO', category='')
if 'severity' in alertDict.keys():
mqAlert['severity'] = alertDict['severity']
if 'category' in alertDict.keys():
mqAlert['category'] = alertDict['category']
if 'utctimestamp' in alertDict.keys():
mqAlert['utctimestamp'] = alertDict['utctimestamp']
if 'eventtimestamp' in alertDict.keys():
mqAlert['eventtimestamp'] = alertDict['eventtimestamp']
mqAlert['summary'] = alertDict['summary']
self.log.debug(mqAlert)
ensurePublish = self.mqConn.ensure(
self.mqproducer,
self.mqproducer.publish,
max_retries=10)
ensurePublish(alertDict,
exchange=self.alertExchange,
routing_key=RABBITMQ['alertexchange'])
self.log.debug('alert sent to the alert queue')
except Exception as e:
self.log.error('Exception while sending alert to message queue: {0}'.format(e))
def alertToES(self, alertDict):
"""
Send alert to elasticsearch
"""
try:
res = self.es.index(index='alerts', doc_type='alert', doc=alertDict)
self.log.debug('alert sent to ES')
self.log.debug(res)
return res
except Exception as e:
self.log.error('Exception while pushing alert to ES: {0}'.format(e))
def filtersManual(self, date_timedelta, must=[], should=[], must_not=[]):
"""
Configure filters manually
date_timedelta is a dict in timedelta format
see https://docs.python.org/2/library/datetime.html#timedelta-objects
must, should and must_not are pyes filter objects lists
see http://pyes.readthedocs.org/en/latest/references/pyes.filters.html
"""
begindateUTC = toUTC(datetime.now() - timedelta(**date_timedelta))
enddateUTC = toUTC(datetime.now())
qDate = pyes.RangeQuery(qrange=pyes.ESRange('utctimestamp',
from_value=begindateUTC, to_value=enddateUTC))
q = pyes.ConstantScoreQuery(pyes.MatchAllQuery())
must_not.append(pyes.ExistsFilter('alerttimestamp'))
q.filters.append(pyes.BoolFilter(
must=must,
should=should,
must_not=must_not))
self.filter = q
def filtersFromKibanaDash(self, fp, date_timedelta):
"""
Import filters from a kibana dashboard
fp is the file path of the json file
date_timedelta is a dict in timedelta format
see https://docs.python.org/2/library/datetime.html#timedelta-objects
"""
f = open(fp)
data = json.load(f)
must = []
should = []
must_not = []
for filtid in data['services']['filter']['list'].keys():
filt = data['services']['filter']['list'][filtid]
if filt['active'] and 'query' in filt.keys():
value = filt['query'].split(':')[-1]
fieldname = filt['query'].split(':')[0].split('.')[-1]
# self.log.info(fieldname)
# self.log.info(value)
# field: fieldname
# query: value
if 'field' in filt.keys():
fieldname = filt['field']
value = filt['query']
if '\"' in value:
value = value.split('\"')[1]
pyesfilt = pyes.QueryFilter(pyes.MatchQuery(fieldname, value, 'phrase'))
else:
pyesfilt = pyes.TermFilter(fieldname, value)
else:
# _exists_:field
if filt['query'].startswith('_exists_:'):
pyesfilt = pyes.ExistsFilter(value.split('.')[-1])
# self.log.info('exists %s' % value.split('.')[-1])
# _missing_:field
elif filt['query'].startswith('_missing_:'):
pyesfilt = pyes.filters.MissingFilter(value.split('.')[-1])
# self.log.info('missing %s' % value.split('.')[-1])
# field:"value"
elif '\"' in value:
value = value.split('\"')[1]
pyesfilt = pyes.QueryFilter(pyes.MatchQuery(fieldname, value, 'phrase'))
# self.log.info("phrase %s %s" % (fieldname, value))
# field:(value1 value2 value3)
elif '(' in value and ')' in value:
value = value.split('(')[1]
value = value.split('(')[0]
pyesfilt = pyes.QueryFilter(pyes.MatchQuery(fieldname, value, "boolean"))
# field:value
else:
pyesfilt = pyes.TermFilter(fieldname, value)
# self.log.info("terms %s %s" % (fieldname, value))
if filt['mandate'] == 'must':
must.append(pyesfilt)
elif filt['mandate'] == 'either':
should.append(pyesfilt)
elif filt['mandate'] == 'mustNot':
must_not.append(pyesfilt)
# self.log.info(must)
f.close()
self.filtersManual(date_timedelta, must=must, should=should, must_not=must_not)
def searchEventsSimple(self):
"""
Search events matching filters, store events in self.events
"""
try:
pyesresults = self.es.search(
self.filter,
size=1000,
indices='events,events-previous')
self.events = pyesresults._search_raw()['hits']['hits']
self.log.debug(self.events)
except Exception as e:
self.log.error('Error while searching events in ES: {0}'.format(e))
def searchEventsAggreg(self, aggregField, samplesLimit=5):
"""
Search aggregations matching filters by aggregField, store them in self.aggregations
"""
try:
pyesresults = self.es.search(
self.filter,
size=1000,
indices='events,events-previous')
results = pyesresults._search_raw()['hits']['hits']
# List of aggregation values that can be counted/summarized by Counter
# Example: ['evil@evil.com','haxoor@noob.com', 'evil@evil.com'] for an email aggregField
aggregValues = []
for r in results:
aggregValues.append(r['_source']['details'][aggregField])
# [{value:'evil@evil.com',count:1337,events:[...]}, ...]
aggregList = []
for i in Counter(aggregValues).most_common():
idict = {
'value': i[0],
'count': i[1],
'events': []
}
for r in results:
if r['_source']['details'][aggregField].encode('ascii', 'ignore') == i[0]:
# copy events detail into this aggregation up to our samples limit
if len(idict['events']) < samplesLimit:
idict['events'].append(r)
aggregList.append(idict)
self.aggregations = aggregList
self.log.debug(self.aggregations)
except Exception as e:
self.log.error('Error while searching events in ES: {0}'.format(e))
def walkEvents(self):
"""
Walk through events, provide some methods to hook in alerts
"""
if len(self.events) > 0:
for i in self.events:
alert = self.onEvent(i)
if alert:
self.log.debug(alert)
alertResultES = self.alertToES(alert)
self.tagEventsAlert([i], alertResultES)
self.alertToMessageQueue(alert)
self.hookAfterInsertion(alert)
def walkAggregations(self, threshold):
"""
Walk through aggregations, provide some methods to hook in alerts
"""
if len(self.aggregations) > 0:
for aggreg in self.aggregations:
if aggreg['count'] >= threshold:
alert = self.onAggreg(aggreg)
self.log.debug(alert)
if alert:
alertResultES = self.alertToES(alert)
self.tagEventsAlert(aggreg['events'], alertResultES)
self.alertToMessageQueue(alert)
def createAlertDict(self, summary, category, tags, events, severity='NOTICE'):
"""
Create an alert dict
"""
alert = {
'utctimestamp': toUTC(datetime.now()).isoformat(),
'severity': severity,
'summary': summary,
'category': category,
'tags': tags,
'events': []
}
for e in events:
alert['events'].append({
'documentindex': e['_index'],
'documenttype': e['_type'],
'documentsource': e['_source'],
'documentid': e['_id']})
self.log.debug(alert)
return alert
def onEvent(self, event):
"""
To be overriden by children to run their code
to be used when creating an alert using an event
must return an alert dict or None
"""
pass
def onAggreg(self, aggreg):
"""
To be overriden by children to run their code
to be used when creating an alert using an aggregation
must return an alert dict or None
"""
pass
def hookAfterInsertion(self, alert):
"""
To be overriden by children to run their code
to be used when creating an alert using an aggregation
"""
pass
def tagEventsAlert(self, events, alertResultES):
"""
Update the event with the alertid/index
and update the alerttimestamp on the event itself so it's
not re-alerted
"""
try:
for event in events:
if 'alerts' not in event['_source'].keys():
event['_source']['alerts'] = []
event['_source']['alerts'].append({
'index': alertResultES['_index'],
'type': alertResultES['_type'],
'id': alertResultES['_id']})
event['_source']['alerttimestamp'] = toUTC(datetime.now()).isoformat()
self.es.update(event['_index'], event['_type'],
event['_id'], document=event['_source'])
except Exception as e:
self.log.error('Error while updating events in ES: {0}'.format(e))
def main(self):
"""
To be overriden by children to run their code
"""
pass
def run(self):
"""
Main method launched by celery periodically
"""
try:
self.main()
self.log.debug('finished')
except Exception as e:
self.log.error('Exception in main() method: {0}'.format(e))
|
felixfontein/ansible
|
refs/heads/devel
|
test/support/windows-integration/plugins/modules/win_certificate_store.py
|
99
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_certificate_store
version_added: '2.5'
short_description: Manages the certificate store
description:
- Used to import/export and remove certificates and keys from the local
certificate store.
- This module is not used to create certificates and will only manage existing
certs as a file or in the store.
- It can be used to import PEM, DER, P7B, PKCS12 (PFX) certificates and export
PEM, DER and PKCS12 certificates.
options:
state:
description:
- If C(present), will ensure that the certificate at I(path) is imported
into the certificate store specified.
- If C(absent), will ensure that the certificate specified by I(thumbprint)
or the thumbprint of the cert at I(path) is removed from the store
specified.
- If C(exported), will ensure the file at I(path) is a certificate
specified by I(thumbprint).
- When exporting a certificate, if I(path) is a directory then the module
will fail, otherwise the file will be replaced if needed.
type: str
choices: [ absent, exported, present ]
default: present
path:
description:
- The path to a certificate file.
- This is required when I(state) is C(present) or C(exported).
- When I(state) is C(absent) and I(thumbprint) is not specified, the
thumbprint is derived from the certificate at this path.
type: path
thumbprint:
description:
- The thumbprint as a hex string to either export or remove.
- See the examples for how to specify the thumbprint.
type: str
store_name:
description:
- The store name to use when importing a certificate or searching for a
certificate.
- "C(AddressBook): The X.509 certificate store for other users"
- "C(AuthRoot): The X.509 certificate store for third-party certificate authorities (CAs)"
- "C(CertificateAuthority): The X.509 certificate store for intermediate certificate authorities (CAs)"
- "C(Disallowed): The X.509 certificate store for revoked certificates"
- "C(My): The X.509 certificate store for personal certificates"
- "C(Root): The X.509 certificate store for trusted root certificate authorities (CAs)"
- "C(TrustedPeople): The X.509 certificate store for directly trusted people and resources"
- "C(TrustedPublisher): The X.509 certificate store for directly trusted publishers"
type: str
choices:
- AddressBook
- AuthRoot
- CertificateAuthority
- Disallowed
- My
- Root
- TrustedPeople
- TrustedPublisher
default: My
store_location:
description:
- The store location to use when importing a certificate or searching for a
certificate.
choices: [ CurrentUser, LocalMachine ]
default: LocalMachine
password:
description:
- The password of the pkcs12 certificate key.
- This is used when reading a pkcs12 certificate file or the password to
set when C(state=exported) and C(file_type=pkcs12).
- If the pkcs12 file has no password set or no password should be set on
the exported file, do not set this option.
type: str
key_exportable:
description:
- Whether to allow the private key to be exported.
- If C(no), then this module and other process will only be able to export
the certificate and the private key cannot be exported.
- Used when C(state=present) only.
type: bool
default: yes
key_storage:
description:
- Specifies where Windows will store the private key when it is imported.
- When set to C(default), the default option as set by Windows is used, typically C(user).
- When set to C(machine), the key is stored in a path accessible by various
users.
- When set to C(user), the key is stored in a path only accessible by the
current user.
- Used when C(state=present) only and cannot be changed once imported.
- See U(https://msdn.microsoft.com/en-us/library/system.security.cryptography.x509certificates.x509keystorageflags.aspx)
for more details.
type: str
choices: [ default, machine, user ]
default: default
file_type:
description:
- The file type to export the certificate as when C(state=exported).
- C(der) is a binary ASN.1 encoded file.
- C(pem) is a base64 encoded file of a der file in the OpenSSL form.
- C(pkcs12) (also known as pfx) is a binary container that contains both
the certificate and private key unlike the other options.
- When C(pkcs12) is set and the private key is not exportable or accessible
by the current user, it will throw an exception.
type: str
choices: [ der, pem, pkcs12 ]
default: der
notes:
- Some actions on PKCS12 certificates and keys may fail with the error
C(the specified network password is not correct), either use CredSSP or
Kerberos with credential delegation, or use C(become) to bypass these
restrictions.
- The certificates must be located on the Windows host to be set with I(path).
- When importing a certificate for usage in IIS, it is generally required
to use the C(machine) key_storage option, as both C(default) and C(user)
will make the private key unreadable to IIS APPPOOL identities and prevent
binding the certificate to the https endpoint.
author:
- Jordan Borean (@jborean93)
'''
EXAMPLES = r'''
- name: Import a certificate
win_certificate_store:
path: C:\Temp\cert.pem
state: present
- name: Import pfx certificate that is password protected
win_certificate_store:
path: C:\Temp\cert.pfx
state: present
password: VeryStrongPasswordHere!
become: yes
become_method: runas
- name: Import pfx certificate without password and set private key as un-exportable
win_certificate_store:
path: C:\Temp\cert.pfx
state: present
key_exportable: no
# usually you don't set this here but it is for illustrative purposes
vars:
ansible_winrm_transport: credssp
- name: Remove a certificate based on file thumbprint
win_certificate_store:
path: C:\Temp\cert.pem
state: absent
- name: Remove a certificate based on thumbprint
win_certificate_store:
thumbprint: BD7AF104CF1872BDB518D95C9534EA941665FD27
state: absent
- name: Remove certificate based on thumbprint is CurrentUser/TrustedPublishers store
win_certificate_store:
thumbprint: BD7AF104CF1872BDB518D95C9534EA941665FD27
state: absent
store_location: CurrentUser
store_name: TrustedPublisher
- name: Export certificate as der encoded file
win_certificate_store:
path: C:\Temp\cert.cer
state: exported
file_type: der
- name: Export certificate and key as pfx encoded file
win_certificate_store:
path: C:\Temp\cert.pfx
state: exported
file_type: pkcs12
password: AnotherStrongPass!
become: yes
become_method: runas
become_user: SYSTEM
- name: Import certificate be used by IIS
win_certificate_store:
path: C:\Temp\cert.pfx
file_type: pkcs12
password: StrongPassword!
store_location: LocalMachine
key_storage: machine
state: present
'''
RETURN = r'''
thumbprints:
description: A list of certificate thumbprints that were touched by the
module.
returned: success
type: list
sample: ["BC05633694E675449136679A658281F17A191087"]
'''
|
Jusedawg/SickRage
|
refs/heads/develop
|
lib/hachoir_parser/video/mov.py
|
56
|
"""
Apple Quicktime Movie (file extension ".mov") parser.
Documents:
- Parsing and Writing QuickTime Files in Java (by Chris Adamson, 02/19/2003)
http://www.onjava.com/pub/a/onjava/2003/02/19/qt_file_format.html
- QuickTime File Format (official technical reference)
http://developer.apple.com/documentation/QuickTime/QTFF/qtff.pdf
- Apple QuickTime:
http://wiki.multimedia.cx/index.php?title=Apple_QuickTime
- File type (ftyp):
http://www.ftyps.com/
- MPEG4 standard
http://neuron2.net/library/avc/c041828_ISO_IEC_14496-12_2005%28E%29.pdf
Author: Victor Stinner, Robert Xiao
Creation: 2 august 2006
"""
from hachoir_parser import Parser
from hachoir_parser.common.win32 import GUID
from hachoir_core.field import (ParserError, FieldSet, MissingField,
Enum,
Bit, NullBits, Bits, UInt8, Int16, UInt16, Int32, UInt32, Int64, UInt64, TimestampMac32,
String, PascalString8, PascalString16, CString,
RawBytes, NullBytes, PaddingBytes)
from hachoir_core.endian import BIG_ENDIAN
from hachoir_core.text_handler import textHandler, hexadecimal
from hachoir_core.tools import MAC_TIMESTAMP_T0, timedelta
def timestampMac64(value):
if not isinstance(value, (float, int, long)):
raise TypeError("an integer or float is required")
return MAC_TIMESTAMP_T0 + timedelta(seconds=value)
from hachoir_core.field.timestamp import timestampFactory
TimestampMac64 = timestampFactory("TimestampMac64", timestampMac64, 64)
def fixedFloatFactory(name, int_bits, float_bits, doc):
size = int_bits + float_bits
class Float(FieldSet):
static_size = size
__doc__ = doc
def createFields(self):
yield Bits(self, "int_part", int_bits)
yield Bits(self, "float_part", float_bits)
def createValue(self):
return self["int_part"].value + float(self["float_part"].value) / (1<<float_bits)
klass = Float
klass.__name__ = name
return klass
QTFloat16 = fixedFloatFactory("QTFloat32", 8, 8, "8.8 fixed point number")
QTFloat32 = fixedFloatFactory("QTFloat32", 16, 16, "16.16 fixed point number")
QTFloat2_30 = fixedFloatFactory("QTFloat2_30", 2, 30, "2.30 fixed point number")
class AtomList(FieldSet):
def createFields(self):
while not self.eof:
yield Atom(self, "atom[]")
class TrackHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version (0 or 1)")
yield NullBits(self, "flags", 20)
yield Bit(self, "is_in_poster")
yield Bit(self, "is_in_preview", "Is this track used when previewing the presentation?")
yield Bit(self, "is_in_movie", "Is this track used in the presentation?")
yield Bit(self, "is_enabled", "Is this track enabled?")
if self['version'].value == 0:
# 32-bit version
yield TimestampMac32(self, "creation_date", "Creation time of this track")
yield TimestampMac32(self, "lastmod_date", "Last modification time of this track")
yield UInt32(self, "track_id", "Unique nonzero identifier of this track within the presentation")
yield NullBytes(self, "reserved[]", 4)
yield UInt32(self, "duration", "Length of track, in movie time-units")
elif self['version'].value == 1:
# 64-bit version
yield TimestampMac64(self, "creation_date", "Creation time of this track")
yield TimestampMac64(self, "lastmod_date", "Last modification time of this track")
yield UInt32(self, "track_id", "Unique nonzero identifier of this track within the presentation")
yield NullBytes(self, "reserved[]", 4)
yield UInt64(self, "duration", "Length of track, in movie time-units")
yield NullBytes(self, "reserved[]", 8)
yield Int16(self, "video_layer", "Middle layer is 0; lower numbers are closer to the viewer")
yield Int16(self, "alternate_group", "Group ID that this track belongs to (0=no group)")
yield QTFloat16(self, "volume", "Track relative audio volume (1.0 = full)")
yield NullBytes(self, "reserved[]", 2)
yield QTFloat32(self, "geom_a", "Width scale")
yield QTFloat32(self, "geom_b", "Width rotate")
yield QTFloat2_30(self, "geom_u", "Width angle")
yield QTFloat32(self, "geom_c", "Height rotate")
yield QTFloat32(self, "geom_d", "Height scale")
yield QTFloat2_30(self, "geom_v", "Height angle")
yield QTFloat32(self, "geom_x", "Position X")
yield QTFloat32(self, "geom_y", "Position Y")
yield QTFloat2_30(self, "geom_w", "Divider scale")
yield QTFloat32(self, "frame_size_width")
yield QTFloat32(self, "frame_size_height")
class TrackReferenceType(FieldSet):
def createFields(self):
while not self.eof:
yield UInt32(self, "track_id[]", "Referenced track ID")
class Handler(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield String(self, "creator", 4)
yield String(self, "subtype", 4)
yield String(self, "manufacturer", 4)
yield UInt32(self, "res_flags")
yield UInt32(self, "res_flags_mask")
if self.root.is_mpeg4:
yield CString(self, "name", charset="UTF-8")
else:
yield PascalString8(self, "name")
class LanguageCode(FieldSet):
static_size = 16
MAC_LANG = {
0: 'English',
1: 'French',
2: 'German',
3: 'Italian',
4: 'Dutch',
5: 'Swedish',
6: 'Spanish',
7: 'Danish',
8: 'Portuguese',
9: 'Norwegian',
10: 'Hebrew',
11: 'Japanese',
12: 'Arabic',
13: 'Finnish',
14: 'Greek',
15: 'Icelandic',
16: 'Maltese',
17: 'Turkish',
18: 'Croatian',
19: 'Traditional Chinese',
20: 'Urdu',
21: 'Hindi',
22: 'Thai',
23: 'Korean',
24: 'Lithuanian',
25: 'Polish',
26: 'Hungarian',
27: 'Estonian',
28: 'Latvian',
28: 'Lettish',
29: 'Lappish',
29: 'Saamisk',
30: 'Faeroese',
31: 'Farsi',
32: 'Russian',
33: 'Simplified Chinese',
34: 'Flemish',
35: 'Irish',
36: 'Albanian',
37: 'Romanian',
38: 'Czech',
39: 'Slovak',
40: 'Slovenian',
41: 'Yiddish',
42: 'Serbian',
43: 'Macedonian',
44: 'Bulgarian',
45: 'Ukrainian',
46: 'Byelorussian',
47: 'Uzbek',
48: 'Kazakh',
49: 'Azerbaijani',
50: 'AzerbaijanAr',
51: 'Armenian',
52: 'Georgian',
53: 'Moldavian',
54: 'Kirghiz',
55: 'Tajiki',
56: 'Turkmen',
57: 'Mongolian',
58: 'MongolianCyr',
59: 'Pashto',
60: 'Kurdish',
61: 'Kashmiri',
62: 'Sindhi',
63: 'Tibetan',
64: 'Nepali',
65: 'Sanskrit',
66: 'Marathi',
67: 'Bengali',
68: 'Assamese',
69: 'Gujarati',
70: 'Punjabi',
71: 'Oriya',
72: 'Malayalam',
73: 'Kannada',
74: 'Tamil',
75: 'Telugu',
76: 'Sinhalese',
77: 'Burmese',
78: 'Khmer',
79: 'Lao',
80: 'Vietnamese',
81: 'Indonesian',
82: 'Tagalog',
83: 'MalayRoman',
84: 'MalayArabic',
85: 'Amharic',
86: 'Tigrinya',
88: 'Somali',
89: 'Swahili',
90: 'Ruanda',
91: 'Rundi',
92: 'Chewa',
93: 'Malagasy',
94: 'Esperanto',
128: 'Welsh',
129: 'Basque',
130: 'Catalan',
131: 'Latin',
132: 'Quechua',
133: 'Guarani',
134: 'Aymara',
135: 'Tatar',
136: 'Uighur',
137: 'Dzongkha',
138: 'JavaneseRom',
}
def fieldHandler(self, field):
if field.value == 0:
return ' '
return chr(field.value + 0x60)
def createFields(self):
value = self.stream.readBits(self.absolute_address, 16, self.endian)
if value < 1024:
yield Enum(UInt16(self, "lang"), self.MAC_LANG)
else:
yield NullBits(self, "padding[]", 1)
yield textHandler(Bits(self, "lang[0]", 5), self.fieldHandler)
yield textHandler(Bits(self, "lang[1]", 5), self.fieldHandler)
yield textHandler(Bits(self, "lang[2]", 5), self.fieldHandler)
def createValue(self):
if 'lang' in self:
return self['lang'].display
return self['lang[0]'].display + self['lang[1]'].display + self['lang[2]'].display
class MediaHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version (0 or 1)")
yield NullBits(self, "flags", 24)
if self['version'].value == 0:
# 32-bit version
yield TimestampMac32(self, "creation_date", "Creation time of this media")
yield TimestampMac32(self, "lastmod_date", "Last modification time of this media")
yield UInt32(self, "time_scale", "Number of time-units per second")
yield UInt32(self, "duration", "Length of media, in time-units")
elif self['version'].value == 1:
# 64-bit version
yield TimestampMac64(self, "creation_date", "Creation time of this media")
yield TimestampMac64(self, "lastmod_date", "Last modification time of this media")
yield UInt32(self, "time_scale", "Number of time-units per second")
yield UInt64(self, "duration", "Length of media, in time-units")
yield LanguageCode(self, "language")
yield Int16(self, "quality")
class VideoMediaHeader(FieldSet):
GRAPHICSMODE = {
0: ('Copy', "Copy the source image over the destination"),
0x20: ('Blend', "Blend of source and destination; blending factor is controlled by op color"),
0x24: ('Transparent', "Replace destination pixel with source pixel if the source pixel is not the op color"),
0x40: ('Dither copy', "Dither image if necessary, else copy"),
0x100: ('Straight alpha', "Blend of source and destination; blending factor is controlled by alpha channel"),
0x101: ('Premul white alpha', "Remove white from each pixel and blend"),
0x102: ('Premul black alpha', "Remove black from each pixel and blend"),
0x103: ('Composition', "Track drawn offscreen and dither copied onto screen"),
0x104: ('Straight alpha blend', "Blend of source and destination; blending factor is controlled by combining alpha channel and op color")
}
def graphicsDisplay(self, field):
if field.value in self.GRAPHICSMODE:
return self.GRAPHICSMODE[field.value][0]
return hex(field.value)
def graphicsDescription(self, field):
if field.value in self.GRAPHICSMODE:
return self.GRAPHICSMODE[field.value][1]
return ""
def createFields(self):
yield UInt8(self, "version", "Version")
yield Bits(self, "flags", 24, "Flags (=1)")
graphics = UInt16(self, "graphicsmode")
graphics.createDisplay = lambda:self.graphicsDisplay(graphics)
graphics.createDescription = lambda:self.graphicsDescription(graphics)
yield graphics
yield UInt16(self, "op_red", "Red value for graphics mode")
yield UInt16(self, "op_green", "Green value for graphics mode")
yield UInt16(self, "op_blue", "Blue value for graphics mode")
class SoundMediaHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield QTFloat16(self, "balance")
yield UInt16(self, "reserved[]")
class HintMediaHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield UInt16(self, "max_pdu_size")
yield UInt16(self, "avg_pdu_size")
yield UInt32(self, "max_bit_rate")
yield UInt32(self, "avg_bit_rate")
yield UInt32(self, "reserved[]")
class DataEntryUrl(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 23)
yield Bit(self, "is_same_file", "Is the reference to this file?")
if not self['is_same_file'].value:
yield CString(self, "location")
class DataEntryUrn(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 23)
yield Bit(self, "is_same_file", "Is the reference to this file?")
if not self['is_same_file'].value:
yield CString(self, "name")
yield CString(self, "location")
class DataReference(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count")
for i in xrange(self['count'].value):
yield Atom(self, "atom[]")
class EditList(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version (0 or 1)")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count")
version = self['version'].value
if version == 0:
UInt, Int = UInt32, Int32
elif version == 1:
UInt, Int = UInt64, Int64
else:
raise ParserError("elst version %d not supported"%version)
for i in xrange(self['count'].value):
yield UInt(self, "duration[]", "Duration of this edit segment")
yield Int(self, "time[]", "Starting time of this edit segment within the media (-1 = empty edit)")
yield QTFloat32(self, "play_speed[]", "Playback rate (0 = dwell edit, 1 = normal playback)")
class Load(FieldSet):
def createFields(self):
yield UInt32(self, "start")
yield UInt32(self, "length")
yield UInt32(self, "flags") # PreloadAlways = 1 or TrackEnabledPreload = 2
yield UInt32(self, "hints") # KeepInBuffer = 0x00000004; HighQuality = 0x00000100; SingleFieldVideo = 0x00100000
class MovieHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version (0 or 1)")
yield NullBits(self, "flags", 24)
if self['version'].value == 0:
# 32-bit version
yield TimestampMac32(self, "creation_date", "Creation time of this presentation")
yield TimestampMac32(self, "lastmod_date", "Last modification time of this presentation")
yield UInt32(self, "time_scale", "Number of time-units per second")
yield UInt32(self, "duration", "Length of presentation, in time-units")
elif self['version'].value == 1:
# 64-bit version
yield TimestampMac64(self, "creation_date", "Creation time of this presentation")
yield TimestampMac64(self, "lastmod_date", "Last modification time of this presentation")
yield UInt32(self, "time_scale", "Number of time-units per second")
yield UInt64(self, "duration", "Length of presentation, in time-units")
yield QTFloat32(self, "play_speed", "Preferred playback speed (1.0 = normal)")
yield QTFloat16(self, "volume", "Preferred playback volume (1.0 = full)")
yield NullBytes(self, "reserved[]", 10)
yield QTFloat32(self, "geom_a", "Width scale")
yield QTFloat32(self, "geom_b", "Width rotate")
yield QTFloat2_30(self, "geom_u", "Width angle")
yield QTFloat32(self, "geom_c", "Height rotate")
yield QTFloat32(self, "geom_d", "Height scale")
yield QTFloat2_30(self, "geom_v", "Height angle")
yield QTFloat32(self, "geom_x", "Position X")
yield QTFloat32(self, "geom_y", "Position Y")
yield QTFloat2_30(self, "geom_w", "Divider scale")
yield UInt32(self, "preview_start")
yield UInt32(self, "preview_length")
yield UInt32(self, "still_poster")
yield UInt32(self, "sel_start")
yield UInt32(self, "sel_length")
yield UInt32(self, "current_time")
yield UInt32(self, "next_track_ID", "Value to use as the track ID for the next track added")
class FileType(FieldSet):
def createFields(self):
yield String(self, "brand", 4, "Major brand")
yield UInt32(self, "version", "Version")
while not self.eof:
yield String(self, "compat_brand[]", 4, "Compatible brand")
class MovieFragmentHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "sequence_number")
class TrackFragmentRandomAccess(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "track_id")
yield NullBits(self, "reserved", 26)
yield Bits(self, "length_size_of_traf_num", 2)
yield Bits(self, "length_size_of_trun_num", 2)
yield Bits(self, "length_size_of_sample_num", 2)
yield UInt32(self, "number_of_entry")
for i in xrange(self['number_of_entry'].value):
if self['version'].value == 1:
yield UInt64(self, "time[%i]" % i)
yield UInt64(self, "moof_offset[%i]" %i)
else:
yield UInt32(self, "time[%i]" %i)
yield UInt32(self, "moof_offset[%i]" %i)
if self['length_size_of_traf_num'].value == 3:
yield UInt64(self, "traf_number[%i]" %i)
elif self['length_size_of_traf_num'].value == 2:
yield UInt32(self, "traf_number[%i]" %i)
elif self['length_size_of_traf_num'].value == 1:
yield UInt16(self, "traf_number[%i]" %i)
else:
yield UInt8(self, "traf_number[%i]" %i)
if self['length_size_of_trun_num'].value == 3:
yield UInt64(self, "trun_number[%i]" %i)
elif self['length_size_of_trun_num'].value == 2:
yield UInt32(self, "trun_number[%i]" %i)
elif self['length_size_of_trun_num'].value == 1:
yield UInt16(self, "trun_number[%i]" %i)
else:
yield UInt8(self, "trun_number[%i]" %i)
if self['length_size_of_sample_num'].value == 3:
yield UInt64(self, "sample_number[%i]" %i)
elif self['length_size_of_sample_num'].value == 2:
yield UInt32(self, "sample_number[%i]" %i)
elif self['length_size_of_sample_num'].value == 1:
yield UInt16(self, "sample_number[%i]" %i)
else:
yield UInt8(self, "sample_number[%i]" %i)
class MovieFragmentRandomAccessOffset(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "size")
def findHandler(self):
''' find the handler corresponding to this fieldset '''
while self:
if self.name in ('media', 'tags'):
break
self = self.parent
else:
return None
for atom in self:
if atom['tag'].value == 'hdlr':
return atom['hdlr']
return None
class METATAG(FieldSet):
def createFields(self):
yield UInt8(self, "unk[]", "0x80 or 0x00")
yield PascalString16(self, "tag_name", charset='UTF-8')
yield UInt16(self, "unk[]", "0x0001")
yield UInt16(self, "unk[]", "0x0000")
yield PascalString16(self, "tag_value", charset='UTF-8')
class META(FieldSet):
def createFields(self):
# This tag has too many variant forms.
if '/tags/' in self.path:
yield UInt32(self, "count")
for i in xrange(self['count'].value):
yield METATAG(self, "tag[]")
elif self.stream.readBits(self.absolute_address, 32, self.endian) == 0:
yield UInt8(self, "version")
yield Bits(self, "flags", 24)
yield AtomList(self, "tags")
else:
yield AtomList(self, "tags")
class Item(FieldSet):
def createFields(self):
yield UInt32(self, "size")
yield UInt32(self, "index")
yield Atom(self, "value")
class KeyList(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count")
for i in xrange(self['count'].value):
yield Atom(self, "key[]")
class ItemList(FieldSet):
def createFields(self):
handler = findHandler(self)
if handler is None:
raise ParserError("ilst couldn't find metadata handler")
if handler['subtype'].value == 'mdir':
while not self.eof:
yield Atom(self, "atom[]")
elif handler['subtype'].value == 'mdta':
while not self.eof:
yield Item(self, "item[]")
class NeroChapters(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "unknown")
yield UInt8(self, "count", description="Number of chapters")
for i in xrange(self['count'].value):
yield UInt64(self, "chapter_start[]")
yield PascalString8(self, "chapter_name[]", charset='UTF-8')
class SampleDecodeTimeTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in sample time table")
for i in xrange(self['count'].value):
yield UInt32(self, "sample_count[]", "Number of consecutive samples with this delta")
yield UInt32(self, "sample_delta[]", "Decode time delta since last sample, in time-units")
class SampleCompositionTimeTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in sample time table")
for i in xrange(self['count'].value):
yield UInt32(self, "sample_count[]", "Number of consecutive samples with this offset")
yield UInt32(self, "sample_offset[]", "Difference between decode time and composition time of this sample, in time-units")
class ChunkOffsetTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in offset table")
for i in xrange(self['count'].value):
yield UInt32(self, "chunk_offset[]")
class ChunkOffsetTable64(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in offset table")
for i in xrange(self['count'].value):
yield UInt64(self, "chunk_offset[]")
class SampleEntry(FieldSet):
def createFields(self):
yield UInt32(self, "size")
yield RawBytes(self, "format", 4, "Data Format (codec)")
yield NullBytes(self, "reserved[]", 6, "Reserved")
yield UInt16(self, "data_reference_index")
handler = findHandler(self)
if not handler:
raise ParserError("stsd couldn't find track handler")
if handler['subtype'].value == 'soun':
# Audio sample entry
yield NullBytes(self, "reserved[]", 8)
yield UInt16(self, "channels", "Number of audio channels")
yield UInt16(self, "samplesize", "Sample size in bits")
yield UInt16(self, "unknown")
yield NullBytes(self, "reserved[]", 2)
yield QTFloat32(self, "samplerate", "Sample rate in Hz")
elif handler['subtype'].value == 'vide':
# Video sample entry
yield UInt16(self, "version")
yield UInt16(self, "revision_level")
yield RawBytes(self, "vendor_id", 4)
yield UInt32(self, "temporal_quality")
yield UInt32(self, "spatial_quality")
yield UInt16(self, "width", "Width (pixels)")
yield UInt16(self, "height", "Height (pixels)")
yield QTFloat32(self, "horizontal_resolution", "Horizontal resolution in DPI")
yield QTFloat32(self, "vertical resolution", "Vertical resolution in DPI")
yield UInt32(self, "data_size")
yield UInt16(self, "frame_count")
yield UInt8(self, "compressor_name_length")
yield String(self, "compressor_name", 31, strip='\0')
yield UInt16(self, "depth", "Bit depth of image")
yield Int16(self, "unknown")
elif handler['subtype'].value == 'hint':
# Hint sample entry
pass
size = self['size'].value - self.current_size//8
if size > 0:
yield RawBytes(self, "extra_data", size)
class SampleDescription(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in table")
for i in xrange(self['count'].value):
yield SampleEntry(self, "sample_entry[]")
class SyncSampleTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Number of sync samples")
for i in xrange(self['count'].value):
yield UInt32(self, "sample_number[]")
class SampleSizeTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "uniform_size", description="Uniform size of each sample (0 if non-uniform)")
yield UInt32(self, "count", description="Number of samples")
if self['uniform_size'].value == 0:
for i in xrange(self['count'].value):
yield UInt32(self, "sample_size[]")
class CompactSampleSizeTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield NullBits(self, "reserved[]", 24)
yield UInt8(self, "field_size", "Size of each entry in this table, in bits")
yield UInt32(self, "count", description="Number of samples")
bitsize = self['field_size'].value
for i in xrange(self['count'].value):
yield Bits(self, "sample_size[]", bitsize)
if self.current_size % 8 != 0:
yield NullBits(self, "padding[]", 8 - (self.current_size % 8))
class SampleToChunkTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Number of samples")
for i in xrange(self['count'].value):
yield UInt32(self, "first_chunk[]")
yield UInt32(self, "samples_per_chunk[]")
yield UInt32(self, "sample_description_index[]")
class Atom(FieldSet):
tag_info = {
"ftyp": (FileType, "file_type", "File type and compatibility"),
# pdin: progressive download information
# pnot: movie preview (old QT spec)
"moov": (AtomList, "movie", "Container for all metadata"),
"mvhd": (MovieHeader, "movie_hdr", "Movie header, overall declarations"),
# clip: movie clipping (old QT spec)
# crgn: movie clipping region (old QT spec)
"trak": (AtomList, "track", "Container for an individual track or stream"),
"tkhd": (TrackHeader, "track_hdr", "Track header, overall information about the track"),
# matt: track matte (old QT spec)
# kmat: compressed matte (old QT spec)
"tref": (AtomList, "tref", "Track reference container"),
"hint": (TrackReferenceType, "hint", "Original media track(s) for this hint track"),
"cdsc": (TrackReferenceType, "cdsc", "Reference to track described by this track"),
"edts": (AtomList, "edts", "Edit list container"),
"elst": (EditList, "elst", "Edit list"),
"load": (Load, "load", "Track loading settings (old QT spec)"),
# imap: Track input map (old QT spec)
"mdia": (AtomList, "media", "Container for the media information in a track"),
"mdhd": (MediaHeader, "media_hdr", "Media header, overall information about the media"),
"hdlr": (Handler, "hdlr", "Handler, declares the media or metadata (handler) type"),
"minf": (AtomList, "minf", "Media information container"),
"vmhd": (VideoMediaHeader, "vmhd", "Video media header, overall information (video track only)"),
"smhd": (SoundMediaHeader, "smhd", "Sound media header, overall information (sound track only)"),
"hmhd": (HintMediaHeader, "hmhd", "Hint media header, overall information (hint track only)"),
# nmhd: Null media header, overall information (some tracks only) (unparsed)
"dinf": (AtomList, "dinf", "Data information, container"),
"dref": (DataReference, "dref", "Data reference, declares source(s) of media data in track"),
"url ": (DataEntryUrl, "url", "URL data reference"),
"urn ": (DataEntryUrn, "urn", "URN data reference"),
"stbl": (AtomList, "stbl", "Sample table, container for the time/space map"),
"stsd": (SampleDescription, "stsd", "Sample descriptions (codec types, initialization etc.)"),
"stts": (SampleDecodeTimeTable, "stts", "decoding time-to-sample delta table"),
"ctts": (SampleCompositionTimeTable, "ctts", "composition time-to-sample offset table"),
"stsc": (SampleToChunkTable, "stsc", "sample-to-chunk, partial data-offset information"),
"stsz": (SampleSizeTable, "stsz", "Sample size table (framing)"),
"stz2": (CompactSampleSizeTable, "stz2", "Compact sample size table (framing)"),
"stco": (ChunkOffsetTable, "stco", "Chunk offset, partial data-offset information"),
"co64": (ChunkOffsetTable64, "co64", "64-bit chunk offset"),
"stss": (SyncSampleTable, "stss", "Sync sample table (random access points)"),
# stsh: shadow sync sample table
# padb: sample padding bits
# stdp: sample degradation priority
# sdtp: independent and disposable samples
# sbgp: sample-to-group
# sgpd: sample group description
# subs: sub-sample information
# ctab color table (old QT spec)
# mvex: movie extends
# mehd: movie extends header
# trex: track extends defaults
# ipmc: IPMP control
"moof": (AtomList, "moof", "movie fragment"),
"mfhd": (MovieFragmentHeader, "mfhd", "movie fragment header"),
# traf: track fragment
# tfhd: track fragment header
# trun: track fragment run
# sdtp: independent and disposable samples
# sbgp: sample-to-group
# subs: sub-sample information
"mfra": (AtomList, "mfra", "movie fragment random access"),
"tfra": (TrackFragmentRandomAccess, "tfra", "track fragment random access"),
"mfro": (MovieFragmentRandomAccessOffset, "mfro", "movie fragment random access offset"),
# mdat: media data container
# free: free space (unparsed)
# skip: free space (unparsed)
"udta": (AtomList, "udta", "User data"),
"meta": (META, "meta", "File metadata"),
"keys": (KeyList, "keys", "Metadata keys"),
## hdlr
## dinf
## dref: data reference, declares source(s) of metadata items
## ipmc: IPMP control
# iloc: item location
# ipro: item protection
# sinf: protection scheme information
# frma: original format
# imif: IPMP information
# schm: scheme type
# schi: scheme information
# iinf: item information
# xml : XML container
# bxml: binary XML container
# pitm: primary item reference
## other tags
"ilst": (ItemList, "ilst", "Item list"),
"trkn": (AtomList, "trkn", "Metadata: Track number"),
"disk": (AtomList, "disk", "Metadata: Disk number"),
"tmpo": (AtomList, "tempo", "Metadata: Tempo"),
"cpil": (AtomList, "cpil", "Metadata: Compilation"),
"gnre": (AtomList, "gnre", "Metadata: Genre"),
"\xa9cpy": (AtomList, "copyright", "Metadata: Copyright statement"),
"\xa9day": (AtomList, "date", "Metadata: Date of content creation"),
"\xa9dir": (AtomList, "director", "Metadata: Movie director"),
"\xa9ed1": (AtomList, "edit1", "Metadata: Edit date and description (1)"),
"\xa9ed2": (AtomList, "edit2", "Metadata: Edit date and description (2)"),
"\xa9ed3": (AtomList, "edit3", "Metadata: Edit date and description (3)"),
"\xa9ed4": (AtomList, "edit4", "Metadata: Edit date and description (4)"),
"\xa9ed5": (AtomList, "edit5", "Metadata: Edit date and description (5)"),
"\xa9ed6": (AtomList, "edit6", "Metadata: Edit date and description (6)"),
"\xa9ed7": (AtomList, "edit7", "Metadata: Edit date and description (7)"),
"\xa9ed8": (AtomList, "edit8", "Metadata: Edit date and description (8)"),
"\xa9ed9": (AtomList, "edit9", "Metadata: Edit date and description (9)"),
"\xa9fmt": (AtomList, "format", "Metadata: Movie format (CGI, digitized, etc.)"),
"\xa9inf": (AtomList, "info", "Metadata: Information about the movie"),
"\xa9prd": (AtomList, "producer", "Metadata: Movie producer"),
"\xa9prf": (AtomList, "performers", "Metadata: Performer names"),
"\xa9req": (AtomList, "requirements", "Metadata: Special hardware and software requirements"),
"\xa9src": (AtomList, "source", "Metadata: Credits for those who provided movie source content"),
"\xa9nam": (AtomList, "name", "Metadata: Name of song or video"),
"\xa9des": (AtomList, "description", "Metadata: File description"),
"\xa9cmt": (AtomList, "comment", "Metadata: General comment"),
"\xa9alb": (AtomList, "album", "Metadata: Album name"),
"\xa9gen": (AtomList, "genre", "Metadata: Custom genre"),
"\xa9ART": (AtomList, "artist", "Metadata: Artist name"),
"\xa9too": (AtomList, "encoder", "Metadata: Encoder"),
"\xa9wrt": (AtomList, "writer", "Metadata: Writer"),
"covr": (AtomList, "cover", "Metadata: Cover art"),
"----": (AtomList, "misc", "Metadata: Miscellaneous"),
"tags": (AtomList, "tags", "File tags"),
"tseg": (AtomList, "tseg", "tseg"),
"chpl": (NeroChapters, "chpl", "Nero chapter data"),
}
tag_handler = [ item[0] for item in tag_info ]
tag_desc = [ item[1] for item in tag_info ]
def createFields(self):
yield UInt32(self, "size")
yield RawBytes(self, "tag", 4)
size = self["size"].value
if size == 1:
# 64-bit size
yield UInt64(self, "size64")
size = self["size64"].value - 16
elif size == 0:
# Unbounded atom
if self._size is None:
size = (self.parent.size - self.parent.current_size) / 8 - 8
else:
size = (self.size - self.current_size) / 8
else:
size = size - 8
if self['tag'].value == 'uuid':
yield GUID(self, "usertag")
tag = self["usertag"].value
size -= 16
else:
tag = self["tag"].value
if size > 0:
if tag in self.tag_info:
handler, name, desc = self.tag_info[tag]
yield handler(self, name, desc, size=size*8)
else:
yield RawBytes(self, "data", size)
def createDescription(self):
if self["tag"].value == "uuid":
return "Atom: uuid: "+self["usertag"].value
return "Atom: %s" % self["tag"].value
class MovFile(Parser):
PARSER_TAGS = {
"id": "mov",
"category": "video",
"file_ext": ("mov", "qt", "mp4", "m4v", "m4a", "m4p", "m4b"),
"mime": (u"video/quicktime", u'video/mp4'),
"min_size": 8*8,
"magic": (("moov", 4*8),),
"description": "Apple QuickTime movie"
}
BRANDS = {
# File type brand => MIME type
'mp41': u'video/mp4',
'mp42': u'video/mp4',
'avc1': u'video/mp4',
'isom': u'video/mp4',
'iso2': u'video/mp4',
}
endian = BIG_ENDIAN
def __init__(self, *args, **kw):
Parser.__init__(self, *args, **kw)
is_mpeg4 = property(lambda self:self.mime_type==u'video/mp4')
def validate(self):
# TODO: Write better code, erk!
size = self.stream.readBits(0, 32, self.endian)
if size < 8:
return "Invalid first atom size"
tag = self.stream.readBytes(4*8, 4)
return tag in ("ftyp", "moov", "free")
def createFields(self):
while not self.eof:
yield Atom(self, "atom[]")
def createMimeType(self):
first = self[0]
try:
# Read brands in the file type
if first['tag'].value != "ftyp":
return None
file_type = first["file_type"]
brand = file_type["brand"].value
if brand in self.BRANDS:
return self.BRANDS[brand]
for field in file_type.array("compat_brand"):
brand = field.value
if brand in self.BRANDS:
return self.BRANDS[brand]
except MissingField:
pass
return u'video/quicktime'
|
hMatoba/Piexif
|
refs/heads/master
|
piexif/_load.py
|
1
|
import struct
import sys
from ._common import *
from ._exceptions import InvalidImageDataError
from ._exif import *
from piexif import _webp
LITTLE_ENDIAN = b"\x49\x49"
def load(input_data, key_is_name=False):
"""
py:function:: piexif.load(filename)
Return exif data as dict. Keys(IFD name), be contained, are "0th", "Exif", "GPS", "Interop", "1st", and "thumbnail". Without "thumbnail", the value is dict(tag name/tag value). "thumbnail" value is JPEG as bytes.
:param str filename: JPEG or TIFF
:return: Exif data({"0th":dict, "Exif":dict, "GPS":dict, "Interop":dict, "1st":dict, "thumbnail":bytes})
:rtype: dict
"""
exif_dict = {"0th":{},
"Exif":{},
"GPS":{},
"Interop":{},
"1st":{},
"thumbnail":None}
exifReader = _ExifReader(input_data)
if exifReader.tiftag is None:
return exif_dict
if exifReader.tiftag[0:2] == LITTLE_ENDIAN:
exifReader.endian_mark = "<"
else:
exifReader.endian_mark = ">"
pointer = struct.unpack(exifReader.endian_mark + "L",
exifReader.tiftag[4:8])[0]
exif_dict["0th"] = exifReader.get_ifd_dict(pointer, "0th")
first_ifd_pointer = exif_dict["0th"].pop("first_ifd_pointer")
if ImageIFD.ExifTag in exif_dict["0th"]:
pointer = exif_dict["0th"][ImageIFD.ExifTag]
exif_dict["Exif"] = exifReader.get_ifd_dict(pointer, "Exif")
if ImageIFD.GPSTag in exif_dict["0th"]:
pointer = exif_dict["0th"][ImageIFD.GPSTag]
exif_dict["GPS"] = exifReader.get_ifd_dict(pointer, "GPS")
if ExifIFD.InteroperabilityTag in exif_dict["Exif"]:
pointer = exif_dict["Exif"][ExifIFD.InteroperabilityTag]
exif_dict["Interop"] = exifReader.get_ifd_dict(pointer, "Interop")
if first_ifd_pointer != b"\x00\x00\x00\x00":
pointer = struct.unpack(exifReader.endian_mark + "L",
first_ifd_pointer)[0]
exif_dict["1st"] = exifReader.get_ifd_dict(pointer, "1st")
if (ImageIFD.JPEGInterchangeFormat in exif_dict["1st"] and
ImageIFD.JPEGInterchangeFormatLength in exif_dict["1st"]):
end = (exif_dict["1st"][ImageIFD.JPEGInterchangeFormat] +
exif_dict["1st"][ImageIFD.JPEGInterchangeFormatLength])
thumb = exifReader.tiftag[exif_dict["1st"][ImageIFD.JPEGInterchangeFormat]:end]
exif_dict["thumbnail"] = thumb
if key_is_name:
exif_dict = _get_key_name_dict(exif_dict)
return exif_dict
class _ExifReader(object):
def __init__(self, data):
# Prevents "UnicodeWarning: Unicode equal comparison failed" warnings on Python 2
maybe_image = sys.version_info >= (3,0,0) or isinstance(data, str)
if maybe_image and data[0:2] == b"\xff\xd8": # JPEG
segments = split_into_segments(data)
app1 = get_exif_seg(segments)
if app1:
self.tiftag = app1[10:]
else:
self.tiftag = None
elif maybe_image and data[0:2] in (b"\x49\x49", b"\x4d\x4d"): # TIFF
self.tiftag = data
elif maybe_image and data[0:4] == b"RIFF" and data[8:12] == b"WEBP":
self.tiftag = _webp.get_exif(data)
elif maybe_image and data[0:4] == b"Exif": # Exif
self.tiftag = data[6:]
else:
with open(data, 'rb') as f:
magic_number = f.read(2)
if magic_number == b"\xff\xd8": # JPEG
app1 = read_exif_from_file(data)
if app1:
self.tiftag = app1[10:]
else:
self.tiftag = None
elif magic_number in (b"\x49\x49", b"\x4d\x4d"): # TIFF
with open(data, 'rb') as f:
self.tiftag = f.read()
else:
with open(data, 'rb') as f:
header = f.read(12)
if header[0:4] == b"RIFF"and header[8:12] == b"WEBP":
with open(data, 'rb') as f:
file_data = f.read()
self.tiftag = _webp.get_exif(file_data)
else:
raise InvalidImageDataError("Given file is neither JPEG nor TIFF.")
def get_ifd_dict(self, pointer, ifd_name, read_unknown=False):
ifd_dict = {}
tag_count = struct.unpack(self.endian_mark + "H",
self.tiftag[pointer: pointer+2])[0]
offset = pointer + 2
if ifd_name in ["0th", "1st"]:
t = "Image"
else:
t = ifd_name
p_and_value = []
for x in range(tag_count):
pointer = offset + 12 * x
tag = struct.unpack(self.endian_mark + "H",
self.tiftag[pointer: pointer+2])[0]
value_type = struct.unpack(self.endian_mark + "H",
self.tiftag[pointer + 2: pointer + 4])[0]
value_num = struct.unpack(self.endian_mark + "L",
self.tiftag[pointer + 4: pointer + 8]
)[0]
value = self.tiftag[pointer+8: pointer+12]
p_and_value.append((pointer, value_type, value_num, value))
v_set = (value_type, value_num, value, tag)
if tag in TAGS[t]:
ifd_dict[tag] = self.convert_value(v_set)
elif read_unknown:
ifd_dict[tag] = (v_set[0], v_set[1], v_set[2], self.tiftag)
#else:
# pass
if ifd_name == "0th":
pointer = offset + 12 * tag_count
ifd_dict["first_ifd_pointer"] = self.tiftag[pointer:pointer + 4]
return ifd_dict
def convert_value(self, val):
data = None
t = val[0]
length = val[1]
value = val[2]
if t == TYPES.Byte: # BYTE
if length > 4:
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = struct.unpack("B" * length,
self.tiftag[pointer: pointer + length])
else:
data = struct.unpack("B" * length, value[0:length])
elif t == TYPES.Ascii: # ASCII
if length > 4:
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = self.tiftag[pointer: pointer+length - 1]
else:
data = value[0: length - 1]
elif t == TYPES.Short: # SHORT
if length > 2:
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = struct.unpack(self.endian_mark + "H" * length,
self.tiftag[pointer: pointer+length*2])
else:
data = struct.unpack(self.endian_mark + "H" * length,
value[0:length * 2])
elif t == TYPES.Long: # LONG
if length > 1:
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = struct.unpack(self.endian_mark + "L" * length,
self.tiftag[pointer: pointer+length*4])
else:
data = struct.unpack(self.endian_mark + "L" * length,
value)
elif t == TYPES.Rational: # RATIONAL
pointer = struct.unpack(self.endian_mark + "L", value)[0]
if length > 1:
data = tuple(
(struct.unpack(self.endian_mark + "L",
self.tiftag[pointer + x * 8:
pointer + 4 + x * 8])[0],
struct.unpack(self.endian_mark + "L",
self.tiftag[pointer + 4 + x * 8:
pointer + 8 + x * 8])[0])
for x in range(length)
)
else:
data = (struct.unpack(self.endian_mark + "L",
self.tiftag[pointer: pointer + 4])[0],
struct.unpack(self.endian_mark + "L",
self.tiftag[pointer + 4: pointer + 8]
)[0])
elif t == TYPES.SByte: # SIGNED BYTES
if length > 4:
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = struct.unpack("b" * length,
self.tiftag[pointer: pointer + length])
else:
data = struct.unpack("b" * length, value[0:length])
elif t == TYPES.Undefined: # UNDEFINED BYTES
if length > 4:
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = self.tiftag[pointer: pointer+length]
else:
data = value[0:length]
elif t == TYPES.SShort: # SIGNED SHORT
if length > 2:
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = struct.unpack(self.endian_mark + "h" * length,
self.tiftag[pointer: pointer+length*2])
else:
data = struct.unpack(self.endian_mark + "h" * length,
value[0:length * 2])
elif t == TYPES.SLong: # SLONG
if length > 1:
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = struct.unpack(self.endian_mark + "l" * length,
self.tiftag[pointer: pointer+length*4])
else:
data = struct.unpack(self.endian_mark + "l" * length,
value)
elif t == TYPES.SRational: # SRATIONAL
pointer = struct.unpack(self.endian_mark + "L", value)[0]
if length > 1:
data = tuple(
(struct.unpack(self.endian_mark + "l",
self.tiftag[pointer + x * 8: pointer + 4 + x * 8])[0],
struct.unpack(self.endian_mark + "l",
self.tiftag[pointer + 4 + x * 8: pointer + 8 + x * 8])[0])
for x in range(length)
)
else:
data = (struct.unpack(self.endian_mark + "l",
self.tiftag[pointer: pointer + 4])[0],
struct.unpack(self.endian_mark + "l",
self.tiftag[pointer + 4: pointer + 8]
)[0])
elif t == TYPES.Float: # FLOAT
if length > 1:
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = struct.unpack(self.endian_mark + "f" * length,
self.tiftag[pointer: pointer+length*4])
else:
data = struct.unpack(self.endian_mark + "f" * length,
value)
elif t == TYPES.DFloat: # DOUBLE
pointer = struct.unpack(self.endian_mark + "L", value)[0]
data = struct.unpack(self.endian_mark + "d" * length,
self.tiftag[pointer: pointer+length*8])
else:
raise ValueError("Exif might be wrong. Got incorrect value " +
"type to decode.\n" +
"tag: " + str(val[3]) + "\ntype: " + str(t))
if isinstance(data, tuple) and (len(data) == 1):
return data[0]
else:
return data
def _get_key_name_dict(exif_dict):
new_dict = {
"0th":{TAGS["Image"][n]["name"]:value for n, value in exif_dict["0th"].items()},
"Exif":{TAGS["Exif"][n]["name"]:value for n, value in exif_dict["Exif"].items()},
"1st":{TAGS["Image"][n]["name"]:value for n, value in exif_dict["1st"].items()},
"GPS":{TAGS["GPS"][n]["name"]:value for n, value in exif_dict["GPS"].items()},
"Interop":{TAGS["Interop"][n]["name"]:value for n, value in exif_dict["Interop"].items()},
"thumbnail":exif_dict["thumbnail"],
}
return new_dict
|
ruansteve/neutron-dynamic-routing
|
refs/heads/master
|
neutron_dynamic_routing/services/bgp/agent/entry.py
|
1
|
# Copyright 2016 Huawei Technologies India Pvt. Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from oslo_config import cfg
from oslo_service import service
from neutron.agent.common import config
from neutron.agent.linux import external_process
from neutron.common import config as common_config
from neutron import service as neutron_service
from neutron_dynamic_routing.services.bgp.agent import config as bgp_dragent_config
from neutron_dynamic_routing.services.bgp.common import constants as bgp_consts
def register_options():
config.register_agent_state_opts_helper(cfg.CONF)
config.register_root_helper(cfg.CONF)
cfg.CONF.register_opts(bgp_dragent_config.BGP_DRIVER_OPTS, 'BGP')
cfg.CONF.register_opts(bgp_dragent_config.BGP_PROTO_CONFIG_OPTS, 'BGP')
cfg.CONF.register_opts(external_process.OPTS)
def main():
register_options()
common_config.init(sys.argv[1:])
config.setup_logging()
server = neutron_service.Service.create(
binary='neutron-bgp-dragent',
topic=bgp_consts.BGP_DRAGENT,
report_interval=cfg.CONF.AGENT.report_interval,
manager='neutron_dynamic_routing.services.bgp.agent.bgp_dragent.'
'BgpDrAgentWithStateReport')
service.launch(cfg.CONF, server).wait()
|
wweiradio/zulip
|
refs/heads/master
|
zilencer/models.py
|
126
|
from django.db import models
import zerver.models
def get_deployment_by_domain(domain):
return Deployment.objects.get(realms__domain=domain)
class Deployment(models.Model):
realms = models.ManyToManyField(zerver.models.Realm, related_name="_deployments")
is_active = models.BooleanField(default=True)
# TODO: This should really become the public portion of a keypair, and
# it should be settable only with an initial bearer "activation key"
api_key = models.CharField(max_length=32, null=True)
base_api_url = models.CharField(max_length=128)
base_site_url = models.CharField(max_length=128)
@property
def endpoints(self):
return {'base_api_url': self.base_api_url, 'base_site_url': self.base_site_url}
@property
def name(self):
# TODO: This only does the right thing for prod because prod authenticates to
# staging with the zulip.com deployment key, while staging is technically the
# deployment for the zulip.com realm.
# This also doesn't necessarily handle other multi-realm deployments correctly.
return self.realms.order_by('pk')[0].domain
|
zhakui/enjarify
|
refs/heads/master
|
enjarify/jvm/constants/genlookup.py
|
30
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct, itertools
from ..jvmops import *
from ...util import s32
# Create a precomputed lookup table giving the bytecode sequence to generate
# any primative constant of 3 bytes or less plus special float values (negative
# infinity requires 4 bytes but is included anyway to simplify things elsewhere)
#
# For example
# 128 -> sipush 128
# -65535 -> iconst_m1 i2c ineg
# 2147483647 -> iconst_m1 iconst_m1 iushr
# 1L -> lconst_1
# 127L -> bipush 127 i2l
# 42.0f -> bipush 42 i2f
# -Inf -> dconst_1 dneg dconst_0 ddiv
#
# Lookup table keys are s32/s64 for ints/longs and u32/u64 for floats/doubles
# There are multiple NaN representations, so we normalize NaNs to the
# representation of all 1s (e.g. float NaN = 0xFFFFFFFF)
def u32(x): return x % (1<<32)
def u64(x): return x % (1<<64)
FLOAT_SIGN = 1<<31
FLOAT_NAN = u32(-1)
FLOAT_INF = 0xFF << 23
FLOAT_NINF = FLOAT_INF ^ FLOAT_SIGN
def i2f(x):
if x == 0:
return 0
if x < 0:
return i2f(-x) ^ FLOAT_SIGN
shift = 24 - x.bit_length()
# Don't bother implementing rounding since we'll only convert small ints
# that can be exactly represented anyway
assert(shift >= 0)
mantissa = x << shift
exponent = shift + 127
return (exponent << 23) | mantissa
DOUBLE_SIGN = 1<<63
DOUBLE_NAN = u64(-1)
DOUBLE_INF = 0x7FF << 52
DOUBLE_NINF = DOUBLE_INF ^ DOUBLE_SIGN
def i2d(x):
if x == 0:
return 0
if x < 0:
return i2d(-x) ^ DOUBLE_SIGN
shift = 53 - x.bit_length()
assert(shift >= 0)
mantissa = x << shift
exponent = shift + 1023
return (exponent << 52) | mantissa
# add if value is shorter then current best
def add(d, k, v):
if k not in d or len(v) < len(d[k]):
d[k] = v
if __name__ == "__main__":
# int constants
all_ints = {}
# 1 byte ints
for i in range(-1, 6):
add(all_ints, i, bytes([ICONST_0 + i]))
# Sort for determinism. Otherwise -0x80000000 could be either
# 1 << -1 or -1 << -1, for example
int_1s = sorted({k for k,v in all_ints.items() if len(v) == 1})
# 2 byte ints
for i in range(-128, 128):
add(all_ints, i, struct.pack('>Bb', BIPUSH, i))
for i in int_1s:
add(all_ints, i % 65536, all_ints[i] + bytes([I2C]))
int_2s = sorted({k for k,v in all_ints.items() if len(v) == 2})
# 3 byte ints
for i in range(-32768, 32768):
add(all_ints, i, struct.pack('>Bh', SIPUSH, i))
for i in int_2s:
add(all_ints, i % 65536, all_ints[i] + bytes([I2C]))
add(all_ints, s32(-i), all_ints[i] + bytes([INEG]))
for x, y in itertools.product(int_1s, int_1s):
add(all_ints, s32(x << (y % 32)), all_ints[x] + all_ints[y] + bytes([ISHL]))
add(all_ints, s32(x >> (y % 32)), all_ints[x] + all_ints[y] + bytes([ISHR]))
add(all_ints, s32(u32(x) >> (y % 32)), all_ints[x] + all_ints[y] + bytes([IUSHR]))
# long constants
all_longs = {}
for i in range(0, 2):
add(all_longs, i, bytes([LCONST_0 + i]))
for i in int_1s + int_2s:
add(all_longs, i, all_ints[i] + bytes([I2L]))
# float constants
all_floats = {}
for i in range(0, 2):
add(all_floats, i2f(i), bytes([FCONST_0 + i]))
for i in int_1s + int_2s:
add(all_floats, i2f(i), all_ints[i] + bytes([I2F]))
# hardcode unusual float values for simplicity
add(all_floats, FLOAT_SIGN, bytes([FCONST_0, FNEG])) # -0.0
add(all_floats, FLOAT_NAN, bytes([FCONST_0, FCONST_0, FDIV])) # NaN
add(all_floats, FLOAT_INF, bytes([FCONST_1, FCONST_0, FDIV])) # Inf
add(all_floats, FLOAT_NINF, bytes([FCONST_1, FNEG, FCONST_0, FDIV])) # -Inf
# double constants
all_doubles = {}
for i in range(0, 2):
add(all_doubles, i2d(i), bytes([DCONST_0 + i]))
for i in int_1s + int_2s:
add(all_doubles, i2d(i), all_ints[i] + bytes([I2D]))
add(all_doubles, DOUBLE_SIGN, bytes([DCONST_0, DNEG])) # -0.0
add(all_doubles, DOUBLE_NAN, bytes([DCONST_0, DCONST_0, DDIV])) # NaN
add(all_doubles, DOUBLE_INF, bytes([DCONST_1, DCONST_0, DDIV])) # Inf
add(all_doubles, DOUBLE_NINF, bytes([DCONST_1, DNEG, DCONST_0, DDIV])) # -Inf
print('''
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Autogenerated by genlookup.py - do not edit''')
for name, d in zip('INTS LONGS FLOATS DOUBLES'.split(), [all_ints, all_longs, all_floats, all_doubles]):
print(name + ' = {')
for k, v in sorted(d.items()):
print(' {}: {},'.format(hex(k), v))
print('}')
|
ken-muturi/pombola
|
refs/heads/master
|
pombola/tasks/models.py
|
5
|
import datetime
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db.models import signals
from django.dispatch import receiver
class TaskCategory(models.Model):
slug = models.SlugField(max_length=100, unique=True)
priority = models.PositiveIntegerField(default=0)
def __unicode__(self):
return self.slug
class Meta:
ordering = ["-priority", "slug" ]
verbose_name_plural = "Task categories"
class Task(models.Model):
# link to other objects using the ContentType system
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
category = models.ForeignKey(TaskCategory)
created = models.DateTimeField(auto_now_add=True)
defer_until = models.DateTimeField(auto_now_add=True)
priority = models.PositiveIntegerField() # defaulted in overloaded .save() method
attempt_count = models.PositiveIntegerField(default=0)
log = models.TextField(blank=True)
note = models.TextField(blank=True)
def clean(self):
"""If needed get the priority from the category"""
if self.priority is None:
self.priority = self.category.priority
def __unicode__(self):
return "%s for %s" % ( self.category.slug, self.content_object )
@classmethod
def objects_for(cls, obj):
"""Return qs for all tasks for the given object"""
# not all primary keys are ints. Check that we can represent them as such
raw_id = obj.pk
if str(raw_id).isdigit():
id = int(raw_id)
else:
return cls.objects.none()
return cls.objects.filter(
content_type = ContentType.objects.get_for_model(obj),
object_id = id,
)
@classmethod
def objects_to_do(cls):
"""Return qs for all tasks that need to be done"""
return (
cls
.objects
.filter( defer_until__lte=datetime.datetime.now() )
)
@classmethod
def call_generate_tasks_on_if_possible(cls, obj):
"""call generate_tasks on the given object and process the results"""
if hasattr( obj, 'generate_tasks' ):
return cls.call_generate_tasks_on( obj )
return False
@classmethod
def call_generate_tasks_on(cls, obj):
"""call generate_tasks on the given object and process the results"""
slug_list = obj.generate_tasks()
cls.update_for_object( obj, slug_list )
return True
@classmethod
def update_for_object(cls, obj, slug_list):
"""Create specified tasks for this objects, delete ones that are missing"""
# get the details needed to create a generic
content_type = ContentType.objects.get_for_model(obj)
object_id = obj.pk
# note all tasks seen so we can delete redundant existing ones
seen_tasks = []
# check that we have tasks for all codes requested
for slug in slug_list:
category, created = TaskCategory.objects.get_or_create(slug=slug)
task, created = Task.objects.get_or_create(
content_type = content_type,
object_id = object_id,
category = category,
defaults = {
'priority': category.priority,
},
)
seen_tasks.append( slug )
# go through all tasks in db and delete redundant ones
for task in cls.objects_for(obj):
if task.category.slug in seen_tasks: continue
task.delete()
pass
def add_to_log(self, msg):
"""append msg to the log entry"""
current_log = self.log
if current_log:
self.log = current_log + "\n" + msg
else:
self.log = msg
return True
def defer_by_days(self, days):
"""Change the defer_until to now + days"""
new_defer_until = datetime.datetime.now() + datetime.timedelta( days=days )
self.defer_until = new_defer_until
return True
def defer_briefly_if_needed(self):
"""If task's defer_until to now + 20 minutes (if needed)"""
new_defer_until = datetime.datetime.now() + datetime.timedelta( minutes=20 )
if self.defer_until < new_defer_until:
self.defer_until = new_defer_until
self.save()
return True
class Meta:
ordering = ["-priority", "attempt_count", "defer_until" ]
# FIXME - add http://docs.djangoproject.com/en/dev/ref/models/options/#unique-together
# NOTE - these two signal catchers may prove to be performance bottlenecks in
# future. If so the check to see if there is a generate_tasks method might be
# better replaced with something else...
@receiver( signals.post_delete )
def delete_related_tasks(sender, instance, **kwargs):
Task.objects_for(instance).delete();
@receiver( signals.post_save )
def post_save_call_generate_tasks(sender, instance, **kwargs):
return Task.call_generate_tasks_on_if_possible( instance )
|
mhbu50/erpnext
|
refs/heads/develop
|
erpnext/erpnext_integrations/doctype/gocardless_settings/test_gocardless_settings.py
|
19
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
class TestGoCardlessSettings(unittest.TestCase):
pass
|
blossomica/airmozilla
|
refs/heads/master
|
airmozilla/manage/event_hit_stats.py
|
15
|
import logging
import datetime
from django.utils import timezone
from airmozilla.main.models import Event, EventHitStats
from . import vidly
# this is what the cron job fires every X minutes
def update(cap=10, swallow_errors=False):
count = 0
# first do those that have never been updated
_stats_ids_qs = (
EventHitStats.objects.all()
.values_list('event_id', flat=True)
)
qs = (
Event.objects
.archived()
.filter(template__name__contains='Vid.ly',
template_environment__contains='"tag"')
.exclude(id__in=_stats_ids_qs)
)
for event in qs.order_by('created')[:cap]: # oldest first
environment = event.template_environment or {}
tag = environment.get('tag')
if not tag or tag == 'None':
logging.warn("Event %r does not have a Vid.ly tag", event.title)
continue
try:
hits = vidly.statistics(tag)['total_hits']
count += 1
except:
if not swallow_errors:
raise
logging.error(
"Unable to download statistics for %r (tag: %s)",
event.title, tag
)
hits = 0
EventHitStats.objects.create(
event=event,
total_hits=hits,
shortcode=tag
)
def update_qs(qs):
count = 0
# oldest first
for stat in qs.order_by('modified')[:cap]:
total_hits_before = stat.total_hits
# if the event more recently modified than the EventHitStats
# the re-read the tag in case it has changed
if stat.event.modified > stat.modified:
environment = stat.event.template_environment or {}
tag = environment.get('tag')
if not tag:
logging.warn(
"Event %r does not have a Vid.ly tag",
stat.event.title
)
stat.delete()
continue
stat.shortcode = tag
shortcode = stat.shortcode
try:
hits = vidly.statistics(shortcode)['total_hits']
count += 1
except:
if not swallow_errors:
raise
logging.error(
"Unable to download statistics for %r (tag: %s)",
stat.event.title, shortcode
)
# we'll come back some other time
hits = total_hits_before
if hits >= total_hits_before:
stat.total_hits = hits
stat.save()
return count
# Old one only get updated once a week
now = timezone.now()
week_ago = now - datetime.timedelta(days=7)
qs = (
EventHitStats.objects
.filter(event__modified__lt=week_ago)
.filter(modified__lt=week_ago)
)
count += update_qs(qs)
# Less old ones only get update once a day
day_ago = now - datetime.timedelta(days=1)
qs = (
EventHitStats.objects
.filter(event__modified__lt=day_ago,
event__modified__gt=week_ago)
.filter(modified__lt=day_ago)
)
count += update_qs(qs)
# Recent ones get updated every hour
hour_ago = now - datetime.timedelta(hours=1)
qs = (
EventHitStats.objects
.filter(event__modified__lt=hour_ago,
event__modified__gt=day_ago)
.filter(modified__lt=hour_ago)
)
count += update_qs(qs)
return count
|
Manouchehri/metagoofil
|
refs/heads/master
|
hachoir_core/field/__init__.py
|
94
|
# Field classes
from hachoir_core.field.field import Field, FieldError, MissingField, joinPath
from hachoir_core.field.bit_field import Bit, Bits, RawBits
from hachoir_core.field.byte_field import Bytes, RawBytes
from hachoir_core.field.sub_file import SubFile, CompressedField
from hachoir_core.field.character import Character
from hachoir_core.field.integer import (
Int8, Int16, Int24, Int32, Int64,
UInt8, UInt16, UInt24, UInt32, UInt64,
GenericInteger)
from hachoir_core.field.enum import Enum
from hachoir_core.field.string_field import (GenericString,
String, CString, UnixLine,
PascalString8, PascalString16, PascalString32)
from hachoir_core.field.padding import (PaddingBits, PaddingBytes,
NullBits, NullBytes)
# Functions
from hachoir_core.field.helper import (isString, isInteger,
createPaddingField, createNullField, createRawField,
writeIntoFile, createOrphanField)
# FieldSet classes
from hachoir_core.field.fake_array import FakeArray
from hachoir_core.field.basic_field_set import (BasicFieldSet,
ParserError, MatchError)
from hachoir_core.field.generic_field_set import GenericFieldSet
from hachoir_core.field.seekable_field_set import SeekableFieldSet, RootSeekableFieldSet
from hachoir_core.field.field_set import FieldSet
from hachoir_core.field.static_field_set import StaticFieldSet
from hachoir_core.field.parser import Parser
from hachoir_core.field.vector import GenericVector, UserVector
# Complex types
from hachoir_core.field.float import Float32, Float64, Float80
from hachoir_core.field.timestamp import (GenericTimestamp,
TimestampUnix32, TimestampUnix64, TimestampMac32, TimestampUUID60, TimestampWin64,
DateTimeMSDOS32, TimeDateMSDOS32, TimedeltaWin64)
# Special Field classes
from hachoir_core.field.link import Link, Fragment
available_types = (
Bit, Bits, RawBits,
Bytes, RawBytes,
SubFile,
Character,
Int8, Int16, Int24, Int32, Int64,
UInt8, UInt16, UInt24, UInt32, UInt64,
String, CString, UnixLine,
PascalString8, PascalString16, PascalString32,
Float32, Float64,
PaddingBits, PaddingBytes,
NullBits, NullBytes,
TimestampUnix32, TimestampMac32, TimestampWin64,
DateTimeMSDOS32, TimeDateMSDOS32,
# GenericInteger, GenericString,
)
|
thaim/ansible
|
refs/heads/fix-broken-link
|
lib/ansible/plugins/terminal/asa.py
|
42
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_text, to_bytes
from ansible.plugins.terminal import TerminalBase
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$")
]
terminal_stderr_re = [
re.compile(br"error:", re.I),
re.compile(br"Removing.* not allowed, it is being used")
]
def on_open_shell(self):
if self._get_prompt().strip().endswith(b'#'):
self.disable_pager()
def disable_pager(self):
cmd = {u'command': u'no terminal pager'}
try:
self._exec_cli_command(u'no terminal pager')
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to disable terminal pager')
def on_become(self, passwd=None):
if self._get_prompt().strip().endswith(b'#'):
return
cmd = {u'command': u'enable'}
if passwd:
# Note: python-3.5 cannot combine u"" and r"" together. Thus make
# an r string and use to_text to ensure it's text on both py2 and py3.
cmd[u'prompt'] = to_text(r"[\r\n]?[Pp]assword: $", errors='surrogate_or_strict')
cmd[u'answer'] = passwd
try:
self._exec_cli_command(to_bytes(json.dumps(cmd), errors='surrogate_or_strict'))
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to elevate privilege to enable mode')
self.disable_pager()
|
meshulam/chicago-justice
|
refs/heads/master
|
cjp/crimedata/views.py
|
1
|
from crimedata.models import CrimeReport, LookupCRCrimeDateMonth, LookupCRCode, LookupCRCrimeType, LookupCRSecondary, LookupCRBeat, LookupCRWard, LookupCRNibrs
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django import forms
from django.http import HttpResponse
from django.db.models import Q, Max, Min
from django.shortcuts import render_to_response, redirect
from django.shortcuts import render
from django.template import RequestContext
from django.db import connection
import csv
import datetime
import time
class CrimeReportForm(forms.Form):
caseNum = forms.CharField(label='case num', required=False, max_length=20)
codes = LookupCRCode.objects.all()
codeList = tuple((c.web_code, c.web_code) for c in codes)
code = forms.ChoiceField(label='code',
choices=(('', ''), ) + codeList,
required=False)
crimeTypes = LookupCRCrimeType.objects.all()
crimeTypeList = tuple((c.web_crime_type, c.web_crime_type) for c in crimeTypes)
crimeType = forms.ChoiceField(label='crime type',
choices=(('', ''), ) + crimeTypeList,
required=False)
secondaries = LookupCRSecondary.objects.all()
secondaryList = tuple((c.web_secondary, c.web_secondary) for c in secondaries)
secondary = forms.ChoiceField(label='secondary',
choices=(('', ''), ) + secondaryList,
required=False)
location = forms.CharField(label='location', required=False, max_length=100)
block = forms.CharField(label='block', required=False, max_length=200)
startDate = forms.DateField(label='start date',
widget=forms.DateInput(format="%m/%d/%Y"),
required=False)
endDate = forms.DateField(label='end date',
widget=forms.DateInput(format="%m/%d/%Y"),
required=False)
arrest = forms.ChoiceField(label='arrest',
choices=(('', ''), ('Y', 'Yes'), ('N', 'No')),
required=False)
domestic = forms.ChoiceField(label='domestic',
choices=(('', ''), ('Y', 'Yes'), ('N', 'No')),
required=False)
beats = LookupCRBeat.objects.all()
beatList = tuple((c.web_beat, c.web_beat) for c in beats)
beat = forms.ChoiceField(label='beat',
choices=(('', ''), ) + beatList,
required=False)
wards = LookupCRWard.objects.all()
wardList = tuple((c.web_ward, c.web_ward) for c in wards)
ward = forms.ChoiceField(label='ward',
choices=(('', ''), ) + wardList,
required=False)
nibrss = LookupCRNibrs.objects.all()
nibrsList = tuple((c.web_nibrs, c.web_nibrs) for c in nibrss)
nibrs = forms.ChoiceField(label='nibrs',
choices=(('', ''), ) + nibrsList,
required=False)
class CrimeReportExportForm(forms.Form):
months = ["%d-%02d" % (m.year, m.month) for m in LookupCRCrimeDateMonth.objects.all()]
months = tuple((m, m) for m in months)
dateGroup = forms.ChoiceField(label='Month',
choices=((('', ''), ) + months),
required=False)
def crimeReportList(request):
form = CrimeReportForm(request.POST)
clearSearch = request.POST.get('clearSearch', "False") == "False"
newSearch = request.POST.get('newSearch', "False") == "True"
if clearSearch and newSearch and form.is_valid():
caseNum = form.cleaned_data['caseNum'].strip()
block = form.cleaned_data['block'].strip().upper()
location = form.cleaned_data['location'].strip().upper()
arrest = form.cleaned_data['arrest'].strip().upper()
if arrest not in ('Y', 'N'):
arrest = None
code = form.cleaned_data['code']
crimeType = form.cleaned_data['crimeType']
secondary = form.cleaned_data['secondary']
domestic = form.cleaned_data['domestic']
if domestic not in ('Y', 'N'):
domestic = None
beat = form.cleaned_data['beat']
ward = form.cleaned_data['ward']
nibrs = form.cleaned_data['nibrs']
startDate = form.cleaned_data['startDate']
endDate = form.cleaned_data['endDate']
page = 1
request.session['crimeData_hasSearch'] = True
elif clearSearch and request.session.get('crimeData_hasSearch', False):
request.session['crimeData_hasSearch'] = True
caseNum = request.session['crimeData_caseNum']
block = request.session['crimeData_block']
location = request.session['crimeData_location']
arrest = request.session['crimeData_arrest']
code = request.session['crimeData_code']
crimeType = request.session['crimeData_crimeType']
secondary = request.session['crimeData_secondary']
domestic = request.session['crimeData_domestic']
beat = request.session['crimeData_beat']
ward = request.session['crimeData_ward']
nibrs = request.session['crimeData_nibrs']
startDate = request.session['crimeData_startDate']
endDate = request.session['crimeData_endDate']
form = CrimeReportForm({
'caseNum' : caseNum,
'block' : block,
'location' : location,
'arrest' : arrest,
'code' : code,
'crimeType' : crimeType,
'secondary' : secondary,
'domestic' : domestic,
'beat' : beat,
'ward' : ward,
'nibrs' : nibrs,
'startDate' : startDate,
'endDate' : endDate,
})
try:
page = int(request.POST.get('page', 'invalid'))
except ValueError:
page = request.session.get('crimeData_page', 1)
else:
form = CrimeReportForm()
caseNum = None
block = None
location = None
arrest = None
code = None
crimeType = None
secondary = None
domestic = None
beat = None
ward = None
nibrs = None
startDate = None
endDate = None
# 3/2/2012 John Nicholson
# added so paging works even when there is no search
try:
page = int(request.POST.get('page', 'invalid'))
except ValueError:
page = request.session.get('crimeData_page', 1)
request.session['crimeData_hasSearch'] = False
request.session['crimeData_caseNum'] = caseNum
request.session['crimeData_block'] = block
request.session['crimeData_location'] = location
request.session['crimeData_arrest'] = arrest
request.session['crimeData_code'] = code
request.session['crimeData_crimeType'] = crimeType
request.session['crimeData_secondary'] = secondary
request.session['crimeData_domestic'] = domestic
request.session['crimeData_beat'] = beat
request.session['crimeData_ward'] = ward
request.session['crimeData_nibrs'] = nibrs
request.session['crimeData_startDate'] = startDate
request.session['crimeData_endDate'] = endDate
request.session['crimeData_page'] = page
crimeReport_list = CrimeReport.objects.all().order_by('-web_date')
if caseNum:
crimeReport_list = crimeReport_list.filter(web_case_num__contains=caseNum)
if block:
crimeReport_list = crimeReport_list.filter(web_block__contains=block)
if location:
crimeReport_list = crimeReport_list.filter(web_location__contains=location)
if arrest:
crimeReport_list = crimeReport_list.filter(web_arrest=arrest)
if code:
crimeReport_list = crimeReport_list.filter(web_code=code)
if crimeType:
crimeReport_list = crimeReport_list.filter(web_crime_type=crimeType)
if secondary:
crimeReport_list = crimeReport_list.filter(web_secondary=secondary)
if domestic:
crimeReport_list = crimeReport_list.filter(web_domestic=domestic)
if beat:
crimeReport_list = crimeReport_list.filter(web_beat=beat)
if ward:
crimeReport_list = crimeReport_list.filter(web_ward=ward)
if nibrs:
crimeReport_list = crimeReport_list.filter(web_nibrs=nibrs)
if startDate:
startDate = datetime.datetime.strptime("%s 00:00:00" % startDate, "%Y-%m-%d %H:%M:%S")
crimeReport_list = crimeReport_list.filter(web_date__gte=startDate)
if endDate:
endDate = datetime.datetime.strptime("%s 23:59:59" % endDate, "%Y-%m-%d %H:%M:%S")
crimeReport_list = crimeReport_list.filter(web_date__lte=endDate)
paginator = Paginator(crimeReport_list, 20) # Show 2 articles per page
try:
crimeReports = paginator.page(page)
except (EmptyPage, InvalidPage):
crimeReports = paginator.page(paginator.num_pages)
dateRange = CrimeReport.objects.all().aggregate(minDate = Min('web_date'),
maxDate = Max('web_date'))
data = {'crimeReports' : crimeReports,
'form' : form,
'exportForm' : CrimeReportExportForm(),
'dateRange' : dateRange,
}
#return render_to_response('crimedata/crimeReportList.html', data,
# context_instance=RequestContext(request))
return render(request, 'crimedata/crimeReportList.html', data)
def crimeReportView(request, crimeReportId):
try:
crimeReport = CrimeReport.objects.get(id = crimeReportId)
except:
crimeReport = None
template = 'crimedata/crimeReport.html'
data = {'crimeReport' : crimeReport}
return render_to_response(template, data,
context_instance=RequestContext(request))
def crimeReportExport(request):
'''
export a month of data
'''
dateGroup = request.GET.get('dateGroup', '').strip()
if not dateGroup:
return HttpResponse()
crimeReport_list = CrimeReport.objects.all()
try:
result = time.strptime(dateGroup, '%Y-%m')
startDay = datetime.date(*result[:3])
if startDay.month != 12:
endDay = datetime.date(startDay.year, startDay.month + 1, 1)
else:
endDay = datetime.date(startDay.year + 1, 1, 1)
crimeReport_list = crimeReport_list.filter(crime_date__gte = startDay).filter(crime_date__lt = endDay)
except Exception, e:
# not a valid month
return HttpResponse()
crimeReport_list = crimeReport_list.order_by('web_date')
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=crimeReport.csv'
fields = [
'web_case_num',
'web_date',
'web_block',
'web_code',
'web_crime_type',
'web_secondary',
'web_arrest',
'web_location',
'web_domestic',
'web_beat',
'web_ward',
'web_nibrs',
]
if request.user.is_authenticated():
fields += [
'orig_rd',
'orig_dateocc',
'orig_stnum',
'orig_stdir',
'orig_street',
'orig_curr_iucr',
'orig_fbi_descr',
'orig_fbi_cd',
'orig_description',
'orig_status',
'orig_location_descr',
'orig_domestic_i',
'orig_ward',
'orig_beat_num',
'crime_date',
'crime_time',
'geocode_longitude',
'geocode_latitude',
]
writer = csv.writer(response,quoting=csv.QUOTE_ALL )
writer.writerow(fields)
cnt = 0
for cr in crimeReport_list.iterator():
data = [str(getattr(cr, fname)) for fname in fields]
writer.writerow(data)
#print connection.queries[0]['sql']
return response
|
mdanielwork/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyMethodMayBeStaticInspection/notImplemented.py
|
83
|
__author__ = 'ktisha'
class A:
def pop(self):
raise NotImplementedError()
|
ceph/autotest
|
refs/heads/master
|
client/common_lib/test_utils/mock_demo_MUT.py
|
12
|
from mock_demo import E
def do_create_stuff():
obj = E(val=7)
print obj.method1()
|
MasterFacilityList/mfl_api
|
refs/heads/develop
|
users/tasks.py
|
1
|
import pydoc
import logging
from django.core.mail import mail_admins
from celery.task.schedules import crontab
from celery.decorators import periodic_task
from common.models import ErrorQueue
from users.models import send_email_on_signup, MflUser
LOGGER = logging.getLogger(__name__)
@periodic_task(
run_every=(crontab(minute='*/1')),
name="try_sending_failed_emails",
ignore_result=True)
def resend_user_signup_emails():
"""
Resends emails that failed to be sent during user registration
"""
objects_with_errors = ErrorQueue.objects.filter(
error_type='SEND_EMAIL_ERROR')
for obj in objects_with_errors:
obj_path = "{0}.models.{1}".format(obj.app_label, obj.model_name)
model = pydoc.locate(obj_path)
try:
instance = model.objects.get(id=obj.object_pk)
user_id = instance.id
email = instance.email
first_name = instance.first_name
employee_number = instance.employee_number
mail_sent = send_email_on_signup(
user_id, email, first_name, employee_number)
if mail_sent:
obj.delete()
else:
obj.retries = obj.retries + 1
obj.save()
mail_admins(
subject="Send User Email Error",
message="Sending emails to users on registration is "
"failing. Check the email settings in the environment"
) if obj.retries > 2 else None
except MflUser.DoesNotExist:
LOGGER.info("The user has been deleted")
|
deroneriksson/incubator-systemml
|
refs/heads/master
|
scripts/perftest/python/datagen.py
|
13
|
#!/usr/bin/env python3
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
import itertools
from os.path import join
from utils_misc import split_rowcol, config_writer, mat_type_check
# This file contains configuration settings for data generation
DATA_FORMAT = 'binary'
MATRIX_TYPE_DICT = {'dense': '0.9',
'sparse': '0.01'}
FAMILY_NO_MATRIX_TYPE = ['clustering', 'stats1', 'stats2']
def multinomial_datagen(matrix_dim, matrix_type, datagen_dir, config_dir):
path_name = '.'.join(['multinomial', matrix_type, str(matrix_dim)])
datagen_write = join(datagen_dir, path_name)
save_path = join(config_dir, path_name)
row, col = split_rowcol(matrix_dim)
numSamples = row
numFeatures = col
sparsity = MATRIX_TYPE_DICT[matrix_type]
num_categories = '150'
intercept = '0'
X = join(datagen_write, 'X.data')
Y = join(datagen_write, 'Y.data')
fmt = DATA_FORMAT
config = [numSamples, numFeatures, sparsity, num_categories, intercept,
X, Y, fmt, '1']
config_writer(save_path + '.json', config)
return save_path
def binomial_datagen(matrix_dim, matrix_type, datagen_dir, config_dir):
path_name = '.'.join(['binomial', matrix_type, str(matrix_dim)])
datagen_write = join(datagen_dir, path_name)
save_path = join(config_dir, path_name)
row, col = split_rowcol(matrix_dim)
numSamples = row
numFeatures = col
maxFeatureValue = '5'
maxWeight = '5'
loc_weights = join(datagen_write, 'weight.data')
loc_data = join(datagen_write, 'X.data')
loc_labels = join(datagen_write, 'Y.data')
noise = '1'
intercept = '0'
sparsity = MATRIX_TYPE_DICT[matrix_type]
tranform_labels = '1'
fmt = DATA_FORMAT
config = [numSamples, numFeatures, maxFeatureValue, maxWeight, loc_weights, loc_data,
loc_labels, noise, intercept, sparsity, fmt, tranform_labels]
config_writer(save_path + '.json', config)
return save_path
def regression1_datagen(matrix_dim, matrix_type, datagen_dir, config_dir):
path_name = '.'.join(['regression1', matrix_type, str(matrix_dim)])
datagen_write = join(datagen_dir, path_name)
save_path = join(config_dir, path_name)
row, col = split_rowcol(matrix_dim)
numSamples = row
numFeatures = col
maxFeatureValue = '5'
maxWeight = '5'
loc_weights = join(datagen_write, 'weight.data')
loc_data = join(datagen_write, 'X.data')
loc_labels = join(datagen_write, 'Y.data')
noise = '1'
intercept = '0'
sparsity = MATRIX_TYPE_DICT[matrix_type]
tranform_labels = '1'
fmt = DATA_FORMAT
config = [numSamples, numFeatures, maxFeatureValue, maxWeight, loc_weights, loc_data,
loc_labels, noise, intercept, sparsity, fmt, tranform_labels]
config_writer(save_path + '.json', config)
return save_path
def regression2_datagen(matrix_dim, matrix_type, datagen_dir, config_dir):
path_name = '.'.join(['regression2', matrix_type, str(matrix_dim)])
datagen_write = join(datagen_dir, path_name)
save_path = join(config_dir, path_name)
row, col = split_rowcol(matrix_dim)
numSamples = row
numFeatures = col
maxFeatureValue = '5'
maxWeight = '5'
loc_weights = join(datagen_write, 'weight.data')
loc_data = join(datagen_write, 'X.data')
loc_labels = join(datagen_write, 'Y.data')
noise = '1'
intercept = '0'
sparsity = MATRIX_TYPE_DICT[matrix_type]
tranform_labels = '1'
fmt = DATA_FORMAT
config = [numSamples, numFeatures, maxFeatureValue, maxWeight, loc_weights, loc_data,
loc_labels, noise, intercept, sparsity, fmt, tranform_labels]
config_writer(save_path + '.json', config)
return save_path
def clustering_datagen(matrix_dim, matrix_type, datagen_dir, config_dir):
path_name = '.'.join(['clustering', matrix_type, str(matrix_dim)])
datagen_write = join(datagen_dir, path_name)
save_path = join(config_dir, path_name)
row, col = split_rowcol(matrix_dim)
X = join(datagen_write, 'X.data')
Y = join(datagen_write, 'Y.data')
YbyC = join(datagen_write, 'YbyC.data')
C = join(datagen_write, 'C.data')
nc = '50'
dc = '10.0'
dr = '1.0'
fbf = '100.0'
cbf = '100.0'
config = dict(nr=row, nf=col, nc=nc, dc=dc, dr=dr, fbf=fbf, cbf=cbf, X=X, C=C, Y=Y,
YbyC=YbyC, fmt=DATA_FORMAT)
config_writer(save_path + '.json', config)
return save_path
def stats1_datagen(matrix_dim, matrix_type, datagen_dir, config_dir):
path_name = '.'.join(['stats1', matrix_type, str(matrix_dim)])
datagen_write = join(datagen_dir, path_name)
save_path = join(config_dir, path_name)
row, col = split_rowcol(matrix_dim)
DATA = join(datagen_write, 'X.data')
TYPES = join(datagen_write, 'types')
TYPES1 = join(datagen_write, 'set1.types')
TYPES2 = join(datagen_write, 'set2.types')
INDEX1 = join(datagen_write, 'set1.indices')
INDEX2 = join(datagen_write, 'set2.indices')
MAXDOMAIN = '1100'
SETSIZE = '20'
LABELSETSIZE = '10'
# NC should be less than C and more than num0
# NC = 10 (old value)
# num0 = NC/2
# num0 < NC < C
# NC = C/2
NC = int(int(col)/2)
config = dict(R=row, C=col, NC=NC, MAXDOMAIN=MAXDOMAIN, DATA=DATA, TYPES=TYPES, SETSIZE=SETSIZE,
LABELSETSIZE=LABELSETSIZE, TYPES1=TYPES1, TYPES2=TYPES2, INDEX1=INDEX1,
INDEX2=INDEX2, fmt=DATA_FORMAT)
config_writer(save_path + '.json', config)
return save_path
def stats2_datagen(matrix_dim, matrix_type, datagen_dir, config_dir):
path_name = '.'.join(['stats2', matrix_type, str(matrix_dim)])
datagen_write = join(datagen_dir, path_name)
save_path = join(config_dir, path_name)
row, col = split_rowcol(matrix_dim)
D = join(datagen_write, 'X.data')
Xcid = join(datagen_write, 'Xcid.data')
Ycid = join(datagen_write, 'Ycid.data')
A = join(datagen_write, 'A.data')
config = dict(nr=row, nf=col, D=D, Xcid=Xcid, Ycid=Ycid,
A=A, fmt=DATA_FORMAT)
config_writer(save_path + '.json', config)
return save_path
def dimreduction_datagen(matrix_dim, matrix_type, datagen_dir, config_dir):
path_name = '.'.join(['dimreduction', matrix_type, str(matrix_dim)])
datagen_write = join(datagen_dir, path_name)
save_path = join(config_dir, path_name)
row, col = split_rowcol(matrix_dim)
R = row
C = col
OUT = join(datagen_write, 'X.data')
config = dict(R=R, C=C, OUT=OUT, FMT=DATA_FORMAT)
config_writer(save_path + '.json', config)
return save_path
def config_packets_datagen(algo_payload, matrix_type, matrix_shape, datagen_dir, dense_algos, config_dir):
"""
This function has two responsibilities. Generate the configuration files for
datagen algorithms and return a dictionary that will be used for execution.
algo_payload : List of tuples
The first tuple index contains algorithm name and the second index contains
family type.
matrix_type: String
Type of matrix to generate e.g dense, sparse, all
matrix_shape: String
Shape of matrix to generate e.g 100k_10
datagen_dir: String
Path of the data generation directory
dense_algos: List
Algorithms that support only dense matrix type
config_dir: String
Location to store
return: Dictionary {string: list}
This dictionary contains algorithms to be executed as keys and the path of configuration
json files to be executed list of values.
"""
config_bundle = {}
distinct_families = set(map(lambda x: x[1], algo_payload))
# Cross Product of all configurations
for current_family in distinct_families:
current_matrix_type = mat_type_check(current_family, matrix_type, dense_algos)
config = list(itertools.product(matrix_shape, current_matrix_type))
# clustering : [[10k_1, dense], [10k_2, dense], ...]
config_bundle[current_family] = config
config_packets = {}
for current_family, configs in config_bundle.items():
config_packets[current_family] = []
for size, type in configs:
family_func = current_family.lower() + '_datagen'
conf_path = globals()[family_func](size, type, datagen_dir, config_dir)
config_packets[current_family].append(conf_path)
return config_packets
|
Tchanders/socorro
|
refs/heads/master
|
socorro/lib/task_manager.py
|
9
|
import time
import threading
import os
from configman import RequiredConfig, Namespace
from configman.converters import class_converter
#------------------------------------------------------------------------------
def default_task_func(a_param):
"""This default consumer function just doesn't do anything. It is a
placeholder just to demonstrate the api and not really for any other
purpose"""
pass
#------------------------------------------------------------------------------
def default_iterator():
"""This default producer's iterator yields the integers 0 through 9 and
then yields none forever thereafter. It is a placeholder to demonstrate
the api and not used for anything in a real system."""
for x in range(10):
yield ((x,), {})
while True:
yield None
#------------------------------------------------------------------------------
def respond_to_SIGTERM(signal_number, frame, logger=None):
""" these classes are instrumented to respond to a KeyboardInterrupt by
cleanly shutting down. This function, when given as a handler to for
a SIGTERM event, will make the program respond to a SIGTERM as neatly
as it responds to ^C.
This function is used in registering a signal handler from the signal
module. It should be registered for any signal for which the desired
behavior is to kill the application:
signal.signal(signal.SIGTERM, respondToSIGTERM)
signal.signal(signal.SIGHUP, respondToSIGTERM)
parameters:
signal_number - unused in this function but required by the api.
frame - unused in this function but required by the api.
"""
if logger:
logger.info('detected SIGTERM')
raise KeyboardInterrupt
#==============================================================================
class TaskManager(RequiredConfig):
required_config = Namespace()
required_config.add_option(
'idle_delay',
default=7,
doc='the delay in seconds if no job is found'
)
required_config.add_option(
'quit_on_empty_queue',
default=False,
doc='stop if the queue is empty'
)
#--------------------------------------------------------------------------
def __init__(self, config,
job_source_iterator=default_iterator,
task_func=default_task_func):
"""
parameters:
job_source_iterator - an iterator to serve as the source of data.
it can be of the form of a generator or
iterator; a function that returns an
iterator; a instance of an iterable object;
or a class that when instantiated with a
config object can be iterated. The iterator
must yield a tuple consisting of a
function's tuple of args and, optionally, a
mapping of kwargs.
Ex: (('a', 17), {'x': 23})
task_func - a function that will accept the args and kwargs yielded
by the job_source_iterator"""
super(TaskManager, self).__init__()
self.config = config
self._pid = os.getpid()
self.logger = config.logger
self.job_param_source_iter = job_source_iterator
self.task_func = task_func
self.quit = False
self.logger.debug('TaskManager finished init')
#--------------------------------------------------------------------------
def quit_check(self):
"""this is the polling function that the threads periodically look at.
If they detect that the quit flag is True, then a KeyboardInterrupt
is raised which will result in the threads dying peacefully"""
if self.quit:
raise KeyboardInterrupt
#--------------------------------------------------------------------------
def _get_iterator(self):
"""The iterator passed in can take several forms: a class that can be
instantiated and then iterated over; a function that when called
returns an iterator; an actual iterator/generator or an iterable
collection. This function sorts all that out and returns an iterator
that can be used"""
try:
return self.job_param_source_iter(self.config)
except TypeError:
try:
return self.job_param_source_iter()
except TypeError:
return self.job_param_source_iter
#--------------------------------------------------------------------------
def _responsive_sleep(self, seconds, wait_log_interval=0, wait_reason=''):
"""When there is litte work to do, the queuing thread sleeps a lot.
It can't sleep for too long without checking for the quit flag and/or
logging about why it is sleeping.
parameters:
seconds - the number of seconds to sleep
wait_log_interval - while sleeping, it is helpful if the thread
periodically announces itself so that we
know that it is still alive. This number is
the time in seconds between log entries.
wait_reason - the is for the explaination of why the thread is
sleeping. This is likely to be a message like:
'there is no work to do'.
This was also partially motivated by old versions' of Python inability
to KeyboardInterrupt out of a long sleep()."""
for x in xrange(int(seconds)):
self.quit_check()
if wait_log_interval and not x % wait_log_interval:
self.logger.info('%s: %dsec of %dsec',
wait_reason,
x,
seconds)
self.quit_check()
time.sleep(1.0)
#--------------------------------------------------------------------------
def blocking_start(self, waiting_func=None):
"""this function starts the task manager running to do tasks. The
waiting_func is normally used to do something while other threads
are running, but here we don't have other threads. So the waiting
func will never get called. I can see wanting this function to be
called at least once after the end of the task loop."""
self.logger.debug('threadless start')
try:
while True:
for job_params in self._get_iterator(): # may never raise
# StopIteration
self.quit_check()
if job_params is None:
if self.config.quit_on_empty_queue:
raise KeyboardInterrupt
self.logger.info("there is nothing to do. Sleeping "
"for %d seconds" %
self.config.idle_delay)
self._responsive_sleep(self.config.idle_delay)
continue
self.quit_check()
try:
args, kwargs = job_params
except ValueError:
args = job_params
kwargs = {}
try:
self.task_func(*args, **kwargs)
except Exception:
self.config.logger.error("Error in processing a job",
exc_info=True)
except KeyboardInterrupt:
self.logger.debug('queuingThread gets quit request')
finally:
self.quit = True
self.logger.debug("ThreadlessTaskManager dies quietly")
#--------------------------------------------------------------------------
def executor_identity(self):
"""this function is likely to be called via the configuration parameter
'executor_identity' at the root of the self.config attribute of the
application. It is most frequently used in the Pooled
ConnectionContext classes to ensure that connections aren't shared
between threads, greenlets, or whatever the unit of execution is.
This is useful for maintaining transactional integrity on a resource
connection."""
return "%s-%s" % (self._pid, threading.currentThread().getName())
|
pythonprobr/pythonbirds-en
|
refs/heads/training
|
fases/escudo_espartano.py
|
1
|
# -*- coding: utf-8 -*-
from os import path
import sys
import math
project_dir = path.dirname(__file__)
project_dir = path.join('..')
sys.path.append(project_dir)
from actors import YellowBird, Obstacle, Pig
from phase import Phase
from graphics_tk import run_phase
from random import randint
if __name__ == '__main__':
fase = Phase(intervalo_de_colisao=32)
# Adicionar Pássaros Amarelos
for i in range(80):
fase.add_birds(YellowBird(30, 30))
# Obstaculos
theta = 270
h = 12
k = 7
step = 32
r = 50
while theta < 480:
x = 600 + (h + r * math.cos(theta))
y = (k + r * math.sin(theta))
fase.add_obstacles(Obstacle(x, y))
theta += 32
# Porcos
for i in range(30, 300, 32):
x = randint(590, 631)
y = randint(0, 21)
fase.add_pigs(Pig(x, y))
run_phase(fase)
|
maxvogel/NetworKit-mirror2
|
refs/heads/master
|
Doc/docs/python/source/ext/numpydoc/tests/test_plot_directive.py
|
31
|
from __future__ import division, absolute_import, print_function
import sys
from nose import SkipTest
def test_import():
if sys.version_info[0] >= 3:
raise SkipTest("plot_directive not ported to Python 3 (use the one from Matplotlib instead)")
import numpydoc.plot_directive
# No tests at the moment...
|
kennedyshead/home-assistant
|
refs/heads/dev
|
homeassistant/components/kira/__init__.py
|
7
|
"""KIRA interface to receive UDP packets from an IR-IP bridge."""
import logging
import os
import pykira
import voluptuous as vol
from voluptuous.error import Error as VoluptuousError
import yaml
from homeassistant.const import (
CONF_CODE,
CONF_DEVICE,
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_REPEAT,
CONF_SENSORS,
CONF_TYPE,
EVENT_HOMEASSISTANT_STOP,
STATE_UNKNOWN,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
DOMAIN = "kira"
_LOGGER = logging.getLogger(__name__)
DEFAULT_HOST = "0.0.0.0"
DEFAULT_PORT = 65432
CONF_REMOTES = "remotes"
CONF_SENSOR = "sensor"
CONF_REMOTE = "remote"
CODES_YAML = f"{DOMAIN}_codes.yaml"
CODE_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_CODE): cv.string,
vol.Optional(CONF_TYPE): cv.string,
vol.Optional(CONF_DEVICE): cv.string,
vol.Optional(CONF_REPEAT): cv.positive_int,
}
)
SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DOMAIN): vol.Exclusive(cv.string, "sensors"),
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
REMOTE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DOMAIN): vol.Exclusive(cv.string, "remotes"),
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_SENSORS): [SENSOR_SCHEMA],
vol.Optional(CONF_REMOTES): [REMOTE_SCHEMA],
}
)
},
extra=vol.ALLOW_EXTRA,
)
def load_codes(path):
"""Load KIRA codes from specified file."""
codes = []
if os.path.exists(path):
with open(path) as code_file:
data = yaml.safe_load(code_file) or []
for code in data:
try:
codes.append(CODE_SCHEMA(code))
except VoluptuousError as exception:
# keep going
_LOGGER.warning("KIRA code invalid data: %s", exception)
else:
with open(path, "w") as code_file:
code_file.write("")
return codes
def setup(hass, config):
"""Set up the KIRA component."""
sensors = config.get(DOMAIN, {}).get(CONF_SENSORS, [])
remotes = config.get(DOMAIN, {}).get(CONF_REMOTES, [])
# If no sensors or remotes were specified, add a sensor
if not (sensors or remotes):
sensors.append({})
codes = load_codes(hass.config.path(CODES_YAML))
hass.data[DOMAIN] = {CONF_SENSOR: {}, CONF_REMOTE: {}}
def load_module(platform, idx, module_conf):
"""Set up the KIRA module and load platform."""
# note: module_name is not the HA device name. it's just a unique name
# to ensure the component and platform can share information
module_name = ("%s_%d" % (DOMAIN, idx)) if idx else DOMAIN
device_name = module_conf.get(CONF_NAME, DOMAIN)
port = module_conf.get(CONF_PORT, DEFAULT_PORT)
host = module_conf.get(CONF_HOST, DEFAULT_HOST)
if platform == CONF_SENSOR:
module = pykira.KiraReceiver(host, port)
module.start()
else:
module = pykira.KiraModule(host, port)
hass.data[DOMAIN][platform][module_name] = module
for code in codes:
code_tuple = (code.get(CONF_NAME), code.get(CONF_DEVICE, STATE_UNKNOWN))
module.registerCode(code_tuple, code.get(CONF_CODE))
discovery.load_platform(
hass, platform, DOMAIN, {"name": module_name, "device": device_name}, config
)
for idx, module_conf in enumerate(sensors):
load_module(CONF_SENSOR, idx, module_conf)
for idx, module_conf in enumerate(remotes):
load_module(CONF_REMOTE, idx, module_conf)
def _stop_kira(_event):
"""Stop the KIRA receiver."""
for receiver in hass.data[DOMAIN][CONF_SENSOR].values():
receiver.stop()
_LOGGER.info("Terminated receivers")
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_kira)
return True
|
da1z/intellij-community
|
refs/heads/master
|
plugins/hg4idea/testData/bin/hgext/convert/cvs.py
|
94
|
# cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
#
# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import os, re, socket, errno
from cStringIO import StringIO
from mercurial import encoding, util
from mercurial.i18n import _
from common import NoRepo, commit, converter_source, checktool
from common import makedatetimestamp
import cvsps
class convert_cvs(converter_source):
def __init__(self, ui, path, rev=None):
super(convert_cvs, self).__init__(ui, path, rev=rev)
cvs = os.path.join(path, "CVS")
if not os.path.exists(cvs):
raise NoRepo(_("%s does not look like a CVS checkout") % path)
checktool('cvs')
self.changeset = None
self.files = {}
self.tags = {}
self.lastbranch = {}
self.socket = None
self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1]
self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1]
self.encoding = encoding.encoding
self._connect()
def _parse(self):
if self.changeset is not None:
return
self.changeset = {}
maxrev = 0
if self.rev:
# TODO: handle tags
try:
# patchset number?
maxrev = int(self.rev)
except ValueError:
raise util.Abort(_('revision %s is not a patchset number')
% self.rev)
d = os.getcwd()
try:
os.chdir(self.path)
id = None
cache = 'update'
if not self.ui.configbool('convert', 'cvsps.cache', True):
cache = None
db = cvsps.createlog(self.ui, cache=cache)
db = cvsps.createchangeset(self.ui, db,
fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
for cs in db:
if maxrev and cs.id > maxrev:
break
id = str(cs.id)
cs.author = self.recode(cs.author)
self.lastbranch[cs.branch] = id
cs.comment = self.recode(cs.comment)
if self.ui.configbool('convert', 'localtimezone'):
cs.date = makedatetimestamp(cs.date[0])
date = util.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
self.tags.update(dict.fromkeys(cs.tags, id))
files = {}
for f in cs.entries:
files[f.file] = "%s%s" % ('.'.join([str(x)
for x in f.revision]),
['', '(DEAD)'][f.dead])
# add current commit to set
c = commit(author=cs.author, date=date,
parents=[str(p.id) for p in cs.parents],
desc=cs.comment, branch=cs.branch or '')
self.changeset[id] = c
self.files[id] = files
self.heads = self.lastbranch.values()
finally:
os.chdir(d)
def _connect(self):
root = self.cvsroot
conntype = None
user, host = None, None
cmd = ['cvs', 'server']
self.ui.status(_("connecting to %s\n") % root)
if root.startswith(":pserver:"):
root = root[9:]
m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
root)
if m:
conntype = "pserver"
user, passw, serv, port, root = m.groups()
if not user:
user = "anonymous"
if not port:
port = 2401
else:
port = int(port)
format0 = ":pserver:%s@%s:%s" % (user, serv, root)
format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
if not passw:
passw = "A"
cvspass = os.path.expanduser("~/.cvspass")
try:
pf = open(cvspass)
for line in pf.read().splitlines():
part1, part2 = line.split(' ', 1)
# /1 :pserver:user@example.com:2401/cvsroot/foo
# Ah<Z
if part1 == '/1':
part1, part2 = part2.split(' ', 1)
format = format1
# :pserver:user@example.com:/cvsroot/foo Ah<Z
else:
format = format0
if part1 == format:
passw = part2
break
pf.close()
except IOError, inst:
if inst.errno != errno.ENOENT:
if not getattr(inst, 'filename', None):
inst.filename = cvspass
raise
sck = socket.socket()
sck.connect((serv, port))
sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
"END AUTH REQUEST", ""]))
if sck.recv(128) != "I LOVE YOU\n":
raise util.Abort(_("CVS pserver authentication failed"))
self.writep = self.readp = sck.makefile('r+')
if not conntype and root.startswith(":local:"):
conntype = "local"
root = root[7:]
if not conntype:
# :ext:user@host/home/user/path/to/cvsroot
if root.startswith(":ext:"):
root = root[5:]
m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
# Do not take Windows path "c:\foo\bar" for a connection strings
if os.path.isdir(root) or not m:
conntype = "local"
else:
conntype = "rsh"
user, host, root = m.group(1), m.group(2), m.group(3)
if conntype != "pserver":
if conntype == "rsh":
rsh = os.environ.get("CVS_RSH") or "ssh"
if user:
cmd = [rsh, '-l', user, host] + cmd
else:
cmd = [rsh, host] + cmd
# popen2 does not support argument lists under Windows
cmd = [util.shellquote(arg) for arg in cmd]
cmd = util.quotecommand(' '.join(cmd))
self.writep, self.readp = util.popen2(cmd)
self.realroot = root
self.writep.write("Root %s\n" % root)
self.writep.write("Valid-responses ok error Valid-requests Mode"
" M Mbinary E Checked-in Created Updated"
" Merged Removed\n")
self.writep.write("valid-requests\n")
self.writep.flush()
r = self.readp.readline()
if not r.startswith("Valid-requests"):
raise util.Abort(_('unexpected response from CVS server '
'(expected "Valid-requests", but got %r)')
% r)
if "UseUnchanged" in r:
self.writep.write("UseUnchanged\n")
self.writep.flush()
r = self.readp.readline()
def getheads(self):
self._parse()
return self.heads
def getfile(self, name, rev):
def chunkedread(fp, count):
# file-objects returned by socket.makefile() do not handle
# large read() requests very well.
chunksize = 65536
output = StringIO()
while count > 0:
data = fp.read(min(count, chunksize))
if not data:
raise util.Abort(_("%d bytes missing from remote file")
% count)
count -= len(data)
output.write(data)
return output.getvalue()
self._parse()
if rev.endswith("(DEAD)"):
raise IOError
args = ("-N -P -kk -r %s --" % rev).split()
args.append(self.cvsrepo + '/' + name)
for x in args:
self.writep.write("Argument %s\n" % x)
self.writep.write("Directory .\n%s\nco\n" % self.realroot)
self.writep.flush()
data = ""
mode = None
while True:
line = self.readp.readline()
if line.startswith("Created ") or line.startswith("Updated "):
self.readp.readline() # path
self.readp.readline() # entries
mode = self.readp.readline()[:-1]
count = int(self.readp.readline()[:-1])
data = chunkedread(self.readp, count)
elif line.startswith(" "):
data += line[1:]
elif line.startswith("M "):
pass
elif line.startswith("Mbinary "):
count = int(self.readp.readline()[:-1])
data = chunkedread(self.readp, count)
else:
if line == "ok\n":
if mode is None:
raise util.Abort(_('malformed response from CVS'))
return (data, "x" in mode and "x" or "")
elif line.startswith("E "):
self.ui.warn(_("cvs server: %s\n") % line[2:])
elif line.startswith("Remove"):
self.readp.readline()
else:
raise util.Abort(_("unknown CVS response: %s") % line)
def getchanges(self, rev):
self._parse()
return sorted(self.files[rev].iteritems()), {}
def getcommit(self, rev):
self._parse()
return self.changeset[rev]
def gettags(self):
self._parse()
return self.tags
def getchangedfiles(self, rev, i):
self._parse()
return sorted(self.files[rev])
|
throwable-one/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/django/contrib/gis/utils/ogrinspect.py
|
321
|
"""
This module is for inspecting OGR data sources and generating either
models for GeoDjango and/or mapping dictionaries for use with the
`LayerMapping` utility.
Author: Travis Pinney, Dane Springmeyer, & Justin Bronn
"""
from itertools import izip
# Requires GDAL to use.
from django.contrib.gis.gdal import DataSource
from django.contrib.gis.gdal.field import OFTDate, OFTDateTime, OFTInteger, OFTReal, OFTString, OFTTime
def mapping(data_source, geom_name='geom', layer_key=0, multi_geom=False):
"""
Given a DataSource, generates a dictionary that may be used
for invoking the LayerMapping utility.
Keyword Arguments:
`geom_name` => The name of the geometry field to use for the model.
`layer_key` => The key for specifying which layer in the DataSource to use;
defaults to 0 (the first layer). May be an integer index or a string
identifier for the layer.
`multi_geom` => Boolean (default: False) - specify as multigeometry.
"""
if isinstance(data_source, basestring):
# Instantiating the DataSource from the string.
data_source = DataSource(data_source)
elif isinstance(data_source, DataSource):
pass
else:
raise TypeError('Data source parameter must be a string or a DataSource object.')
# Creating the dictionary.
_mapping = {}
# Generating the field name for each field in the layer.
for field in data_source[layer_key].fields:
mfield = field.lower()
if mfield[-1:] == '_': mfield += 'field'
_mapping[mfield] = field
gtype = data_source[layer_key].geom_type
if multi_geom and gtype.num in (1, 2, 3): prefix = 'MULTI'
else: prefix = ''
_mapping[geom_name] = prefix + str(gtype).upper()
return _mapping
def ogrinspect(*args, **kwargs):
"""
Given a data source (either a string or a DataSource object) and a string
model name this function will generate a GeoDjango model.
Usage:
>>> from django.contrib.gis.utils import ogrinspect
>>> ogrinspect('/path/to/shapefile.shp','NewModel')
...will print model definition to stout
or put this in a python script and use to redirect the output to a new
model like:
$ python generate_model.py > myapp/models.py
# generate_model.py
from django.contrib.gis.utils import ogrinspect
shp_file = 'data/mapping_hacks/world_borders.shp'
model_name = 'WorldBorders'
print ogrinspect(shp_file, model_name, multi_geom=True, srid=4326,
geom_name='shapes', blank=True)
Required Arguments
`datasource` => string or DataSource object to file pointer
`model name` => string of name of new model class to create
Optional Keyword Arguments
`geom_name` => For specifying the model name for the Geometry Field.
Otherwise will default to `geom`
`layer_key` => The key for specifying which layer in the DataSource to use;
defaults to 0 (the first layer). May be an integer index or a string
identifier for the layer.
`srid` => The SRID to use for the Geometry Field. If it can be determined,
the SRID of the datasource is used.
`multi_geom` => Boolean (default: False) - specify as multigeometry.
`name_field` => String - specifies a field name to return for the
`__unicode__` function (which will be generated if specified).
`imports` => Boolean (default: True) - set to False to omit the
`from django.contrib.gis.db import models` code from the
autogenerated models thus avoiding duplicated imports when building
more than one model by batching ogrinspect()
`decimal` => Boolean or sequence (default: False). When set to True
all generated model fields corresponding to the `OFTReal` type will
be `DecimalField` instead of `FloatField`. A sequence of specific
field names to generate as `DecimalField` may also be used.
`blank` => Boolean or sequence (default: False). When set to True all
generated model fields will have `blank=True`. If the user wants to
give specific fields to have blank, then a list/tuple of OGR field
names may be used.
`null` => Boolean (default: False) - When set to True all generated
model fields will have `null=True`. If the user wants to specify
give specific fields to have null, then a list/tuple of OGR field
names may be used.
Note: This routine calls the _ogrinspect() helper to do the heavy lifting.
"""
return '\n'.join(s for s in _ogrinspect(*args, **kwargs))
def _ogrinspect(data_source, model_name, geom_name='geom', layer_key=0, srid=None,
multi_geom=False, name_field=None, imports=True,
decimal=False, blank=False, null=False):
"""
Helper routine for `ogrinspect` that generates GeoDjango models corresponding
to the given data source. See the `ogrinspect` docstring for more details.
"""
# Getting the DataSource
if isinstance(data_source, str):
data_source = DataSource(data_source)
elif isinstance(data_source, DataSource):
pass
else:
raise TypeError('Data source parameter must be a string or a DataSource object.')
# Getting the layer corresponding to the layer key and getting
# a string listing of all OGR fields in the Layer.
layer = data_source[layer_key]
ogr_fields = layer.fields
# Creating lists from the `null`, `blank`, and `decimal`
# keyword arguments.
def process_kwarg(kwarg):
if isinstance(kwarg, (list, tuple)):
return [s.lower() for s in kwarg]
elif kwarg:
return [s.lower() for s in ogr_fields]
else:
return []
null_fields = process_kwarg(null)
blank_fields = process_kwarg(blank)
decimal_fields = process_kwarg(decimal)
# Gets the `null` and `blank` keywords for the given field name.
def get_kwargs_str(field_name):
kwlist = []
if field_name.lower() in null_fields: kwlist.append('null=True')
if field_name.lower() in blank_fields: kwlist.append('blank=True')
if kwlist: return ', ' + ', '.join(kwlist)
else: return ''
# For those wishing to disable the imports.
if imports:
yield '# This is an auto-generated Django model module created by ogrinspect.'
yield 'from django.contrib.gis.db import models'
yield ''
yield 'class %s(models.Model):' % model_name
for field_name, width, precision, field_type in izip(ogr_fields, layer.field_widths, layer.field_precisions, layer.field_types):
# The model field name.
mfield = field_name.lower()
if mfield[-1:] == '_': mfield += 'field'
# Getting the keyword args string.
kwargs_str = get_kwargs_str(field_name)
if field_type is OFTReal:
# By default OFTReals are mapped to `FloatField`, however, they
# may also be mapped to `DecimalField` if specified in the
# `decimal` keyword.
if field_name.lower() in decimal_fields:
yield ' %s = models.DecimalField(max_digits=%d, decimal_places=%d%s)' % (mfield, width, precision, kwargs_str)
else:
yield ' %s = models.FloatField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTInteger:
yield ' %s = models.IntegerField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTString:
yield ' %s = models.CharField(max_length=%s%s)' % (mfield, width, kwargs_str)
elif field_type is OFTDate:
yield ' %s = models.DateField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTDateTime:
yield ' %s = models.DateTimeField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTDate:
yield ' %s = models.TimeField(%s)' % (mfield, kwargs_str[2:])
else:
raise TypeError('Unknown field type %s in %s' % (field_type, mfield))
# TODO: Autodetection of multigeometry types (see #7218).
gtype = layer.geom_type
if multi_geom and gtype.num in (1, 2, 3):
geom_field = 'Multi%s' % gtype.django
else:
geom_field = gtype.django
# Setting up the SRID keyword string.
if srid is None:
if layer.srs is None:
srid_str = 'srid=-1'
else:
srid = layer.srs.srid
if srid is None:
srid_str = 'srid=-1'
elif srid == 4326:
# WGS84 is already the default.
srid_str = ''
else:
srid_str = 'srid=%s' % srid
else:
srid_str = 'srid=%s' % srid
yield ' %s = models.%s(%s)' % (geom_name, geom_field, srid_str)
yield ' objects = models.GeoManager()'
if name_field:
yield ''
yield ' def __unicode__(self): return self.%s' % name_field
|
lanfker/vPRKS
|
refs/heads/master
|
src/mobility/bindings/modulegen__gcc_LP64.py
|
28
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.mobility', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## box.h (module 'mobility'): ns3::Box [class]
module.add_class('Box')
## box.h (module 'mobility'): ns3::Box::Side [enumeration]
module.add_enum('Side', ['RIGHT', 'LEFT', 'TOP', 'BOTTOM', 'UP', 'DOWN'], outer_class=root_module['ns3::Box'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## constant-velocity-helper.h (module 'mobility'): ns3::ConstantVelocityHelper [class]
module.add_class('ConstantVelocityHelper')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mobility-helper.h (module 'mobility'): ns3::MobilityHelper [class]
module.add_class('MobilityHelper')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## ns2-mobility-helper.h (module 'mobility'): ns3::Ns2MobilityHelper [class]
module.add_class('Ns2MobilityHelper')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## random-variable.h (module 'core'): ns3::RandomVariable [class]
module.add_class('RandomVariable', import_from_module='ns.core')
## rectangle.h (module 'mobility'): ns3::Rectangle [class]
module.add_class('Rectangle')
## rectangle.h (module 'mobility'): ns3::Rectangle::Side [enumeration]
module.add_enum('Side', ['RIGHT', 'LEFT', 'TOP', 'BOTTOM'], outer_class=root_module['ns3::Rectangle'])
## random-variable.h (module 'core'): ns3::SeedManager [class]
module.add_class('SeedManager', import_from_module='ns.core')
## random-variable.h (module 'core'): ns3::SequentialVariable [class]
module.add_class('SequentialVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## random-variable.h (module 'core'): ns3::TriangularVariable [class]
module.add_class('TriangularVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## random-variable.h (module 'core'): ns3::UniformVariable [class]
module.add_class('UniformVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## vector.h (module 'core'): ns3::Vector2D [class]
module.add_class('Vector2D', import_from_module='ns.core')
## vector.h (module 'core'): ns3::Vector3D [class]
module.add_class('Vector3D', import_from_module='ns.core')
## waypoint.h (module 'mobility'): ns3::Waypoint [class]
module.add_class('Waypoint')
## random-variable.h (module 'core'): ns3::WeibullVariable [class]
module.add_class('WeibullVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ZetaVariable [class]
module.add_class('ZetaVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ZipfVariable [class]
module.add_class('ZipfVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## random-variable.h (module 'core'): ns3::ConstantVariable [class]
module.add_class('ConstantVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::DeterministicVariable [class]
module.add_class('DeterministicVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::EmpiricalVariable [class]
module.add_class('EmpiricalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ErlangVariable [class]
module.add_class('ErlangVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ExponentialVariable [class]
module.add_class('ExponentialVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::GammaVariable [class]
module.add_class('GammaVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::IntEmpiricalVariable [class]
module.add_class('IntEmpiricalVariable', import_from_module='ns.core', parent=root_module['ns3::EmpiricalVariable'])
## random-variable.h (module 'core'): ns3::LogNormalVariable [class]
module.add_class('LogNormalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::NormalVariable [class]
module.add_class('NormalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## random-variable.h (module 'core'): ns3::ParetoVariable [class]
module.add_class('ParetoVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## position-allocator.h (module 'mobility'): ns3::PositionAllocator [class]
module.add_class('PositionAllocator', parent=root_module['ns3::Object'])
## position-allocator.h (module 'mobility'): ns3::RandomBoxPositionAllocator [class]
module.add_class('RandomBoxPositionAllocator', parent=root_module['ns3::PositionAllocator'])
## position-allocator.h (module 'mobility'): ns3::RandomDiscPositionAllocator [class]
module.add_class('RandomDiscPositionAllocator', parent=root_module['ns3::PositionAllocator'])
## position-allocator.h (module 'mobility'): ns3::RandomRectanglePositionAllocator [class]
module.add_class('RandomRectanglePositionAllocator', parent=root_module['ns3::PositionAllocator'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## position-allocator.h (module 'mobility'): ns3::UniformDiscPositionAllocator [class]
module.add_class('UniformDiscPositionAllocator', parent=root_module['ns3::PositionAllocator'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## box.h (module 'mobility'): ns3::BoxChecker [class]
module.add_class('BoxChecker', parent=root_module['ns3::AttributeChecker'])
## box.h (module 'mobility'): ns3::BoxValue [class]
module.add_class('BoxValue', parent=root_module['ns3::AttributeValue'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## position-allocator.h (module 'mobility'): ns3::GridPositionAllocator [class]
module.add_class('GridPositionAllocator', parent=root_module['ns3::PositionAllocator'])
## position-allocator.h (module 'mobility'): ns3::GridPositionAllocator::LayoutType [enumeration]
module.add_enum('LayoutType', ['ROW_FIRST', 'COLUMN_FIRST'], outer_class=root_module['ns3::GridPositionAllocator'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## position-allocator.h (module 'mobility'): ns3::ListPositionAllocator [class]
module.add_class('ListPositionAllocator', parent=root_module['ns3::PositionAllocator'])
## mobility-model.h (module 'mobility'): ns3::MobilityModel [class]
module.add_class('MobilityModel', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-direction-2d-mobility-model.h (module 'mobility'): ns3::RandomDirection2dMobilityModel [class]
module.add_class('RandomDirection2dMobilityModel', parent=root_module['ns3::MobilityModel'])
## random-variable.h (module 'core'): ns3::RandomVariableChecker [class]
module.add_class('RandomVariableChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## random-variable.h (module 'core'): ns3::RandomVariableValue [class]
module.add_class('RandomVariableValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-walk-2d-mobility-model.h (module 'mobility'): ns3::RandomWalk2dMobilityModel [class]
module.add_class('RandomWalk2dMobilityModel', parent=root_module['ns3::MobilityModel'])
## random-walk-2d-mobility-model.h (module 'mobility'): ns3::RandomWalk2dMobilityModel::Mode [enumeration]
module.add_enum('Mode', ['MODE_DISTANCE', 'MODE_TIME'], outer_class=root_module['ns3::RandomWalk2dMobilityModel'])
## random-waypoint-mobility-model.h (module 'mobility'): ns3::RandomWaypointMobilityModel [class]
module.add_class('RandomWaypointMobilityModel', parent=root_module['ns3::MobilityModel'])
## rectangle.h (module 'mobility'): ns3::RectangleChecker [class]
module.add_class('RectangleChecker', parent=root_module['ns3::AttributeChecker'])
## rectangle.h (module 'mobility'): ns3::RectangleValue [class]
module.add_class('RectangleValue', parent=root_module['ns3::AttributeValue'])
## steady-state-random-waypoint-mobility-model.h (module 'mobility'): ns3::SteadyStateRandomWaypointMobilityModel [class]
module.add_class('SteadyStateRandomWaypointMobilityModel', parent=root_module['ns3::MobilityModel'])
## nstime.h (module 'core'): ns3::TimeChecker [class]
module.add_class('TimeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## vector.h (module 'core'): ns3::Vector2DChecker [class]
module.add_class('Vector2DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## vector.h (module 'core'): ns3::Vector2DValue [class]
module.add_class('Vector2DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## vector.h (module 'core'): ns3::Vector3DChecker [class]
module.add_class('Vector3DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## vector.h (module 'core'): ns3::Vector3DValue [class]
module.add_class('Vector3DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## waypoint.h (module 'mobility'): ns3::WaypointChecker [class]
module.add_class('WaypointChecker', parent=root_module['ns3::AttributeChecker'])
## waypoint-mobility-model.h (module 'mobility'): ns3::WaypointMobilityModel [class]
module.add_class('WaypointMobilityModel', parent=root_module['ns3::MobilityModel'])
## waypoint.h (module 'mobility'): ns3::WaypointValue [class]
module.add_class('WaypointValue', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## constant-acceleration-mobility-model.h (module 'mobility'): ns3::ConstantAccelerationMobilityModel [class]
module.add_class('ConstantAccelerationMobilityModel', parent=root_module['ns3::MobilityModel'])
## constant-position-mobility-model.h (module 'mobility'): ns3::ConstantPositionMobilityModel [class]
module.add_class('ConstantPositionMobilityModel', parent=root_module['ns3::MobilityModel'])
## constant-velocity-mobility-model.h (module 'mobility'): ns3::ConstantVelocityMobilityModel [class]
module.add_class('ConstantVelocityMobilityModel', parent=root_module['ns3::MobilityModel'])
## gauss-markov-mobility-model.h (module 'mobility'): ns3::GaussMarkovMobilityModel [class]
module.add_class('GaussMarkovMobilityModel', parent=root_module['ns3::MobilityModel'])
## hierarchical-mobility-model.h (module 'mobility'): ns3::HierarchicalMobilityModel [class]
module.add_class('HierarchicalMobilityModel', parent=root_module['ns3::MobilityModel'])
typehandlers.add_type_alias('ns3::Vector3DValue', 'ns3::VectorValue')
typehandlers.add_type_alias('ns3::Vector3DValue*', 'ns3::VectorValue*')
typehandlers.add_type_alias('ns3::Vector3DValue&', 'ns3::VectorValue&')
module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue')
typehandlers.add_type_alias('ns3::Vector3D', 'ns3::Vector')
typehandlers.add_type_alias('ns3::Vector3D*', 'ns3::Vector*')
typehandlers.add_type_alias('ns3::Vector3D&', 'ns3::Vector&')
module.add_typedef(root_module['ns3::Vector3D'], 'Vector')
typehandlers.add_type_alias('ns3::Vector3DChecker', 'ns3::VectorChecker')
typehandlers.add_type_alias('ns3::Vector3DChecker*', 'ns3::VectorChecker*')
typehandlers.add_type_alias('ns3::Vector3DChecker&', 'ns3::VectorChecker&')
module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Box_methods(root_module, root_module['ns3::Box'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3ConstantVelocityHelper_methods(root_module, root_module['ns3::ConstantVelocityHelper'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3MobilityHelper_methods(root_module, root_module['ns3::MobilityHelper'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3Ns2MobilityHelper_methods(root_module, root_module['ns3::Ns2MobilityHelper'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3RandomVariable_methods(root_module, root_module['ns3::RandomVariable'])
register_Ns3Rectangle_methods(root_module, root_module['ns3::Rectangle'])
register_Ns3SeedManager_methods(root_module, root_module['ns3::SeedManager'])
register_Ns3SequentialVariable_methods(root_module, root_module['ns3::SequentialVariable'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TriangularVariable_methods(root_module, root_module['ns3::TriangularVariable'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3UniformVariable_methods(root_module, root_module['ns3::UniformVariable'])
register_Ns3Vector2D_methods(root_module, root_module['ns3::Vector2D'])
register_Ns3Vector3D_methods(root_module, root_module['ns3::Vector3D'])
register_Ns3Waypoint_methods(root_module, root_module['ns3::Waypoint'])
register_Ns3WeibullVariable_methods(root_module, root_module['ns3::WeibullVariable'])
register_Ns3ZetaVariable_methods(root_module, root_module['ns3::ZetaVariable'])
register_Ns3ZipfVariable_methods(root_module, root_module['ns3::ZipfVariable'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3ConstantVariable_methods(root_module, root_module['ns3::ConstantVariable'])
register_Ns3DeterministicVariable_methods(root_module, root_module['ns3::DeterministicVariable'])
register_Ns3EmpiricalVariable_methods(root_module, root_module['ns3::EmpiricalVariable'])
register_Ns3ErlangVariable_methods(root_module, root_module['ns3::ErlangVariable'])
register_Ns3ExponentialVariable_methods(root_module, root_module['ns3::ExponentialVariable'])
register_Ns3GammaVariable_methods(root_module, root_module['ns3::GammaVariable'])
register_Ns3IntEmpiricalVariable_methods(root_module, root_module['ns3::IntEmpiricalVariable'])
register_Ns3LogNormalVariable_methods(root_module, root_module['ns3::LogNormalVariable'])
register_Ns3NormalVariable_methods(root_module, root_module['ns3::NormalVariable'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3ParetoVariable_methods(root_module, root_module['ns3::ParetoVariable'])
register_Ns3PositionAllocator_methods(root_module, root_module['ns3::PositionAllocator'])
register_Ns3RandomBoxPositionAllocator_methods(root_module, root_module['ns3::RandomBoxPositionAllocator'])
register_Ns3RandomDiscPositionAllocator_methods(root_module, root_module['ns3::RandomDiscPositionAllocator'])
register_Ns3RandomRectanglePositionAllocator_methods(root_module, root_module['ns3::RandomRectanglePositionAllocator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3UniformDiscPositionAllocator_methods(root_module, root_module['ns3::UniformDiscPositionAllocator'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3BoxChecker_methods(root_module, root_module['ns3::BoxChecker'])
register_Ns3BoxValue_methods(root_module, root_module['ns3::BoxValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3GridPositionAllocator_methods(root_module, root_module['ns3::GridPositionAllocator'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3ListPositionAllocator_methods(root_module, root_module['ns3::ListPositionAllocator'])
register_Ns3MobilityModel_methods(root_module, root_module['ns3::MobilityModel'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3RandomDirection2dMobilityModel_methods(root_module, root_module['ns3::RandomDirection2dMobilityModel'])
register_Ns3RandomVariableChecker_methods(root_module, root_module['ns3::RandomVariableChecker'])
register_Ns3RandomVariableValue_methods(root_module, root_module['ns3::RandomVariableValue'])
register_Ns3RandomWalk2dMobilityModel_methods(root_module, root_module['ns3::RandomWalk2dMobilityModel'])
register_Ns3RandomWaypointMobilityModel_methods(root_module, root_module['ns3::RandomWaypointMobilityModel'])
register_Ns3RectangleChecker_methods(root_module, root_module['ns3::RectangleChecker'])
register_Ns3RectangleValue_methods(root_module, root_module['ns3::RectangleValue'])
register_Ns3SteadyStateRandomWaypointMobilityModel_methods(root_module, root_module['ns3::SteadyStateRandomWaypointMobilityModel'])
register_Ns3TimeChecker_methods(root_module, root_module['ns3::TimeChecker'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3Vector2DChecker_methods(root_module, root_module['ns3::Vector2DChecker'])
register_Ns3Vector2DValue_methods(root_module, root_module['ns3::Vector2DValue'])
register_Ns3Vector3DChecker_methods(root_module, root_module['ns3::Vector3DChecker'])
register_Ns3Vector3DValue_methods(root_module, root_module['ns3::Vector3DValue'])
register_Ns3WaypointChecker_methods(root_module, root_module['ns3::WaypointChecker'])
register_Ns3WaypointMobilityModel_methods(root_module, root_module['ns3::WaypointMobilityModel'])
register_Ns3WaypointValue_methods(root_module, root_module['ns3::WaypointValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3ConstantAccelerationMobilityModel_methods(root_module, root_module['ns3::ConstantAccelerationMobilityModel'])
register_Ns3ConstantPositionMobilityModel_methods(root_module, root_module['ns3::ConstantPositionMobilityModel'])
register_Ns3ConstantVelocityMobilityModel_methods(root_module, root_module['ns3::ConstantVelocityMobilityModel'])
register_Ns3GaussMarkovMobilityModel_methods(root_module, root_module['ns3::GaussMarkovMobilityModel'])
register_Ns3HierarchicalMobilityModel_methods(root_module, root_module['ns3::HierarchicalMobilityModel'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Box_methods(root_module, cls):
cls.add_output_stream_operator()
## box.h (module 'mobility'): ns3::Box::Box(ns3::Box const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Box const &', 'arg0')])
## box.h (module 'mobility'): ns3::Box::Box(double _xMin, double _xMax, double _yMin, double _yMax, double _zMin, double _zMax) [constructor]
cls.add_constructor([param('double', '_xMin'), param('double', '_xMax'), param('double', '_yMin'), param('double', '_yMax'), param('double', '_zMin'), param('double', '_zMax')])
## box.h (module 'mobility'): ns3::Box::Box() [constructor]
cls.add_constructor([])
## box.h (module 'mobility'): ns3::Vector ns3::Box::CalculateIntersection(ns3::Vector const & current, ns3::Vector const & speed) const [member function]
cls.add_method('CalculateIntersection',
'ns3::Vector',
[param('ns3::Vector const &', 'current'), param('ns3::Vector const &', 'speed')],
is_const=True)
## box.h (module 'mobility'): ns3::Box::Side ns3::Box::GetClosestSide(ns3::Vector const & position) const [member function]
cls.add_method('GetClosestSide',
'ns3::Box::Side',
[param('ns3::Vector const &', 'position')],
is_const=True)
## box.h (module 'mobility'): bool ns3::Box::IsInside(ns3::Vector const & position) const [member function]
cls.add_method('IsInside',
'bool',
[param('ns3::Vector const &', 'position')],
is_const=True)
## box.h (module 'mobility'): ns3::Box::xMax [variable]
cls.add_instance_attribute('xMax', 'double', is_const=False)
## box.h (module 'mobility'): ns3::Box::xMin [variable]
cls.add_instance_attribute('xMin', 'double', is_const=False)
## box.h (module 'mobility'): ns3::Box::yMax [variable]
cls.add_instance_attribute('yMax', 'double', is_const=False)
## box.h (module 'mobility'): ns3::Box::yMin [variable]
cls.add_instance_attribute('yMin', 'double', is_const=False)
## box.h (module 'mobility'): ns3::Box::zMax [variable]
cls.add_instance_attribute('zMax', 'double', is_const=False)
## box.h (module 'mobility'): ns3::Box::zMin [variable]
cls.add_instance_attribute('zMin', 'double', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3ConstantVelocityHelper_methods(root_module, cls):
## constant-velocity-helper.h (module 'mobility'): ns3::ConstantVelocityHelper::ConstantVelocityHelper(ns3::ConstantVelocityHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConstantVelocityHelper const &', 'arg0')])
## constant-velocity-helper.h (module 'mobility'): ns3::ConstantVelocityHelper::ConstantVelocityHelper() [constructor]
cls.add_constructor([])
## constant-velocity-helper.h (module 'mobility'): ns3::ConstantVelocityHelper::ConstantVelocityHelper(ns3::Vector const & position) [constructor]
cls.add_constructor([param('ns3::Vector const &', 'position')])
## constant-velocity-helper.h (module 'mobility'): ns3::ConstantVelocityHelper::ConstantVelocityHelper(ns3::Vector const & position, ns3::Vector const & vel) [constructor]
cls.add_constructor([param('ns3::Vector const &', 'position'), param('ns3::Vector const &', 'vel')])
## constant-velocity-helper.h (module 'mobility'): ns3::Vector ns3::ConstantVelocityHelper::GetCurrentPosition() const [member function]
cls.add_method('GetCurrentPosition',
'ns3::Vector',
[],
is_const=True)
## constant-velocity-helper.h (module 'mobility'): ns3::Vector ns3::ConstantVelocityHelper::GetVelocity() const [member function]
cls.add_method('GetVelocity',
'ns3::Vector',
[],
is_const=True)
## constant-velocity-helper.h (module 'mobility'): void ns3::ConstantVelocityHelper::Pause() [member function]
cls.add_method('Pause',
'void',
[])
## constant-velocity-helper.h (module 'mobility'): void ns3::ConstantVelocityHelper::SetPosition(ns3::Vector const & position) [member function]
cls.add_method('SetPosition',
'void',
[param('ns3::Vector const &', 'position')])
## constant-velocity-helper.h (module 'mobility'): void ns3::ConstantVelocityHelper::SetVelocity(ns3::Vector const & vel) [member function]
cls.add_method('SetVelocity',
'void',
[param('ns3::Vector const &', 'vel')])
## constant-velocity-helper.h (module 'mobility'): void ns3::ConstantVelocityHelper::Unpause() [member function]
cls.add_method('Unpause',
'void',
[])
## constant-velocity-helper.h (module 'mobility'): void ns3::ConstantVelocityHelper::Update() const [member function]
cls.add_method('Update',
'void',
[],
is_const=True)
## constant-velocity-helper.h (module 'mobility'): void ns3::ConstantVelocityHelper::UpdateWithBounds(ns3::Rectangle const & rectangle) const [member function]
cls.add_method('UpdateWithBounds',
'void',
[param('ns3::Rectangle const &', 'rectangle')],
is_const=True)
## constant-velocity-helper.h (module 'mobility'): void ns3::ConstantVelocityHelper::UpdateWithBounds(ns3::Box const & bounds) const [member function]
cls.add_method('UpdateWithBounds',
'void',
[param('ns3::Box const &', 'bounds')],
is_const=True)
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3MobilityHelper_methods(root_module, cls):
## mobility-helper.h (module 'mobility'): ns3::MobilityHelper::MobilityHelper(ns3::MobilityHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MobilityHelper const &', 'arg0')])
## mobility-helper.h (module 'mobility'): ns3::MobilityHelper::MobilityHelper() [constructor]
cls.add_constructor([])
## mobility-helper.h (module 'mobility'): static void ns3::MobilityHelper::EnableAscii(std::ostream & os, uint32_t nodeid) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::ostream &', 'os'), param('uint32_t', 'nodeid')],
is_static=True)
## mobility-helper.h (module 'mobility'): static void ns3::MobilityHelper::EnableAscii(std::ostream & os, ns3::NodeContainer n) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::ostream &', 'os'), param('ns3::NodeContainer', 'n')],
is_static=True)
## mobility-helper.h (module 'mobility'): static void ns3::MobilityHelper::EnableAsciiAll(std::ostream & os) [member function]
cls.add_method('EnableAsciiAll',
'void',
[param('std::ostream &', 'os')],
is_static=True)
## mobility-helper.h (module 'mobility'): std::string ns3::MobilityHelper::GetMobilityModelType() const [member function]
cls.add_method('GetMobilityModelType',
'std::string',
[],
is_const=True)
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::Install(ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Install',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_const=True)
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::Install(std::string nodeName) const [member function]
cls.add_method('Install',
'void',
[param('std::string', 'nodeName')],
is_const=True)
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::Install(ns3::NodeContainer container) const [member function]
cls.add_method('Install',
'void',
[param('ns3::NodeContainer', 'container')],
is_const=True)
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::InstallAll() [member function]
cls.add_method('InstallAll',
'void',
[])
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::PopReferenceMobilityModel() [member function]
cls.add_method('PopReferenceMobilityModel',
'void',
[])
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::PushReferenceMobilityModel(ns3::Ptr<ns3::Object> reference) [member function]
cls.add_method('PushReferenceMobilityModel',
'void',
[param('ns3::Ptr< ns3::Object >', 'reference')])
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::PushReferenceMobilityModel(std::string referenceName) [member function]
cls.add_method('PushReferenceMobilityModel',
'void',
[param('std::string', 'referenceName')])
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::SetMobilityModel(std::string type, std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue(), std::string n8="", ns3::AttributeValue const & v8=ns3::EmptyAttributeValue(), std::string n9="", ns3::AttributeValue const & v9=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetMobilityModel',
'void',
[param('std::string', 'type'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n8', default_value='""'), param('ns3::AttributeValue const &', 'v8', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n9', default_value='""'), param('ns3::AttributeValue const &', 'v9', default_value='ns3::EmptyAttributeValue()')])
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::SetPositionAllocator(ns3::Ptr<ns3::PositionAllocator> allocator) [member function]
cls.add_method('SetPositionAllocator',
'void',
[param('ns3::Ptr< ns3::PositionAllocator >', 'allocator')])
## mobility-helper.h (module 'mobility'): void ns3::MobilityHelper::SetPositionAllocator(std::string type, std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue(), std::string n8="", ns3::AttributeValue const & v8=ns3::EmptyAttributeValue(), std::string n9="", ns3::AttributeValue const & v9=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetPositionAllocator',
'void',
[param('std::string', 'type'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n8', default_value='""'), param('ns3::AttributeValue const &', 'v8', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n9', default_value='""'), param('ns3::AttributeValue const &', 'v9', default_value='ns3::EmptyAttributeValue()')])
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3Ns2MobilityHelper_methods(root_module, cls):
## ns2-mobility-helper.h (module 'mobility'): ns3::Ns2MobilityHelper::Ns2MobilityHelper(ns3::Ns2MobilityHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ns2MobilityHelper const &', 'arg0')])
## ns2-mobility-helper.h (module 'mobility'): ns3::Ns2MobilityHelper::Ns2MobilityHelper(std::string filename) [constructor]
cls.add_constructor([param('std::string', 'filename')])
## ns2-mobility-helper.h (module 'mobility'): void ns3::Ns2MobilityHelper::Install() const [member function]
cls.add_method('Install',
'void',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3RandomVariable_methods(root_module, cls):
cls.add_output_stream_operator()
## random-variable.h (module 'core'): ns3::RandomVariable::RandomVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::RandomVariable::RandomVariable(ns3::RandomVariable const & o) [copy constructor]
cls.add_constructor([param('ns3::RandomVariable const &', 'o')])
## random-variable.h (module 'core'): uint32_t ns3::RandomVariable::GetInteger() const [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::RandomVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
return
def register_Ns3Rectangle_methods(root_module, cls):
cls.add_output_stream_operator()
## rectangle.h (module 'mobility'): ns3::Rectangle::Rectangle(ns3::Rectangle const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Rectangle const &', 'arg0')])
## rectangle.h (module 'mobility'): ns3::Rectangle::Rectangle(double _xMin, double _xMax, double _yMin, double _yMax) [constructor]
cls.add_constructor([param('double', '_xMin'), param('double', '_xMax'), param('double', '_yMin'), param('double', '_yMax')])
## rectangle.h (module 'mobility'): ns3::Rectangle::Rectangle() [constructor]
cls.add_constructor([])
## rectangle.h (module 'mobility'): ns3::Vector ns3::Rectangle::CalculateIntersection(ns3::Vector const & current, ns3::Vector const & speed) const [member function]
cls.add_method('CalculateIntersection',
'ns3::Vector',
[param('ns3::Vector const &', 'current'), param('ns3::Vector const &', 'speed')],
is_const=True)
## rectangle.h (module 'mobility'): ns3::Rectangle::Side ns3::Rectangle::GetClosestSide(ns3::Vector const & position) const [member function]
cls.add_method('GetClosestSide',
'ns3::Rectangle::Side',
[param('ns3::Vector const &', 'position')],
is_const=True)
## rectangle.h (module 'mobility'): bool ns3::Rectangle::IsInside(ns3::Vector const & position) const [member function]
cls.add_method('IsInside',
'bool',
[param('ns3::Vector const &', 'position')],
is_const=True)
## rectangle.h (module 'mobility'): ns3::Rectangle::xMax [variable]
cls.add_instance_attribute('xMax', 'double', is_const=False)
## rectangle.h (module 'mobility'): ns3::Rectangle::xMin [variable]
cls.add_instance_attribute('xMin', 'double', is_const=False)
## rectangle.h (module 'mobility'): ns3::Rectangle::yMax [variable]
cls.add_instance_attribute('yMax', 'double', is_const=False)
## rectangle.h (module 'mobility'): ns3::Rectangle::yMin [variable]
cls.add_instance_attribute('yMin', 'double', is_const=False)
return
def register_Ns3SeedManager_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::SeedManager::SeedManager() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::SeedManager::SeedManager(ns3::SeedManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SeedManager const &', 'arg0')])
## random-variable.h (module 'core'): static bool ns3::SeedManager::CheckSeed(uint32_t seed) [member function]
cls.add_method('CheckSeed',
'bool',
[param('uint32_t', 'seed')],
is_static=True)
## random-variable.h (module 'core'): static uint32_t ns3::SeedManager::GetRun() [member function]
cls.add_method('GetRun',
'uint32_t',
[],
is_static=True)
## random-variable.h (module 'core'): static uint32_t ns3::SeedManager::GetSeed() [member function]
cls.add_method('GetSeed',
'uint32_t',
[],
is_static=True)
## random-variable.h (module 'core'): static void ns3::SeedManager::SetRun(uint32_t run) [member function]
cls.add_method('SetRun',
'void',
[param('uint32_t', 'run')],
is_static=True)
## random-variable.h (module 'core'): static void ns3::SeedManager::SetSeed(uint32_t seed) [member function]
cls.add_method('SetSeed',
'void',
[param('uint32_t', 'seed')],
is_static=True)
return
def register_Ns3SequentialVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(ns3::SequentialVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SequentialVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(double f, double l, double i=1, uint32_t c=1) [constructor]
cls.add_constructor([param('double', 'f'), param('double', 'l'), param('double', 'i', default_value='1'), param('uint32_t', 'c', default_value='1')])
## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(double f, double l, ns3::RandomVariable const & i, uint32_t c=1) [constructor]
cls.add_constructor([param('double', 'f'), param('double', 'l'), param('ns3::RandomVariable const &', 'i'), param('uint32_t', 'c', default_value='1')])
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TriangularVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable(ns3::TriangularVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TriangularVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable(double s, double l, double mean) [constructor]
cls.add_constructor([param('double', 's'), param('double', 'l'), param('double', 'mean')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info')],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3UniformVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable(ns3::UniformVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UniformVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable(double s, double l) [constructor]
cls.add_constructor([param('double', 's'), param('double', 'l')])
## random-variable.h (module 'core'): uint32_t ns3::UniformVariable::GetInteger(uint32_t s, uint32_t l) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 's'), param('uint32_t', 'l')])
## random-variable.h (module 'core'): double ns3::UniformVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::UniformVariable::GetValue(double s, double l) [member function]
cls.add_method('GetValue',
'double',
[param('double', 's'), param('double', 'l')])
return
def register_Ns3Vector2D_methods(root_module, cls):
cls.add_output_stream_operator()
## vector.h (module 'core'): ns3::Vector2D::Vector2D(ns3::Vector2D const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2D const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector2D::Vector2D(double _x, double _y) [constructor]
cls.add_constructor([param('double', '_x'), param('double', '_y')])
## vector.h (module 'core'): ns3::Vector2D::Vector2D() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2D::x [variable]
cls.add_instance_attribute('x', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector2D::y [variable]
cls.add_instance_attribute('y', 'double', is_const=False)
return
def register_Ns3Vector3D_methods(root_module, cls):
cls.add_output_stream_operator()
## vector.h (module 'core'): ns3::Vector3D::Vector3D(ns3::Vector3D const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3D const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector3D::Vector3D(double _x, double _y, double _z) [constructor]
cls.add_constructor([param('double', '_x'), param('double', '_y'), param('double', '_z')])
## vector.h (module 'core'): ns3::Vector3D::Vector3D() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3D::x [variable]
cls.add_instance_attribute('x', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector3D::y [variable]
cls.add_instance_attribute('y', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector3D::z [variable]
cls.add_instance_attribute('z', 'double', is_const=False)
return
def register_Ns3Waypoint_methods(root_module, cls):
cls.add_output_stream_operator()
## waypoint.h (module 'mobility'): ns3::Waypoint::Waypoint(ns3::Waypoint const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Waypoint const &', 'arg0')])
## waypoint.h (module 'mobility'): ns3::Waypoint::Waypoint(ns3::Time const & waypointTime, ns3::Vector const & waypointPosition) [constructor]
cls.add_constructor([param('ns3::Time const &', 'waypointTime'), param('ns3::Vector const &', 'waypointPosition')])
## waypoint.h (module 'mobility'): ns3::Waypoint::Waypoint() [constructor]
cls.add_constructor([])
## waypoint.h (module 'mobility'): ns3::Waypoint::position [variable]
cls.add_instance_attribute('position', 'ns3::Vector', is_const=False)
## waypoint.h (module 'mobility'): ns3::Waypoint::time [variable]
cls.add_instance_attribute('time', 'ns3::Time', is_const=False)
return
def register_Ns3WeibullVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(ns3::WeibullVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WeibullVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m) [constructor]
cls.add_constructor([param('double', 'm')])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m, double s) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's')])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m, double s, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's'), param('double', 'b')])
return
def register_Ns3ZetaVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable(ns3::ZetaVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ZetaVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable(double alpha) [constructor]
cls.add_constructor([param('double', 'alpha')])
## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable() [constructor]
cls.add_constructor([])
return
def register_Ns3ZipfVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable(ns3::ZipfVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ZipfVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable(long int N, double alpha) [constructor]
cls.add_constructor([param('long int', 'N'), param('double', 'alpha')])
## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable() [constructor]
cls.add_constructor([])
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3ConstantVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable(ns3::ConstantVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConstantVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable(double c) [constructor]
cls.add_constructor([param('double', 'c')])
## random-variable.h (module 'core'): void ns3::ConstantVariable::SetConstant(double c) [member function]
cls.add_method('SetConstant',
'void',
[param('double', 'c')])
return
def register_Ns3DeterministicVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::DeterministicVariable::DeterministicVariable(ns3::DeterministicVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DeterministicVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::DeterministicVariable::DeterministicVariable(double * d, uint32_t c) [constructor]
cls.add_constructor([param('double *', 'd'), param('uint32_t', 'c')])
return
def register_Ns3EmpiricalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::EmpiricalVariable::EmpiricalVariable(ns3::EmpiricalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmpiricalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::EmpiricalVariable::EmpiricalVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): void ns3::EmpiricalVariable::CDF(double v, double c) [member function]
cls.add_method('CDF',
'void',
[param('double', 'v'), param('double', 'c')])
return
def register_Ns3ErlangVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable(ns3::ErlangVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ErlangVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable(unsigned int k, double lambda) [constructor]
cls.add_constructor([param('unsigned int', 'k'), param('double', 'lambda')])
## random-variable.h (module 'core'): double ns3::ErlangVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::ErlangVariable::GetValue(unsigned int k, double lambda) const [member function]
cls.add_method('GetValue',
'double',
[param('unsigned int', 'k'), param('double', 'lambda')],
is_const=True)
return
def register_Ns3ExponentialVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(ns3::ExponentialVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ExponentialVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(double m) [constructor]
cls.add_constructor([param('double', 'm')])
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(double m, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 'b')])
return
def register_Ns3GammaVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable(ns3::GammaVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::GammaVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable(double alpha, double beta) [constructor]
cls.add_constructor([param('double', 'alpha'), param('double', 'beta')])
## random-variable.h (module 'core'): double ns3::GammaVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::GammaVariable::GetValue(double alpha, double beta) const [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha'), param('double', 'beta')],
is_const=True)
return
def register_Ns3IntEmpiricalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::IntEmpiricalVariable::IntEmpiricalVariable(ns3::IntEmpiricalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntEmpiricalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::IntEmpiricalVariable::IntEmpiricalVariable() [constructor]
cls.add_constructor([])
return
def register_Ns3LogNormalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::LogNormalVariable::LogNormalVariable(ns3::LogNormalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LogNormalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::LogNormalVariable::LogNormalVariable(double mu, double sigma) [constructor]
cls.add_constructor([param('double', 'mu'), param('double', 'sigma')])
return
def register_Ns3NormalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(ns3::NormalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NormalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(double m, double v) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 'v')])
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(double m, double v, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 'v'), param('double', 'b')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Start() [member function]
cls.add_method('Start',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3ParetoVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(ns3::ParetoVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ParetoVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m) [constructor]
cls.add_constructor([param('double', 'm')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m, double s) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m, double s, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's'), param('double', 'b')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(std::pair<double,double> params) [constructor]
cls.add_constructor([param('std::pair< double, double >', 'params')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(std::pair<double,double> params, double b) [constructor]
cls.add_constructor([param('std::pair< double, double >', 'params'), param('double', 'b')])
return
def register_Ns3PositionAllocator_methods(root_module, cls):
## position-allocator.h (module 'mobility'): ns3::PositionAllocator::PositionAllocator(ns3::PositionAllocator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PositionAllocator const &', 'arg0')])
## position-allocator.h (module 'mobility'): ns3::PositionAllocator::PositionAllocator() [constructor]
cls.add_constructor([])
## position-allocator.h (module 'mobility'): ns3::Vector ns3::PositionAllocator::GetNext() const [member function]
cls.add_method('GetNext',
'ns3::Vector',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## position-allocator.h (module 'mobility'): static ns3::TypeId ns3::PositionAllocator::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3RandomBoxPositionAllocator_methods(root_module, cls):
## position-allocator.h (module 'mobility'): ns3::RandomBoxPositionAllocator::RandomBoxPositionAllocator(ns3::RandomBoxPositionAllocator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomBoxPositionAllocator const &', 'arg0')])
## position-allocator.h (module 'mobility'): ns3::RandomBoxPositionAllocator::RandomBoxPositionAllocator() [constructor]
cls.add_constructor([])
## position-allocator.h (module 'mobility'): ns3::Vector ns3::RandomBoxPositionAllocator::GetNext() const [member function]
cls.add_method('GetNext',
'ns3::Vector',
[],
is_const=True, is_virtual=True)
## position-allocator.h (module 'mobility'): static ns3::TypeId ns3::RandomBoxPositionAllocator::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## position-allocator.h (module 'mobility'): void ns3::RandomBoxPositionAllocator::SetX(ns3::RandomVariable x) [member function]
cls.add_method('SetX',
'void',
[param('ns3::RandomVariable', 'x')])
## position-allocator.h (module 'mobility'): void ns3::RandomBoxPositionAllocator::SetY(ns3::RandomVariable y) [member function]
cls.add_method('SetY',
'void',
[param('ns3::RandomVariable', 'y')])
## position-allocator.h (module 'mobility'): void ns3::RandomBoxPositionAllocator::SetZ(ns3::RandomVariable z) [member function]
cls.add_method('SetZ',
'void',
[param('ns3::RandomVariable', 'z')])
return
def register_Ns3RandomDiscPositionAllocator_methods(root_module, cls):
## position-allocator.h (module 'mobility'): ns3::RandomDiscPositionAllocator::RandomDiscPositionAllocator(ns3::RandomDiscPositionAllocator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomDiscPositionAllocator const &', 'arg0')])
## position-allocator.h (module 'mobility'): ns3::RandomDiscPositionAllocator::RandomDiscPositionAllocator() [constructor]
cls.add_constructor([])
## position-allocator.h (module 'mobility'): ns3::Vector ns3::RandomDiscPositionAllocator::GetNext() const [member function]
cls.add_method('GetNext',
'ns3::Vector',
[],
is_const=True, is_virtual=True)
## position-allocator.h (module 'mobility'): static ns3::TypeId ns3::RandomDiscPositionAllocator::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## position-allocator.h (module 'mobility'): void ns3::RandomDiscPositionAllocator::SetRho(ns3::RandomVariable rho) [member function]
cls.add_method('SetRho',
'void',
[param('ns3::RandomVariable', 'rho')])
## position-allocator.h (module 'mobility'): void ns3::RandomDiscPositionAllocator::SetTheta(ns3::RandomVariable theta) [member function]
cls.add_method('SetTheta',
'void',
[param('ns3::RandomVariable', 'theta')])
## position-allocator.h (module 'mobility'): void ns3::RandomDiscPositionAllocator::SetX(double x) [member function]
cls.add_method('SetX',
'void',
[param('double', 'x')])
## position-allocator.h (module 'mobility'): void ns3::RandomDiscPositionAllocator::SetY(double y) [member function]
cls.add_method('SetY',
'void',
[param('double', 'y')])
return
def register_Ns3RandomRectanglePositionAllocator_methods(root_module, cls):
## position-allocator.h (module 'mobility'): ns3::RandomRectanglePositionAllocator::RandomRectanglePositionAllocator(ns3::RandomRectanglePositionAllocator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomRectanglePositionAllocator const &', 'arg0')])
## position-allocator.h (module 'mobility'): ns3::RandomRectanglePositionAllocator::RandomRectanglePositionAllocator() [constructor]
cls.add_constructor([])
## position-allocator.h (module 'mobility'): ns3::Vector ns3::RandomRectanglePositionAllocator::GetNext() const [member function]
cls.add_method('GetNext',
'ns3::Vector',
[],
is_const=True, is_virtual=True)
## position-allocator.h (module 'mobility'): static ns3::TypeId ns3::RandomRectanglePositionAllocator::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## position-allocator.h (module 'mobility'): void ns3::RandomRectanglePositionAllocator::SetX(ns3::RandomVariable x) [member function]
cls.add_method('SetX',
'void',
[param('ns3::RandomVariable', 'x')])
## position-allocator.h (module 'mobility'): void ns3::RandomRectanglePositionAllocator::SetY(ns3::RandomVariable y) [member function]
cls.add_method('SetY',
'void',
[param('ns3::RandomVariable', 'y')])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3UniformDiscPositionAllocator_methods(root_module, cls):
## position-allocator.h (module 'mobility'): ns3::UniformDiscPositionAllocator::UniformDiscPositionAllocator(ns3::UniformDiscPositionAllocator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UniformDiscPositionAllocator const &', 'arg0')])
## position-allocator.h (module 'mobility'): ns3::UniformDiscPositionAllocator::UniformDiscPositionAllocator() [constructor]
cls.add_constructor([])
## position-allocator.h (module 'mobility'): ns3::Vector ns3::UniformDiscPositionAllocator::GetNext() const [member function]
cls.add_method('GetNext',
'ns3::Vector',
[],
is_const=True, is_virtual=True)
## position-allocator.h (module 'mobility'): static ns3::TypeId ns3::UniformDiscPositionAllocator::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## position-allocator.h (module 'mobility'): void ns3::UniformDiscPositionAllocator::SetRho(double rho) [member function]
cls.add_method('SetRho',
'void',
[param('double', 'rho')])
## position-allocator.h (module 'mobility'): void ns3::UniformDiscPositionAllocator::SetX(double x) [member function]
cls.add_method('SetX',
'void',
[param('double', 'x')])
## position-allocator.h (module 'mobility'): void ns3::UniformDiscPositionAllocator::SetY(double y) [member function]
cls.add_method('SetY',
'void',
[param('double', 'y')])
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3BoxChecker_methods(root_module, cls):
## box.h (module 'mobility'): ns3::BoxChecker::BoxChecker() [constructor]
cls.add_constructor([])
## box.h (module 'mobility'): ns3::BoxChecker::BoxChecker(ns3::BoxChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BoxChecker const &', 'arg0')])
return
def register_Ns3BoxValue_methods(root_module, cls):
## box.h (module 'mobility'): ns3::BoxValue::BoxValue() [constructor]
cls.add_constructor([])
## box.h (module 'mobility'): ns3::BoxValue::BoxValue(ns3::BoxValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BoxValue const &', 'arg0')])
## box.h (module 'mobility'): ns3::BoxValue::BoxValue(ns3::Box const & value) [constructor]
cls.add_constructor([param('ns3::Box const &', 'value')])
## box.h (module 'mobility'): ns3::Ptr<ns3::AttributeValue> ns3::BoxValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## box.h (module 'mobility'): bool ns3::BoxValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## box.h (module 'mobility'): ns3::Box ns3::BoxValue::Get() const [member function]
cls.add_method('Get',
'ns3::Box',
[],
is_const=True)
## box.h (module 'mobility'): std::string ns3::BoxValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## box.h (module 'mobility'): void ns3::BoxValue::Set(ns3::Box const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Box const &', 'value')])
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3GridPositionAllocator_methods(root_module, cls):
## position-allocator.h (module 'mobility'): ns3::GridPositionAllocator::GridPositionAllocator(ns3::GridPositionAllocator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::GridPositionAllocator const &', 'arg0')])
## position-allocator.h (module 'mobility'): ns3::GridPositionAllocator::GridPositionAllocator() [constructor]
cls.add_constructor([])
## position-allocator.h (module 'mobility'): double ns3::GridPositionAllocator::GetDeltaX() const [member function]
cls.add_method('GetDeltaX',
'double',
[],
is_const=True)
## position-allocator.h (module 'mobility'): double ns3::GridPositionAllocator::GetDeltaY() const [member function]
cls.add_method('GetDeltaY',
'double',
[],
is_const=True)
## position-allocator.h (module 'mobility'): ns3::GridPositionAllocator::LayoutType ns3::GridPositionAllocator::GetLayoutType() const [member function]
cls.add_method('GetLayoutType',
'ns3::GridPositionAllocator::LayoutType',
[],
is_const=True)
## position-allocator.h (module 'mobility'): double ns3::GridPositionAllocator::GetMinX() const [member function]
cls.add_method('GetMinX',
'double',
[],
is_const=True)
## position-allocator.h (module 'mobility'): double ns3::GridPositionAllocator::GetMinY() const [member function]
cls.add_method('GetMinY',
'double',
[],
is_const=True)
## position-allocator.h (module 'mobility'): uint32_t ns3::GridPositionAllocator::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
## position-allocator.h (module 'mobility'): ns3::Vector ns3::GridPositionAllocator::GetNext() const [member function]
cls.add_method('GetNext',
'ns3::Vector',
[],
is_const=True, is_virtual=True)
## position-allocator.h (module 'mobility'): static ns3::TypeId ns3::GridPositionAllocator::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## position-allocator.h (module 'mobility'): void ns3::GridPositionAllocator::SetDeltaX(double deltaX) [member function]
cls.add_method('SetDeltaX',
'void',
[param('double', 'deltaX')])
## position-allocator.h (module 'mobility'): void ns3::GridPositionAllocator::SetDeltaY(double deltaY) [member function]
cls.add_method('SetDeltaY',
'void',
[param('double', 'deltaY')])
## position-allocator.h (module 'mobility'): void ns3::GridPositionAllocator::SetLayoutType(ns3::GridPositionAllocator::LayoutType layoutType) [member function]
cls.add_method('SetLayoutType',
'void',
[param('ns3::GridPositionAllocator::LayoutType', 'layoutType')])
## position-allocator.h (module 'mobility'): void ns3::GridPositionAllocator::SetMinX(double xMin) [member function]
cls.add_method('SetMinX',
'void',
[param('double', 'xMin')])
## position-allocator.h (module 'mobility'): void ns3::GridPositionAllocator::SetMinY(double yMin) [member function]
cls.add_method('SetMinY',
'void',
[param('double', 'yMin')])
## position-allocator.h (module 'mobility'): void ns3::GridPositionAllocator::SetN(uint32_t n) [member function]
cls.add_method('SetN',
'void',
[param('uint32_t', 'n')])
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3ListPositionAllocator_methods(root_module, cls):
## position-allocator.h (module 'mobility'): ns3::ListPositionAllocator::ListPositionAllocator(ns3::ListPositionAllocator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ListPositionAllocator const &', 'arg0')])
## position-allocator.h (module 'mobility'): ns3::ListPositionAllocator::ListPositionAllocator() [constructor]
cls.add_constructor([])
## position-allocator.h (module 'mobility'): void ns3::ListPositionAllocator::Add(ns3::Vector v) [member function]
cls.add_method('Add',
'void',
[param('ns3::Vector', 'v')])
## position-allocator.h (module 'mobility'): ns3::Vector ns3::ListPositionAllocator::GetNext() const [member function]
cls.add_method('GetNext',
'ns3::Vector',
[],
is_const=True, is_virtual=True)
## position-allocator.h (module 'mobility'): static ns3::TypeId ns3::ListPositionAllocator::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3MobilityModel_methods(root_module, cls):
## mobility-model.h (module 'mobility'): ns3::MobilityModel::MobilityModel(ns3::MobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MobilityModel const &', 'arg0')])
## mobility-model.h (module 'mobility'): ns3::MobilityModel::MobilityModel() [constructor]
cls.add_constructor([])
## mobility-model.h (module 'mobility'): double ns3::MobilityModel::GetDistanceFrom(ns3::Ptr<const ns3::MobilityModel> position) const [member function]
cls.add_method('GetDistanceFrom',
'double',
[param('ns3::Ptr< ns3::MobilityModel const >', 'position')],
is_const=True)
## mobility-model.h (module 'mobility'): ns3::Vector ns3::MobilityModel::GetPosition() const [member function]
cls.add_method('GetPosition',
'ns3::Vector',
[],
is_const=True)
## mobility-model.h (module 'mobility'): double ns3::MobilityModel::GetRelativeSpeed(ns3::Ptr<const ns3::MobilityModel> other) const [member function]
cls.add_method('GetRelativeSpeed',
'double',
[param('ns3::Ptr< ns3::MobilityModel const >', 'other')],
is_const=True)
## mobility-model.h (module 'mobility'): static ns3::TypeId ns3::MobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mobility-model.h (module 'mobility'): ns3::Vector ns3::MobilityModel::GetVelocity() const [member function]
cls.add_method('GetVelocity',
'ns3::Vector',
[],
is_const=True)
## mobility-model.h (module 'mobility'): void ns3::MobilityModel::SetPosition(ns3::Vector const & position) [member function]
cls.add_method('SetPosition',
'void',
[param('ns3::Vector const &', 'position')])
## mobility-model.h (module 'mobility'): void ns3::MobilityModel::NotifyCourseChange() const [member function]
cls.add_method('NotifyCourseChange',
'void',
[],
is_const=True, visibility='protected')
## mobility-model.h (module 'mobility'): ns3::Vector ns3::MobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## mobility-model.h (module 'mobility'): ns3::Vector ns3::MobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## mobility-model.h (module 'mobility'): void ns3::MobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3RandomDirection2dMobilityModel_methods(root_module, cls):
## random-direction-2d-mobility-model.h (module 'mobility'): ns3::RandomDirection2dMobilityModel::RandomDirection2dMobilityModel() [constructor]
cls.add_constructor([])
## random-direction-2d-mobility-model.h (module 'mobility'): ns3::RandomDirection2dMobilityModel::RandomDirection2dMobilityModel(ns3::RandomDirection2dMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomDirection2dMobilityModel const &', 'arg0')])
## random-direction-2d-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::RandomDirection2dMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-direction-2d-mobility-model.h (module 'mobility'): void ns3::RandomDirection2dMobilityModel::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## random-direction-2d-mobility-model.h (module 'mobility'): ns3::Vector ns3::RandomDirection2dMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## random-direction-2d-mobility-model.h (module 'mobility'): ns3::Vector ns3::RandomDirection2dMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## random-direction-2d-mobility-model.h (module 'mobility'): void ns3::RandomDirection2dMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
## random-direction-2d-mobility-model.h (module 'mobility'): void ns3::RandomDirection2dMobilityModel::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3RandomVariableChecker_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::RandomVariableChecker::RandomVariableChecker() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::RandomVariableChecker::RandomVariableChecker(ns3::RandomVariableChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomVariableChecker const &', 'arg0')])
return
def register_Ns3RandomVariableValue_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue(ns3::RandomVariableValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomVariableValue const &', 'arg0')])
## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue(ns3::RandomVariable const & value) [constructor]
cls.add_constructor([param('ns3::RandomVariable const &', 'value')])
## random-variable.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::RandomVariableValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## random-variable.h (module 'core'): bool ns3::RandomVariableValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## random-variable.h (module 'core'): ns3::RandomVariable ns3::RandomVariableValue::Get() const [member function]
cls.add_method('Get',
'ns3::RandomVariable',
[],
is_const=True)
## random-variable.h (module 'core'): std::string ns3::RandomVariableValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## random-variable.h (module 'core'): void ns3::RandomVariableValue::Set(ns3::RandomVariable const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::RandomVariable const &', 'value')])
return
def register_Ns3RandomWalk2dMobilityModel_methods(root_module, cls):
## random-walk-2d-mobility-model.h (module 'mobility'): ns3::RandomWalk2dMobilityModel::RandomWalk2dMobilityModel() [constructor]
cls.add_constructor([])
## random-walk-2d-mobility-model.h (module 'mobility'): ns3::RandomWalk2dMobilityModel::RandomWalk2dMobilityModel(ns3::RandomWalk2dMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomWalk2dMobilityModel const &', 'arg0')])
## random-walk-2d-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::RandomWalk2dMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-walk-2d-mobility-model.h (module 'mobility'): void ns3::RandomWalk2dMobilityModel::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## random-walk-2d-mobility-model.h (module 'mobility'): ns3::Vector ns3::RandomWalk2dMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## random-walk-2d-mobility-model.h (module 'mobility'): ns3::Vector ns3::RandomWalk2dMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## random-walk-2d-mobility-model.h (module 'mobility'): void ns3::RandomWalk2dMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
## random-walk-2d-mobility-model.h (module 'mobility'): void ns3::RandomWalk2dMobilityModel::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3RandomWaypointMobilityModel_methods(root_module, cls):
## random-waypoint-mobility-model.h (module 'mobility'): ns3::RandomWaypointMobilityModel::RandomWaypointMobilityModel() [constructor]
cls.add_constructor([])
## random-waypoint-mobility-model.h (module 'mobility'): ns3::RandomWaypointMobilityModel::RandomWaypointMobilityModel(ns3::RandomWaypointMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomWaypointMobilityModel const &', 'arg0')])
## random-waypoint-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::RandomWaypointMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-waypoint-mobility-model.h (module 'mobility'): void ns3::RandomWaypointMobilityModel::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## random-waypoint-mobility-model.h (module 'mobility'): ns3::Vector ns3::RandomWaypointMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## random-waypoint-mobility-model.h (module 'mobility'): ns3::Vector ns3::RandomWaypointMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## random-waypoint-mobility-model.h (module 'mobility'): void ns3::RandomWaypointMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
return
def register_Ns3RectangleChecker_methods(root_module, cls):
## rectangle.h (module 'mobility'): ns3::RectangleChecker::RectangleChecker() [constructor]
cls.add_constructor([])
## rectangle.h (module 'mobility'): ns3::RectangleChecker::RectangleChecker(ns3::RectangleChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RectangleChecker const &', 'arg0')])
return
def register_Ns3RectangleValue_methods(root_module, cls):
## rectangle.h (module 'mobility'): ns3::RectangleValue::RectangleValue() [constructor]
cls.add_constructor([])
## rectangle.h (module 'mobility'): ns3::RectangleValue::RectangleValue(ns3::RectangleValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RectangleValue const &', 'arg0')])
## rectangle.h (module 'mobility'): ns3::RectangleValue::RectangleValue(ns3::Rectangle const & value) [constructor]
cls.add_constructor([param('ns3::Rectangle const &', 'value')])
## rectangle.h (module 'mobility'): ns3::Ptr<ns3::AttributeValue> ns3::RectangleValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## rectangle.h (module 'mobility'): bool ns3::RectangleValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## rectangle.h (module 'mobility'): ns3::Rectangle ns3::RectangleValue::Get() const [member function]
cls.add_method('Get',
'ns3::Rectangle',
[],
is_const=True)
## rectangle.h (module 'mobility'): std::string ns3::RectangleValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## rectangle.h (module 'mobility'): void ns3::RectangleValue::Set(ns3::Rectangle const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Rectangle const &', 'value')])
return
def register_Ns3SteadyStateRandomWaypointMobilityModel_methods(root_module, cls):
## steady-state-random-waypoint-mobility-model.h (module 'mobility'): ns3::SteadyStateRandomWaypointMobilityModel::SteadyStateRandomWaypointMobilityModel(ns3::SteadyStateRandomWaypointMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SteadyStateRandomWaypointMobilityModel const &', 'arg0')])
## steady-state-random-waypoint-mobility-model.h (module 'mobility'): ns3::SteadyStateRandomWaypointMobilityModel::SteadyStateRandomWaypointMobilityModel() [constructor]
cls.add_constructor([])
## steady-state-random-waypoint-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::SteadyStateRandomWaypointMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## steady-state-random-waypoint-mobility-model.h (module 'mobility'): void ns3::SteadyStateRandomWaypointMobilityModel::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## steady-state-random-waypoint-mobility-model.h (module 'mobility'): ns3::Vector ns3::SteadyStateRandomWaypointMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## steady-state-random-waypoint-mobility-model.h (module 'mobility'): ns3::Vector ns3::SteadyStateRandomWaypointMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## steady-state-random-waypoint-mobility-model.h (module 'mobility'): void ns3::SteadyStateRandomWaypointMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
return
def register_Ns3TimeChecker_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker(ns3::TimeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeChecker const &', 'arg0')])
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3Vector2DChecker_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker(ns3::Vector2DChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2DChecker const &', 'arg0')])
return
def register_Ns3Vector2DValue_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2DValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2DValue const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2D const & value) [constructor]
cls.add_constructor([param('ns3::Vector2D const &', 'value')])
## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector2DValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## vector.h (module 'core'): bool ns3::Vector2DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## vector.h (module 'core'): ns3::Vector2D ns3::Vector2DValue::Get() const [member function]
cls.add_method('Get',
'ns3::Vector2D',
[],
is_const=True)
## vector.h (module 'core'): std::string ns3::Vector2DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## vector.h (module 'core'): void ns3::Vector2DValue::Set(ns3::Vector2D const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Vector2D const &', 'value')])
return
def register_Ns3Vector3DChecker_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker(ns3::Vector3DChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3DChecker const &', 'arg0')])
return
def register_Ns3Vector3DValue_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3DValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3DValue const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3D const & value) [constructor]
cls.add_constructor([param('ns3::Vector3D const &', 'value')])
## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector3DValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## vector.h (module 'core'): bool ns3::Vector3DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## vector.h (module 'core'): ns3::Vector3D ns3::Vector3DValue::Get() const [member function]
cls.add_method('Get',
'ns3::Vector3D',
[],
is_const=True)
## vector.h (module 'core'): std::string ns3::Vector3DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## vector.h (module 'core'): void ns3::Vector3DValue::Set(ns3::Vector3D const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Vector3D const &', 'value')])
return
def register_Ns3WaypointChecker_methods(root_module, cls):
## waypoint.h (module 'mobility'): ns3::WaypointChecker::WaypointChecker() [constructor]
cls.add_constructor([])
## waypoint.h (module 'mobility'): ns3::WaypointChecker::WaypointChecker(ns3::WaypointChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WaypointChecker const &', 'arg0')])
return
def register_Ns3WaypointMobilityModel_methods(root_module, cls):
## waypoint-mobility-model.h (module 'mobility'): ns3::WaypointMobilityModel::WaypointMobilityModel(ns3::WaypointMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WaypointMobilityModel const &', 'arg0')])
## waypoint-mobility-model.h (module 'mobility'): ns3::WaypointMobilityModel::WaypointMobilityModel() [constructor]
cls.add_constructor([])
## waypoint-mobility-model.h (module 'mobility'): void ns3::WaypointMobilityModel::AddWaypoint(ns3::Waypoint const & waypoint) [member function]
cls.add_method('AddWaypoint',
'void',
[param('ns3::Waypoint const &', 'waypoint')])
## waypoint-mobility-model.h (module 'mobility'): void ns3::WaypointMobilityModel::EndMobility() [member function]
cls.add_method('EndMobility',
'void',
[])
## waypoint-mobility-model.h (module 'mobility'): ns3::Waypoint ns3::WaypointMobilityModel::GetNextWaypoint() const [member function]
cls.add_method('GetNextWaypoint',
'ns3::Waypoint',
[],
is_const=True)
## waypoint-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::WaypointMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## waypoint-mobility-model.h (module 'mobility'): uint32_t ns3::WaypointMobilityModel::WaypointsLeft() const [member function]
cls.add_method('WaypointsLeft',
'uint32_t',
[],
is_const=True)
## waypoint-mobility-model.h (module 'mobility'): void ns3::WaypointMobilityModel::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## waypoint-mobility-model.h (module 'mobility'): ns3::Vector ns3::WaypointMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## waypoint-mobility-model.h (module 'mobility'): ns3::Vector ns3::WaypointMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## waypoint-mobility-model.h (module 'mobility'): void ns3::WaypointMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
return
def register_Ns3WaypointValue_methods(root_module, cls):
## waypoint.h (module 'mobility'): ns3::WaypointValue::WaypointValue() [constructor]
cls.add_constructor([])
## waypoint.h (module 'mobility'): ns3::WaypointValue::WaypointValue(ns3::WaypointValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WaypointValue const &', 'arg0')])
## waypoint.h (module 'mobility'): ns3::WaypointValue::WaypointValue(ns3::Waypoint const & value) [constructor]
cls.add_constructor([param('ns3::Waypoint const &', 'value')])
## waypoint.h (module 'mobility'): ns3::Ptr<ns3::AttributeValue> ns3::WaypointValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## waypoint.h (module 'mobility'): bool ns3::WaypointValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## waypoint.h (module 'mobility'): ns3::Waypoint ns3::WaypointValue::Get() const [member function]
cls.add_method('Get',
'ns3::Waypoint',
[],
is_const=True)
## waypoint.h (module 'mobility'): std::string ns3::WaypointValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## waypoint.h (module 'mobility'): void ns3::WaypointValue::Set(ns3::Waypoint const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Waypoint const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3ConstantAccelerationMobilityModel_methods(root_module, cls):
## constant-acceleration-mobility-model.h (module 'mobility'): ns3::ConstantAccelerationMobilityModel::ConstantAccelerationMobilityModel(ns3::ConstantAccelerationMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConstantAccelerationMobilityModel const &', 'arg0')])
## constant-acceleration-mobility-model.h (module 'mobility'): ns3::ConstantAccelerationMobilityModel::ConstantAccelerationMobilityModel() [constructor]
cls.add_constructor([])
## constant-acceleration-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::ConstantAccelerationMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## constant-acceleration-mobility-model.h (module 'mobility'): void ns3::ConstantAccelerationMobilityModel::SetVelocityAndAcceleration(ns3::Vector const & velocity, ns3::Vector const & acceleration) [member function]
cls.add_method('SetVelocityAndAcceleration',
'void',
[param('ns3::Vector const &', 'velocity'), param('ns3::Vector const &', 'acceleration')])
## constant-acceleration-mobility-model.h (module 'mobility'): ns3::Vector ns3::ConstantAccelerationMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## constant-acceleration-mobility-model.h (module 'mobility'): ns3::Vector ns3::ConstantAccelerationMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## constant-acceleration-mobility-model.h (module 'mobility'): void ns3::ConstantAccelerationMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
return
def register_Ns3ConstantPositionMobilityModel_methods(root_module, cls):
## constant-position-mobility-model.h (module 'mobility'): ns3::ConstantPositionMobilityModel::ConstantPositionMobilityModel(ns3::ConstantPositionMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConstantPositionMobilityModel const &', 'arg0')])
## constant-position-mobility-model.h (module 'mobility'): ns3::ConstantPositionMobilityModel::ConstantPositionMobilityModel() [constructor]
cls.add_constructor([])
## constant-position-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::ConstantPositionMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## constant-position-mobility-model.h (module 'mobility'): ns3::Vector ns3::ConstantPositionMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## constant-position-mobility-model.h (module 'mobility'): ns3::Vector ns3::ConstantPositionMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## constant-position-mobility-model.h (module 'mobility'): void ns3::ConstantPositionMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
return
def register_Ns3ConstantVelocityMobilityModel_methods(root_module, cls):
## constant-velocity-mobility-model.h (module 'mobility'): ns3::ConstantVelocityMobilityModel::ConstantVelocityMobilityModel(ns3::ConstantVelocityMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConstantVelocityMobilityModel const &', 'arg0')])
## constant-velocity-mobility-model.h (module 'mobility'): ns3::ConstantVelocityMobilityModel::ConstantVelocityMobilityModel() [constructor]
cls.add_constructor([])
## constant-velocity-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::ConstantVelocityMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## constant-velocity-mobility-model.h (module 'mobility'): void ns3::ConstantVelocityMobilityModel::SetVelocity(ns3::Vector const & speed) [member function]
cls.add_method('SetVelocity',
'void',
[param('ns3::Vector const &', 'speed')])
## constant-velocity-mobility-model.h (module 'mobility'): ns3::Vector ns3::ConstantVelocityMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## constant-velocity-mobility-model.h (module 'mobility'): ns3::Vector ns3::ConstantVelocityMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## constant-velocity-mobility-model.h (module 'mobility'): void ns3::ConstantVelocityMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
return
def register_Ns3GaussMarkovMobilityModel_methods(root_module, cls):
## gauss-markov-mobility-model.h (module 'mobility'): ns3::GaussMarkovMobilityModel::GaussMarkovMobilityModel(ns3::GaussMarkovMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::GaussMarkovMobilityModel const &', 'arg0')])
## gauss-markov-mobility-model.h (module 'mobility'): ns3::GaussMarkovMobilityModel::GaussMarkovMobilityModel() [constructor]
cls.add_constructor([])
## gauss-markov-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::GaussMarkovMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## gauss-markov-mobility-model.h (module 'mobility'): void ns3::GaussMarkovMobilityModel::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## gauss-markov-mobility-model.h (module 'mobility'): ns3::Vector ns3::GaussMarkovMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## gauss-markov-mobility-model.h (module 'mobility'): ns3::Vector ns3::GaussMarkovMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## gauss-markov-mobility-model.h (module 'mobility'): void ns3::GaussMarkovMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
return
def register_Ns3HierarchicalMobilityModel_methods(root_module, cls):
## hierarchical-mobility-model.h (module 'mobility'): ns3::HierarchicalMobilityModel::HierarchicalMobilityModel(ns3::HierarchicalMobilityModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::HierarchicalMobilityModel const &', 'arg0')])
## hierarchical-mobility-model.h (module 'mobility'): ns3::HierarchicalMobilityModel::HierarchicalMobilityModel() [constructor]
cls.add_constructor([])
## hierarchical-mobility-model.h (module 'mobility'): ns3::Ptr<ns3::MobilityModel> ns3::HierarchicalMobilityModel::GetChild() const [member function]
cls.add_method('GetChild',
'ns3::Ptr< ns3::MobilityModel >',
[],
is_const=True)
## hierarchical-mobility-model.h (module 'mobility'): ns3::Ptr<ns3::MobilityModel> ns3::HierarchicalMobilityModel::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::Ptr< ns3::MobilityModel >',
[],
is_const=True)
## hierarchical-mobility-model.h (module 'mobility'): static ns3::TypeId ns3::HierarchicalMobilityModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## hierarchical-mobility-model.h (module 'mobility'): void ns3::HierarchicalMobilityModel::SetChild(ns3::Ptr<ns3::MobilityModel> model) [member function]
cls.add_method('SetChild',
'void',
[param('ns3::Ptr< ns3::MobilityModel >', 'model')])
## hierarchical-mobility-model.h (module 'mobility'): void ns3::HierarchicalMobilityModel::SetParent(ns3::Ptr<ns3::MobilityModel> model) [member function]
cls.add_method('SetParent',
'void',
[param('ns3::Ptr< ns3::MobilityModel >', 'model')])
## hierarchical-mobility-model.h (module 'mobility'): ns3::Vector ns3::HierarchicalMobilityModel::DoGetPosition() const [member function]
cls.add_method('DoGetPosition',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## hierarchical-mobility-model.h (module 'mobility'): ns3::Vector ns3::HierarchicalMobilityModel::DoGetVelocity() const [member function]
cls.add_method('DoGetVelocity',
'ns3::Vector',
[],
is_const=True, visibility='private', is_virtual=True)
## hierarchical-mobility-model.h (module 'mobility'): void ns3::HierarchicalMobilityModel::DoSetPosition(ns3::Vector const & position) [member function]
cls.add_method('DoSetPosition',
'void',
[param('ns3::Vector const &', 'position')],
visibility='private', is_virtual=True)
return
def register_functions(root_module):
module = root_module
## box.h (module 'mobility'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeBoxChecker() [free function]
module.add_function('MakeBoxChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## rectangle.h (module 'mobility'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeRectangleChecker() [free function]
module.add_function('MakeRectangleChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## waypoint.h (module 'mobility'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeWaypointChecker() [free function]
module.add_function('MakeWaypointChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
fnordahl/nova
|
refs/heads/master
|
nova/scheduler/manager.py
|
8
|
# Copyright (c) 2010 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler Service
"""
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from oslo_service import periodic_task
from oslo_utils import importutils
from nova import exception
from nova import manager
from nova import quota
LOG = logging.getLogger(__name__)
scheduler_driver_opts = [
cfg.StrOpt('scheduler_driver',
default='nova.scheduler.filter_scheduler.FilterScheduler',
help='Default driver to use for the scheduler'),
cfg.IntOpt('scheduler_driver_task_period',
default=60,
help='How often (in seconds) to run periodic tasks in '
'the scheduler driver of your choice. '
'Please note this is likely to interact with the value '
'of service_down_time, but exactly how they interact '
'will depend on your choice of scheduler driver.'),
]
CONF = cfg.CONF
CONF.register_opts(scheduler_driver_opts)
QUOTAS = quota.QUOTAS
class SchedulerManager(manager.Manager):
"""Chooses a host to run instances on."""
target = messaging.Target(version='4.2')
def __init__(self, scheduler_driver=None, *args, **kwargs):
if not scheduler_driver:
scheduler_driver = CONF.scheduler_driver
self.driver = importutils.import_object(scheduler_driver)
super(SchedulerManager, self).__init__(service_name='scheduler',
*args, **kwargs)
@periodic_task.periodic_task
def _expire_reservations(self, context):
QUOTAS.expire(context)
@periodic_task.periodic_task(spacing=CONF.scheduler_driver_task_period,
run_immediately=True)
def _run_periodic_tasks(self, context):
self.driver.run_periodic_tasks(context)
@messaging.expected_exceptions(exception.NoValidHost)
def select_destinations(self, context, request_spec, filter_properties):
"""Returns destinations(s) best suited for this request_spec and
filter_properties.
The result should be a list of dicts with 'host', 'nodename' and
'limits' as keys.
"""
dests = self.driver.select_destinations(context, request_spec,
filter_properties)
return jsonutils.to_primitive(dests)
def update_aggregates(self, ctxt, aggregates):
"""Updates HostManager internal aggregates information.
:param aggregates: Aggregate(s) to update
:type aggregates: :class:`nova.objects.Aggregate`
or :class:`nova.objects.AggregateList`
"""
# NOTE(sbauza): We're dropping the user context now as we don't need it
self.driver.host_manager.update_aggregates(aggregates)
def delete_aggregate(self, ctxt, aggregate):
"""Deletes HostManager internal information about a specific aggregate.
:param aggregate: Aggregate to delete
:type aggregate: :class:`nova.objects.Aggregate`
"""
# NOTE(sbauza): We're dropping the user context now as we don't need it
self.driver.host_manager.delete_aggregate(aggregate)
def update_instance_info(self, context, host_name, instance_info):
"""Receives information about changes to a host's instances, and
updates the driver's HostManager with that information.
"""
self.driver.host_manager.update_instance_info(context, host_name,
instance_info)
def delete_instance_info(self, context, host_name, instance_uuid):
"""Receives information about the deletion of one of a host's
instances, and updates the driver's HostManager with that information.
"""
self.driver.host_manager.delete_instance_info(context, host_name,
instance_uuid)
def sync_instance_info(self, context, host_name, instance_uuids):
"""Receives a sync request from a host, and passes it on to the
driver's HostManager.
"""
self.driver.host_manager.sync_instance_info(context, host_name,
instance_uuids)
|
neiudemo1/django
|
refs/heads/master
|
tests/migrations/test_migrations_squashed_complex_multi_apps/app1/1_auto.py
|
1155
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
operations = [
migrations.RunPython(migrations.RunPython.noop)
]
|
mujiansu/arangodb
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/test/test_ctypes.py
|
50
|
import unittest
from test.test_support import run_unittest
import ctypes.test
def test_main():
skipped, testcases = ctypes.test.get_tests(ctypes.test, "test_*.py", verbosity=0)
suites = [unittest.makeSuite(t) for t in testcases]
run_unittest(unittest.TestSuite(suites))
if __name__ == "__main__":
test_main()
|
rv816/wagtail
|
refs/heads/master
|
wagtail/wagtailadmin/views/generic.py
|
14
|
from django.core.urlresolvers import reverse
from django.shortcuts import render, redirect, get_object_or_404
from django.utils.translation import ugettext as _, ugettext_lazy as __
from django.views.generic.base import View
from wagtail.wagtailadmin import messages
from wagtail.wagtailadmin.utils import permission_denied
class PermissionCheckedMixin(object):
"""
Mixin for class-based views to enforce permission checks.
Subclasses should set either of the following class properties:
* permission_required (a single permission string)
* any_permission_required (a list of permission strings - the user must have
one or more of those permissions)
"""
permission_required = None
any_permission_required = None
def dispatch(self, request, *args, **kwargs):
if self.permission_required is not None:
if not request.user.has_perm(self.permission_required):
return permission_denied(request)
if self.any_permission_required is not None:
has_permission = False
for perm in self.any_permission_required:
if request.user.has_perm(perm):
has_permission = True
break
if not has_permission:
return permission_denied(request)
return super(PermissionCheckedMixin, self).dispatch(request, *args, **kwargs)
class IndexView(PermissionCheckedMixin, View):
context_object_name = None
def get_queryset(self):
return self.model.objects.all()
def get(self, request):
object_list = self.get_queryset()
context = {
'view': self,
'object_list': object_list,
'can_add': self.request.user.has_perm(self.add_permission_name),
}
if self.context_object_name:
context[self.context_object_name] = object_list
return render(request, self.template_name, context)
class CreateView(PermissionCheckedMixin, View):
def get_add_url(self):
return reverse(self.add_url_name)
def get(self, request):
self.form = self.form_class()
return self.render_to_response()
def post(self, request):
self.form = self.form_class(request.POST)
if self.form.is_valid():
instance = self.form.save()
messages.success(request, self.success_message.format(instance), buttons=[
messages.button(reverse(self.edit_url_name, args=(instance.id,)), _('Edit'))
])
return redirect(self.index_url_name)
else:
return self.render_to_response()
def render_to_response(self):
return render(self.request, self.template_name, {
'view': self,
'form': self.form,
})
class EditView(PermissionCheckedMixin, View):
page_title = __("Editing")
context_object_name = None
def get_page_subtitle(self):
return str(self.instance)
def get_edit_url(self):
return reverse(self.edit_url_name, args=(self.instance.id,))
def get_delete_url(self):
return reverse(self.delete_url_name, args=(self.instance.id,))
def get(self, request, instance_id):
self.instance = get_object_or_404(self.model, id=instance_id)
self.form = self.form_class(instance=self.instance)
return self.render_to_response()
def post(self, request, instance_id):
self.instance = get_object_or_404(self.model, id=instance_id)
self.form = self.form_class(request.POST, instance=self.instance)
if self.form.is_valid():
self.form.save()
messages.success(request, self.success_message.format(self.instance), buttons=[
messages.button(reverse(self.edit_url_name, args=(self.instance.id,)), _('Edit'))
])
return redirect(self.index_url_name)
else:
messages.error(request, self.error_message)
return self.render_to_response()
def render_to_response(self):
context = {
'view': self,
'object': self.instance,
'form': self.form,
'can_delete': self.request.user.has_perm(self.delete_permission_name),
}
if self.context_object_name:
context[self.context_object_name] = self.instance
return render(self.request, self.template_name, context)
class DeleteView(PermissionCheckedMixin, View):
template_name = 'wagtailadmin/generic/confirm_delete.html'
context_object_name = None
def get_page_subtitle(self):
return str(self.instance)
def get_delete_url(self):
return reverse(self.delete_url_name, args=(self.instance.id,))
def get(self, request, instance_id):
self.instance = get_object_or_404(self.model, id=instance_id)
context = {
'view': self,
'object': self.instance,
}
if self.context_object_name:
context[self.context_object_name] = self.instance
return render(request, self.template_name, context)
def post(self, request, instance_id):
self.instance = get_object_or_404(self.model, id=instance_id)
self.instance.delete()
messages.success(request, self.success_message.format(self.instance))
return redirect(self.index_url_name)
|
yan12125/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/roxwel.py
|
73
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import unified_strdate, determine_ext
class RoxwelIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?roxwel\.com/player/(?P<filename>.+?)(\.|\?|$)'
_TEST = {
'url': 'http://www.roxwel.com/player/passionpittakeawalklive.html',
'info_dict': {
'id': 'passionpittakeawalklive',
'ext': 'flv',
'title': 'Take A Walk (live)',
'uploader': 'Passion Pit',
'uploader_id': 'passionpit',
'upload_date': '20120928',
'description': 'Passion Pit performs "Take A Walk\" live at The Backyard in Austin, Texas. ',
},
'params': {
# rtmp download
'skip_download': True,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
filename = mobj.group('filename')
info_url = 'http://www.roxwel.com/api/videos/%s' % filename
info = self._download_json(info_url, filename)
rtmp_rates = sorted([int(r.replace('flv_', '')) for r in info['media_rates'] if r.startswith('flv_')])
best_rate = rtmp_rates[-1]
url_page_url = 'http://roxwel.com/pl_one_time.php?filename=%s&quality=%s' % (filename, best_rate)
rtmp_url = self._download_webpage(url_page_url, filename, 'Downloading video url')
ext = determine_ext(rtmp_url)
if ext == 'f4v':
rtmp_url = rtmp_url.replace(filename, 'mp4:%s' % filename)
return {
'id': filename,
'title': info['title'],
'url': rtmp_url,
'ext': 'flv',
'description': info['description'],
'thumbnail': info.get('player_image_url') or info.get('image_url_large'),
'uploader': info['artist'],
'uploader_id': info['artistname'],
'upload_date': unified_strdate(info['dbdate']),
}
|
FlowFX/reggae-cdmx
|
refs/heads/master
|
config/urls.py
|
1
|
"""Reggae CDMX URL Configuration."""
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponseNotFound
from django.urls import include, path
from app.events.views import HomePage
from django.views.generic import RedirectView, TemplateView
urlpatterns = [
path('admin/', admin.site.urls),
# path(r'^', include('django.contrib.auth.urls')),
path('accounts/signup/', HttpResponseNotFound),
path('accounts/', include('allauth.urls')),
path(
'accounts/profile/',
TemplateView.as_view(template_name='account/profile.html'),
name="account_profile",
),
path('', HomePage.as_view(), name='index'),
# Events
path('events/', include('app.events.urls', namespace='events')),
# Venues
path('venues/', include('app.venues.urls', namespace='venues')),
]
# Permanent redirects
urlpatterns += [
path('login/', RedirectView.as_view(url='/accounts/login/', permanent=True)),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [path('__debug__/', include(debug_toolbar.urls))] \
+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) \
+ urlpatterns
|
TshepangRas/tshilo-dikotla
|
refs/heads/develop
|
td_infant/apps.py
|
2
|
from django.apps import AppConfig
class TdInfantConfig(AppConfig):
name = 'td_infant'
verbose_name = 'td_infant'
|
houzhenggang/TcpRoute
|
refs/heads/master
|
win_inet_pton.py
|
2
|
# https://github.com/hickeroar/win_inet_pton/blob/master/win_inet_pton.py
# This software released into the public domain. Anyone is free to copy,
# modify, publish, use, compile, sell, or distribute this software,
# either in source code form or as a compiled binary, for any purpose,
# commercial or non-commercial, and by any means.
#http://stackoverflow.com/questions/5619685/conversion-from-ip-string-to-integer-and-backward-in-python
import socket
import ctypes
import os
class sockaddr(ctypes.Structure):
_fields_ = [("sa_family", ctypes.c_short),
("__pad1", ctypes.c_ushort),
("ipv4_addr", ctypes.c_byte * 4),
("ipv6_addr", ctypes.c_byte * 16),
("__pad2", ctypes.c_ulong)]
if hasattr(ctypes, 'windll'):
WSAStringToAddressA = ctypes.windll.ws2_32.WSAStringToAddressA
WSAAddressToStringA = ctypes.windll.ws2_32.WSAAddressToStringA
else:
def not_windows():
raise SystemError(
"Invalid platform. ctypes.windll must be available."
)
WSAStringToAddressA = not_windows
WSAAddressToStringA = not_windows
def inet_pton(address_family, ip_string):
addr = sockaddr()
addr.sa_family = address_family
addr_size = ctypes.c_int(ctypes.sizeof(addr))
if WSAStringToAddressA(
ip_string,
address_family,
None,
ctypes.byref(addr),
ctypes.byref(addr_size)
) != 0:
raise socket.error(ctypes.FormatError())
if address_family == socket.AF_INET:
return ctypes.string_at(addr.ipv4_addr, 4)
if address_family == socket.AF_INET6:
return ctypes.string_at(addr.ipv6_addr, 16)
raise socket.error('unknown address family')
def inet_ntop(address_family, packed_ip):
addr = sockaddr()
addr.sa_family = address_family
addr_size = ctypes.c_int(ctypes.sizeof(addr))
ip_string = ctypes.create_string_buffer(128)
ip_string_size = ctypes.c_int(ctypes.sizeof(ip_string))
if address_family == socket.AF_INET:
if len(packed_ip) != ctypes.sizeof(addr.ipv4_addr):
raise socket.error('packed IP wrong length for inet_ntoa')
ctypes.memmove(addr.ipv4_addr, packed_ip, 4)
elif address_family == socket.AF_INET6:
if len(packed_ip) != ctypes.sizeof(addr.ipv6_addr):
raise socket.error('packed IP wrong length for inet_ntoa')
ctypes.memmove(addr.ipv6_addr, packed_ip, 16)
else:
raise socket.error('unknown address family')
if WSAAddressToStringA(
ctypes.byref(addr),
addr_size,
None,
ip_string,
ctypes.byref(ip_string_size)
) != 0:
raise socket.error(ctypes.FormatError())
return ip_string[:ip_string_size.value - 1]
# Adding our two functions to the socket library
if os.name == 'nt':
socket.inet_pton = inet_pton
socket.inet_ntop = inet_ntop
|
BlueBrain/Poppler
|
refs/heads/bbp
|
regtest/commands/run-tests.py
|
15
|
# run-tests.py
#
# Copyright (C) 2011 Carlos Garcia Campos <carlosgc@gnome.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from commands import Command, register_command
from TestRun import TestRun
from Timer import Timer
from Config import Config
from Printer import get_printer
import os
import tempfile
class RunTests(Command):
name = 'run-tests'
usage_args = '[ options ... ] tests '
description = 'Run tests for documents'
def __init__(self):
Command.__init__(self)
parser = self._get_args_parser()
parser.add_argument('--refs-dir',
action = 'store', dest = 'refs_dir', default = os.path.join(tempfile.gettempdir(), 'refs'),
help = 'Directory containing the references')
parser.add_argument('-o', '--out-dir',
action = 'store', dest = 'out_dir', default = os.path.join(tempfile.gettempdir(), 'out'),
help = 'Directory where test results will be created')
parser.add_argument('--docs-dir',
action = 'store', dest = 'docs_dir',
help = 'Base documents directory')
parser.add_argument('--keep-results',
action = 'store_true', dest = 'keep_results', default = False,
help = 'Do not remove result files for passing tests')
parser.add_argument('--create-diffs',
action = 'store_true', dest = 'create_diffs', default = False,
help = 'Create diff files for failed tests')
parser.add_argument('--update-refs',
action = 'store_true', dest = 'update_refs', default = False,
help = 'Update references for failed tests')
parser.add_argument('tests', metavar = 'TEST', nargs = '+',
help = 'Tests directory or individual test to run')
def run(self, options):
config = Config()
config.keep_results = options['keep_results']
config.create_diffs = options['create_diffs']
config.update_refs = options['update_refs']
t = Timer()
docs = options['tests']
docs_dir = options['docs_dir']
if len(docs) == 1:
if os.path.isdir(docs[0]):
if docs_dir is None:
docs_dir = docs[0]
if docs_dir == docs[0]:
docs = []
else:
if docs_dir is None:
docs_dir = os.path.dirname(docs[0])
else:
if docs_dir is None:
docs_dir = os.path.commonprefix(docs).rpartition(os.path.sep)[0]
tests = TestRun(docs_dir, options['refs_dir'], options['out_dir'])
status = tests.run_tests(docs)
tests.summary()
get_printer().printout_ln("Tests run in %s" % (t.elapsed_str()))
return status
register_command('run-tests', RunTests)
|
ryanahall/django
|
refs/heads/master
|
django/views/decorators/cache.py
|
586
|
from functools import wraps
from django.middleware.cache import CacheMiddleware
from django.utils.cache import add_never_cache_headers, patch_cache_control
from django.utils.decorators import (
available_attrs, decorator_from_middleware_with_args,
)
def cache_page(*args, **kwargs):
"""
Decorator for views that tries getting the page from the cache and
populates the cache if the page isn't in the cache yet.
The cache is keyed by the URL and some data from the headers.
Additionally there is the key prefix that is used to distinguish different
cache areas in a multi-site setup. You could use the
get_current_site().domain, for example, as that is unique across a Django
project.
Additionally, all headers from the response's Vary header will be taken
into account on caching -- just like the middleware does.
"""
# We also add some asserts to give better error messages in case people are
# using other ways to call cache_page that no longer work.
if len(args) != 1 or callable(args[0]):
raise TypeError("cache_page has a single mandatory positional argument: timeout")
cache_timeout = args[0]
cache_alias = kwargs.pop('cache', None)
key_prefix = kwargs.pop('key_prefix', None)
if kwargs:
raise TypeError("cache_page has two optional keyword arguments: cache and key_prefix")
return decorator_from_middleware_with_args(CacheMiddleware)(
cache_timeout=cache_timeout, cache_alias=cache_alias, key_prefix=key_prefix
)
def cache_control(**kwargs):
def _cache_controller(viewfunc):
@wraps(viewfunc, assigned=available_attrs(viewfunc))
def _cache_controlled(request, *args, **kw):
response = viewfunc(request, *args, **kw)
patch_cache_control(response, **kwargs)
return response
return _cache_controlled
return _cache_controller
def never_cache(view_func):
"""
Decorator that adds headers to a response so that it will
never be cached.
"""
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view_func(request, *args, **kwargs):
response = view_func(request, *args, **kwargs)
add_never_cache_headers(response)
return response
return _wrapped_view_func
|
runekaagaard/django-contrib-locking
|
refs/heads/master
|
django/contrib/auth/backends.py
|
42
|
from __future__ import unicode_literals
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
class ModelBackend(object):
"""
Authenticates against settings.AUTH_USER_MODEL.
"""
def authenticate(self, username=None, password=None, **kwargs):
UserModel = get_user_model()
if username is None:
username = kwargs.get(UserModel.USERNAME_FIELD)
try:
user = UserModel._default_manager.get_by_natural_key(username)
if user.check_password(password):
return user
except UserModel.DoesNotExist:
# Run the default password hasher once to reduce the timing
# difference between an existing and a non-existing user (#20760).
UserModel().set_password(password)
def _get_user_permissions(self, user_obj):
return user_obj.user_permissions.all()
def _get_group_permissions(self, user_obj):
user_groups_field = get_user_model()._meta.get_field('groups')
user_groups_query = 'group__%s' % user_groups_field.related_query_name()
return Permission.objects.filter(**{user_groups_query: user_obj})
def _get_permissions(self, user_obj, obj, from_name):
"""
Returns the permissions of `user_obj` from `from_name`. `from_name` can
be either "group" or "user" to return permissions from
`_get_group_permissions` or `_get_user_permissions` respectively.
"""
if not user_obj.is_active or user_obj.is_anonymous() or obj is not None:
return set()
perm_cache_name = '_%s_perm_cache' % from_name
if not hasattr(user_obj, perm_cache_name):
if user_obj.is_superuser:
perms = Permission.objects.all()
else:
perms = getattr(self, '_get_%s_permissions' % from_name)(user_obj)
perms = perms.values_list('content_type__app_label', 'codename').order_by()
setattr(user_obj, perm_cache_name, set("%s.%s" % (ct, name) for ct, name in perms))
return getattr(user_obj, perm_cache_name)
def get_user_permissions(self, user_obj, obj=None):
"""
Returns a set of permission strings the user `user_obj` has from their
`user_permissions`.
"""
return self._get_permissions(user_obj, obj, 'user')
def get_group_permissions(self, user_obj, obj=None):
"""
Returns a set of permission strings the user `user_obj` has from the
groups they belong.
"""
return self._get_permissions(user_obj, obj, 'group')
def get_all_permissions(self, user_obj, obj=None):
if not user_obj.is_active or user_obj.is_anonymous() or obj is not None:
return set()
if not hasattr(user_obj, '_perm_cache'):
user_obj._perm_cache = self.get_user_permissions(user_obj)
user_obj._perm_cache.update(self.get_group_permissions(user_obj))
return user_obj._perm_cache
def has_perm(self, user_obj, perm, obj=None):
if not user_obj.is_active:
return False
return perm in self.get_all_permissions(user_obj, obj)
def has_module_perms(self, user_obj, app_label):
"""
Returns True if user_obj has any permissions in the given app_label.
"""
if not user_obj.is_active:
return False
for perm in self.get_all_permissions(user_obj):
if perm[:perm.index('.')] == app_label:
return True
return False
def get_user(self, user_id):
UserModel = get_user_model()
try:
return UserModel._default_manager.get(pk=user_id)
except UserModel.DoesNotExist:
return None
class RemoteUserBackend(ModelBackend):
"""
This backend is to be used in conjunction with the ``RemoteUserMiddleware``
found in the middleware module of this package, and is used when the server
is handling authentication outside of Django.
By default, the ``authenticate`` method creates ``User`` objects for
usernames that don't already exist in the database. Subclasses can disable
this behavior by setting the ``create_unknown_user`` attribute to
``False``.
"""
# Create a User object if not already in the database?
create_unknown_user = True
def authenticate(self, remote_user):
"""
The username passed as ``remote_user`` is considered trusted. This
method simply returns the ``User`` object with the given username,
creating a new ``User`` object if ``create_unknown_user`` is ``True``.
Returns None if ``create_unknown_user`` is ``False`` and a ``User``
object with the given username is not found in the database.
"""
if not remote_user:
return
user = None
username = self.clean_username(remote_user)
UserModel = get_user_model()
# Note that this could be accomplished in one try-except clause, but
# instead we use get_or_create when creating unknown users since it has
# built-in safeguards for multiple threads.
if self.create_unknown_user:
user, created = UserModel._default_manager.get_or_create(**{
UserModel.USERNAME_FIELD: username
})
if created:
user = self.configure_user(user)
else:
try:
user = UserModel._default_manager.get_by_natural_key(username)
except UserModel.DoesNotExist:
pass
return user
def clean_username(self, username):
"""
Performs any cleaning on the "username" prior to using it to get or
create the user object. Returns the cleaned username.
By default, returns the username unchanged.
"""
return username
def configure_user(self, user):
"""
Configures a user after creation and returns the updated user.
By default, returns the user unmodified.
"""
return user
|
skyduy/zfverify
|
refs/heads/master
|
Verify-Manual-python/train/split_imgs.py
|
2
|
# coding: utf-8
import os
from PIL import Image
def img2single(samples_folder, single_folder):
imgs = os.listdir(samples_folder)
for img in imgs:
image = Image.open('%s/%s' % (samples_folder, img)).convert("L")
x_size, y_size = image.size
y_size -= 5
# y from 1 to y_size-5
# x from 4 to x_size-18
piece = (x_size-22) / 8
centers = [4+piece*(2*i+1) for i in range(4)]
pre = img.split('.')[0]
for i, center in enumerate(centers):
image.crop((center-(piece+2), 1, center+(piece+2), y_size)).save('%s/%s-%s.png' % (single_folder, pre, i))
img2single('samples', 'sample_single')
|
jazzband/silk
|
refs/heads/master
|
silk/auth.py
|
1
|
from functools import wraps, WRAPPER_ASSIGNMENTS
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from silk.config import SilkyConfig
def login_possibly_required(function=None, **kwargs):
if SilkyConfig().SILKY_AUTHENTICATION:
return login_required(function, **kwargs)
return function
def permissions_possibly_required(function=None):
if SilkyConfig().SILKY_AUTHORISATION:
actual_decorator = user_passes_test(
SilkyConfig().SILKY_PERMISSIONS
)
if function:
return actual_decorator(function)
return actual_decorator
return function
def user_passes_test(test_func):
def decorator(view_func):
@wraps(view_func, assigned=WRAPPER_ASSIGNMENTS)
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
else:
raise PermissionDenied
return _wrapped_view
return decorator
|
gsmartway/odoo
|
refs/heads/8.0
|
addons/l10n_uk/__openerp__.py
|
260
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2011 Smartmode LTD (<http://www.smartmode.co.uk>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'UK - Accounting',
'version': '1.0',
'category': 'Localization/Account Charts',
'description': """
This is the latest UK OpenERP localisation necessary to run OpenERP accounting for UK SME's with:
=================================================================================================
- a CT600-ready chart of accounts
- VAT100-ready tax structure
- InfoLogic UK counties listing
- a few other adaptations""",
'author': 'SmartMode LTD',
'website': 'http://www.smartmode.co.uk',
'depends': ['base_iban', 'base_vat', 'account_chart', 'account_anglo_saxon'],
'data': [
'data/account.account.type.csv',
'data/account.account.template.csv',
'data/account.tax.code.template.csv',
'data/account.chart.template.csv',
'data/account.tax.template.csv',
'data/res.country.state.csv',
'l10n_uk_wizard.xml',
],
'demo' : ['demo/demo.xml'],
'installable': 'True',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
keedio/hue
|
refs/heads/master
|
desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/Random/Fortuna/SHAd256.py
|
127
|
# -*- coding: ascii -*-
#
# Random/Fortuna/SHAd256.py : SHA_d-256 hash function implementation
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""\
SHA_d-256 hash function implementation.
This module should comply with PEP 247.
"""
__revision__ = "$Id$"
__all__ = ['new', 'digest_size']
import sys
if sys.version_info[0] == 2 and sys.version_info[1] == 1:
from Crypto.Util.py21compat import *
from Crypto.Util.py3compat import *
from binascii import b2a_hex
from Crypto.Hash import SHA256
assert SHA256.digest_size == 32
class _SHAd256(object):
"""SHA-256, doubled.
Returns SHA-256(SHA-256(data)).
"""
digest_size = SHA256.digest_size
_internal = object()
def __init__(self, internal_api_check, sha256_hash_obj):
if internal_api_check is not self._internal:
raise AssertionError("Do not instantiate this class directly. Use %s.new()" % (__name__,))
self._h = sha256_hash_obj
# PEP 247 "copy" method
def copy(self):
"""Return a copy of this hashing object"""
return _SHAd256(SHAd256._internal, self._h.copy())
# PEP 247 "digest" method
def digest(self):
"""Return the hash value of this object as a binary string"""
retval = SHA256.new(self._h.digest()).digest()
assert len(retval) == 32
return retval
# PEP 247 "hexdigest" method
def hexdigest(self):
"""Return the hash value of this object as a (lowercase) hexadecimal string"""
retval = b2a_hex(self.digest())
assert len(retval) == 64
if sys.version_info[0] == 2:
return retval
else:
return retval.decode()
# PEP 247 "update" method
def update(self, data):
self._h.update(data)
# PEP 247 module-level "digest_size" variable
digest_size = _SHAd256.digest_size
# PEP 247 module-level "new" function
def new(data=None):
"""Return a new SHAd256 hashing object"""
if not data:
data=b("")
sha = _SHAd256(_SHAd256._internal, SHA256.new(data))
sha.new = globals()['new']
return sha
# vim:set ts=4 sw=4 sts=4 expandtab:
|
MortimerGoro/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/fetch/nosniff/resources/worker.py
|
219
|
def main(request, response):
type = request.GET.first("type", None)
content = "// nothing to see here"
content += "\n"
content += "this.postMessage('hi')"
response.add_required_headers = False
response.writer.write_status(200)
response.writer.write_header("x-content-type-options", "nosniff")
response.writer.write_header("content-length", len(content))
if(type != None):
response.writer.write_header("content-type", type)
response.writer.end_headers()
response.writer.write(content)
|
Reagankm/KnockKnock
|
refs/heads/master
|
venv/lib/python3.4/site-packages/nltk/chunk/regexp.py
|
10
|
# Natural Language Toolkit: Regular Expression Chunkers
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# Steven Bird <stevenbird1@gmail.com> (minor additions)
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function, unicode_literals
from __future__ import division
import re
from nltk.tree import Tree
from nltk.chunk.api import ChunkParserI
from nltk.compat import python_2_unicode_compatible, string_types, unicode_repr
##//////////////////////////////////////////////////////
## ChunkString
##//////////////////////////////////////////////////////
@python_2_unicode_compatible
class ChunkString(object):
"""
A string-based encoding of a particular chunking of a text.
Internally, the ``ChunkString`` class uses a single string to
encode the chunking of the input text. This string contains a
sequence of angle-bracket delimited tags, with chunking indicated
by braces. An example of this encoding is::
{<DT><JJ><NN>}<VBN><IN>{<DT><NN>}<.>{<DT><NN>}<VBD><.>
``ChunkString`` are created from tagged texts (i.e., lists of
``tokens`` whose type is ``TaggedType``). Initially, nothing is
chunked.
The chunking of a ``ChunkString`` can be modified with the ``xform()``
method, which uses a regular expression to transform the string
representation. These transformations should only add and remove
braces; they should *not* modify the sequence of angle-bracket
delimited tags.
:type _str: str
:ivar _str: The internal string representation of the text's
encoding. This string representation contains a sequence of
angle-bracket delimited tags, with chunking indicated by
braces. An example of this encoding is::
{<DT><JJ><NN>}<VBN><IN>{<DT><NN>}<.>{<DT><NN>}<VBD><.>
:type _pieces: list(tagged tokens and chunks)
:ivar _pieces: The tagged tokens and chunks encoded by this ``ChunkString``.
:ivar _debug: The debug level. See the constructor docs.
:cvar IN_CHUNK_PATTERN: A zero-width regexp pattern string that
will only match positions that are in chunks.
:cvar IN_CHINK_PATTERN: A zero-width regexp pattern string that
will only match positions that are in chinks.
"""
CHUNK_TAG_CHAR = r'[^\{\}<>]'
CHUNK_TAG = r'(<%s+?>)' % CHUNK_TAG_CHAR
IN_CHUNK_PATTERN = r'(?=[^\{]*\})'
IN_CHINK_PATTERN = r'(?=[^\}]*(\{|$))'
# These are used by _verify
_CHUNK = r'(\{%s+?\})+?' % CHUNK_TAG
_CHINK = r'(%s+?)+?' % CHUNK_TAG
_VALID = re.compile(r'^(\{?%s\}?)*?$' % CHUNK_TAG)
_BRACKETS = re.compile('[^\{\}]+')
_BALANCED_BRACKETS = re.compile(r'(\{\})*$')
def __init__(self, chunk_struct, debug_level=1):
"""
Construct a new ``ChunkString`` that encodes the chunking of
the text ``tagged_tokens``.
:type chunk_struct: Tree
:param chunk_struct: The chunk structure to be further chunked.
:type debug_level: int
:param debug_level: The level of debugging which should be
applied to transformations on the ``ChunkString``. The
valid levels are:
- 0: no checks
- 1: full check on to_chunkstruct
- 2: full check on to_chunkstruct and cursory check after
each transformation.
- 3: full check on to_chunkstruct and full check after
each transformation.
We recommend you use at least level 1. You should
probably use level 3 if you use any non-standard
subclasses of ``RegexpChunkRule``.
"""
self._root_label = chunk_struct.label()
self._pieces = chunk_struct[:]
tags = [self._tag(tok) for tok in self._pieces]
self._str = '<' + '><'.join(tags) + '>'
self._debug = debug_level
def _tag(self, tok):
if isinstance(tok, tuple):
return tok[1]
elif isinstance(tok, Tree):
return tok.label()
else:
raise ValueError('chunk structures must contain tagged '
'tokens or trees')
def _verify(self, s, verify_tags):
"""
Check to make sure that ``s`` still corresponds to some chunked
version of ``_pieces``.
:type verify_tags: bool
:param verify_tags: Whether the individual tags should be
checked. If this is false, ``_verify`` will check to make
sure that ``_str`` encodes a chunked version of *some*
list of tokens. If this is true, then ``_verify`` will
check to make sure that the tags in ``_str`` match those in
``_pieces``.
:raise ValueError: if the internal string representation of
this ``ChunkString`` is invalid or not consistent with _pieces.
"""
# Check overall form
if not ChunkString._VALID.match(s):
raise ValueError('Transformation generated invalid '
'chunkstring:\n %s' % s)
# Check that parens are balanced. If the string is long, we
# have to do this in pieces, to avoid a maximum recursion
# depth limit for regular expressions.
brackets = ChunkString._BRACKETS.sub('', s)
for i in range(1 + len(brackets) // 5000):
substr = brackets[i*5000:i*5000+5000]
if not ChunkString._BALANCED_BRACKETS.match(substr):
raise ValueError('Transformation generated invalid '
'chunkstring:\n %s' % s)
if verify_tags<=0: return
tags1 = (re.split(r'[\{\}<>]+', s))[1:-1]
tags2 = [self._tag(piece) for piece in self._pieces]
if tags1 != tags2:
raise ValueError('Transformation generated invalid '
'chunkstring: tag changed')
def to_chunkstruct(self, chunk_label='CHUNK'):
"""
Return the chunk structure encoded by this ``ChunkString``.
:rtype: Tree
:raise ValueError: If a transformation has generated an
invalid chunkstring.
"""
if self._debug > 0: self._verify(self._str, 1)
# Use this alternating list to create the chunkstruct.
pieces = []
index = 0
piece_in_chunk = 0
for piece in re.split('[{}]', self._str):
# Find the list of tokens contained in this piece.
length = piece.count('<')
subsequence = self._pieces[index:index+length]
# Add this list of tokens to our pieces.
if piece_in_chunk:
pieces.append(Tree(chunk_label, subsequence))
else:
pieces += subsequence
# Update index, piece_in_chunk
index += length
piece_in_chunk = not piece_in_chunk
return Tree(self._root_label, pieces)
def xform(self, regexp, repl):
"""
Apply the given transformation to the string encoding of this
``ChunkString``. In particular, find all occurrences that match
``regexp``, and replace them using ``repl`` (as done by
``re.sub``).
This transformation should only add and remove braces; it
should *not* modify the sequence of angle-bracket delimited
tags. Furthermore, this transformation may not result in
improper bracketing. Note, in particular, that bracketing may
not be nested.
:type regexp: str or regexp
:param regexp: A regular expression matching the substring
that should be replaced. This will typically include a
named group, which can be used by ``repl``.
:type repl: str
:param repl: An expression specifying what should replace the
matched substring. Typically, this will include a named
replacement group, specified by ``regexp``.
:rtype: None
:raise ValueError: If this transformation generated an
invalid chunkstring.
"""
# Do the actual substitution
s = re.sub(regexp, repl, self._str)
# The substitution might have generated "empty chunks"
# (substrings of the form "{}"). Remove them, so they don't
# interfere with other transformations.
s = re.sub('\{\}', '', s)
# Make sure that the transformation was legal.
if self._debug > 1: self._verify(s, self._debug-2)
# Commit the transformation.
self._str = s
def __repr__(self):
"""
Return a string representation of this ``ChunkString``.
It has the form::
<ChunkString: '{<DT><JJ><NN>}<VBN><IN>{<DT><NN>}'>
:rtype: str
"""
return '<ChunkString: %s>' % unicode_repr(self._str)
def __str__(self):
"""
Return a formatted representation of this ``ChunkString``.
This representation will include extra spaces to ensure that
tags will line up with the representation of other
``ChunkStrings`` for the same text, regardless of the chunking.
:rtype: str
"""
# Add spaces to make everything line up.
str = re.sub(r'>(?!\})', r'> ', self._str)
str = re.sub(r'([^\{])<', r'\1 <', str)
if str[0] == '<': str = ' ' + str
return str
##//////////////////////////////////////////////////////
## Chunking Rules
##//////////////////////////////////////////////////////
@python_2_unicode_compatible
class RegexpChunkRule(object):
"""
A rule specifying how to modify the chunking in a ``ChunkString``,
using a transformational regular expression. The
``RegexpChunkRule`` class itself can be used to implement any
transformational rule based on regular expressions. There are
also a number of subclasses, which can be used to implement
simpler types of rules, based on matching regular expressions.
Each ``RegexpChunkRule`` has a regular expression and a
replacement expression. When a ``RegexpChunkRule`` is "applied"
to a ``ChunkString``, it searches the ``ChunkString`` for any
substring that matches the regular expression, and replaces it
using the replacement expression. This search/replace operation
has the same semantics as ``re.sub``.
Each ``RegexpChunkRule`` also has a description string, which
gives a short (typically less than 75 characters) description of
the purpose of the rule.
This transformation defined by this ``RegexpChunkRule`` should
only add and remove braces; it should *not* modify the sequence
of angle-bracket delimited tags. Furthermore, this transformation
may not result in nested or mismatched bracketing.
"""
def __init__(self, regexp, repl, descr):
"""
Construct a new RegexpChunkRule.
:type regexp: regexp or str
:param regexp: The regular expression for this ``RegexpChunkRule``.
When this rule is applied to a ``ChunkString``, any
substring that matches ``regexp`` will be replaced using
the replacement string ``repl``. Note that this must be a
normal regular expression, not a tag pattern.
:type repl: str
:param repl: The replacement expression for this ``RegexpChunkRule``.
When this rule is applied to a ``ChunkString``, any substring
that matches ``regexp`` will be replaced using ``repl``.
:type descr: str
:param descr: A short description of the purpose and/or effect
of this rule.
"""
if isinstance(regexp, string_types):
regexp = re.compile(regexp)
self._repl = repl
self._descr = descr
self._regexp = regexp
def apply(self, chunkstr):
# Keep docstring generic so we can inherit it.
"""
Apply this rule to the given ``ChunkString``. See the
class reference documentation for a description of what it
means to apply a rule.
:type chunkstr: ChunkString
:param chunkstr: The chunkstring to which this rule is applied.
:rtype: None
:raise ValueError: If this transformation generated an
invalid chunkstring.
"""
chunkstr.xform(self._regexp, self._repl)
def descr(self):
"""
Return a short description of the purpose and/or effect of
this rule.
:rtype: str
"""
return self._descr
def __repr__(self):
"""
Return a string representation of this rule. It has the form::
<RegexpChunkRule: '{<IN|VB.*>}'->'<IN>'>
Note that this representation does not include the
description string; that string can be accessed
separately with the ``descr()`` method.
:rtype: str
"""
return ('<RegexpChunkRule: '+unicode_repr(self._regexp.pattern)+
'->'+unicode_repr(self._repl)+'>')
@staticmethod
def fromstring(s):
"""
Create a RegexpChunkRule from a string description.
Currently, the following formats are supported::
{regexp} # chunk rule
}regexp{ # chink rule
regexp}{regexp # split rule
regexp{}regexp # merge rule
Where ``regexp`` is a regular expression for the rule. Any
text following the comment marker (``#``) will be used as
the rule's description:
>>> from nltk.chunk.regexp import RegexpChunkRule
>>> RegexpChunkRule.fromstring('{<DT>?<NN.*>+}')
<ChunkRule: '<DT>?<NN.*>+'>
"""
# Split off the comment (but don't split on '\#')
m = re.match(r'(?P<rule>(\\.|[^#])*)(?P<comment>#.*)?', s)
rule = m.group('rule').strip()
comment = (m.group('comment') or '')[1:].strip()
# Pattern bodies: chunk, chink, split, merge
try:
if not rule:
raise ValueError('Empty chunk pattern')
if rule[0] == '{' and rule[-1] == '}':
return ChunkRule(rule[1:-1], comment)
elif rule[0] == '}' and rule[-1] == '{':
return ChinkRule(rule[1:-1], comment)
elif '}{' in rule:
left, right = rule.split('}{')
return SplitRule(left, right, comment)
elif '{}' in rule:
left, right = rule.split('{}')
return MergeRule(left, right, comment)
elif re.match('[^{}]*{[^{}]*}[^{}]*', rule):
left, chunk, right = re.split('[{}]', rule)
return ChunkRuleWithContext(left, chunk, right, comment)
else:
raise ValueError('Illegal chunk pattern: %s' % rule)
except (ValueError, re.error):
raise ValueError('Illegal chunk pattern: %s' % rule)
@python_2_unicode_compatible
class ChunkRule(RegexpChunkRule):
"""
A rule specifying how to add chunks to a ``ChunkString``, using a
matching tag pattern. When applied to a ``ChunkString``, it will
find any substring that matches this tag pattern and that is not
already part of a chunk, and create a new chunk containing that
substring.
"""
def __init__(self, tag_pattern, descr):
"""
Construct a new ``ChunkRule``.
:type tag_pattern: str
:param tag_pattern: This rule's tag pattern. When
applied to a ``ChunkString``, this rule will
chunk any substring that matches this tag pattern and that
is not already part of a chunk.
:type descr: str
:param descr: A short description of the purpose and/or effect
of this rule.
"""
self._pattern = tag_pattern
regexp = re.compile('(?P<chunk>%s)%s' %
(tag_pattern2re_pattern(tag_pattern),
ChunkString.IN_CHINK_PATTERN))
RegexpChunkRule.__init__(self, regexp, '{\g<chunk>}', descr)
def __repr__(self):
"""
Return a string representation of this rule. It has the form::
<ChunkRule: '<IN|VB.*>'>
Note that this representation does not include the
description string; that string can be accessed
separately with the ``descr()`` method.
:rtype: str
"""
return '<ChunkRule: '+unicode_repr(self._pattern)+'>'
@python_2_unicode_compatible
class ChinkRule(RegexpChunkRule):
"""
A rule specifying how to remove chinks to a ``ChunkString``,
using a matching tag pattern. When applied to a
``ChunkString``, it will find any substring that matches this
tag pattern and that is contained in a chunk, and remove it
from that chunk, thus creating two new chunks.
"""
def __init__(self, tag_pattern, descr):
"""
Construct a new ``ChinkRule``.
:type tag_pattern: str
:param tag_pattern: This rule's tag pattern. When
applied to a ``ChunkString``, this rule will
find any substring that matches this tag pattern and that
is contained in a chunk, and remove it from that chunk,
thus creating two new chunks.
:type descr: str
:param descr: A short description of the purpose and/or effect
of this rule.
"""
self._pattern = tag_pattern
regexp = re.compile('(?P<chink>%s)%s' %
(tag_pattern2re_pattern(tag_pattern),
ChunkString.IN_CHUNK_PATTERN))
RegexpChunkRule.__init__(self, regexp, '}\g<chink>{', descr)
def __repr__(self):
"""
Return a string representation of this rule. It has the form::
<ChinkRule: '<IN|VB.*>'>
Note that this representation does not include the
description string; that string can be accessed
separately with the ``descr()`` method.
:rtype: str
"""
return '<ChinkRule: '+unicode_repr(self._pattern)+'>'
@python_2_unicode_compatible
class UnChunkRule(RegexpChunkRule):
"""
A rule specifying how to remove chunks to a ``ChunkString``,
using a matching tag pattern. When applied to a
``ChunkString``, it will find any complete chunk that matches this
tag pattern, and un-chunk it.
"""
def __init__(self, tag_pattern, descr):
"""
Construct a new ``UnChunkRule``.
:type tag_pattern: str
:param tag_pattern: This rule's tag pattern. When
applied to a ``ChunkString``, this rule will
find any complete chunk that matches this tag pattern,
and un-chunk it.
:type descr: str
:param descr: A short description of the purpose and/or effect
of this rule.
"""
self._pattern = tag_pattern
regexp = re.compile('\{(?P<chunk>%s)\}' %
tag_pattern2re_pattern(tag_pattern))
RegexpChunkRule.__init__(self, regexp, '\g<chunk>', descr)
def __repr__(self):
"""
Return a string representation of this rule. It has the form::
<UnChunkRule: '<IN|VB.*>'>
Note that this representation does not include the
description string; that string can be accessed
separately with the ``descr()`` method.
:rtype: str
"""
return '<UnChunkRule: '+unicode_repr(self._pattern)+'>'
@python_2_unicode_compatible
class MergeRule(RegexpChunkRule):
"""
A rule specifying how to merge chunks in a ``ChunkString``, using
two matching tag patterns: a left pattern, and a right pattern.
When applied to a ``ChunkString``, it will find any chunk whose end
matches left pattern, and immediately followed by a chunk whose
beginning matches right pattern. It will then merge those two
chunks into a single chunk.
"""
def __init__(self, left_tag_pattern, right_tag_pattern, descr):
"""
Construct a new ``MergeRule``.
:type right_tag_pattern: str
:param right_tag_pattern: This rule's right tag
pattern. When applied to a ``ChunkString``, this
rule will find any chunk whose end matches
``left_tag_pattern``, and immediately followed by a chunk
whose beginning matches this pattern. It will
then merge those two chunks into a single chunk.
:type left_tag_pattern: str
:param left_tag_pattern: This rule's left tag
pattern. When applied to a ``ChunkString``, this
rule will find any chunk whose end matches
this pattern, and immediately followed by a chunk
whose beginning matches ``right_tag_pattern``. It will
then merge those two chunks into a single chunk.
:type descr: str
:param descr: A short description of the purpose and/or effect
of this rule.
"""
# Ensure that the individual patterns are coherent. E.g., if
# left='(' and right=')', then this will raise an exception:
re.compile(tag_pattern2re_pattern(left_tag_pattern))
re.compile(tag_pattern2re_pattern(right_tag_pattern))
self._left_tag_pattern = left_tag_pattern
self._right_tag_pattern = right_tag_pattern
regexp = re.compile('(?P<left>%s)}{(?=%s)' %
(tag_pattern2re_pattern(left_tag_pattern),
tag_pattern2re_pattern(right_tag_pattern)))
RegexpChunkRule.__init__(self, regexp, '\g<left>', descr)
def __repr__(self):
"""
Return a string representation of this rule. It has the form::
<MergeRule: '<NN|DT|JJ>', '<NN|JJ>'>
Note that this representation does not include the
description string; that string can be accessed
separately with the ``descr()`` method.
:rtype: str
"""
return ('<MergeRule: '+unicode_repr(self._left_tag_pattern)+', '+
unicode_repr(self._right_tag_pattern)+'>')
@python_2_unicode_compatible
class SplitRule(RegexpChunkRule):
"""
A rule specifying how to split chunks in a ``ChunkString``, using
two matching tag patterns: a left pattern, and a right pattern.
When applied to a ``ChunkString``, it will find any chunk that
matches the left pattern followed by the right pattern. It will
then split the chunk into two new chunks, at the point between the
two pattern matches.
"""
def __init__(self, left_tag_pattern, right_tag_pattern, descr):
"""
Construct a new ``SplitRule``.
:type right_tag_pattern: str
:param right_tag_pattern: This rule's right tag
pattern. When applied to a ``ChunkString``, this rule will
find any chunk containing a substring that matches
``left_tag_pattern`` followed by this pattern. It will
then split the chunk into two new chunks at the point
between these two matching patterns.
:type left_tag_pattern: str
:param left_tag_pattern: This rule's left tag
pattern. When applied to a ``ChunkString``, this rule will
find any chunk containing a substring that matches this
pattern followed by ``right_tag_pattern``. It will then
split the chunk into two new chunks at the point between
these two matching patterns.
:type descr: str
:param descr: A short description of the purpose and/or effect
of this rule.
"""
# Ensure that the individual patterns are coherent. E.g., if
# left='(' and right=')', then this will raise an exception:
re.compile(tag_pattern2re_pattern(left_tag_pattern))
re.compile(tag_pattern2re_pattern(right_tag_pattern))
self._left_tag_pattern = left_tag_pattern
self._right_tag_pattern = right_tag_pattern
regexp = re.compile('(?P<left>%s)(?=%s)' %
(tag_pattern2re_pattern(left_tag_pattern),
tag_pattern2re_pattern(right_tag_pattern)))
RegexpChunkRule.__init__(self, regexp, r'\g<left>}{', descr)
def __repr__(self):
"""
Return a string representation of this rule. It has the form::
<SplitRule: '<NN>', '<DT>'>
Note that this representation does not include the
description string; that string can be accessed
separately with the ``descr()`` method.
:rtype: str
"""
return ('<SplitRule: '+unicode_repr(self._left_tag_pattern)+', '+
unicode_repr(self._right_tag_pattern)+'>')
@python_2_unicode_compatible
class ExpandLeftRule(RegexpChunkRule):
"""
A rule specifying how to expand chunks in a ``ChunkString`` to the left,
using two matching tag patterns: a left pattern, and a right pattern.
When applied to a ``ChunkString``, it will find any chunk whose beginning
matches right pattern, and immediately preceded by a chink whose
end matches left pattern. It will then expand the chunk to incorporate
the new material on the left.
"""
def __init__(self, left_tag_pattern, right_tag_pattern, descr):
"""
Construct a new ``ExpandRightRule``.
:type right_tag_pattern: str
:param right_tag_pattern: This rule's right tag
pattern. When applied to a ``ChunkString``, this
rule will find any chunk whose beginning matches
``right_tag_pattern``, and immediately preceded by a chink
whose end matches this pattern. It will
then merge those two chunks into a single chunk.
:type left_tag_pattern: str
:param left_tag_pattern: This rule's left tag
pattern. When applied to a ``ChunkString``, this
rule will find any chunk whose beginning matches
this pattern, and immediately preceded by a chink
whose end matches ``left_tag_pattern``. It will
then expand the chunk to incorporate the new material on the left.
:type descr: str
:param descr: A short description of the purpose and/or effect
of this rule.
"""
# Ensure that the individual patterns are coherent. E.g., if
# left='(' and right=')', then this will raise an exception:
re.compile(tag_pattern2re_pattern(left_tag_pattern))
re.compile(tag_pattern2re_pattern(right_tag_pattern))
self._left_tag_pattern = left_tag_pattern
self._right_tag_pattern = right_tag_pattern
regexp = re.compile('(?P<left>%s)\{(?P<right>%s)' %
(tag_pattern2re_pattern(left_tag_pattern),
tag_pattern2re_pattern(right_tag_pattern)))
RegexpChunkRule.__init__(self, regexp, '{\g<left>\g<right>', descr)
def __repr__(self):
"""
Return a string representation of this rule. It has the form::
<ExpandLeftRule: '<NN|DT|JJ>', '<NN|JJ>'>
Note that this representation does not include the
description string; that string can be accessed
separately with the ``descr()`` method.
:rtype: str
"""
return ('<ExpandLeftRule: '+unicode_repr(self._left_tag_pattern)+', '+
unicode_repr(self._right_tag_pattern)+'>')
@python_2_unicode_compatible
class ExpandRightRule(RegexpChunkRule):
"""
A rule specifying how to expand chunks in a ``ChunkString`` to the
right, using two matching tag patterns: a left pattern, and a
right pattern. When applied to a ``ChunkString``, it will find any
chunk whose end matches left pattern, and immediately followed by
a chink whose beginning matches right pattern. It will then
expand the chunk to incorporate the new material on the right.
"""
def __init__(self, left_tag_pattern, right_tag_pattern, descr):
"""
Construct a new ``ExpandRightRule``.
:type right_tag_pattern: str
:param right_tag_pattern: This rule's right tag
pattern. When applied to a ``ChunkString``, this
rule will find any chunk whose end matches
``left_tag_pattern``, and immediately followed by a chink
whose beginning matches this pattern. It will
then merge those two chunks into a single chunk.
:type left_tag_pattern: str
:param left_tag_pattern: This rule's left tag
pattern. When applied to a ``ChunkString``, this
rule will find any chunk whose end matches
this pattern, and immediately followed by a chink
whose beginning matches ``right_tag_pattern``. It will
then expand the chunk to incorporate the new material on the right.
:type descr: str
:param descr: A short description of the purpose and/or effect
of this rule.
"""
# Ensure that the individual patterns are coherent. E.g., if
# left='(' and right=')', then this will raise an exception:
re.compile(tag_pattern2re_pattern(left_tag_pattern))
re.compile(tag_pattern2re_pattern(right_tag_pattern))
self._left_tag_pattern = left_tag_pattern
self._right_tag_pattern = right_tag_pattern
regexp = re.compile('(?P<left>%s)\}(?P<right>%s)' %
(tag_pattern2re_pattern(left_tag_pattern),
tag_pattern2re_pattern(right_tag_pattern)))
RegexpChunkRule.__init__(self, regexp, '\g<left>\g<right>}', descr)
def __repr__(self):
"""
Return a string representation of this rule. It has the form::
<ExpandRightRule: '<NN|DT|JJ>', '<NN|JJ>'>
Note that this representation does not include the
description string; that string can be accessed
separately with the ``descr()`` method.
:rtype: str
"""
return ('<ExpandRightRule: '+unicode_repr(self._left_tag_pattern)+', '+
unicode_repr(self._right_tag_pattern)+'>')
@python_2_unicode_compatible
class ChunkRuleWithContext(RegexpChunkRule):
"""
A rule specifying how to add chunks to a ``ChunkString``, using
three matching tag patterns: one for the left context, one for the
chunk, and one for the right context. When applied to a
``ChunkString``, it will find any substring that matches the chunk
tag pattern, is surrounded by substrings that match the two
context patterns, and is not already part of a chunk; and create a
new chunk containing the substring that matched the chunk tag
pattern.
Caveat: Both the left and right context are consumed when this
rule matches; therefore, if you need to find overlapping matches,
you will need to apply your rule more than once.
"""
def __init__(self, left_context_tag_pattern, chunk_tag_pattern,
right_context_tag_pattern, descr):
"""
Construct a new ``ChunkRuleWithContext``.
:type left_context_tag_pattern: str
:param left_context_tag_pattern: A tag pattern that must match
the left context of ``chunk_tag_pattern`` for this rule to
apply.
:type chunk_tag_pattern: str
:param chunk_tag_pattern: A tag pattern that must match for this
rule to apply. If the rule does apply, then this pattern
also identifies the substring that will be made into a chunk.
:type right_context_tag_pattern: str
:param right_context_tag_pattern: A tag pattern that must match
the right context of ``chunk_tag_pattern`` for this rule to
apply.
:type descr: str
:param descr: A short description of the purpose and/or effect
of this rule.
"""
# Ensure that the individual patterns are coherent. E.g., if
# left='(' and right=')', then this will raise an exception:
re.compile(tag_pattern2re_pattern(left_context_tag_pattern))
re.compile(tag_pattern2re_pattern(chunk_tag_pattern))
re.compile(tag_pattern2re_pattern(right_context_tag_pattern))
self._left_context_tag_pattern = left_context_tag_pattern
self._chunk_tag_pattern = chunk_tag_pattern
self._right_context_tag_pattern = right_context_tag_pattern
regexp = re.compile('(?P<left>%s)(?P<chunk>%s)(?P<right>%s)%s' %
(tag_pattern2re_pattern(left_context_tag_pattern),
tag_pattern2re_pattern(chunk_tag_pattern),
tag_pattern2re_pattern(right_context_tag_pattern),
ChunkString.IN_CHINK_PATTERN))
replacement = r'\g<left>{\g<chunk>}\g<right>'
RegexpChunkRule.__init__(self, regexp, replacement, descr)
def __repr__(self):
"""
Return a string representation of this rule. It has the form::
<ChunkRuleWithContext: '<IN>', '<NN>', '<DT>'>
Note that this representation does not include the
description string; that string can be accessed
separately with the ``descr()`` method.
:rtype: str
"""
return '<ChunkRuleWithContext: %r, %r, %r>' % (
self._left_context_tag_pattern, self._chunk_tag_pattern,
self._right_context_tag_pattern)
##//////////////////////////////////////////////////////
## Tag Pattern Format Conversion
##//////////////////////////////////////////////////////
# this should probably be made more strict than it is -- e.g., it
# currently accepts 'foo'.
CHUNK_TAG_PATTERN = re.compile(r'^((%s|<%s>)*)$' %
('[^\{\}<>]+',
'[^\{\}<>]+'))
def tag_pattern2re_pattern(tag_pattern):
"""
Convert a tag pattern to a regular expression pattern. A "tag
pattern" is a modified version of a regular expression, designed
for matching sequences of tags. The differences between regular
expression patterns and tag patterns are:
- In tag patterns, ``'<'`` and ``'>'`` act as parentheses; so
``'<NN>+'`` matches one or more repetitions of ``'<NN>'``, not
``'<NN'`` followed by one or more repetitions of ``'>'``.
- Whitespace in tag patterns is ignored. So
``'<DT> | <NN>'`` is equivalant to ``'<DT>|<NN>'``
- In tag patterns, ``'.'`` is equivalant to ``'[^{}<>]'``; so
``'<NN.*>'`` matches any single tag starting with ``'NN'``.
In particular, ``tag_pattern2re_pattern`` performs the following
transformations on the given pattern:
- Replace '.' with '[^<>{}]'
- Remove any whitespace
- Add extra parens around '<' and '>', to make '<' and '>' act
like parentheses. E.g., so that in '<NN>+', the '+' has scope
over the entire '<NN>'; and so that in '<NN|IN>', the '|' has
scope over 'NN' and 'IN', but not '<' or '>'.
- Check to make sure the resulting pattern is valid.
:type tag_pattern: str
:param tag_pattern: The tag pattern to convert to a regular
expression pattern.
:raise ValueError: If ``tag_pattern`` is not a valid tag pattern.
In particular, ``tag_pattern`` should not include braces; and it
should not contain nested or mismatched angle-brackets.
:rtype: str
:return: A regular expression pattern corresponding to
``tag_pattern``.
"""
# Clean up the regular expression
tag_pattern = re.sub(r'\s', '', tag_pattern)
tag_pattern = re.sub(r'<', '(<(', tag_pattern)
tag_pattern = re.sub(r'>', ')>)', tag_pattern)
# Check the regular expression
if not CHUNK_TAG_PATTERN.match(tag_pattern):
raise ValueError('Bad tag pattern: %r' % tag_pattern)
# Replace "." with CHUNK_TAG_CHAR.
# We have to do this after, since it adds {}[]<>s, which would
# confuse CHUNK_TAG_PATTERN.
# PRE doesn't have lookback assertions, so reverse twice, and do
# the pattern backwards (with lookahead assertions). This can be
# made much cleaner once we can switch back to SRE.
def reverse_str(str):
lst = list(str)
lst.reverse()
return ''.join(lst)
tc_rev = reverse_str(ChunkString.CHUNK_TAG_CHAR)
reversed = reverse_str(tag_pattern)
reversed = re.sub(r'\.(?!\\(\\\\)*($|[^\\]))', tc_rev, reversed)
tag_pattern = reverse_str(reversed)
return tag_pattern
##//////////////////////////////////////////////////////
## RegexpChunkParser
##//////////////////////////////////////////////////////
@python_2_unicode_compatible
class RegexpChunkParser(ChunkParserI):
"""
A regular expression based chunk parser. ``RegexpChunkParser`` uses a
sequence of "rules" to find chunks of a single type within a
text. The chunking of the text is encoded using a ``ChunkString``,
and each rule acts by modifying the chunking in the
``ChunkString``. The rules are all implemented using regular
expression matching and substitution.
The ``RegexpChunkRule`` class and its subclasses (``ChunkRule``,
``ChinkRule``, ``UnChunkRule``, ``MergeRule``, and ``SplitRule``)
define the rules that are used by ``RegexpChunkParser``. Each rule
defines an ``apply()`` method, which modifies the chunking encoded
by a given ``ChunkString``.
:type _rules: list(RegexpChunkRule)
:ivar _rules: The list of rules that should be applied to a text.
:type _trace: int
:ivar _trace: The default level of tracing.
"""
def __init__(self, rules, chunk_label='NP', root_label='S', trace=0):
"""
Construct a new ``RegexpChunkParser``.
:type rules: list(RegexpChunkRule)
:param rules: The sequence of rules that should be used to
generate the chunking for a tagged text.
:type chunk_label: str
:param chunk_label: The node value that should be used for
chunk subtrees. This is typically a short string
describing the type of information contained by the chunk,
such as ``"NP"`` for base noun phrases.
:type root_label: str
:param root_label: The node value that should be used for the
top node of the chunk structure.
:type trace: int
:param trace: The level of tracing that should be used when
parsing a text. ``0`` will generate no tracing output;
``1`` will generate normal tracing output; and ``2`` or
higher will generate verbose tracing output.
"""
self._rules = rules
self._trace = trace
self._chunk_label = chunk_label
self._root_label = root_label
def _trace_apply(self, chunkstr, verbose):
"""
Apply each rule of this ``RegexpChunkParser`` to ``chunkstr``, in
turn. Generate trace output between each rule. If ``verbose``
is true, then generate verbose output.
:type chunkstr: ChunkString
:param chunkstr: The chunk string to which each rule should be
applied.
:type verbose: bool
:param verbose: Whether output should be verbose.
:rtype: None
"""
print('# Input:')
print(chunkstr)
for rule in self._rules:
rule.apply(chunkstr)
if verbose:
print('#', rule.descr()+' ('+unicode_repr(rule)+'):')
else:
print('#', rule.descr()+':')
print(chunkstr)
def _notrace_apply(self, chunkstr):
"""
Apply each rule of this ``RegexpChunkParser`` to ``chunkstr``, in
turn.
:param chunkstr: The chunk string to which each rule should be
applied.
:type chunkstr: ChunkString
:rtype: None
"""
for rule in self._rules:
rule.apply(chunkstr)
def parse(self, chunk_struct, trace=None):
"""
:type chunk_struct: Tree
:param chunk_struct: the chunk structure to be (further) chunked
:type trace: int
:param trace: The level of tracing that should be used when
parsing a text. ``0`` will generate no tracing output;
``1`` will generate normal tracing output; and ``2`` or
highter will generate verbose tracing output. This value
overrides the trace level value that was given to the
constructor.
:rtype: Tree
:return: a chunk structure that encodes the chunks in a given
tagged sentence. A chunk is a non-overlapping linguistic
group, such as a noun phrase. The set of chunks
identified in the chunk structure depends on the rules
used to define this ``RegexpChunkParser``.
"""
if len(chunk_struct) == 0:
print('Warning: parsing empty text')
return Tree(self._root_label, [])
try:
chunk_struct.label()
except AttributeError:
chunk_struct = Tree(self._root_label, chunk_struct)
# Use the default trace value?
if trace is None: trace = self._trace
chunkstr = ChunkString(chunk_struct)
# Apply the sequence of rules to the chunkstring.
if trace:
verbose = (trace>1)
self._trace_apply(chunkstr, verbose)
else:
self._notrace_apply(chunkstr)
# Use the chunkstring to create a chunk structure.
return chunkstr.to_chunkstruct(self._chunk_label)
def rules(self):
"""
:return: the sequence of rules used by ``RegexpChunkParser``.
:rtype: list(RegexpChunkRule)
"""
return self._rules
def __repr__(self):
"""
:return: a concise string representation of this
``RegexpChunkParser``.
:rtype: str
"""
return "<RegexpChunkParser with %d rules>" % len(self._rules)
def __str__(self):
"""
:return: a verbose string representation of this ``RegexpChunkParser``.
:rtype: str
"""
s = "RegexpChunkParser with %d rules:\n" % len(self._rules)
margin = 0
for rule in self._rules:
margin = max(margin, len(rule.descr()))
if margin < 35:
format = " %" + repr(-(margin+3)) + "s%s\n"
else:
format = " %s\n %s\n"
for rule in self._rules:
s += format % (rule.descr(), unicode_repr(rule))
return s[:-1]
##//////////////////////////////////////////////////////
## Chunk Grammar
##//////////////////////////////////////////////////////
@python_2_unicode_compatible
class RegexpParser(ChunkParserI):
"""
A grammar based chunk parser. ``chunk.RegexpParser`` uses a set of
regular expression patterns to specify the behavior of the parser.
The chunking of the text is encoded using a ``ChunkString``, and
each rule acts by modifying the chunking in the ``ChunkString``.
The rules are all implemented using regular expression matching
and substitution.
A grammar contains one or more clauses in the following form::
NP:
{<DT|JJ>} # chunk determiners and adjectives
}<[\.VI].*>+{ # chink any tag beginning with V, I, or .
<.*>}{<DT> # split a chunk at a determiner
<DT|JJ>{}<NN.*> # merge chunk ending with det/adj
# with one starting with a noun
The patterns of a clause are executed in order. An earlier
pattern may introduce a chunk boundary that prevents a later
pattern from executing. Sometimes an individual pattern will
match on multiple, overlapping extents of the input. As with
regular expression substitution more generally, the chunker will
identify the first match possible, then continue looking for matches
after this one has ended.
The clauses of a grammar are also executed in order. A cascaded
chunk parser is one having more than one clause. The maximum depth
of a parse tree created by this chunk parser is the same as the
number of clauses in the grammar.
When tracing is turned on, the comment portion of a line is displayed
each time the corresponding pattern is applied.
:type _start: str
:ivar _start: The start symbol of the grammar (the root node of
resulting trees)
:type _stages: int
:ivar _stages: The list of parsing stages corresponding to the grammar
"""
def __init__(self, grammar, root_label='S', loop=1, trace=0):
"""
Create a new chunk parser, from the given start state
and set of chunk patterns.
:param grammar: The grammar, or a list of RegexpChunkParser objects
:type grammar: str or list(RegexpChunkParser)
:param root_label: The top node of the tree being created
:type root_label: str or Nonterminal
:param loop: The number of times to run through the patterns
:type loop: int
:type trace: int
:param trace: The level of tracing that should be used when
parsing a text. ``0`` will generate no tracing output;
``1`` will generate normal tracing output; and ``2`` or
higher will generate verbose tracing output.
"""
self._trace = trace
self._stages = []
self._grammar = grammar
self._loop = loop
if isinstance(grammar, string_types):
self._read_grammar(grammar, root_label, trace)
else:
# Make sur the grammar looks like it has the right type:
type_err = ('Expected string or list of RegexpChunkParsers '
'for the grammar.')
try: grammar = list(grammar)
except: raise TypeError(type_err)
for elt in grammar:
if not isinstance(elt, RegexpChunkParser):
raise TypeError(type_err)
self._stages = grammar
def _read_grammar(self, grammar, root_label, trace):
"""
Helper function for __init__: read the grammar if it is a
string.
"""
rules = []
lhs = None
for line in grammar.split('\n'):
line = line.strip()
# New stage begins if there's an unescaped ':'
m = re.match('(?P<nonterminal>(\\.|[^:])*)(:(?P<rule>.*))', line)
if m:
# Record the stage that we just completed.
self._add_stage(rules, lhs, root_label, trace)
# Start a new stage.
lhs = m.group('nonterminal').strip()
rules = []
line = m.group('rule').strip()
# Skip blank & comment-only lines
if line=='' or line.startswith('#'): continue
# Add the rule
rules.append(RegexpChunkRule.fromstring(line))
# Record the final stage
self._add_stage(rules, lhs, root_label, trace)
def _add_stage(self, rules, lhs, root_label, trace):
"""
Helper function for __init__: add a new stage to the parser.
"""
if rules != []:
if not lhs:
raise ValueError('Expected stage marker (eg NP:)')
parser = RegexpChunkParser(rules, chunk_label=lhs,
root_label=root_label, trace=trace)
self._stages.append(parser)
def parse(self, chunk_struct, trace=None):
"""
Apply the chunk parser to this input.
:type chunk_struct: Tree
:param chunk_struct: the chunk structure to be (further) chunked
(this tree is modified, and is also returned)
:type trace: int
:param trace: The level of tracing that should be used when
parsing a text. ``0`` will generate no tracing output;
``1`` will generate normal tracing output; and ``2`` or
highter will generate verbose tracing output. This value
overrides the trace level value that was given to the
constructor.
:return: the chunked output.
:rtype: Tree
"""
if trace is None: trace = self._trace
for i in range(self._loop):
for parser in self._stages:
chunk_struct = parser.parse(chunk_struct, trace=trace)
return chunk_struct
def __repr__(self):
"""
:return: a concise string representation of this ``chunk.RegexpParser``.
:rtype: str
"""
return "<chunk.RegexpParser with %d stages>" % len(self._stages)
def __str__(self):
"""
:return: a verbose string representation of this
``RegexpParser``.
:rtype: str
"""
s = "chunk.RegexpParser with %d stages:\n" % len(self._stages)
margin = 0
for parser in self._stages:
s += "%s\n" % parser
return s[:-1]
##//////////////////////////////////////////////////////
## Demonstration code
##//////////////////////////////////////////////////////
def demo_eval(chunkparser, text):
"""
Demonstration code for evaluating a chunk parser, using a
``ChunkScore``. This function assumes that ``text`` contains one
sentence per line, and that each sentence has the form expected by
``tree.chunk``. It runs the given chunk parser on each sentence in
the text, and scores the result. It prints the final score
(precision, recall, and f-measure); and reports the set of chunks
that were missed and the set of chunks that were incorrect. (At
most 10 missing chunks and 10 incorrect chunks are reported).
:param chunkparser: The chunkparser to be tested
:type chunkparser: ChunkParserI
:param text: The chunked tagged text that should be used for
evaluation.
:type text: str
"""
from nltk import chunk
from nltk.tree import Tree
# Evaluate our chunk parser.
chunkscore = chunk.ChunkScore()
for sentence in text.split('\n'):
print(sentence)
sentence = sentence.strip()
if not sentence: continue
gold = chunk.tagstr2tree(sentence)
tokens = gold.leaves()
test = chunkparser.parse(Tree('S', tokens), trace=1)
chunkscore.score(gold, test)
print()
print('/'+('='*75)+'\\')
print('Scoring', chunkparser)
print(('-'*77))
print('Precision: %5.1f%%' % (chunkscore.precision()*100), ' '*4, end=' ')
print('Recall: %5.1f%%' % (chunkscore.recall()*100), ' '*6, end=' ')
print('F-Measure: %5.1f%%' % (chunkscore.f_measure()*100))
# Missed chunks.
if chunkscore.missed():
print('Missed:')
missed = chunkscore.missed()
for chunk in missed[:10]:
print(' ', ' '.join(map(str,chunk)))
if len(chunkscore.missed()) > 10:
print(' ...')
# Incorrect chunks.
if chunkscore.incorrect():
print('Incorrect:')
incorrect = chunkscore.incorrect()
for chunk in incorrect[:10]:
print(' ', ' '.join(map(str,chunk)))
if len(chunkscore.incorrect()) > 10:
print(' ...')
print('\\'+('='*75)+'/')
print()
def demo():
"""
A demonstration for the ``RegexpChunkParser`` class. A single text is
parsed with four different chunk parsers, using a variety of rules
and strategies.
"""
from nltk import chunk, Tree
text = """\
[ the/DT little/JJ cat/NN ] sat/VBD on/IN [ the/DT mat/NN ] ./.
[ John/NNP ] saw/VBD [the/DT cats/NNS] [the/DT dog/NN] chased/VBD ./.
[ John/NNP ] thinks/VBZ [ Mary/NN ] saw/VBD [ the/DT cat/NN ] sit/VB on/IN [ the/DT mat/NN ]./.
"""
print('*'*75)
print('Evaluation text:')
print(text)
print('*'*75)
print()
grammar = r"""
NP: # NP stage
{<DT>?<JJ>*<NN>} # chunk determiners, adjectives and nouns
{<NNP>+} # chunk proper nouns
"""
cp = chunk.RegexpParser(grammar)
demo_eval(cp, text)
grammar = r"""
NP:
{<.*>} # start by chunking each tag
}<[\.VI].*>+{ # unchunk any verbs, prepositions or periods
<DT|JJ>{}<NN.*> # merge det/adj with nouns
"""
cp = chunk.RegexpParser(grammar)
demo_eval(cp, text)
grammar = r"""
NP: {<DT>?<JJ>*<NN>} # chunk determiners, adjectives and nouns
VP: {<TO>?<VB.*>} # VP = verb words
"""
cp = chunk.RegexpParser(grammar)
demo_eval(cp, text)
grammar = r"""
NP: {<.*>*} # start by chunking everything
}<[\.VI].*>+{ # chink any verbs, prepositions or periods
<.*>}{<DT> # separate on determiners
PP: {<IN><NP>} # PP = preposition + noun phrase
VP: {<VB.*><NP|PP>*} # VP = verb words + NPs and PPs
"""
cp = chunk.RegexpParser(grammar)
demo_eval(cp, text)
# Evaluation
from nltk.corpus import conll2000
print()
print("Demonstration of empty grammar:")
cp = chunk.RegexpParser("")
print(chunk.accuracy(cp, conll2000.chunked_sents('test.txt',
chunk_types=('NP',))))
print()
print("Demonstration of accuracy evaluation using CoNLL tags:")
grammar = r"""
NP:
{<.*>} # start by chunking each tag
}<[\.VI].*>+{ # unchunk any verbs, prepositions or periods
<DT|JJ>{}<NN.*> # merge det/adj with nouns
"""
cp = chunk.RegexpParser(grammar)
print(chunk.accuracy(cp, conll2000.chunked_sents('test.txt')[:5]))
print()
print("Demonstration of tagged token input")
grammar = r"""
NP: {<.*>*} # start by chunking everything
}<[\.VI].*>+{ # chink any verbs, prepositions or periods
<.*>}{<DT> # separate on determiners
PP: {<IN><NP>} # PP = preposition + noun phrase
VP: {<VB.*><NP|PP>*} # VP = verb words + NPs and PPs
"""
cp = chunk.RegexpParser(grammar)
print(cp.parse([("the","DT"), ("little","JJ"), ("cat", "NN"),
("sat", "VBD"), ("on", "IN"), ("the", "DT"),
("mat", "NN"), (".", ".")]))
if __name__ == '__main__':
demo()
|
PyDev777/studentsdb
|
refs/heads/master
|
students/views/events_log.py
|
1
|
from django.views.generic import TemplateView
from django.core.urlresolvers import reverse
from ..models import LogEntry
from ..util import paginate
class EventLogView(TemplateView):
template_name = 'students/events_log.html'
def get_context_data(self, **kwargs):
context = super(EventLogView, self).get_context_data(**kwargs)
context['events_log_url'] = reverse('events_log')
events_log = LogEntry.objects.all().order_by('-timestamp')[:100]
context['events_log'] = events_log
# apply pagination, 10 events per page
context.update(paginate(events_log, 10, self.request, {}, var_name='events_log'))
return context
|
gromez/Sick-Beard
|
refs/heads/development
|
sickbeard/searchBacklog.py
|
47
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import datetime
import threading
import sickbeard
from sickbeard import db, scheduler
from sickbeard import search_queue
from sickbeard import logger
from sickbeard import ui
#from sickbeard.common import *
class BacklogSearchScheduler(scheduler.Scheduler):
def forceSearch(self):
self.action._set_lastBacklog(1)
self.lastRun = datetime.datetime.fromordinal(1)
def nextRun(self):
if self.action._lastBacklog <= 1:
return datetime.date.today()
else:
return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime)
class BacklogSearcher:
def __init__(self):
self._lastBacklog = self._get_lastBacklog()
self.cycleTime = 7
self.lock = threading.Lock()
self.amActive = False
self.amPaused = False
self.amWaiting = False
self._resetPI()
def _resetPI(self):
self.percentDone = 0
self.currentSearchInfo = {'title': 'Initializing'}
def getProgressIndicator(self):
if self.amActive:
return ui.ProgressIndicator(self.percentDone, self.currentSearchInfo)
else:
return None
def am_running(self):
logger.log(u"amWaiting: "+str(self.amWaiting)+", amActive: "+str(self.amActive), logger.DEBUG)
return (not self.amWaiting) and self.amActive
def searchBacklog(self, which_shows=None):
if which_shows:
show_list = which_shows
else:
show_list = sickbeard.showList
if self.amActive == True:
logger.log(u"Backlog is still running, not starting it again", logger.DEBUG)
return
self._get_lastBacklog()
curDate = datetime.date.today().toordinal()
fromDate = datetime.date.fromordinal(1)
if not which_shows and not curDate - self._lastBacklog >= self.cycleTime:
logger.log(u"Running limited backlog on recently missed episodes only")
fromDate = datetime.date.today() - datetime.timedelta(days=7)
self.amActive = True
self.amPaused = False
#myDB = db.DBConnection()
#numSeasonResults = myDB.select("SELECT DISTINCT(season), showid FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.tvdb_id AND show.paused = 0 AND ep.airdate > ?", [fromDate.toordinal()])
# get separate lists of the season/date shows
#season_shows = [x for x in show_list if not x.air_by_date]
air_by_date_shows = [x for x in show_list if x.air_by_date]
# figure out how many segments of air by date shows we're going to do
air_by_date_segments = []
for cur_id in [x.tvdbid for x in air_by_date_shows]:
air_by_date_segments += self._get_air_by_date_segments(cur_id, fromDate)
logger.log(u"Air-by-date segments: "+str(air_by_date_segments), logger.DEBUG)
#totalSeasons = float(len(numSeasonResults) + len(air_by_date_segments))
#numSeasonsDone = 0.0
# go through non air-by-date shows and see if they need any episodes
for curShow in show_list:
if curShow.paused:
continue
if curShow.air_by_date:
segments = [x[1] for x in self._get_air_by_date_segments(curShow.tvdbid, fromDate)]
else:
segments = self._get_season_segments(curShow.tvdbid, fromDate)
for cur_segment in segments:
self.currentSearchInfo = {'title': curShow.name + " Season "+str(cur_segment)}
backlog_queue_item = search_queue.BacklogQueueItem(curShow, cur_segment)
if not backlog_queue_item.wantSeason:
logger.log(u"Nothing in season "+str(cur_segment)+" needs to be downloaded, skipping this season", logger.DEBUG)
else:
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) #@UndefinedVariable
# don't consider this an actual backlog search if we only did recent eps
# or if we only did certain shows
if fromDate == datetime.date.fromordinal(1) and not which_shows:
self._set_lastBacklog(curDate)
self.amActive = False
self._resetPI()
def _get_lastBacklog(self):
logger.log(u"Retrieving the last check time from the DB", logger.DEBUG)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM info")
if len(sqlResults) == 0:
lastBacklog = 1
elif sqlResults[0]["last_backlog"] == None or sqlResults[0]["last_backlog"] == "":
lastBacklog = 1
else:
lastBacklog = int(sqlResults[0]["last_backlog"])
self._lastBacklog = lastBacklog
return self._lastBacklog
def _get_season_segments(self, tvdb_id, fromDate):
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT DISTINCT(season) as season FROM tv_episodes WHERE showid = ? AND season > 0 and airdate > ?", [tvdb_id, fromDate.toordinal()])
return [int(x["season"]) for x in sqlResults]
def _get_air_by_date_segments(self, tvdb_id, fromDate):
# query the DB for all dates for this show
myDB = db.DBConnection()
num_air_by_date_results = myDB.select("SELECT airdate, showid FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.tvdb_id AND show.paused = 0 ANd ep.airdate > ? AND ep.showid = ?",
[fromDate.toordinal(), tvdb_id])
# break them apart into month/year strings
air_by_date_segments = []
for cur_result in num_air_by_date_results:
cur_date = datetime.date.fromordinal(int(cur_result["airdate"]))
cur_date_str = str(cur_date)[:7]
cur_tvdb_id = int(cur_result["showid"])
cur_result_tuple = (cur_tvdb_id, cur_date_str)
if cur_result_tuple not in air_by_date_segments:
air_by_date_segments.append(cur_result_tuple)
return air_by_date_segments
def _set_lastBacklog(self, when):
logger.log(u"Setting the last backlog in the DB to " + str(when), logger.DEBUG)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM info")
if len(sqlResults) == 0:
myDB.action("INSERT INTO info (last_backlog, last_TVDB) VALUES (?,?)", [str(when), 0])
else:
myDB.action("UPDATE info SET last_backlog=" + str(when))
def run(self):
try:
self.searchBacklog()
except:
self.amActive = False
raise
|
bdaroz/the-blue-alliance
|
refs/heads/master
|
tests/test_team_manipulator.py
|
8
|
import unittest2
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from helpers.team_manipulator import TeamManipulator
from models.team import Team
class TestTeamManipulator(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
self.testbed.init_taskqueue_stub(root_path=".")
self.old_team = Team(
id="frc177",
team_number=177,
rookie_year=1996,
first_tpid=61771,
first_tpid_year=2012,
)
self.new_team = Team(
id="frc177",
team_number=177,
rookie_year=1995,
website="http://www.bobcatrobotics.org",
)
def tearDown(self):
self.testbed.deactivate()
def assertMergedTeam(self, team):
self.assertOldTeam(team)
self.assertEqual(team.website, "http://www.bobcatrobotics.org")
self.assertEqual(team.rookie_year, 1995)
def assertOldTeam(self, team):
self.assertEqual(team.first_tpid, 61771)
self.assertEqual(team.first_tpid_year, 2012)
self.assertEqual(team.key_name, "frc177")
self.assertEqual(team.team_number, 177)
def test_createOrUpdate(self):
TeamManipulator.createOrUpdate(self.old_team)
self.assertOldTeam(Team.get_by_id("frc177"))
TeamManipulator.createOrUpdate(self.new_team)
self.assertMergedTeam(Team.get_by_id("frc177"))
def test_findOrSpawn(self):
self.old_team.put()
self.assertMergedTeam(TeamManipulator.findOrSpawn(self.new_team))
def test_updateMerge(self):
self.assertMergedTeam(TeamManipulator.updateMerge(self.new_team, self.old_team))
def test_create_lots_of_teams(self):
number = 500
teams = [Team(
id="frc%s" % team_number,
team_number=team_number)
for team_number in range(number)]
TeamManipulator.createOrUpdate(teams)
team = Team.get_by_id("frc177")
self.assertEqual(team.key_name, "frc177")
self.assertEqual(team.team_number, 177)
team = Team.get_by_id("frc%s" % (number - 1))
self.assertEqual(team.key_name, "frc%s" % (number - 1))
self.assertEqual(team.team_number, number - 1)
|
mcsosa121/cafa
|
refs/heads/master
|
cafaenv/lib/python2.7/site-packages/django/contrib/postgres/signals.py
|
548
|
from psycopg2 import ProgrammingError
from psycopg2.extras import register_hstore
from django.utils import six
def register_hstore_handler(connection, **kwargs):
if connection.vendor != 'postgresql':
return
try:
if six.PY2:
register_hstore(connection.connection, globally=True, unicode=True)
else:
register_hstore(connection.connection, globally=True)
except ProgrammingError:
# Hstore is not available on the database.
#
# If someone tries to create an hstore field it will error there.
# This is necessary as someone may be using PSQL without extensions
# installed but be using other features of contrib.postgres.
#
# This is also needed in order to create the connection in order to
# install the hstore extension.
pass
|
xfournet/intellij-community
|
refs/heads/master
|
python/testData/completion/await.py
|
54
|
async def foo():
awa<caret> # comment
|
Skytim/nccuTEG
|
refs/heads/master
|
test/test_uploader/test_generic_uploader.py
|
9
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""This module tests the Uploader class."""
from default import Test, with_context
from pybossa.uploader import Uploader
from werkzeug.datastructures import FileStorage
from mock import patch
from PIL import Image
import tempfile
import os
from nose.tools import assert_raises
class TestUploader(Test):
"""Test PyBossa Uploader module."""
def setUp(self):
"""SetUp method."""
super(TestUploader, self).setUp()
with self.flask_app.app_context():
self.create()
@with_context
def test_uploader_init(self):
"""Test UPLOADER init method works."""
u = Uploader()
new_extensions = ['pdf', 'doe']
new_uploader = Uploader()
with patch.dict(self.flask_app.config,
{'ALLOWED_EXTENSIONS': new_extensions}):
new_uploader.init_app(self.flask_app)
expected_extensions = set.union(u.allowed_extensions, new_extensions)
err_msg = "The new uploader should support two extra extensions"
assert expected_extensions == new_uploader.allowed_extensions, err_msg
@with_context
def test_allowed_file(self):
"""Test UPLOADER allowed_file method works."""
u = Uploader()
for ext in u.allowed_extensions:
# Change extension to uppercase to check that it works too
filename = 'test.%s' % ext.upper()
err_msg = ("This file: %s should be allowed, but it failed"
% filename)
assert u.allowed_file(filename) is True, err_msg
err_msg = "Non allowed extensions should return false"
assert u.allowed_file('wrong.pdf') is False, err_msg
@with_context
def test_get_filename_extension(self):
"""Test UPLOADER get_filename_extension works."""
u = Uploader()
filename = "image.png"
err_msg = "The extension should be PNG"
assert u.get_filename_extension(filename) == 'png', err_msg
filename = "image.jpg"
err_msg = "The extension should be JPEG"
assert u.get_filename_extension(filename) == 'jpeg', err_msg
filename = "imagenoextension"
err_msg = "The extension should be None"
assert u.get_filename_extension(filename) == None, err_msg
@with_context
def test_crop(self):
"""Test UPLOADER crop works."""
u = Uploader()
size = (100, 100)
im = Image.new('RGB', size)
folder = tempfile.mkdtemp()
u.upload_folder = folder
im.save(os.path.join(folder, 'image.png'))
coordinates = (0, 0, 50, 50)
file = FileStorage(filename=os.path.join(folder, 'image.png'))
with patch('pybossa.uploader.Image', return_value=True):
err_msg = "It should crop the image"
assert u.crop(file, coordinates) is True, err_msg
with patch('pybossa.uploader.Image.open', side_effect=IOError):
err_msg = "It should return false"
assert u.crop(file, coordinates) is False, err_msg
@with_context
def test_external_url_handler(self):
"""Test UPLOADER external_url_handler works."""
u = Uploader()
with patch.object(u, '_lookup_url', return_value='url'):
assert u.external_url_handler(BaseException, 'endpoint', 'values') == 'url'
@with_context
def test_external_url_handler_fails(self):
"""Test UPLOADER external_url_handler fails works."""
u = Uploader()
with patch.object(u, '_lookup_url', return_value=None):
with patch('pybossa.uploader.sys') as mysys:
mysys.exc_info.return_value=(BaseException, BaseException, None)
assert_raises(BaseException,
u.external_url_handler,
BaseException,
'endpoint',
'values')
@with_context
def test_external_url_handler_fails_2(self):
"""Test UPLOADER external_url_handler fails works."""
u = Uploader()
with patch.object(u, '_lookup_url', return_value=None):
with patch('pybossa.uploader.sys') as mysys:
mysys.exc_info.return_value=(BaseException, BaseException, None)
assert_raises(IOError,
u.external_url_handler,
IOError,
'endpoint',
'values')
|
ya7lelkom/googleads-python-lib
|
refs/heads/master
|
examples/dfp/v201505/placement_service/get_placements_by_statement.py
|
3
|
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all active placements by using a statement.
To create a placement, run create_placements.py.
"""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
placement_service = client.GetService('PlacementService', version='v201505')
# Create a statement to only select active placements.
values = [{
'key': 'status',
'value': {
'xsi_type': 'TextValue',
'value': 'ACTIVE'
}
}]
query = 'WHERE status = :status'
statement = dfp.FilterStatement(query, values)
# Get placements by statement.
while True:
response = placement_service.getPlacementsByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for placement in response['results']:
print ('Placement with id \'%s\' and name \'%s\' was found.'
% (placement['id'], placement['name']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
|
alexanian/uwaterloo-igem-2015
|
refs/heads/master
|
models/targeting/genome_classes.py
|
4
|
from probabilistic import prob_cut, nt_rand, indel
# Script to store principle simulation classes and their interactions
# Gene/genome de-activation defined by:
# - frameshift mutation in an ORF
# - deletion of a promoter
# - deletion of a significant portion of the gene
class Target(object):
def __init__(self, label, grna, sequence, start, complex_concentration, sense, domain):
self.label = label # string
self.grna = grna # string
if sense == 1:
self.sequence = sequence # string, exclude PAM, should be ~ 20 chars
else:
self.sequence = self.convert_sense(sequence)
self.original_start = start # int, location of first target nucleotide adjacent to PAM, shouldn't change
self.current_start = start # int, location of first target nucleotide adjacent to PAM, changes with indels
self.total_cuts = 0 # int, total time this target has been cut
self.cut_position = None # absolute genome location of cut
self.repair_position = None # formerly cut position after repair
self.repaired = True # defined by open/closed
self.targetable = True # defined by targetable or not (PAM broken or indel size > 5)
self.complex_concentration = complex_concentration # conc of gRNA-cas9 complex inside nucleus
self.shift = 0 # defined by sum of net indel sizes, used to compute frameshift if orf region
assert sense in [1, -1]
self.sense = sense # 1 or -1, referring to top (explicit) or bottom (implicit) dna strand
self.domain = domain
domain.add_target(self)
self.cut_probability = None
def convert_sense(self, sequence):
# flips characters and reverses string
pairs = [['a', 't'], ['g', 'c']]
converted_sequence = ""
for nt in sequence:
for pair in pairs:
if nt in pair:
for item in pair:
if nt != item:
converted_sequence += item
return converted_sequence[::-1]
def is_repaired(self):
return self.repaired
def is_targetable(self):
return self.targetable
def get_shift(self):
return self.shift
def compute_and_assign_cut_probability(self, dt):
grna = self.grna
if self.sense == -1:
grna = self.convert_sense(grna)
self.cut_probability = prob_cut(grna, self.sequence, self.complex_concentration, dt)
return self.cut_probability
def cut(self):
self.total_cuts += 1
self.set_cut_position()
self.repaired = False
def set_cut_position(self):
# posn of the nt right of the cut, usually 3-4nt from pam: foo_cut_posn()
# hardcoded for now
if self.sense == 1:
self.cut_position = self.current_start + 17
else:
self.cut_position = self.current_start + 3
def repair(self, dt):
# call Genome.target_repair through Domain
net_indel_size = self.domain.genome.repair_target(self)
# assess targetability and cut probability
if abs(net_indel_size) > 5: # big insertion
self.targetable = False
self.cut_probability = 0.0
else:
self.compute_and_assign_cut_probability(dt)
# update state properties
self.repair_position = self.cut_position
self.cut_position = None
self.repaired = True
self.shift += net_indel_size
# check domain functionality
self.domain.update_functionality()
class Domain(object):
# Each Domain may contain targets and belongs to a Genome
def __init__(self, label, domain_start, domain_end, domain_type, genome, promoter=None):
assert domain_type in ["orf", "promoter", "untracked"] # note untracked isn't affected by cas9
self.label = label # string
self.domain_type = domain_type # 'orf' or 'promoter' or 'untracked'
self.domain_start = domain_start # int
self.domain_end = domain_end # int
self.promoter = promoter
if domain_type == 'orf':
# assert promoter is not None
# for now to test
if self.promoter is not None:
assert type(promoter) is Domain
assert promoter.domain_type == 'promoter'
self.promoter = promoter # promoter is a domain too
self.sequence = None # to be implemented
self.functional = True # bool
self.targets = {} # dict of Target objects and locations with labels as keys
self.genome = genome
genome.add_domain(self)
def add_target(self, target):
assert type(target) is Target
assert target.domain is self
self.targets[target.label] = target
def remove_target(self, target):
assert type(target) is Target
assert target.domain is self
del self.targets[target.label]
def update_functionality(self):
if self.domain_type == "orf":
if (not self.promoter.functional) or (sum(target.get_shift() for target in self.targets.values()) % 3 != 0):
self.functional = False
else:
self.functional = True
elif self.domain_type == "promoter": # TODO how to define functional promoter
self.functional = True
else: # untracked domains always functional
self.functional = True
def target_location(self, target_label):
target = self.targets[target_label]
return target.current_start
def set_location(self, target_label, location):
self.targets[target_label].current_start = location
class Genome(object):
# Each Genome has >=1 Domains
def __init__(self, sequence):
self.current_length = len(sequence) # int
self.initial_genome = sequence # string
self.current_genome = sequence # string
self.repaired = True # bool
self.domains = {} # dict of all domains (ORFs, promoters, untracked sections)
def add_domain(self, domain):
assert type(domain) is Domain
self.domains[domain.label] = domain
def remove_domain(self, domain):
assert type(domain) is Domain
del self.domains[domain.label]
def repair_target(self, target):
# sample from indel distribution to get left/right deletion sizes and insertion nucleotides
if target.sense == 1:
del_left, del_right, insert = indel() # e.g. 0, 0, 2
else:
del_right, del_left, insert = indel() # e.g. 0, 0, 2
insert_nt = nt_rand(insert) # fill in random sequence
net_indel_size = insert - del_left - del_right
left_genome = self.current_genome[0: target.cut_position - del_left] # genome to left of sequence
right_genome = self.current_genome[target.cut_position + del_right:] # to right of sequence
new_genome = left_genome + insert_nt + right_genome
# target.current_start = self.find_pam(target.current_start, target.sense)
self.make_new_genome(len(left_genome), net_indel_size, new_genome)
if target.sense == 1:
target.sequence = self.current_genome[target.current_start - net_indel_size: target.current_start + 20 - net_indel_size]
else:
target.sequence = self.current_genome[target.current_start: target.current_start + 20]
return net_indel_size
def find_pam(self, location, sense):
shift = 0
# expands to left and right looking for nearest working PAM
if sense == 1:
while self.current_genome[location+shift+20: location+shift+23] != "gg" and self.current_genome[location-shift: location-shift+2] != "gg":
shift += 1
if self.current_genome[location-shift+20: location-shift+23] == "gg": # if nearest PAM is on left
location -= shift # shift location to the left
else: # if nearest PAM is on right
location += shift # shift location to the right
else:
while self.current_genome[location+shift-3: location+shift] != "cc" and self.current_genome[location-shift: location-shift+2] != "gg":
shift += 1
if self.current_genome[location-shift-3: location-shift] == "cc": # if nearest PAM is on left
location -= shift # shift location to the left
else: # if nearest PAM is on right
location += shift # shift location to the right
return location
def get_targets_from_genome(self):
"""Get a layered dictionary of all the targets in each domain of the genome
Notes:
- structure is {domain_label: dict_of_domain_targets, ...}
"""
return {key: self.domains[key].targets for key in self.domains.keys()}
def get_open_targets_from_genome(self):
"""Get list of unrepaired/open targets
Notes:
- list with format [(key_domain, key_target), ...]
"""
open_targets = []
target_dict = self.get_targets_from_genome()
for key_domain in target_dict.keys():
for key_target in target_dict[key_domain].keys():
if not target_dict[key_domain][key_target].repaired:
open_targets.append((key_domain, key_target))
return open_targets
def get_closed_targets_from_genome(self):
"""Get list of repaired/closed targets
Notes:
- list with format [(key_domain, key_target), ...]
"""
closed_targets = []
target_dict = self.get_targets_from_genome()
for key_domain in target_dict.keys():
for key_target in target_dict[key_domain].keys():
if target_dict[key_domain][key_target].repaired:
closed_targets.append((key_domain, key_target))
return closed_targets
def initialize_target_cut_probabilities(self, dt):
"""Fill in all the target cut probabilities based on dt
Notes:
- target cut probability initializes to None because the class doesn't naturally have access to dt
"""
target_dict = self.get_targets_from_genome()
for key_domain in target_dict.keys():
for key_target in target_dict[key_domain].keys():
target = target_dict[key_domain][key_target]
target.compute_and_assign_cut_probability(dt)
def make_new_genome(self, indel_location, indel_size, new_genome):
"""Re-index all domains and targets after single indel
and set current_genome to new_genome
This is a bit headache inducing
"""
deleted_domains = []
broken_targets = []
target_dict = self.get_targets_from_genome()
for key_domain in target_dict.keys():
domain = self.domains[key_domain]
# if domain starts past indel location
if domain.domain_start > indel_location:
# re-index it
self.domains[key_domain].domain_start += indel_size
# if this pulls start past indel location
if self.domains[key_domain].domain_start < indel_location:
# set it to indel location
self.domains[key_domain].domain_start = indel_location
# if domain ends past indel location
if domain.domain_end > indel_location:
# re-index it
self.domains[key_domain].domain_end += indel_size
# if this pulls end past indel location
if self.domains[key_domain].domain_end < indel_location:
# set it to indel location
self.domains[key_domain].domain_end = indel_location
# if start and end are the same (both location)
if self.domains[key_domain].domain_start == self.domains[key_domain].domain_end:
# it has been deleted
deleted_domains.append(domain)
else:
# otherwise check if it has targets to be re-indexed
for key_target in target_dict[key_domain].keys():
target = target_dict[key_domain][key_target]
if target.current_start > indel_location:
self.domains[key_domain].targets[key_target].current_start += indel_size
# if the target has been damaged or deleted
if self.domains[key_domain].targets[key_target].current_start < indel_location:
# deal with that later
broken_targets.append(self.domains[key_domain].targets[key_target])
# remove all deleted domains
for domain in deleted_domains:
self.remove_domain(domain)
# set new genome
self.current_genome = new_genome
self.current_length = len(new_genome)
# self.repaired = True
# delete or fix all broken targets
for target in broken_targets:
domain_label = target.domain.label
# if whole target is deleted
if target.current_start + 20 < indel_location:
self.domains[domain_label].remove_target(target)
# else it is just broken
else:
continue
# find new pam, set new location, set new sequence
# new_start = self.find_pam(target.current_start, target.sense)
# self.domains[domain_label].targets[target.label].current_start = new_start
# self.domains[domain_label].targets[target.label].sequence = self.current_genome[new_start:new_start+20]
def large_deletion(self, target1, target2, dt):
"""Delete section between two open targets
"""
assert not (target1.repaired or target2.repaired)
target1.set_cut_position() # make sure cut_positions of targets are up to date
target2.set_cut_position() # make sure cut_positions of targets are up to date
location = min(target1.cut_position, target2.cut_position)
middle = abs(target1.cut_position - target2.cut_position)
if middle < self.current_length / 2: # if middle is smaller, should delete
new_genome = self.current_genome[0:location] + self.current_genome[location+middle:]
self.make_new_genome(location, -middle, new_genome)
else: # otherwise, should keep (delete beginning and end)
# first delete beginning
new_genome = self.current_genome[location:]
self.make_new_genome(0, -location, new_genome)
# then delete end
new_genome = self.current_genome[0:middle]
self.make_new_genome(middle, -(self.current_length - middle), new_genome)
# keep the target on from the dleted portion if they have the same sense
if target1.sense == target2.sense:
if location <= target1.current_start <= location + middle:
target_keep = target1
target_discard = target2
else:
target_keep = target2
target_discard = target1
target_keep.sequence = target_keep.sequence[3:]
target_keep.sequence = target_discard.sequence[0:3] + target_keep.sequence
target_discard.domain.remove_target(target_discard)
target_keep.cut_position = None
target_keep.repaired = True
target_keep.compute_and_assign_cut_probability(dt)
# check domain functionality
target_keep.domain.update_functionality()
# targets have opposite sense and so they're both likely to be broken
else:
target1.domain.remove_target(target1)
target2.domain.remove_target(target2)
|
egbertbouman/tribler-g
|
refs/heads/master
|
Tribler/Main/Dialogs/GUITaskQueue.py
|
1
|
# Written by Arno Bakker
# see LICENSE.txt for license information
#
# GUITaskQueue is a server that executes tasks on behalf of the GUI that are too
# time consuming to be run by the actual GUI Thread (MainThread). Note that
# you still need to delegate the actual updating of the GUI to the MainThread via
# wx.CallAfter
#
from Tribler.Utilities.TimedTaskQueue import TimedTaskQueue
DEBUG = False
class GUITaskQueue(TimedTaskQueue):
__single = None
def __init__(self):
if GUITaskQueue.__single:
raise RuntimeError, "GUITaskQueue is singleton"
GUITaskQueue.__single = self
TimedTaskQueue.__init__(self)
def getInstance(*args, **kw):
if GUITaskQueue.__single is None:
GUITaskQueue(*args, **kw)
return GUITaskQueue.__single
getInstance = staticmethod(getInstance)
def resetSingleton(self):
""" For testing purposes """
GUITaskQueue.__single = None
|
Daniel-CA/odoo
|
refs/heads/8.0
|
openerp/report/render/makohtml2html/makohtml2html.py
|
443
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import mako
from lxml import etree
from mako.template import Template
from mako.lookup import TemplateLookup
import os
_logger = logging.getLogger(__name__)
class makohtml2html(object):
def __init__(self, html, localcontext):
self.localcontext = localcontext
self.html = html
def format_header(self, html):
head = html.findall('head')
header = ''
for node in head:
header += etree.tostring(node)
return header
def format_footer(self, footer):
html_footer = ''
for node in footer[0].getchildren():
html_footer += etree.tostring(node)
return html_footer
def format_body(self, html):
body = html.findall('body')
body_list = []
footer = self.format_footer(body[-1].getchildren())
for b in body[:-1]:
body_list.append(etree.tostring(b).replace('\t', '').replace('\n',''))
html_body ='''
<script type="text/javascript">
var indexer = 0;
var aryTest = %s ;
function nextData()
{
if(indexer < aryTest.length -1)
{
indexer += 1;
document.forms[0].prev.disabled = false;
document.getElementById("openerp_data").innerHTML=aryTest[indexer];
document.getElementById("counter").innerHTML= indexer + 1 + ' / ' + aryTest.length;
}
else
{
document.forms[0].next.disabled = true;
}
}
function prevData()
{
if (indexer > 0)
{
indexer -= 1;
document.forms[0].next.disabled = false;
document.getElementById("openerp_data").innerHTML=aryTest[indexer];
document.getElementById("counter").innerHTML= indexer + 1 + ' / ' + aryTest.length;
}
else
{
document.forms[0].prev.disabled = true;
}
}
</script>
</head>
<body>
<div id="openerp_data">
%s
</div>
<div>
%s
</div>
<br>
<form>
<table>
<tr>
<td td align="left">
<input name = "prev" type="button" value="Previous" onclick="prevData();">
</td>
<td>
<div id = "counter">%s / %s</div>
</td>
<td align="right">
<input name = "next" type="button" value="Next" onclick="nextData();">
</td>
</tr>
</table>
</form>
</body></html>'''%(body_list,body_list[0],footer,'1',len(body_list))
return html_body
def render(self):
path = os.path.realpath('addons/base/report')
temp_lookup = TemplateLookup(directories=[path],output_encoding='utf-8', encoding_errors='replace')
template = Template(self.html, lookup=temp_lookup)
self.localcontext.update({'css_path':path})
final_html ='''<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>'''
try:
html = template.render_unicode(**self.localcontext)
etree_obj = etree.HTML(html)
final_html += self.format_header(etree_obj)
final_html += self.format_body(etree_obj)
return final_html
except Exception:
_logger.exception('report :')
def parseNode(html, localcontext = {}):
r = makohtml2html(html, localcontext)
return r.render()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
zstackio/zstack-woodpecker
|
refs/heads/master
|
integrationtest/vm/multihosts/snapshots/paths/path15.py
|
2
|
import zstackwoodpecker.test_state as ts_header
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template1", \
path_list=[[TestAction.stop_vm, "vm1"], \
[TestAction.reinit_vm, "vm1"], \
[TestAction.start_vm, "vm1"], \
[TestAction.create_volume_snapshot, "vm1-root", "snapshot1"], \
[TestAction.stop_vm, "vm1"], \
[TestAction.reinit_vm, "vm1"], \
[TestAction.start_vm, "vm1"], \
[TestAction.create_volume_snapshot, "vm1-root", "snapshot2"], \
[TestAction.stop_vm, "vm1"], \
[TestAction.reinit_vm, "vm1"], \
[TestAction.start_vm, "vm1"], \
[TestAction.batch_delete_volume_snapshot, ["snapshot2"]]
])
|
rdo-management/tuskar
|
refs/heads/mgt-master
|
tuskar/db/sqlalchemy/migrate_repo/manage.py
|
1
|
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False', repository='.')
|
rowhit/jasper-client
|
refs/heads/master
|
tests/test_modules.py
|
32
|
#!/usr/bin/env python2
# -*- coding: utf-8-*-
import unittest
from client import test_mic, diagnose, jasperpath
from client.modules import Life, Joke, Time, Gmail, HN, News, Weather
DEFAULT_PROFILE = {
'prefers_email': False,
'location': 'Cape Town',
'timezone': 'US/Eastern',
'phone_number': '012344321'
}
class TestModules(unittest.TestCase):
def setUp(self):
self.profile = DEFAULT_PROFILE
self.send = False
def runConversation(self, query, inputs, module):
"""Generic method for spoofing conversation.
Arguments:
query -- The initial input to the server.
inputs -- Additional input, if conversation is extended.
Returns:
The server's responses, in a list.
"""
self.assertTrue(module.isValid(query))
mic = test_mic.Mic(inputs)
module.handle(query, mic, self.profile)
return mic.outputs
def testLife(self):
query = "What is the meaning of life?"
inputs = []
outputs = self.runConversation(query, inputs, Life)
self.assertEqual(len(outputs), 1)
self.assertTrue("42" in outputs[0])
def testJoke(self):
query = "Tell me a joke."
inputs = ["Who's there?", "Random response"]
outputs = self.runConversation(query, inputs, Joke)
self.assertEqual(len(outputs), 3)
allJokes = open(jasperpath.data('text', 'JOKES.txt'), 'r').read()
self.assertTrue(outputs[2] in allJokes)
def testTime(self):
query = "What time is it?"
inputs = []
self.runConversation(query, inputs, Time)
@unittest.skipIf(not diagnose.check_network_connection(),
"No internet connection")
def testGmail(self):
key = 'gmail_password'
if key not in self.profile or not self.profile[key]:
return
query = "Check my email"
inputs = []
self.runConversation(query, inputs, Gmail)
@unittest.skipIf(not diagnose.check_network_connection(),
"No internet connection")
def testHN(self):
query = "find me some of the top hacker news stories"
if self.send:
inputs = ["the first and third"]
else:
inputs = ["no"]
outputs = self.runConversation(query, inputs, HN)
self.assertTrue("front-page articles" in outputs[1])
@unittest.skipIf(not diagnose.check_network_connection(),
"No internet connection")
def testNews(self):
query = "find me some of the top news stories"
if self.send:
inputs = ["the first"]
else:
inputs = ["no"]
outputs = self.runConversation(query, inputs, News)
self.assertTrue("top headlines" in outputs[1])
@unittest.skipIf(not diagnose.check_network_connection(),
"No internet connection")
def testWeather(self):
query = "what's the weather like tomorrow"
inputs = []
outputs = self.runConversation(query, inputs, Weather)
self.assertTrue("can't see that far ahead"
in outputs[0] or "Tomorrow" in outputs[0])
|
linjoahow/w17g
|
refs/heads/master
|
static/Brython3.1.3-20150514-095342/Lib/xml/etree/__init__.py
|
1200
|
# $Id: __init__.py 3375 2008-02-13 08:05:08Z fredrik $
# elementtree package
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2008 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
|
leekchan/django_test
|
refs/heads/master
|
django/contrib/messages/storage/session.py
|
288
|
import json
from django.contrib.messages.storage.base import BaseStorage
from django.contrib.messages.storage.cookie import MessageEncoder, MessageDecoder
from django.utils import six
class SessionStorage(BaseStorage):
"""
Stores messages in the session (that is, django.contrib.sessions).
"""
session_key = '_messages'
def __init__(self, request, *args, **kwargs):
assert hasattr(request, 'session'), "The session-based temporary "\
"message storage requires session middleware to be installed, "\
"and come before the message middleware in the "\
"MIDDLEWARE_CLASSES list."
super(SessionStorage, self).__init__(request, *args, **kwargs)
def _get(self, *args, **kwargs):
"""
Retrieves a list of messages from the request's session. This storage
always stores everything it is given, so return True for the
all_retrieved flag.
"""
return self.deserialize_messages(self.request.session.get(self.session_key)), True
def _store(self, messages, response, *args, **kwargs):
"""
Stores a list of messages to the request's session.
"""
if messages:
self.request.session[self.session_key] = self.serialize_messages(messages)
else:
self.request.session.pop(self.session_key, None)
return []
def serialize_messages(self, messages):
encoder = MessageEncoder(separators=(',', ':'))
return encoder.encode(messages)
def deserialize_messages(self, data):
if data and isinstance(data, six.string_types):
return json.loads(data, cls=MessageDecoder)
return data
|
d40223223/2015cdbg6team0622
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/importlib/basehook.py
|
608
|
from javascript import JSObject
from browser import window
import urllib.request
class TempMod:
def __init__(self, name):
self.name=name
#define my custom import hook (just to see if it get called etc).
class BaseHook:
def __init__(self, fullname=None, path=None):
self._fullname=fullname
self._path=path # we don't are about this...
self._modpath=''
self._module=''
def find_module(self, name=None, path=None):
if name is None:
name=self._fullname
for _i in ('libs/%s.js' % name, 'Lib/%s.py' % name,
'Lib/%s/__init__.py' % name):
_path="%s%s" % (__BRYTHON__.brython_path, _i)
try:
_fp,_,_headers=urllib.request.urlopen(_path)
if _headers['status'] != 200:
continue
self._module=_fp.read()
self._modpath=_path
return self
except urllib.error.HTTPError as e:
print(str(e))
self._modpath=''
self._module=''
raise ImportError
def is_package(self):
return '.' in self._fullname
def load_module(self, name):
if name is None:
name=self._fullname
window.eval('__BRYTHON__.imported["%s"]={}' % name)
return JSObject(__BRYTHON__.run_py)(TempMod(name),
self._modpath, self._module)
|
yajnab/android_kernel_sony_taoshan
|
refs/heads/jellybean
|
tools/perf/scripts/python/sctop.py
|
11180
|
# system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
laperry1/android_external_chromium_org
|
refs/heads/cm-12.1
|
build/android/pylib/perf/perf_control_unittest.py
|
37
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0212
import os
import sys
import unittest
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
from pylib import android_commands
from pylib.device import device_utils
from pylib.perf import perf_control
class TestPerfControl(unittest.TestCase):
def setUp(self):
if not os.getenv('BUILDTYPE'):
os.environ['BUILDTYPE'] = 'Debug'
devices = android_commands.GetAttachedDevices()
self.assertGreater(len(devices), 0, 'No device attached!')
self._device = device_utils.DeviceUtils(
android_commands.AndroidCommands(device=devices[0]))
def testHighPerfMode(self):
perf = perf_control.PerfControl(self._device)
try:
perf.SetPerfProfilingMode()
for cpu in range(perf._num_cpu_cores):
path = perf_control.PerfControl._CPU_ONLINE_FMT % cpu
self.assertEquals('1',
self._device.ReadFile(path)[0])
path = perf_control.PerfControl._SCALING_GOVERNOR_FMT % cpu
self.assertEquals('performance',
self._device.ReadFile(path)[0])
finally:
perf.SetDefaultPerfMode()
if __name__ == '__main__':
unittest.main()
|
ElOceanografo/EchoMetrics
|
refs/heads/master
|
build/lib/echometrics/__init__.py
|
2
|
# __init__.py
from echometrics import *
|
HenrikSolver/micropython
|
refs/heads/master
|
drivers/onewire/ds18x20.py
|
33
|
# DS18x20 temperature sensor driver for MicroPython.
# MIT license; Copyright (c) 2016 Damien P. George
from micropython import const
_CONVERT = const(0x44)
_RD_SCRATCH = const(0xbe)
_WR_SCRATCH = const(0x4e)
class DS18X20:
def __init__(self, onewire):
self.ow = onewire
self.buf = bytearray(9)
def scan(self):
return [rom for rom in self.ow.scan() if rom[0] == 0x10 or rom[0] == 0x28]
def convert_temp(self):
self.ow.reset(True)
self.ow.writebyte(self.ow.SKIP_ROM)
self.ow.writebyte(_CONVERT)
def read_scratch(self, rom):
self.ow.reset(True)
self.ow.select_rom(rom)
self.ow.writebyte(_RD_SCRATCH)
self.ow.readinto(self.buf)
if self.ow.crc8(self.buf):
raise Exception('CRC error')
return self.buf
def write_scratch(self, rom, buf):
self.ow.reset(True)
self.ow.select_rom(rom)
self.ow.writebyte(_WR_SCRATCH)
self.ow.write(buf)
def read_temp(self, rom):
buf = self.read_scratch(rom)
if rom[0] == 0x10:
if buf[1]:
t = buf[0] >> 1 | 0x80
t = -((~t + 1) & 0xff)
else:
t = buf[0] >> 1
return t - 0.25 + (buf[7] - buf[6]) / buf[7]
else:
t = buf[1] << 8 | buf[0]
if t & 0x8000: # sign bit set
t = -((t ^ 0xffff) + 1)
return t / 16
|
phisiart/tvm
|
refs/heads/opengl
|
topi/tests/python_cpp/test_topi_broadcast.py
|
1
|
"""Test code for broadcasting operators."""
import os
import numpy as np
import tvm
import topi
def verify_broadcast_to_ele(in_shape, out_shape):
# Build the logic and compile the function
A = tvm.placeholder(shape=in_shape, name="A")
B = topi.cpp.broadcast_to(A, out_shape)
def check_device(device):
if not tvm.module.enabled(device):
print("Skip because %s is not enabled" % device)
return
print("Running on target: %s" % device)
target = topi.cpp.TEST_create_target(device)
s = topi.cpp.cuda.schedule_injective(target, [B])
ctx = tvm.context(device, 0)
foo = tvm.build(s, [A, B], device, name="broadcast_to")
data_npy = np.random.uniform(size=in_shape).astype(A.dtype)
out_npy = np.broadcast_to(data_npy, out_shape)
data_nd = tvm.nd.array(data_npy, ctx)
out_nd = tvm.nd.array(np.empty(out_shape).astype(B.dtype), ctx)
for _ in range(1):
foo(data_nd, out_nd)
np.testing.assert_allclose(out_nd.asnumpy(), out_npy)
check_device("opencl")
check_device("cuda")
#check_device("metal")
#check_device("rocm")
def verify_broadcast_binary_ele(lhs_shape, rhs_shape, typ="add"):
# Build the logic and compile the function
A = tvm.placeholder(shape=lhs_shape, name="A")
B = tvm.placeholder(shape=rhs_shape, name="B")
if typ == "add":
C = topi.cpp.broadcast_add(A, B)
elif typ == "sub":
C = topi.cpp.broadcast_sub(A, B)
elif typ == "div":
C = topi.cpp.broadcast_div(A, B)
elif typ == "mul":
C = topi.cpp.broadcast_mul(A, B)
elif typ == "maximum":
C = topi.cpp.broadcast_maximum(A, B)
elif typ == "minimum":
C = topi.cpp.broadcast_minimum(A, B)
elif typ == "pow":
C = topi.cpp.broadcast_pow(A, B)
else:
raise NotImplementedError
def check_device(device):
if not tvm.module.enabled(device):
print("Skip because %s is not enabled" % device)
return
print("Running on target: %s" % device)
target = topi.cpp.TEST_create_target(device)
s = topi.cpp.cuda.schedule_injective(target, [C])
ctx = tvm.context(device, 0)
foo = tvm.build(s, [A, B, C], device, name="broadcast_binary" + "_" + typ)
lhs_npy = np.random.uniform(size=lhs_shape).astype(A.dtype)
rhs_npy = np.random.uniform(size=rhs_shape).astype(A.dtype)
if typ == "add":
out_npy = lhs_npy + rhs_npy
elif typ == "sub":
out_npy = lhs_npy - rhs_npy
elif typ == "div":
rhs_npy = np.abs(rhs_npy) + 0.001
out_npy = lhs_npy / rhs_npy
elif typ == "mul":
out_npy = lhs_npy * rhs_npy
elif typ == "maximum":
out_npy = np.maximum(lhs_npy, rhs_npy)
elif typ == "minimum":
out_npy = np.minimum(lhs_npy, rhs_npy)
elif typ == "pow":
out_npy = lhs_npy ** rhs_npy
else:
raise NotImplementedError
lhs_nd = tvm.nd.array(lhs_npy, ctx)
rhs_nd = tvm.nd.array(rhs_npy, ctx)
out_nd = tvm.nd.array(np.empty(out_npy.shape).astype(B.dtype), ctx)
for _ in range(1):
foo(lhs_nd, rhs_nd, out_nd)
np.testing.assert_allclose(out_nd.asnumpy(), out_npy, rtol=1E-4, atol=1E-4)
check_device("opencl")
check_device("cuda")
#check_device("metal")
#check_device("rocm")
def test_broadcast_to():
verify_broadcast_to_ele((1,), (10,))
verify_broadcast_to_ele((), (10,))
verify_broadcast_to_ele((1, 1, 5, 4), (3, 4, 4, 4, 5, 4))
verify_broadcast_to_ele((1, 128, 1, 32), (64, 128, 64, 32))
def test_broadcast_binary():
verify_broadcast_binary_ele((5, 2, 3), (2, 1), typ="add")
verify_broadcast_binary_ele((5, 2, 3), (), typ="add")
verify_broadcast_binary_ele((5, 64, 128), (2, 5, 64, 1), typ="mul")
verify_broadcast_binary_ele((2, 3, 1, 32), (64, 32), typ="div")
verify_broadcast_binary_ele((1, 32), (64, 32), typ="sub")
verify_broadcast_binary_ele((32,), (64, 32), typ="maximum")
verify_broadcast_binary_ele((1, 2, 2, 1, 32), (64, 32), typ="minimum")
verify_broadcast_binary_ele((1, 32), (64, 32), typ="pow")
if __name__ == "__main__":
test_broadcast_to()
test_broadcast_binary()
|
kfoss/thrift
|
refs/heads/0.1.x
|
tutorial/php/runserver.py
|
117
|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import BaseHTTPServer
import CGIHTTPServer
# chdir(2) into the tutorial directory.
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
class Handler(CGIHTTPServer.CGIHTTPRequestHandler):
cgi_directories = ['/php']
BaseHTTPServer.HTTPServer(('', 8080), Handler).serve_forever()
|
jimt/wikieducator-package
|
refs/heads/master
|
wikieducator_package_config_sample.py
|
1
|
# rename to wikieducator_package_config.py and edit to suit
# directory for ZIP files, needs to be web accessible
zips_directory = '/var/www/html/wikieducator'
# base download URL corresponding to that directory
# (no trailing slash)
download_url = 'http://example.com/wikieducator'
# directory containing templates for export formats
templates_directory = '/var/www/html/wikieducator/templates'
# form action URL
# (depends on webserver CGI invocation configuration)
form_action = 'http://example.com/wikieducator/package/'
|
Jenselme/OWSLib
|
refs/heads/master
|
owslib/swe/sensor/__init__.py
|
144
|
from __future__ import (absolute_import, division, print_function)
|
eLBati/odoo
|
refs/heads/master
|
addons/mail/static/scripts/__init__.py
|
58
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009-2010 OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import openerp_mailgate
|
jledbetter/openhatch
|
refs/heads/master
|
mysite/profile/migrations/0079_smaller_thumbnail.py
|
17
|
# This file is part of OpenHatch.
# Copyright (C) 2010 Parker Phinney
# Copyright (C) 2010 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.profile.models import *
class Migration:
def forwards(self, orm):
# Adding field 'Person.photo_thumbnail_20px_wide'
db.add_column('profile_person', 'photo_thumbnail_20px_wide', orm['profile.person:photo_thumbnail_20px_wide'])
# Changing field 'DataImportAttempt.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2010, 4, 7, 17, 36, 3, 812587)))
db.alter_column('profile_dataimportattempt', 'date_created', orm['profile.dataimportattempt:date_created'])
# Changing field 'PortfolioEntry.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2010, 4, 7, 17, 36, 4, 540996)))
db.alter_column('profile_portfolioentry', 'date_created', orm['profile.portfolioentry:date_created'])
# Changing field 'Citation.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2010, 4, 7, 17, 36, 4, 627808)))
db.alter_column('profile_citation', 'date_created', orm['profile.citation:date_created'])
def backwards(self, orm):
# Deleting field 'Person.photo_thumbnail_20px_wide'
db.delete_column('profile_person', 'photo_thumbnail_20px_wide')
# Changing field 'DataImportAttempt.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2010, 3, 27, 21, 8, 58, 554916)))
db.alter_column('profile_dataimportattempt', 'date_created', orm['profile.dataimportattempt:date_created'])
# Changing field 'PortfolioEntry.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2010, 3, 27, 21, 8, 59, 201995)))
db.alter_column('profile_portfolioentry', 'date_created', orm['profile.portfolioentry:date_created'])
# Changing field 'Citation.date_created'
# (to signature: django.db.models.fields.DateTimeField(default=datetime.datetime(2010, 3, 27, 21, 8, 58, 167309)))
db.alter_column('profile_citation', 'date_created', orm['profile.citation:date_created'])
models = {
'auth.group': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'customs.webresponse': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'response_headers': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'text': ('django.db.models.fields.TextField', [], {}),
'url': ('django.db.models.fields.TextField', [], {})
},
'profile.citation': {
'contributor_role': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'data_import_attempt': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.DataImportAttempt']", 'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 4, 7, 17, 36, 6, 389742)'}),
'distinct_months': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'first_commit_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_due_to_duplicate': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'languages': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'old_summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
'portfolio_entry': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.PortfolioEntry']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'profile.dataimportattempt': {
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 4, 7, 17, 36, 5, 338940)'}),
'failed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']"}),
'query': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'web_response': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['customs.WebResponse']", 'null': 'True'})
},
'profile.forwarder': {
'address': ('django.db.models.fields.TextField', [], {}),
'expires_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1970, 1, 1, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stops_being_listed_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1970, 1, 1, 0, 0)'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'profile.link_person_tag': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']"}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Tag']"})
},
'profile.link_project_tag': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Tag']"})
},
'profile.link_sf_proj_dude_fm': {
'Meta': {'unique_together': "[('person', 'project')]"},
'date_collected': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.SourceForgePerson']"}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.SourceForgeProject']"})
},
'profile.person': {
'bio': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'blacklisted_repository_committers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profile.RepositoryCommitter']"}),
'contact_blurb': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'dont_guess_my_location': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'expand_next_steps': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'gotten_name_from_ohloh': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'homepage_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interested_in_working_on': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024'}),
'last_polled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1970, 1, 1, 0, 0)'}),
'location_confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'location_display_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100'}),
'photo_thumbnail': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'photo_thumbnail_20px_wide': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'photo_thumbnail_30px_wide': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'show_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'profile.portfolioentry': {
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 4, 7, 17, 36, 6, 229336)'}),
'experience_description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'project_description': ('django.db.models.fields.TextField', [], {})
},
'profile.repositorycommitter': {
'Meta': {'unique_together': "(('project', 'data_import_attempt'),)"},
'data_import_attempt': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.DataImportAttempt']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"})
},
'profile.sourceforgeperson': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'profile.sourceforgeproject': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'unixname': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'profile.tag': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.TagType']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'profile.tagtype': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'search.project': {
'cached_contributor_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'icon_for_profile': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_for_search_result': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_raw': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_smaller_for_badge': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'logo_contains_name': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'people_who_wanna_help': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profile.Person']"})
}
}
complete_apps = ['profile']
|
megaumi/django
|
refs/heads/master
|
tests/utils_tests/test_encoding.py
|
288
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import unittest
from django.utils import six
from django.utils.encoding import (
escape_uri_path, filepath_to_uri, force_bytes, force_text, iri_to_uri,
smart_text, uri_to_iri,
)
from django.utils.functional import SimpleLazyObject
from django.utils.http import urlquote_plus
class TestEncodingUtils(unittest.TestCase):
def test_force_text_exception(self):
"""
Check that broken __unicode__/__str__ actually raises an error.
"""
class MyString(object):
def __str__(self):
return b'\xc3\xb6\xc3\xa4\xc3\xbc'
__unicode__ = __str__
# str(s) raises a TypeError on python 3 if the result is not a text type.
# python 2 fails when it tries converting from str to unicode (via ASCII).
exception = TypeError if six.PY3 else UnicodeError
self.assertRaises(exception, force_text, MyString())
def test_force_text_lazy(self):
s = SimpleLazyObject(lambda: 'x')
self.assertTrue(issubclass(type(force_text(s)), six.text_type))
def test_force_bytes_exception(self):
"""
Test that force_bytes knows how to convert to bytes an exception
containing non-ASCII characters in its args.
"""
error_msg = "This is an exception, voilà"
exc = ValueError(error_msg)
result = force_bytes(exc)
self.assertEqual(result, error_msg.encode('utf-8'))
def test_force_bytes_strings_only(self):
today = datetime.date.today()
self.assertEqual(force_bytes(today, strings_only=True), today)
def test_smart_text(self):
class Test:
if six.PY3:
def __str__(self):
return 'ŠĐĆŽćžšđ'
else:
def __str__(self):
return 'ŠĐĆŽćžšđ'.encode('utf-8')
class TestU:
if six.PY3:
def __str__(self):
return 'ŠĐĆŽćžšđ'
def __bytes__(self):
return b'Foo'
else:
def __str__(self):
return b'Foo'
def __unicode__(self):
return '\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111'
self.assertEqual(smart_text(Test()), '\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111')
self.assertEqual(smart_text(TestU()), '\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111')
self.assertEqual(smart_text(1), '1')
self.assertEqual(smart_text('foo'), 'foo')
class TestRFC3987IEncodingUtils(unittest.TestCase):
def test_filepath_to_uri(self):
self.assertEqual(filepath_to_uri('upload\\чубака.mp4'),
'upload/%D1%87%D1%83%D0%B1%D0%B0%D0%BA%D0%B0.mp4')
self.assertEqual(filepath_to_uri('upload\\чубака.mp4'.encode('utf-8')),
'upload/%D1%87%D1%83%D0%B1%D0%B0%D0%BA%D0%B0.mp4')
def test_iri_to_uri(self):
cases = [
# Valid UTF-8 sequences are encoded.
('red%09rosé#red', 'red%09ros%C3%A9#red'),
('/blog/for/Jürgen Münster/', '/blog/for/J%C3%BCrgen%20M%C3%BCnster/'),
('locations/%s' % urlquote_plus('Paris & Orléans'), 'locations/Paris+%26+Orl%C3%A9ans'),
# Reserved chars remain unescaped.
('%&', '%&'),
('red&♥ros%#red', 'red&%E2%99%A5ros%#red'),
]
for iri, uri in cases:
self.assertEqual(iri_to_uri(iri), uri)
# Test idempotency.
self.assertEqual(iri_to_uri(iri_to_uri(iri)), uri)
def test_uri_to_iri(self):
cases = [
# Valid UTF-8 sequences are decoded.
('/%E2%99%A5%E2%99%A5/', '/♥♥/'),
('/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93', '/♥♥/?utf8=✓'),
# Broken UTF-8 sequences remain escaped.
('/%AAd%AAj%AAa%AAn%AAg%AAo%AA/', '/%AAd%AAj%AAa%AAn%AAg%AAo%AA/'),
('/%E2%99%A5%E2%E2%99%A5/', '/♥%E2♥/'),
('/%E2%99%A5%E2%99%E2%99%A5/', '/♥%E2%99♥/'),
('/%E2%E2%99%A5%E2%99%A5%99/', '/%E2♥♥%99/'),
('/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93', '/♥♥/?utf8=%9C%93✓%9C%93'),
]
for uri, iri in cases:
self.assertEqual(uri_to_iri(uri), iri)
# Test idempotency.
self.assertEqual(uri_to_iri(uri_to_iri(uri)), iri)
def test_complementarity(self):
cases = [
('/blog/for/J%C3%BCrgen%20M%C3%BCnster/', '/blog/for/J\xfcrgen M\xfcnster/'),
('%&', '%&'),
('red&%E2%99%A5ros%#red', 'red&♥ros%#red'),
('/%E2%99%A5%E2%99%A5/', '/♥♥/'),
('/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93', '/♥♥/?utf8=✓'),
('/%AAd%AAj%AAa%AAn%AAg%AAo%AA/', '/%AAd%AAj%AAa%AAn%AAg%AAo%AA/'),
('/%E2%99%A5%E2%E2%99%A5/', '/♥%E2♥/'),
('/%E2%99%A5%E2%99%E2%99%A5/', '/♥%E2%99♥/'),
('/%E2%E2%99%A5%E2%99%A5%99/', '/%E2♥♥%99/'),
('/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93', '/♥♥/?utf8=%9C%93✓%9C%93'),
]
for uri, iri in cases:
self.assertEqual(iri_to_uri(uri_to_iri(uri)), uri)
self.assertEqual(uri_to_iri(iri_to_uri(iri)), iri)
def test_escape_uri_path(self):
self.assertEqual(
escape_uri_path('/;some/=awful/?path/:with/@lots/&of/+awful/chars'),
'/%3Bsome/%3Dawful/%3Fpath/:with/@lots/&of/+awful/chars'
)
self.assertEqual(escape_uri_path('/foo#bar'), '/foo%23bar')
self.assertEqual(escape_uri_path('/foo?bar'), '/foo%3Fbar')
|
atiqueahmedziad/addons-server
|
refs/heads/master
|
src/olympia/ratings/tests/test_helpers.py
|
5
|
from django.template import engines
from pyquery import PyQuery as pq
from olympia.addons.models import Addon
from olympia.amo.tests import TestCase, addon_factory
from olympia.amo.urlresolvers import reverse
from olympia.ratings.forms import RatingForm
from olympia.ratings.models import RatingFlag
class HelpersTest(TestCase):
def render(self, s, context=None):
if context is None:
context = {}
return engines['jinja2'].from_string(s).render(context)
def test_stars(self):
s = self.render('{{ num|stars }}', {'num': None})
assert s == 'Not yet rated'
doc = pq(self.render('{{ num|stars }}', {'num': 1}))
msg = 'Rated 1 out of 5 stars'
assert doc.attr('class') == 'stars stars-1'
assert doc.attr('title') == msg
assert doc.text() == msg
def test_stars_details_page(self):
doc = pq(self.render('{{ num|stars(large=True) }}', {'num': 2}))
assert doc('.stars').attr('class') == 'stars large stars-2'
def test_stars_max(self):
doc = pq(self.render('{{ num|stars }}', {'num': 5.3}))
assert doc.attr('class') == 'stars stars-5'
def test_reviews_link(self):
a = addon_factory(average_rating=4, total_ratings=37, id=1, slug='xx')
s = self.render('{{ reviews_link(myaddon) }}', {'myaddon': a})
assert pq(s)('strong').text() == '37 reviews'
# without collection uuid
assert pq(s)('a').attr('href') == '/en-US/firefox/addon/xx/reviews/'
# with collection uuid
myuuid = 'f19a8822-1ee3-4145-9440-0a3640201fe6'
s = self.render('{{ reviews_link(myaddon, myuuid) }}',
{'myaddon': a, 'myuuid': myuuid})
assert pq(s)('a').attr('href') == (
'/en-US/firefox/addon/xx/reviews/?collection_uuid=%s' % myuuid)
z = Addon(average_rating=0, total_ratings=0, id=1, type=1, slug='xx')
s = self.render('{{ reviews_link(myaddon) }}', {'myaddon': z})
assert pq(s)('strong').text() == 'Not yet rated'
# with link
u = reverse('addons.ratings.list', args=['xx'])
s = self.render('{{ reviews_link(myaddon) }}',
{'myaddon': a})
assert pq(s)('a').attr('href') == u
def test_impala_reviews_link(self):
a = addon_factory(average_rating=4, total_ratings=37, id=1, slug='xx')
s = self.render('{{ impala_reviews_link(myaddon) }}', {'myaddon': a})
assert pq(s)('a').text() == '(37)'
# without collection uuid
assert pq(s)('a').attr('href') == '/en-US/firefox/addon/xx/reviews/'
# with collection uuid
myuuid = 'f19a8822-1ee3-4145-9440-0a3640201fe6'
s = self.render('{{ impala_reviews_link(myaddon, myuuid) }}',
{'myaddon': a, 'myuuid': myuuid})
assert pq(s)('a').attr('href') == (
'/en-US/firefox/addon/xx/reviews/?collection_uuid=%s' % myuuid)
z = Addon(average_rating=0, total_ratings=0, id=1, type=1, slug='xx')
s = self.render('{{ impala_reviews_link(myaddon) }}', {'myaddon': z})
assert pq(s)('b').text() == 'Not yet rated'
# with link
u = reverse('addons.ratings.list', args=['xx'])
s = self.render(
'{{ impala_reviews_link(myaddon) }}',
{'myaddon': a})
assert pq(s)('a').attr('href') == u
def test_report_review_popup(self):
doc = pq(self.render('{{ report_review_popup() }}'))
assert doc('.popup.review-reason').length == 1
for flag, text in RatingFlag.FLAGS:
assert doc('li a[href$=%s]' % flag).text() == text
assert doc('form input[name=note]').length == 1
def test_edit_review_form(self):
doc = pq(self.render('{{ edit_review_form() }}'))
assert doc('#review-edit-form').length == 1
assert doc('p.req').length == 1
for name in RatingForm().fields.keys():
assert doc('[name=%s]' % name).length == 1
|
saraivaufc/PySpy
|
refs/heads/master
|
pysocket/__init__.py
|
1
|
from .pysocket import *
|
pedro2d10/SickRage-FR
|
refs/heads/develop
|
lib/html5lib/treeadapters/sax.py
|
1835
|
from __future__ import absolute_import, division, unicode_literals
from xml.sax.xmlreader import AttributesNSImpl
from ..constants import adjustForeignAttributes, unadjustForeignAttributes
prefix_mapping = {}
for prefix, localName, namespace in adjustForeignAttributes.values():
if prefix is not None:
prefix_mapping[prefix] = namespace
def to_sax(walker, handler):
"""Call SAX-like content handler based on treewalker walker"""
handler.startDocument()
for prefix, namespace in prefix_mapping.items():
handler.startPrefixMapping(prefix, namespace)
for token in walker:
type = token["type"]
if type == "Doctype":
continue
elif type in ("StartTag", "EmptyTag"):
attrs = AttributesNSImpl(token["data"],
unadjustForeignAttributes)
handler.startElementNS((token["namespace"], token["name"]),
token["name"],
attrs)
if type == "EmptyTag":
handler.endElementNS((token["namespace"], token["name"]),
token["name"])
elif type == "EndTag":
handler.endElementNS((token["namespace"], token["name"]),
token["name"])
elif type in ("Characters", "SpaceCharacters"):
handler.characters(token["data"])
elif type == "Comment":
pass
else:
assert False, "Unknown token type"
for prefix, namespace in prefix_mapping.items():
handler.endPrefixMapping(prefix)
handler.endDocument()
|
liurenqiu520/AutobahnPython
|
refs/heads/master
|
examples/twisted/websocket/echo_tls/server.py
|
18
|
###############################################################################
##
## Copyright (C) 2011-2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.internet import reactor, ssl
from twisted.python import log
from twisted.web.server import Site
from twisted.web.static import File
from autobahn.twisted.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS
class EchoServerProtocol(WebSocketServerProtocol):
def onMessage(self, payload, isBinary):
self.sendMessage(payload, isBinary)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
## SSL server context: load server key and certificate
## We use this for both WS and Web!
##
contextFactory = ssl.DefaultOpenSSLContextFactory('keys/server.key',
'keys/server.crt')
factory = WebSocketServerFactory("wss://localhost:9000",
debug = debug,
debugCodePaths = debug)
factory.protocol = EchoServerProtocol
factory.setProtocolOptions(allowHixie76 = True)
listenWS(factory, contextFactory)
webdir = File(".")
webdir.contentTypes['.crt'] = 'application/x-x509-ca-cert'
web = Site(webdir)
#reactor.listenSSL(8080, web, contextFactory)
reactor.listenTCP(8080, web)
reactor.run()
|
nickrobinson/VoicemailStress
|
refs/heads/master
|
call.py
|
1
|
import sys
import pjsua as pj
import time
# Logging callback
def log_cb(level, str, len):
print str,
# Callback to receive events from Call
class MyCallCallback(pj.CallCallback):
def __init__(self, call=None):
pj.CallCallback.__init__(self, call)
# Notification when call state has changed
def on_state(self):
print "Call is ", self.call.info().state_text,
print "last code =", self.call.info().last_code,
print "(" + self.call.info().last_reason + ")"
if self.call.info().state == pj.CallState.DISCONNECTED:
lib.player_destroy(self._player)
# Notification when call's media state has changed.
def on_media_state(self):
global lib
if self.call.info().media_state == pj.MediaState.ACTIVE:
# Connect the call to sound device
call_slot = self.call.info().conf_slot
self._player = lib.create_player("man2_64.wav", loop=True)
player_slot = lib.player_get_slot(self._player)
#lib.conf_connect(call_slot, 0)
#lib.conf_connect(0, call_slot)
lib.conf_connect(player_slot, call_slot)
#lib.conf_connect(call_slot, player_slot)
lib.conf_connect(player_slot, 0)
print "Hello world, I can talk!"
# Check command line argument
if len(sys.argv) != 4:
print "Usage: call.py <dst-URI> <ext> <pass>"
sys.exit(1)
try:
uc = pj.UAConfig()
#uc.max_calls = 20
# Create library instance
lib = pj.Lib()
# Init library with default config
log_config = pj.LogConfig(level=5, callback=log_cb)
log_config.msg_logging = False
lib.init(ua_cfg = uc, log_cfg = log_config)
# Create UDP transport which listens to any available port
transport = lib.create_transport(pj.TransportType.UDP)
# Start the library
lib.start()
# Build the account configuration
acc_cfg = pj.AccountConfig("10.10.10.1", sys.argv[2], sys.argv[3])
# Create local/user-less account
acc = lib.create_account(acc_cfg)
# Make call
call1 = acc.make_call(sys.argv[1], MyCallCallback())
#time.sleep(1)
#call2 = acc.make_call(sys.argv[1], MyCallCallback())
#time.sleep(1)
#call3 = acc.make_call(sys.argv[1], MyCallCallback())
#time.sleep(1)
time.sleep(30)
call1.hangup()
#time.sleep(1)
#call2.hangup()
#time.sleep(1)
#call3.hangup()
time.sleep(11)
# We're done, shutdown the library
lib.destroy()
lib = None
sys.exit(1)
except pj.Error, e:
print "Exception: " + str(e)
lib.destroy()
lib = None
sys.exit(1)
|
jhunufernandes/ArduWatchRaspSerial
|
refs/heads/master
|
virtualenv/lib/python3.4/site-packages/pkg_resources/_vendor/packaging/__init__.py
|
1178
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
|
ausarbluhd/EternalLLC
|
refs/heads/master
|
scripts/mallory/src/dns/e164.py
|
248
|
# Copyright (C) 2006, 2007, 2009 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS E.164 helpers
@var public_enum_domain: The DNS public ENUM domain, e164.arpa.
@type public_enum_domain: dns.name.Name object
"""
import dns.exception
import dns.name
import dns.resolver
public_enum_domain = dns.name.from_text('e164.arpa.')
def from_e164(text, origin=public_enum_domain):
"""Convert an E.164 number in textual form into a Name object whose
value is the ENUM domain name for that number.
@param text: an E.164 number in textual form.
@type text: str
@param origin: The domain in which the number should be constructed.
The default is e164.arpa.
@type: dns.name.Name object or None
@rtype: dns.name.Name object
"""
parts = [d for d in text if d.isdigit()]
parts.reverse()
return dns.name.from_text('.'.join(parts), origin=origin)
def to_e164(name, origin=public_enum_domain, want_plus_prefix=True):
"""Convert an ENUM domain name into an E.164 number.
@param name: the ENUM domain name.
@type name: dns.name.Name object.
@param origin: A domain containing the ENUM domain name. The
name is relativized to this domain before being converted to text.
@type: dns.name.Name object or None
@param want_plus_prefix: if True, add a '+' to the beginning of the
returned number.
@rtype: str
"""
if not origin is None:
name = name.relativize(origin)
dlabels = [d for d in name.labels if (d.isdigit() and len(d) == 1)]
if len(dlabels) != len(name.labels):
raise dns.exception.SyntaxError('non-digit labels in ENUM domain name')
dlabels.reverse()
text = ''.join(dlabels)
if want_plus_prefix:
text = '+' + text
return text
def query(number, domains, resolver=None):
"""Look for NAPTR RRs for the specified number in the specified domains.
e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.'])
"""
if resolver is None:
resolver = dns.resolver.get_default_resolver()
for domain in domains:
if isinstance(domain, (str, unicode)):
domain = dns.name.from_text(domain)
qname = dns.e164.from_e164(number, domain)
try:
return resolver.query(qname, 'NAPTR')
except dns.resolver.NXDOMAIN:
pass
raise dns.resolver.NXDOMAIN
|
EnTeQuAk/django-filer
|
refs/heads/develop
|
filer/utils/loader.py
|
22
|
#-*- coding: utf-8 -*-
"""
This function is snatched from
https://github.com/ojii/django-load/blob/3058ab9d9d4875589638cc45e84b59e7e1d7c9c3/django_load/core.py#L49
local changes:
* added check for basestring to allow values that are already an object
or method.
"""
from django.utils import six
from django.utils.importlib import import_module
def load_object(import_path):
"""
Loads an object from an 'import_path', like in MIDDLEWARE_CLASSES and the
likes.
Import paths should be: "mypackage.mymodule.MyObject". It then imports the
module up until the last dot and tries to get the attribute after that dot
from the imported module.
If the import path does not contain any dots, a TypeError is raised.
If the module cannot be imported, an ImportError is raised.
If the attribute does not exist in the module, a AttributeError is raised.
"""
if not isinstance(import_path, six.string_types):
return import_path
if '.' not in import_path:
raise TypeError(
"'import_path' argument to 'django_load.core.load_object' " +\
"must contain at least one dot.")
module_name, object_name = import_path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, object_name)
def storage_factory(klass, location, base_url):
"""
This factory returns an instance of the storage class provided.
args:
* klass: must be inherit from ``django.core.files.storage.Storage``
* location: is a string representing the PATH similar to MEDIA_ROOT
* base_url: is a string representing the URL similar to MEDIA_URL
"""
return klass(location=location, base_url=base_url)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.