text
stringlengths 4
1.02M
| meta
dict |
|---|---|
class OSVersion(str):
def __new__(cls, friendly_name, sortable_name):
version = str.__new__(cls, friendly_name)
version._sortable_name = sortable_name
return version
def __lt__(self, other):
return self._sortable_name < other._sortable_name
def __gt__(self, other):
return self._sortable_name > other._sortable_name
def __le__(self, other):
return self._sortable_name <= other._sortable_name
def __ge__(self, other):
return self._sortable_name >= other._sortable_name
XP = OSVersion('xp', 5.1)
VISTA = OSVersion('vista', 6.0)
WIN7 = OSVersion('win7', 6.1)
WIN8 = OSVersion('win8', 6.2)
WIN81 = OSVersion('win8.1', 6.3)
WIN10 = OSVersion('win10', 10)
LEOPARD = OSVersion('leopard', 105)
SNOWLEOPARD = OSVersion('snowleopard', 106)
LION = OSVersion('lion', 107)
MOUNTAINLION = OSVersion('mountainlion', 108)
MAVERICKS = OSVersion('mavericks', 109)
YOSEMITE = OSVersion('yosemite', 1010)
ELCAPITAN = OSVersion('elcapitan', 1011)
SIERRA = OSVersion('sierra', 1012)
HIGHSIERRA = OSVersion('highsierra', 1013)
MOJAVE = OSVersion('mojave', 1014)
CATALINA = OSVersion('catalina', 1015)
|
{
"content_hash": "77f7bce67192539d8343a5cdf6fad93f",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 54,
"avg_line_length": 30.35135135135135,
"alnum_prop": 0.6838824577025824,
"repo_name": "endlessm/chromium-browser",
"id": "5214f3defaf410d3ab03c23ffae3f63a0915838e",
"size": "1323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/catapult/telemetry/telemetry/core/os_version.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
'''OpenGL extension ARB.vertex_type_2_10_10_10_rev
Automatically generated by the get_gl_extensions script, do not edit!
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_ARB_vertex_type_2_10_10_10_rev'
_DEPRECATED = False
GL_INT_2_10_10_10_REV = constant.Constant( 'GL_INT_2_10_10_10_REV', 0x8D9F )
glVertexP2ui = platform.createExtensionFunction(
'glVertexP2ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glVertexP2ui(GLenum(type), GLuint(value)) -> None',
argNames=('type','value',),
deprecated=_DEPRECATED,
)
glVertexP2uiv = platform.createExtensionFunction(
'glVertexP2uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glVertexP2uiv(GLenum(type), GLuintArray(value)) -> None',
argNames=('type','value',),
deprecated=_DEPRECATED,
)
glVertexP3ui = platform.createExtensionFunction(
'glVertexP3ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glVertexP3ui(GLenum(type), GLuint(value)) -> None',
argNames=('type','value',),
deprecated=_DEPRECATED,
)
glVertexP3uiv = platform.createExtensionFunction(
'glVertexP3uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glVertexP3uiv(GLenum(type), GLuintArray(value)) -> None',
argNames=('type','value',),
deprecated=_DEPRECATED,
)
glVertexP4ui = platform.createExtensionFunction(
'glVertexP4ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glVertexP4ui(GLenum(type), GLuint(value)) -> None',
argNames=('type','value',),
deprecated=_DEPRECATED,
)
glVertexP4uiv = platform.createExtensionFunction(
'glVertexP4uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glVertexP4uiv(GLenum(type), GLuintArray(value)) -> None',
argNames=('type','value',),
deprecated=_DEPRECATED,
)
glTexCoordP1ui = platform.createExtensionFunction(
'glTexCoordP1ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glTexCoordP1ui(GLenum(type), GLuint(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glTexCoordP1uiv = platform.createExtensionFunction(
'glTexCoordP1uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glTexCoordP1uiv(GLenum(type), GLuintArray(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glTexCoordP2ui = platform.createExtensionFunction(
'glTexCoordP2ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glTexCoordP2ui(GLenum(type), GLuint(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glTexCoordP2uiv = platform.createExtensionFunction(
'glTexCoordP2uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glTexCoordP2uiv(GLenum(type), GLuintArray(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glTexCoordP3ui = platform.createExtensionFunction(
'glTexCoordP3ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glTexCoordP3ui(GLenum(type), GLuint(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glTexCoordP3uiv = platform.createExtensionFunction(
'glTexCoordP3uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glTexCoordP3uiv(GLenum(type), GLuintArray(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glTexCoordP4ui = platform.createExtensionFunction(
'glTexCoordP4ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glTexCoordP4ui(GLenum(type), GLuint(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glTexCoordP4uiv = platform.createExtensionFunction(
'glTexCoordP4uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glTexCoordP4uiv(GLenum(type), GLuintArray(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glMultiTexCoordP1ui = platform.createExtensionFunction(
'glMultiTexCoordP1ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLuint,),
doc='glMultiTexCoordP1ui(GLenum(texture), GLenum(type), GLuint(coords)) -> None',
argNames=('texture','type','coords',),
deprecated=_DEPRECATED,
)
glMultiTexCoordP1uiv = platform.createExtensionFunction(
'glMultiTexCoordP1uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,arrays.GLuintArray,),
doc='glMultiTexCoordP1uiv(GLenum(texture), GLenum(type), GLuintArray(coords)) -> None',
argNames=('texture','type','coords',),
deprecated=_DEPRECATED,
)
glMultiTexCoordP2ui = platform.createExtensionFunction(
'glMultiTexCoordP2ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLuint,),
doc='glMultiTexCoordP2ui(GLenum(texture), GLenum(type), GLuint(coords)) -> None',
argNames=('texture','type','coords',),
deprecated=_DEPRECATED,
)
glMultiTexCoordP2uiv = platform.createExtensionFunction(
'glMultiTexCoordP2uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,arrays.GLuintArray,),
doc='glMultiTexCoordP2uiv(GLenum(texture), GLenum(type), GLuintArray(coords)) -> None',
argNames=('texture','type','coords',),
deprecated=_DEPRECATED,
)
glMultiTexCoordP3ui = platform.createExtensionFunction(
'glMultiTexCoordP3ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLuint,),
doc='glMultiTexCoordP3ui(GLenum(texture), GLenum(type), GLuint(coords)) -> None',
argNames=('texture','type','coords',),
deprecated=_DEPRECATED,
)
glMultiTexCoordP3uiv = platform.createExtensionFunction(
'glMultiTexCoordP3uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,arrays.GLuintArray,),
doc='glMultiTexCoordP3uiv(GLenum(texture), GLenum(type), GLuintArray(coords)) -> None',
argNames=('texture','type','coords',),
deprecated=_DEPRECATED,
)
glMultiTexCoordP4ui = platform.createExtensionFunction(
'glMultiTexCoordP4ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLuint,),
doc='glMultiTexCoordP4ui(GLenum(texture), GLenum(type), GLuint(coords)) -> None',
argNames=('texture','type','coords',),
deprecated=_DEPRECATED,
)
glMultiTexCoordP4uiv = platform.createExtensionFunction(
'glMultiTexCoordP4uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,arrays.GLuintArray,),
doc='glMultiTexCoordP4uiv(GLenum(texture), GLenum(type), GLuintArray(coords)) -> None',
argNames=('texture','type','coords',),
deprecated=_DEPRECATED,
)
glNormalP3ui = platform.createExtensionFunction(
'glNormalP3ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glNormalP3ui(GLenum(type), GLuint(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glNormalP3uiv = platform.createExtensionFunction(
'glNormalP3uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glNormalP3uiv(GLenum(type), GLuintArray(coords)) -> None',
argNames=('type','coords',),
deprecated=_DEPRECATED,
)
glColorP3ui = platform.createExtensionFunction(
'glColorP3ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glColorP3ui(GLenum(type), GLuint(color)) -> None',
argNames=('type','color',),
deprecated=_DEPRECATED,
)
glColorP3uiv = platform.createExtensionFunction(
'glColorP3uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glColorP3uiv(GLenum(type), GLuintArray(color)) -> None',
argNames=('type','color',),
deprecated=_DEPRECATED,
)
glColorP4ui = platform.createExtensionFunction(
'glColorP4ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glColorP4ui(GLenum(type), GLuint(color)) -> None',
argNames=('type','color',),
deprecated=_DEPRECATED,
)
glColorP4uiv = platform.createExtensionFunction(
'glColorP4uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glColorP4uiv(GLenum(type), GLuintArray(color)) -> None',
argNames=('type','color',),
deprecated=_DEPRECATED,
)
glSecondaryColorP3ui = platform.createExtensionFunction(
'glSecondaryColorP3ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLuint,),
doc='glSecondaryColorP3ui(GLenum(type), GLuint(color)) -> None',
argNames=('type','color',),
deprecated=_DEPRECATED,
)
glSecondaryColorP3uiv = platform.createExtensionFunction(
'glSecondaryColorP3uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,arrays.GLuintArray,),
doc='glSecondaryColorP3uiv(GLenum(type), GLuintArray(color)) -> None',
argNames=('type','color',),
deprecated=_DEPRECATED,
)
glVertexAttribP1ui = platform.createExtensionFunction(
'glVertexAttribP1ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLuint,constants.GLenum,constants.GLboolean,constants.GLuint,),
doc='glVertexAttribP1ui(GLuint(index), GLenum(type), GLboolean(normalized), GLuint(value)) -> None',
argNames=('index','type','normalized','value',),
deprecated=_DEPRECATED,
)
glVertexAttribP1uiv = platform.createExtensionFunction(
'glVertexAttribP1uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLuint,constants.GLenum,constants.GLboolean,arrays.GLuintArray,),
doc='glVertexAttribP1uiv(GLuint(index), GLenum(type), GLboolean(normalized), GLuintArray(value)) -> None',
argNames=('index','type','normalized','value',),
deprecated=_DEPRECATED,
)
glVertexAttribP2ui = platform.createExtensionFunction(
'glVertexAttribP2ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLuint,constants.GLenum,constants.GLboolean,constants.GLuint,),
doc='glVertexAttribP2ui(GLuint(index), GLenum(type), GLboolean(normalized), GLuint(value)) -> None',
argNames=('index','type','normalized','value',),
deprecated=_DEPRECATED,
)
glVertexAttribP2uiv = platform.createExtensionFunction(
'glVertexAttribP2uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLuint,constants.GLenum,constants.GLboolean,arrays.GLuintArray,),
doc='glVertexAttribP2uiv(GLuint(index), GLenum(type), GLboolean(normalized), GLuintArray(value)) -> None',
argNames=('index','type','normalized','value',),
deprecated=_DEPRECATED,
)
glVertexAttribP3ui = platform.createExtensionFunction(
'glVertexAttribP3ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLuint,constants.GLenum,constants.GLboolean,constants.GLuint,),
doc='glVertexAttribP3ui(GLuint(index), GLenum(type), GLboolean(normalized), GLuint(value)) -> None',
argNames=('index','type','normalized','value',),
deprecated=_DEPRECATED,
)
glVertexAttribP3uiv = platform.createExtensionFunction(
'glVertexAttribP3uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLuint,constants.GLenum,constants.GLboolean,arrays.GLuintArray,),
doc='glVertexAttribP3uiv(GLuint(index), GLenum(type), GLboolean(normalized), GLuintArray(value)) -> None',
argNames=('index','type','normalized','value',),
deprecated=_DEPRECATED,
)
glVertexAttribP4ui = platform.createExtensionFunction(
'glVertexAttribP4ui',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLuint,constants.GLenum,constants.GLboolean,constants.GLuint,),
doc='glVertexAttribP4ui(GLuint(index), GLenum(type), GLboolean(normalized), GLuint(value)) -> None',
argNames=('index','type','normalized','value',),
deprecated=_DEPRECATED,
)
glVertexAttribP4uiv = platform.createExtensionFunction(
'glVertexAttribP4uiv',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLuint,constants.GLenum,constants.GLboolean,arrays.GLuintArray,),
doc='glVertexAttribP4uiv(GLuint(index), GLenum(type), GLboolean(normalized), GLuintArray(value)) -> None',
argNames=('index','type','normalized','value',),
deprecated=_DEPRECATED,
)
def glInitVertexType2101010RevARB():
'''Return boolean indicating whether this extension is available'''
return extensions.hasGLExtension( EXTENSION_NAME )
|
{
"content_hash": "bc23554ff08d91d752789396a1daf316",
"timestamp": "",
"source": "github",
"line_count": 395,
"max_line_length": 106,
"avg_line_length": 33.27088607594937,
"alnum_prop": 0.7826053873078679,
"repo_name": "Universal-Model-Converter/UMC3.0a",
"id": "9e19d10ddde5b1b2ddaf3204539bc7455f55f585",
"size": "13142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/Python/x86/Lib/site-packages/OpenGL/raw/GL/ARB/vertex_type_2_10_10_10_rev.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "226"
},
{
"name": "C",
"bytes": "1082640"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "3621086"
},
{
"name": "CSS",
"bytes": "6226"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "FORTRAN",
"bytes": "7795"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "Groff",
"bytes": "5943"
},
{
"name": "HTML",
"bytes": "1196266"
},
{
"name": "Java",
"bytes": "5793"
},
{
"name": "Makefile",
"bytes": "1109"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "33351557"
},
{
"name": "R",
"bytes": "1370"
},
{
"name": "Shell",
"bytes": "6931"
},
{
"name": "Tcl",
"bytes": "2084458"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
}
|
"""Accesses the google.spanner.admin.instance.v1 InstanceAdmin API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.operation
import google.api_core.operations_v1
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.cloud.spanner_admin_instance_v1.gapic import enums
from google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config
from google.cloud.spanner_admin_instance_v1.gapic.transports import (
instance_admin_grpc_transport,
)
from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2
from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import policy_pb2
from google.longrunning import operations_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-spanner").version
class InstanceAdminClient(object):
"""
Cloud Spanner Instance Admin API
The Cloud Spanner Instance Admin API can be used to create, delete,
modify and list instances. Instances are dedicated Cloud Spanner serving
and storage resources to be used by Cloud Spanner databases.
Each instance has a "configuration", which dictates where the
serving resources for the Cloud Spanner instance are located (e.g.,
US-central, Europe). Configurations are created by Google based on
resource availability.
Cloud Spanner billing is based on the instances that exist and their
sizes. After an instance exists, there are no additional
per-database or per-operation charges for use of the instance
(though there may be additional network bandwidth charges).
Instances offer isolation: problems with databases in one instance
will not affect other instances. However, within an instance
databases can affect each other. For example, if one database in an
instance receives a lot of requests and consumes most of the
instance resources, fewer resources are available for other
databases in that instance, and their performance may suffer.
"""
SERVICE_ADDRESS = "spanner.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.spanner.admin.instance.v1.InstanceAdmin"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
InstanceAdminClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
"projects/{project}", project=project
)
@classmethod
def instance_config_path(cls, project, instance_config):
"""Return a fully-qualified instance_config string."""
return google.api_core.path_template.expand(
"projects/{project}/instanceConfigs/{instance_config}",
project=project,
instance_config=instance_config,
)
@classmethod
def instance_path(cls, project, instance):
"""Return a fully-qualified instance string."""
return google.api_core.path_template.expand(
"projects/{project}/instances/{instance}",
project=project,
instance=instance,
)
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
):
"""Constructor.
Args:
transport (Union[~.InstanceAdminGrpcTransport,
Callable[[~.Credentials, type], ~.InstanceAdminGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = instance_admin_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=instance_admin_grpc_transport.InstanceAdminGrpcTransport,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = instance_admin_grpc_transport.InstanceAdminGrpcTransport(
address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def list_instance_configs(
self,
parent,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists the supported instance configurations for a given project.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_instance_configs(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_instance_configs(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The name of the project for which a list of supported instance
configurations is requested. Values are of the form
``projects/<project>``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_instance_configs" not in self._inner_api_calls:
self._inner_api_calls[
"list_instance_configs"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_instance_configs,
default_retry=self._method_configs["ListInstanceConfigs"].retry,
default_timeout=self._method_configs["ListInstanceConfigs"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.ListInstanceConfigsRequest(
parent=parent, page_size=page_size
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_instance_configs"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="instance_configs",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_instance_config(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets information about a particular instance configuration.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> name = client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]')
>>>
>>> response = client.get_instance_config(name)
Args:
name (str): Required. The name of the requested instance configuration. Values are
of the form ``projects/<project>/instanceConfigs/<config>``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_instance_config" not in self._inner_api_calls:
self._inner_api_calls[
"get_instance_config"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_instance_config,
default_retry=self._method_configs["GetInstanceConfig"].retry,
default_timeout=self._method_configs["GetInstanceConfig"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name)
return self._inner_api_calls["get_instance_config"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_instances(
self,
parent,
page_size=None,
filter_=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists all instances in the given project.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_instances(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_instances(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The name of the project for which a list of instances is
requested. Values are of the form ``projects/<project>``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
filter_ (str): An expression for filtering the results of the request. Filter rules are
case insensitive. The fields eligible for filtering are:
- ``name``
- ``display_name``
- ``labels.key`` where key is the name of a label
Some examples of using filters are:
- ``name:*`` --> The instance has a name.
- ``name:Howl`` --> The instance's name contains the string "howl".
- ``name:HOWL`` --> Equivalent to above.
- ``NAME:howl`` --> Equivalent to above.
- ``labels.env:*`` --> The instance has the label "env".
- ``labels.env:dev`` --> The instance has the label "env" and the value
of the label contains the string "dev".
- ``name:howl labels.env:dev`` --> The instance's name contains "howl"
and it has the label "env" with its value containing "dev".
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_instances" not in self._inner_api_calls:
self._inner_api_calls[
"list_instances"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_instances,
default_retry=self._method_configs["ListInstances"].retry,
default_timeout=self._method_configs["ListInstances"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.ListInstancesRequest(
parent=parent, page_size=page_size, filter=filter_
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_instances"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="instances",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_instance(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets information about a particular instance.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> name = client.instance_path('[PROJECT]', '[INSTANCE]')
>>>
>>> response = client.get_instance(name)
Args:
name (str): Required. The name of the requested instance. Values are of the form
``projects/<project>/instances/<instance>``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_instance" not in self._inner_api_calls:
self._inner_api_calls[
"get_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_instance,
default_retry=self._method_configs["GetInstance"].retry,
default_timeout=self._method_configs["GetInstance"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.GetInstanceRequest(name=name)
return self._inner_api_calls["get_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def create_instance(
self,
parent,
instance_id,
instance,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates an instance and begins preparing it to begin serving. The
returned ``long-running operation`` can be used to track the progress of
preparing the new instance. The instance name is assigned by the caller.
If the named instance already exists, ``CreateInstance`` returns
``ALREADY_EXISTS``.
Immediately upon completion of this request:
- The instance is readable via the API, with all requested attributes
but no allocated resources. Its state is ``CREATING``.
Until completion of the returned operation:
- Cancelling the operation renders the instance immediately unreadable
via the API.
- The instance can be deleted.
- All other attempts to modify the instance are rejected.
Upon completion of the returned operation:
- Billing for all successfully-allocated resources begins (some types
may have lower than the requested levels).
- Databases can be created in the instance.
- The instance's allocated resource levels are readable via the API.
- The instance's state becomes ``READY``.
The returned ``long-running operation`` will have a name of the format
``<instance_name>/operations/<operation_id>`` and can be used to track
creation of the instance. The ``metadata`` field type is
``CreateInstanceMetadata``. The ``response`` field type is ``Instance``,
if successful.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `instance_id`:
>>> instance_id = ''
>>>
>>> # TODO: Initialize `instance`:
>>> instance = {}
>>>
>>> response = client.create_instance(parent, instance_id, instance)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Required. The name of the project in which to create the instance.
Values are of the form ``projects/<project>``.
instance_id (str): Required. The ID of the instance to create. Valid identifiers are of the
form ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 6 and 30 characters
in length.
instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to create. The name may be omitted, but if
specified must be ``<parent>/instances/<instance_id>``.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_instance" not in self._inner_api_calls:
self._inner_api_calls[
"create_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_instance,
default_retry=self._method_configs["CreateInstance"].retry,
default_timeout=self._method_configs["CreateInstance"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.CreateInstanceRequest(
parent=parent, instance_id=instance_id, instance=instance
)
operation = self._inner_api_calls["create_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
spanner_instance_admin_pb2.Instance,
metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata,
)
def update_instance(
self,
instance,
field_mask,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates an instance, and begins allocating or releasing resources as
requested. The returned ``long-running operation`` can be used to track
the progress of updating the instance. If the named instance does not
exist, returns ``NOT_FOUND``.
Immediately upon completion of this request:
- For resource types for which a decrease in the instance's allocation
has been requested, billing is based on the newly-requested level.
Until completion of the returned operation:
- Cancelling the operation sets its metadata's ``cancel_time``, and
begins restoring resources to their pre-request values. The operation
is guaranteed to succeed at undoing all resource changes, after which
point it terminates with a ``CANCELLED`` status.
- All other attempts to modify the instance are rejected.
- Reading the instance via the API continues to give the pre-request
resource levels.
Upon completion of the returned operation:
- Billing begins for all successfully-allocated resources (some types
may have lower than the requested levels).
- All newly-reserved resources are available for serving the instance's
tables.
- The instance's new resource levels are readable via the API.
The returned ``long-running operation`` will have a name of the format
``<instance_name>/operations/<operation_id>`` and can be used to track
the instance modification. The ``metadata`` field type is
``UpdateInstanceMetadata``. The ``response`` field type is ``Instance``,
if successful.
Authorization requires ``spanner.instances.update`` permission on
resource ``name``.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> # TODO: Initialize `instance`:
>>> instance = {}
>>>
>>> # TODO: Initialize `field_mask`:
>>> field_mask = {}
>>>
>>> response = client.update_instance(instance, field_mask)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to update, which must always include the instance
name. Otherwise, only fields mentioned in
[][google.spanner.admin.instance.v1.UpdateInstanceRequest.field\_mask]
need be included.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance`
field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask specifying which fields in
[][google.spanner.admin.instance.v1.UpdateInstanceRequest.instance]
should be updated. The field mask must always be specified; this
prevents any future fields in
[][google.spanner.admin.instance.v1.Instance] from being erased
accidentally by clients that do not know about them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "update_instance" not in self._inner_api_calls:
self._inner_api_calls[
"update_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_instance,
default_retry=self._method_configs["UpdateInstance"].retry,
default_timeout=self._method_configs["UpdateInstance"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.UpdateInstanceRequest(
instance=instance, field_mask=field_mask
)
operation = self._inner_api_calls["update_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
spanner_instance_admin_pb2.Instance,
metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata,
)
def delete_instance(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes an instance.
Immediately upon completion of the request:
- Billing ceases for all of the instance's reserved resources.
Soon afterward:
- The instance and *all of its databases* immediately and irrevocably
disappear from the API. All data in the databases is permanently
deleted.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> name = client.instance_path('[PROJECT]', '[INSTANCE]')
>>>
>>> client.delete_instance(name)
Args:
name (str): Required. The name of the instance to be deleted. Values are of the form
``projects/<project>/instances/<instance>``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_instance" not in self._inner_api_calls:
self._inner_api_calls[
"delete_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_instance,
default_retry=self._method_configs["DeleteInstance"].retry,
default_timeout=self._method_configs["DeleteInstance"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name)
self._inner_api_calls["delete_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def set_iam_policy(
self,
resource,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Sets the access control policy on an instance resource. Replaces any
existing policy.
Authorization requires ``spanner.instances.setIamPolicy`` on
``resource``.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> resource = client.instance_path('[PROJECT]', '[INSTANCE]')
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.set_iam_policy(resource, policy)
Args:
resource (str): REQUIRED: The resource for which the policy is being specified.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "set_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"set_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.set_iam_policy,
default_retry=self._method_configs["SetIamPolicy"].retry,
default_timeout=self._method_configs["SetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy)
return self._inner_api_calls["set_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def get_iam_policy(
self,
resource,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets the access control policy for an instance resource. Returns an
empty policy if an instance exists but does not have a policy set.
Authorization requires ``spanner.instances.getIamPolicy`` on
``resource``.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> resource = client.instance_path('[PROJECT]', '[INSTANCE]')
>>>
>>> response = client.get_iam_policy(resource)
Args:
resource (str): REQUIRED: The resource for which the policy is being requested.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"get_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_iam_policy,
default_retry=self._method_configs["GetIamPolicy"].retry,
default_timeout=self._method_configs["GetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.GetIamPolicyRequest(resource=resource)
return self._inner_api_calls["get_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def test_iam_permissions(
self,
resource,
permissions,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Returns permissions that the caller has on the specified instance
resource.
Attempting this RPC on a non-existent Cloud Spanner instance resource
will result in a NOT\_FOUND error if the user has
``spanner.instances.list`` permission on the containing Google Cloud
Project. Otherwise returns an empty set of permissions.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> resource = client.instance_path('[PROJECT]', '[INSTANCE]')
>>>
>>> # TODO: Initialize `permissions`:
>>> permissions = []
>>>
>>> response = client.test_iam_permissions(resource, permissions)
Args:
resource (str): REQUIRED: The resource for which the policy detail is being requested.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with
wildcards (such as '*' or 'storage.*') are not allowed. For more
information see `IAM
Overview <https://cloud.google.com/iam/docs/overview#permissions>`__.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "test_iam_permissions" not in self._inner_api_calls:
self._inner_api_calls[
"test_iam_permissions"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.test_iam_permissions,
default_retry=self._method_configs["TestIamPermissions"].retry,
default_timeout=self._method_configs["TestIamPermissions"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions
)
return self._inner_api_calls["test_iam_permissions"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
{
"content_hash": "a44a000d7e02c71040b9f3a10b72780c",
"timestamp": "",
"source": "github",
"line_count": 1045,
"max_line_length": 164,
"avg_line_length": 44.691866028708134,
"alnum_prop": 0.6003682846926321,
"repo_name": "dhermes/gcloud-python",
"id": "63d3a1631eae32cf2f373af8cce82d256faf850a",
"size": "47304",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spanner/google/cloud/spanner_admin_instance_v1/gapic/instance_admin_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "95635"
},
{
"name": "Python",
"bytes": "2871895"
},
{
"name": "Shell",
"bytes": "4683"
}
],
"symlink_target": ""
}
|
class Solution:
def maxArea(self, height: List[int]) -> int:
res = 0
start, end = 0, len(height)-1
while start < end:
res = max(res, ((end-start)*min(height[end], height[start])))
if height[start] > height[end]:
end -= 1
else:
start += 1
return res
|
{
"content_hash": "922d2a17418dc753da5d040141e5075f",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 73,
"avg_line_length": 29.53846153846154,
"alnum_prop": 0.4270833333333333,
"repo_name": "saisankargochhayat/algo_quest",
"id": "13497eb2c157e0777649412bb91c0230d33cd212",
"size": "545",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leetcode/11.ContainerWithMostWater/soln.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "405"
},
{
"name": "C++",
"bytes": "9149"
},
{
"name": "HTML",
"bytes": "1679"
},
{
"name": "Java",
"bytes": "3648"
},
{
"name": "JavaScript",
"bytes": "786"
},
{
"name": "Python",
"bytes": "248621"
},
{
"name": "Ruby",
"bytes": "2761"
},
{
"name": "Shell",
"bytes": "610"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved.
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from storage import Storage
__all__ = ["AzureStorage"]
class AzureStorage(Storage):
"""Hackathon file storage that saves all templates on MS azure"""
def __init__(self):
return
|
{
"content_hash": "e5eed06b2bbcd62a78d8afe07f195c83",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 83,
"avg_line_length": 40.57575757575758,
"alnum_prop": 0.7744585511575803,
"repo_name": "Fendoe/open-hackathon-o",
"id": "16a2681d412902149fbc73c02733eb90d7e27753",
"size": "1364",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "open-hackathon-server/src/hackathon/storage/azure_storage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "109082"
},
{
"name": "HTML",
"bytes": "426116"
},
{
"name": "Java",
"bytes": "12108"
},
{
"name": "JavaScript",
"bytes": "414512"
},
{
"name": "Python",
"bytes": "2270532"
},
{
"name": "Ruby",
"bytes": "1518308"
},
{
"name": "Shell",
"bytes": "18652"
}
],
"symlink_target": ""
}
|
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from openstack_dashboard import api
from openstack_dashboard import policy
from horizon import exceptions
from horizon import tables
LOG = logging.getLogger(__name__)
class AddAllowedAddressPair(policy.PolicyTargetMixin, tables.LinkAction):
name = "AddAllowedAddressPair"
verbose_name = _("Add Allowed Address Pair")
url = "horizon:project:networks:ports:addallowedaddresspairs"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "update_port"),)
def get_link_url(self, port=None):
if port:
return reverse(self.url, args=(port.id,))
else:
return reverse(self.url, args=(self.table.kwargs.get('port_id'),))
class DeleteAllowedAddressPair(tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete",
u"Delete",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted address pair",
u"Deleted address pairs",
count
)
def delete(self, request, ip_address):
try:
port_id = self.table.kwargs['port_id']
port = api.neutron.port_get(request, port_id)
pairs = port.get('allowed_address_pairs', [])
pairs = [pair for pair in pairs
if pair['ip_address'] != ip_address]
pairs = [pair.to_dict() for pair in pairs]
api.neutron.port_update(request, port_id,
allowed_address_pairs=pairs)
except Exception as e:
LOG.error('Failed to update port %(port_id)s: %(reason)s',
{'port_id': port_id, 'reason': e})
redirect = reverse("horizon:project:networks:ports:detail",
args=(port_id,))
exceptions.handle(request, _('Failed to update port %s') % port_id,
redirect=redirect)
class AllowedAddressPairsTable(tables.DataTable):
IP = tables.Column("ip_address",
verbose_name=_("IP Address or CIDR"))
mac = tables.Column('mac_address', verbose_name=_("MAC Address"))
def get_object_display(self, address_pair):
return address_pair['ip_address']
class Meta(object):
name = "allowed_address_pairs"
verbose_name = _("Allowed Address Pairs")
row_actions = (DeleteAllowedAddressPair,)
table_actions = (AddAllowedAddressPair, DeleteAllowedAddressPair)
|
{
"content_hash": "cef53c782023d559a0bb221e78cba7b8",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 79,
"avg_line_length": 33.51851851851852,
"alnum_prop": 0.6058931860036832,
"repo_name": "wolverineav/horizon",
"id": "461f97772d2c505fca2e1c8920d82f29ba412831",
"size": "3355",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/project/networks/ports/extensions/allowed_address_pairs/tables.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "182861"
},
{
"name": "HTML",
"bytes": "547294"
},
{
"name": "JavaScript",
"bytes": "1954942"
},
{
"name": "Makefile",
"bytes": "588"
},
{
"name": "Python",
"bytes": "5103444"
},
{
"name": "Shell",
"bytes": "19593"
}
],
"symlink_target": ""
}
|
from puq import *
def run():
x = UniformParameter('x', 'x', min=0, max=2)
host = InteractiveHost()
uq = Smolyak([x], level=8)
prog = TestProgram('./sin.py', desc='Sine Function')
return Sweep(uq, host, prog)
|
{
"content_hash": "a9b457715a8363157d8101165a87847d",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 56,
"avg_line_length": 28.625,
"alnum_prop": 0.6026200873362445,
"repo_name": "c-PRIMED/puq",
"id": "8a84628618ea8f247d305cb64907198e370c5216",
"size": "229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/discontinuous/1dsin/smolyak.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "328"
},
{
"name": "Fortran",
"bytes": "410"
},
{
"name": "M",
"bytes": "648"
},
{
"name": "Makefile",
"bytes": "165"
},
{
"name": "Matlab",
"bytes": "6367"
},
{
"name": "Python",
"bytes": "444524"
},
{
"name": "Shell",
"bytes": "38"
}
],
"symlink_target": ""
}
|
import argparse
import genetic_analyzer as ga
def main():
# Parse the command line inputs
parser = argparse.ArgumentParser(description="part_profile_analysis")
parser.add_argument("-settings", dest="settings", required=True, help="settings.txt", metavar="string")
args = parser.parse_args()
settings = ga.load_settings(args.settings)
samples = []
for s in settings.keys():
if s != 'None':
samples.append(s)
chr_promoters = ga.characterize_promoter_units(settings, s, upstream_bp=10, downstream_skip_bp=0, downstream_bp=10, normed=True)
ga.save_characterization_data(settings, s, chr_promoters, part_type='promoter')
chr_terminators = ga.characterize_terminators(settings, s, upstream_bp=10, upstream_skip_bp=0, downstream_bp=10, normed=True)
ga.save_characterization_data(settings, s, chr_terminators, part_type='terminator')
chr_ribozymes = ga.characterize_ribozymes(settings, s, upstream_promoter_bp=10, upstream_bp=10, downstream_skip_bp=0, downstream_bp=10, normed=True)
ga.save_characterization_data(settings, s, chr_ribozymes, part_type='ribozyme')
ga.combine_promoter_characterizations(settings, samples)
ga.combine_terminator_characterizations(settings, samples)
ga.combine_ribozyme_characterizations(settings, samples)
if __name__ == "__main__":
main()
|
{
"content_hash": "5d5d6a165ba81cc97d0e8253aab7ed58",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 151,
"avg_line_length": 52.08,
"alnum_prop": 0.7511520737327189,
"repo_name": "VoigtLab/MIT-BroadFoundry",
"id": "1707054852e68647e4198b7ce641137adb4857e8",
"size": "1500",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "genetic-analyzer/bin/part_profile_analysis.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "99392"
},
{
"name": "R",
"bytes": "3572"
},
{
"name": "Shell",
"bytes": "11429"
}
],
"symlink_target": ""
}
|
import re
import string
from django.http import HttpRequest, HttpResponse
from zerver.decorator import webhook_view
from zerver.lib.exceptions import UnsupportedWebhookEventTypeError
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.validator import WildValue, check_string, to_wild_value
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.models import UserProfile
from .support_event import SUPPORT_EVENTS
DOCUMENT_TEMPLATE = "{user_name} {verb} the document [{title}]({url})"
QUESTION_TEMPLATE = "{user_name} {verb} the question [{title}]({url})"
QUESTIONS_ANSWER_TEMPLATE = (
"{user_name} {verb} the [answer]({answer_url}) "
+ "of the question [{question_title}]({question_url})"
)
COMMENT_TEMPLATE = (
"{user_name} {verb} the [comment]({answer_url}) of the task [{task_title}]({task_url})"
)
MESSAGE_TEMPLATE = "{user_name} {verb} the message [{title}]({url})"
TODO_LIST_TEMPLATE = "{user_name} {verb} the todo list [{title}]({url})"
TODO_TEMPLATE = "{user_name} {verb} the todo task [{title}]({url})"
ALL_EVENT_TYPES = [
"document",
"question_answer",
"question",
"message",
"todolist",
"todo",
"comment",
]
@webhook_view("Basecamp", all_event_types=ALL_EVENT_TYPES)
@has_request_variables
def api_basecamp_webhook(
request: HttpRequest,
user_profile: UserProfile,
payload: WildValue = REQ(argument_type="body", converter=to_wild_value),
) -> HttpResponse:
event = get_event_type(payload)
if event not in SUPPORT_EVENTS:
raise UnsupportedWebhookEventTypeError(event)
subject = get_project_name(payload)
if event.startswith("document_"):
body = get_document_body(event, payload)
event = "document"
elif event.startswith("question_answer_"):
body = get_questions_answer_body(event, payload)
event = "question_answer"
elif event.startswith("question_"):
body = get_questions_body(event, payload)
event = "question"
elif event.startswith("message_"):
body = get_message_body(event, payload)
event = "message"
elif event.startswith("todolist_"):
body = get_todo_list_body(event, payload)
event = "todolist"
elif event.startswith("todo_"):
body = get_todo_body(event, payload)
event = "todo"
elif event.startswith("comment_"):
body = get_comment_body(event, payload)
event = "comment"
else:
raise UnsupportedWebhookEventTypeError(event)
check_send_webhook_message(request, user_profile, subject, body, event)
return json_success(request)
def get_project_name(payload: WildValue) -> str:
return payload["recording"]["bucket"]["name"].tame(check_string)
def get_event_type(payload: WildValue) -> str:
return payload["kind"].tame(check_string)
def get_event_creator(payload: WildValue) -> str:
return payload["creator"]["name"].tame(check_string)
def get_subject_url(payload: WildValue) -> str:
return payload["recording"]["app_url"].tame(check_string)
def get_subject_title(payload: WildValue) -> str:
return payload["recording"]["title"].tame(check_string)
def get_verb(event: str, prefix: str) -> str:
verb = event.replace(prefix, "")
if verb == "active":
return "activated"
matched = re.match(r"(?P<subject>[A-z]*)_changed", verb)
if matched:
return "changed {} of".format(matched.group("subject"))
return verb
def add_punctuation_if_necessary(body: str, title: str) -> str:
if title[-1] not in string.punctuation:
body = f"{body}."
return body
def get_document_body(event: str, payload: WildValue) -> str:
return get_generic_body(event, payload, "document_", DOCUMENT_TEMPLATE)
def get_questions_answer_body(event: str, payload: WildValue) -> str:
verb = get_verb(event, "question_answer_")
question = payload["recording"]["parent"]
title = question["title"].tame(check_string)
template = add_punctuation_if_necessary(QUESTIONS_ANSWER_TEMPLATE, title)
return template.format(
user_name=get_event_creator(payload),
verb=verb,
answer_url=get_subject_url(payload),
question_title=title,
question_url=question["app_url"].tame(check_string),
)
def get_comment_body(event: str, payload: WildValue) -> str:
verb = get_verb(event, "comment_")
task = payload["recording"]["parent"]
template = add_punctuation_if_necessary(COMMENT_TEMPLATE, task["title"].tame(check_string))
return template.format(
user_name=get_event_creator(payload),
verb=verb,
answer_url=get_subject_url(payload),
task_title=task["title"].tame(check_string),
task_url=task["app_url"].tame(check_string),
)
def get_questions_body(event: str, payload: WildValue) -> str:
return get_generic_body(event, payload, "question_", QUESTION_TEMPLATE)
def get_message_body(event: str, payload: WildValue) -> str:
return get_generic_body(event, payload, "message_", MESSAGE_TEMPLATE)
def get_todo_list_body(event: str, payload: WildValue) -> str:
return get_generic_body(event, payload, "todolist_", TODO_LIST_TEMPLATE)
def get_todo_body(event: str, payload: WildValue) -> str:
return get_generic_body(event, payload, "todo_", TODO_TEMPLATE)
def get_generic_body(event: str, payload: WildValue, prefix: str, template: str) -> str:
verb = get_verb(event, prefix)
title = get_subject_title(payload)
template = add_punctuation_if_necessary(template, title)
return template.format(
user_name=get_event_creator(payload),
verb=verb,
title=get_subject_title(payload),
url=get_subject_url(payload),
)
|
{
"content_hash": "acf2d1380ba8b90d5ed877088780b759",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 95,
"avg_line_length": 32.67796610169491,
"alnum_prop": 0.6742738589211619,
"repo_name": "zulip/zulip",
"id": "4626a25e60b61082983d4d8a1a3b9ec66c08c9fd",
"size": "5784",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "zerver/webhooks/basecamp/view.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "509211"
},
{
"name": "Dockerfile",
"bytes": "4219"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "696430"
},
{
"name": "Handlebars",
"bytes": "384277"
},
{
"name": "JavaScript",
"bytes": "4098367"
},
{
"name": "Perl",
"bytes": "10163"
},
{
"name": "Puppet",
"bytes": "112433"
},
{
"name": "Python",
"bytes": "10336945"
},
{
"name": "Ruby",
"bytes": "3166"
},
{
"name": "Shell",
"bytes": "147162"
},
{
"name": "TypeScript",
"bytes": "286785"
}
],
"symlink_target": ""
}
|
from lighthouse.ajax.lapack_le import *
from lighthouse.ajax.btoscript import *
|
{
"content_hash": "5384beaff3b29deaadd493175049eea2",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 39,
"avg_line_length": 27,
"alnum_prop": 0.8024691358024691,
"repo_name": "LighthouseHPC/lighthouse",
"id": "11b7b74b7cc1bc37c127d0ccfe765e7b9570cdd0",
"size": "81",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Dlighthouse/lighthouse/ajax/__init__.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
import inspect, types
from .pool import HashPool
from .chord import Chord
from .resource import ProxyResource
from . import registry
class Task(object):
def __init__(self, target=None, name=None):
"""Similar to threads, if target is not None, will run target. You may also override the run method."""
self._run = target
self._name = name
def get_name(self):
if self._name is not None:
return self._name
if self._run:
return self._run.__name__
return self.__class__.__name__
def require(self, resources, *args, **kwargs):
"""
Place requirements for task. Task won't execute until all resources are acquired
> resources.request(Resource, exclusive=True, **kwargs)
"""
pass
def start(self, *args, **kwargs):
resources = kwargs.pop('resources', Chord())
resources.request(Task, False, key=self.get_name())
self.require(resources, *args, **kwargs)
with resources:
if self._run is None:
return self.run(resources, *args, **kwargs)
argnames = inspect.getargspec(self._run)[0]
is_method = (argnames and 'self' == argnames[0])
resource_index = 1 if is_method else 0
# if resources is first arg, pass it down. We don't do fancy arg matching yet
add_resources = 'resources' in argnames[resource_index:resource_index + 1]
if add_resources:
if is_method:
args = (args[0], resources) + args[1:]
else:
args = (resources,) + args[1:]
return self._run(*args, **kwargs)
def run(self, resources, *args, **kwargs):
"""
Do stuff
"""
raise NotImplementedError()
class TaskPool(HashPool):
def add(self, task_or_str):
if isinstance(task_or_str, Task):
task_or_str = task_or_str.get_name()
self._resources[task_or_str] = ProxyResource(task_or_str)
def find(self, request):
if 'key' in request.kwargs and request.kwargs.get('key') not in self._resources:
self.add(request.kwargs.get('key'))
return super(TaskPool, self).find(request)
registry.register(Task, TaskPool())
### DECORATORS
_default_task_class = Task
def set_default_task_class(task_class):
global _default_task_class
_default_task_class = task_class
def get_default_task_class():
return _default_task_class
class TaskFactory(object):
def __init__(self, func):
self._task_class = None
self._requirements = []
self._func = func
self.__name__ = self._func.__name__
self.__doc__ = self._func.__doc__
def add_requirement(self, cls, exclusive, **kwargs):
self._requirements.append(dict(cls=cls, exclusive=exclusive, kwargs=kwargs))
def __call__(self, *args, **kwargs):
task_class = self._task_class or get_default_task_class()
task = task_class(self._func, name=self.__name__)
resources = Chord()
for requirement in self._requirements:
resources.request(requirement['cls'], requirement['exclusive'], **requirement['kwargs'])
return task.start(resources=resources, *args, **kwargs)
def __get__(self, instance, cls):
return types.MethodType(self, instance or cls)
def task(name=None, task_class=None):
if task_class is None:
task_class = get_default_task_class()
def wrapper(func):
if not isinstance(func, TaskFactory):
func = TaskFactory(func)
if name:
func.__name__ = name
if task_class:
func._task_class = task_class
return func
return wrapper
def requires(cls, exclusive=False, **kwargs):
def wrapper(func):
if not isinstance(func, TaskFactory):
task = TaskFactory(func)
func = task
func.add_requirement(cls, exclusive, **kwargs)
return func
return wrapper
|
{
"content_hash": "f8e7b553ed2d939b045968f2da3b9301",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 111,
"avg_line_length": 32.15079365079365,
"alnum_prop": 0.5917057516662553,
"repo_name": "omergertel/chords",
"id": "804d056952ec4b73b51f365f51391b0df94c5480",
"size": "4051",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chords/task.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42910"
}
],
"symlink_target": ""
}
|
"""
Unit Tests for remote procedure calls using queue
"""
import mock
import mox
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_db import exception as db_exc
from cinder import context
from cinder import db
from cinder import exception
from cinder import manager
from cinder import service
from cinder import test
from cinder import wsgi
test_service_opts = [
cfg.StrOpt("fake_manager",
default="cinder.tests.test_service.FakeManager",
help="Manager for testing"),
cfg.StrOpt("test_service_listen",
default=None,
help="Host to bind test service to"),
cfg.IntOpt("test_service_listen_port",
default=0,
help="Port number to bind test service to"), ]
CONF = cfg.CONF
CONF.register_opts(test_service_opts)
class FakeManager(manager.Manager):
"""Fake manager for tests."""
def __init__(self, host=None,
db_driver=None, service_name=None):
super(FakeManager, self).__init__(host=host,
db_driver=db_driver)
def test_method(self):
return 'manager'
class ExtendedService(service.Service):
def test_method(self):
return 'service'
class ServiceManagerTestCase(test.TestCase):
"""Test cases for Services."""
def test_message_gets_to_manager(self):
serv = service.Service('test',
'test',
'test',
'cinder.tests.test_service.FakeManager')
serv.start()
self.assertEqual('manager', serv.test_method())
def test_override_manager_method(self):
serv = ExtendedService('test',
'test',
'test',
'cinder.tests.test_service.FakeManager')
serv.start()
self.assertEqual('service', serv.test_method())
class ServiceFlagsTestCase(test.TestCase):
def test_service_enabled_on_create_based_on_flag(self):
self.flags(enable_new_services=True)
host = 'foo'
binary = 'cinder-fake'
app = service.Service.create(host=host, binary=binary)
app.start()
app.stop()
ref = db.service_get(context.get_admin_context(), app.service_id)
db.service_destroy(context.get_admin_context(), app.service_id)
self.assertFalse(ref['disabled'])
def test_service_disabled_on_create_based_on_flag(self):
self.flags(enable_new_services=False)
host = 'foo'
binary = 'cinder-fake'
app = service.Service.create(host=host, binary=binary)
app.start()
app.stop()
ref = db.service_get(context.get_admin_context(), app.service_id)
db.service_destroy(context.get_admin_context(), app.service_id)
self.assertTrue(ref['disabled'])
class ServiceTestCase(test.TestCase):
"""Test cases for Services."""
def setUp(self):
super(ServiceTestCase, self).setUp()
self.mox.StubOutWithMock(service, 'db')
def test_create(self):
host = 'foo'
binary = 'cinder-fake'
topic = 'fake'
# NOTE(vish): Create was moved out of mox replay to make sure that
# the looping calls are created in StartService.
app = service.Service.create(host=host, binary=binary, topic=topic)
self.assertTrue(app)
def test_report_state_newly_disconnected(self):
host = 'foo'
binary = 'bar'
topic = 'test'
service_create = {'host': host,
'binary': binary,
'topic': topic,
'report_count': 0,
'availability_zone': 'nova'}
service_ref = {'host': host,
'binary': binary,
'topic': topic,
'report_count': 0,
'availability_zone': 'nova',
'id': 1}
service.db.service_get_by_args(mox.IgnoreArg(),
host,
binary).AndRaise(exception.NotFound())
service.db.service_create(mox.IgnoreArg(),
service_create).AndReturn(service_ref)
service.db.service_get(
mox.IgnoreArg(),
mox.IgnoreArg()).AndRaise(db_exc.DBConnectionError())
self.mox.ReplayAll()
serv = service.Service(host,
binary,
topic,
'cinder.tests.test_service.FakeManager')
serv.start()
serv.report_state()
self.assertTrue(serv.model_disconnected)
def test_report_state_newly_connected(self):
host = 'foo'
binary = 'bar'
topic = 'test'
service_create = {'host': host,
'binary': binary,
'topic': topic,
'report_count': 0,
'availability_zone': 'nova'}
service_ref = {'host': host,
'binary': binary,
'topic': topic,
'report_count': 0,
'availability_zone': 'nova',
'id': 1}
service.db.service_get_by_args(mox.IgnoreArg(),
host,
binary).AndRaise(exception.NotFound())
service.db.service_create(mox.IgnoreArg(),
service_create).AndReturn(service_ref)
service.db.service_get(mox.IgnoreArg(),
service_ref['id']).AndReturn(service_ref)
service.db.service_update(mox.IgnoreArg(), service_ref['id'],
mox.ContainsKeyValue('report_count', 1))
self.mox.ReplayAll()
serv = service.Service(host,
binary,
topic,
'cinder.tests.test_service.FakeManager')
serv.start()
serv.model_disconnected = True
serv.report_state()
self.assertFalse(serv.model_disconnected)
def test_service_with_long_report_interval(self):
self.override_config('service_down_time', 10)
self.override_config('report_interval', 10)
service.Service.create(binary="test_service",
manager="cinder.tests.test_service.FakeManager")
self.assertEqual(25, CONF.service_down_time)
class TestWSGIService(test.TestCase):
def setUp(self):
super(TestWSGIService, self).setUp()
self.stubs.Set(wsgi.Loader, "load_app", mox.MockAnything())
def test_service_random_port(self):
test_service = service.WSGIService("test_service")
self.assertEqual(0, test_service.port)
test_service.start()
self.assertNotEqual(0, test_service.port)
test_service.stop()
@mock.patch('cinder.wsgi.Server')
def test_workers_set_default(self, wsgi_server):
test_service = service.WSGIService("osapi_volume")
self.assertEqual(processutils.get_worker_count(), test_service.workers)
@mock.patch('cinder.wsgi.Server')
def test_workers_set_good_user_setting(self, wsgi_server):
self.override_config('osapi_volume_workers', 8)
test_service = service.WSGIService("osapi_volume")
self.assertEqual(8, test_service.workers)
@mock.patch('cinder.wsgi.Server')
def test_workers_set_zero_user_setting(self, wsgi_server):
self.override_config('osapi_volume_workers', 0)
test_service = service.WSGIService("osapi_volume")
# If a value less than 1 is used, defaults to number of procs available
self.assertEqual(processutils.get_worker_count(), test_service.workers)
@mock.patch('cinder.wsgi.Server')
def test_workers_set_negative_user_setting(self, wsgi_server):
self.override_config('osapi_volume_workers', -1)
self.assertRaises(exception.InvalidInput,
service.WSGIService,
"osapi_volume")
self.assertFalse(wsgi_server.called)
class OSCompatibilityTestCase(test.TestCase):
def _test_service_launcher(self, fake_os):
# Note(lpetrut): The cinder-volume service needs to be spawned
# differently on Windows due to an eventlet bug. For this reason,
# we must check the process launcher used.
fake_process_launcher = mock.MagicMock()
with mock.patch('os.name', fake_os):
with mock.patch('cinder.service.process_launcher',
fake_process_launcher):
launcher = service.get_launcher()
if fake_os == 'nt':
self.assertEqual(service.Launcher, type(launcher))
else:
self.assertEqual(fake_process_launcher(), launcher)
def test_process_launcher_on_windows(self):
self._test_service_launcher('nt')
def test_process_launcher_on_linux(self):
self._test_service_launcher('posix')
|
{
"content_hash": "290b1a10c3e323003925b7c08217cb7c",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 79,
"avg_line_length": 36.63492063492063,
"alnum_prop": 0.5621750433275563,
"repo_name": "yanheven/cinder",
"id": "c2de595e1e361d25ae8f64a944f8412338f35ed9",
"size": "9965",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinder/tests/test_service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PLpgSQL",
"bytes": "2511"
},
{
"name": "Python",
"bytes": "10655225"
},
{
"name": "Shell",
"bytes": "8111"
}
],
"symlink_target": ""
}
|
import json
import logging
import random
import warnings
from typing import Dict, Hashable, List, Mapping, Optional
import numpy as np
import torch
from monai.config import KeysCollection
from monai.data import MetaTensor
from monai.networks.layers import GaussianFilter
from monai.transforms.transform import MapTransform, Randomizable, Transform
from monai.utils import min_version, optional_import
measure, _ = optional_import("skimage.measure", "0.14.2", min_version)
logger = logging.getLogger(__name__)
distance_transform_cdt, _ = optional_import("scipy.ndimage.morphology", name="distance_transform_cdt")
class DiscardAddGuidanced(MapTransform):
def __init__(
self,
keys: KeysCollection,
number_intensity_ch: int = 1,
probability: float = 1.0,
label_names=None,
allow_missing_keys: bool = False,
):
"""
Discard positive and negative points according to discard probability
Args:
keys: The ``keys`` parameter will be used to get and set the actual data item to transform
number_intensity_ch: number of intensity channels
probability: probability of discarding clicks
"""
super().__init__(keys, allow_missing_keys)
self.number_intensity_ch = number_intensity_ch
self.discard_probability = probability
self.label_names = label_names
def _apply(self, image):
if self.discard_probability >= 1.0 or np.random.choice(
[True, False], p=[self.discard_probability, 1 - self.discard_probability]
):
signal = np.zeros(
(len(self.label_names), image.shape[-3], image.shape[-2], image.shape[-1]), dtype=np.float32
)
if image.shape[0] == self.number_intensity_ch + len(self.label_names):
image[self.number_intensity_ch :, ...] = signal
else:
image = np.concatenate([image, signal], axis=0)
return image
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
if key == "image":
tmp_image = self._apply(d[key])
if isinstance(d[key], MetaTensor):
d[key].array = tmp_image
else:
d[key] = tmp_image
else:
print("This transform only applies to the image")
return d
class NormalizeLabelsInDatasetd(MapTransform):
def __init__(self, keys: KeysCollection, label_names=None, allow_missing_keys: bool = False):
"""
Normalize label values according to label names dictionary
Args:
keys: The ``keys`` parameter will be used to get and set the actual data item to transform
label_names: all label names
"""
super().__init__(keys, allow_missing_keys)
self.label_names = label_names
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
# Dictionary containing new label numbers
new_label_names = {}
label = np.zeros(d[key].shape)
# Making sure the range values and number of labels are the same
for idx, (key_label, val_label) in enumerate(self.label_names.items(), start=1):
if key_label != "background":
new_label_names[key_label] = idx
label[d[key] == val_label] = idx
if key_label == "background":
new_label_names["background"] = 0
d["label_names"] = new_label_names
if isinstance(d[key], MetaTensor):
d[key].array = label
else:
d[key] = label
return d
class SingleLabelSelectiond(MapTransform):
def __init__(self, keys: KeysCollection, label_names=None, allow_missing_keys: bool = False):
"""
Selects one label at a time to train the DeepEdit
Args:
keys: The ``keys`` parameter will be used to get and set the actual data item to transform
label_names: all label names
"""
super().__init__(keys, allow_missing_keys)
self.label_names = label_names
self.all_label_values = {
"spleen": 1,
"right kidney": 2,
"left kidney": 3,
"gallbladder": 4,
"esophagus": 5,
"liver": 6,
"stomach": 7,
"aorta": 8,
"inferior vena cava": 9,
"portal_vein": 10,
"splenic_vein": 11,
"pancreas": 12,
"right adrenal gland": 13,
"left adrenal gland": 14,
}
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
if key == "label":
# Taking one label at a time
t_label = np.random.choice(self.label_names)
d["current_label"] = t_label
d[key][d[key] != self.all_label_values[t_label]] = 0.0
# Convert label to index values following label_names argument
max_label_val = self.label_names.index(t_label) + 1
d[key][d[key] > 0] = max_label_val
print(f"Using label {t_label} with number: {d[key].max()}")
else:
warnings.warn("This transform only applies to the label")
return d
class AddGuidanceSignalDeepEditd(MapTransform):
"""
Add Guidance signal for input image. Multilabel DeepEdit
Based on the "guidance" points, apply Gaussian to them and add them as new channel for input image.
Args:
guidance: key to store guidance.
sigma: standard deviation for Gaussian kernel.
number_intensity_ch: channel index.
"""
def __init__(
self,
keys: KeysCollection,
guidance: str = "guidance",
sigma: int = 3,
number_intensity_ch: int = 1,
allow_missing_keys: bool = False,
):
super().__init__(keys, allow_missing_keys)
self.guidance = guidance
self.sigma = sigma
self.number_intensity_ch = number_intensity_ch
def _get_signal(self, image, guidance):
dimensions = 3 if len(image.shape) > 3 else 2
guidance = guidance.tolist() if isinstance(guidance, np.ndarray) else guidance
guidance = json.loads(guidance) if isinstance(guidance, str) else guidance
# In inference the user may not provide clicks for some channels/labels
if len(guidance):
if dimensions == 3:
# Assume channel is first and depth is last CHWD
signal = np.zeros((1, image.shape[-3], image.shape[-2], image.shape[-1]), dtype=np.float32)
else:
signal = np.zeros((1, image.shape[-2], image.shape[-1]), dtype=np.float32)
sshape = signal.shape
for point in guidance: # TO DO: make the guidance a list only - it is currently a list of list
if np.any(np.asarray(point) < 0):
continue
if dimensions == 3:
# Making sure points fall inside the image dimension
p1 = max(0, min(int(point[-3]), sshape[-3] - 1))
p2 = max(0, min(int(point[-2]), sshape[-2] - 1))
p3 = max(0, min(int(point[-1]), sshape[-1] - 1))
signal[:, p1, p2, p3] = 1.0
else:
p1 = max(0, min(int(point[-2]), sshape[-2] - 1))
p2 = max(0, min(int(point[-1]), sshape[-1] - 1))
signal[:, p1, p2] = 1.0
# Apply a Gaussian filter to the signal
if np.max(signal[0]) > 0:
signal_tensor = torch.tensor(signal[0])
pt_gaussian = GaussianFilter(len(signal_tensor.shape), sigma=self.sigma)
signal_tensor = pt_gaussian(signal_tensor.unsqueeze(0).unsqueeze(0))
signal_tensor = signal_tensor.squeeze(0).squeeze(0)
signal[0] = signal_tensor.detach().cpu().numpy()
signal[0] = (signal[0] - np.min(signal[0])) / (np.max(signal[0]) - np.min(signal[0]))
return signal
else:
if dimensions == 3:
signal = np.zeros((1, image.shape[-3], image.shape[-2], image.shape[-1]), dtype=np.float32)
else:
signal = np.zeros((1, image.shape[-2], image.shape[-1]), dtype=np.float32)
return signal
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
if key == "image":
image = d[key]
tmp_image = image[0 : 0 + self.number_intensity_ch, ...]
guidance = d[self.guidance]
for key_label in guidance.keys():
# Getting signal based on guidance
signal = self._get_signal(image, guidance[key_label])
tmp_image = np.concatenate([tmp_image, signal], axis=0)
if isinstance(d[key], MetaTensor):
d[key].array = tmp_image
else:
d[key] = tmp_image
return d
else:
print("This transform only applies to image key")
return d
class FindAllValidSlicesDeepEditd(MapTransform):
"""
Find/List all valid slices in the labels.
Label is assumed to be a 4D Volume with shape CHWD, where C=1.
Args:
sids: key to store slices indices having valid label map.
"""
def __init__(self, keys: KeysCollection, sids="sids", allow_missing_keys: bool = False):
super().__init__(keys, allow_missing_keys)
self.sids = sids
def _apply(self, label, d):
sids = {}
for key_label in d["label_names"].keys():
l_ids = []
for sid in range(label.shape[-1]): # Assume channel is first and depth is last CHWD
if d["label_names"][key_label] in label[0][..., sid]:
l_ids.append(sid)
sids[key_label] = l_ids
return sids
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
if key == "label":
label = d[key]
if label.shape[0] != 1:
raise ValueError("Only supports single channel labels!")
if len(label.shape) != 4: # only for 3D
raise ValueError("Only supports label with shape CHWD!")
sids = self._apply(label, d)
if sids is not None and len(sids.keys()):
d[self.sids] = sids
return d
else:
print("This transform only applies to label key")
return d
class AddInitialSeedPointDeepEditd(Randomizable, MapTransform):
"""
Add random guidance as initial seed point for a given label.
Note that the label is of size (C, D, H, W) or (C, H, W)
The guidance is of size (2, N, # of dims) where N is number of guidance added.
# of dims = 4 when C, D, H, W; # of dims = 3 when (C, H, W)
Args:
guidance: key to store guidance.
sids: key that represents lists of valid slice indices for the given label.
sid: key that represents the slice to add initial seed point. If not present, random sid will be chosen.
connected_regions: maximum connected regions to use for adding initial points.
"""
def __init__(
self,
keys: KeysCollection,
guidance: str = "guidance",
sids: str = "sids",
sid: str = "sid",
connected_regions: int = 5,
allow_missing_keys: bool = False,
):
super().__init__(keys, allow_missing_keys)
self.sids_key = sids
self.sid_key = sid
self.sid: Dict[str, int] = dict()
self.guidance = guidance
self.connected_regions = connected_regions
def _apply(self, label, sid, key_label):
dimensions = 3 if len(label.shape) > 3 else 2
self.default_guidance = [-1] * (dimensions + 1)
dims = dimensions
if sid is not None and dimensions == 3:
dims = 2
label = label[0][..., sid][np.newaxis] # Assume channel is first and depth is last CHWD
# THERE MAY BE MULTIPLE BLOBS FOR SINGLE LABEL IN THE SELECTED SLICE
label = (label > 0.5).astype(np.float32)
# measure.label: Label connected regions of an integer array - Two pixels are connected
# when they are neighbors and have the same value
blobs_labels = measure.label(label.astype(int), background=0) if dims == 2 else label
if np.max(blobs_labels) <= 0:
raise AssertionError(f"SLICES NOT FOUND FOR LABEL: {key_label}")
pos_guidance = []
for ridx in range(1, 2 if dims == 3 else self.connected_regions + 1):
if dims == 2:
label = (blobs_labels == ridx).astype(np.float32)
if np.sum(label) == 0:
pos_guidance.append(self.default_guidance)
continue
# The distance transform provides a metric or measure of the separation of points in the image.
# This function calculates the distance between each pixel that is set to off (0) and
# the nearest nonzero pixel for binary images - http://matlab.izmiran.ru/help/toolbox/images/morph14.html
distance = distance_transform_cdt(label).flatten()
probability = np.exp(distance) - 1.0
idx = np.where(label.flatten() > 0)[0]
seed = self.R.choice(idx, size=1, p=probability[idx] / np.sum(probability[idx]))
dst = distance[seed]
g = np.asarray(np.unravel_index(seed, label.shape)).transpose().tolist()[0]
g[0] = dst[0] # for debug
if dimensions == 2 or dims == 3:
pos_guidance.append(g)
else:
# Clicks are created using this convention Channel Height Width Depth (CHWD)
pos_guidance.append([g[0], g[-2], g[-1], sid]) # Assume channel is first and depth is last CHWD
return np.asarray([pos_guidance])
def _randomize(self, d, key_label):
sids = d.get(self.sids_key).get(key_label) if d.get(self.sids_key) is not None else None
sid = d.get(self.sid_key).get(key_label) if d.get(self.sid_key) is not None else None
if sids is not None and sids:
if sid is None or sid not in sids:
sid = self.R.choice(sids, replace=False)
else:
logger.info(f"Not slice IDs for label: {key_label}")
sid = None
self.sid[key_label] = sid
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
if key == "label":
label_guidances = {}
for key_label in d["sids"].keys():
# Randomize: Select a random slice
self._randomize(d, key_label)
# Generate guidance base on selected slice
tmp_label = np.copy(d[key])
# Taking one label to create the guidance
if key_label != "background":
tmp_label[tmp_label != float(d["label_names"][key_label])] = 0
else:
tmp_label[tmp_label != float(d["label_names"][key_label])] = 1
tmp_label = 1 - tmp_label
label_guidances[key_label] = json.dumps(
self._apply(tmp_label, self.sid.get(key_label), key_label).astype(int).tolist()
)
d[self.guidance] = label_guidances
return d
else:
print("This transform only applies to label key")
return d
class FindDiscrepancyRegionsDeepEditd(MapTransform):
"""
Find discrepancy between prediction and actual during click interactions during training.
Args:
pred: key to prediction source.
discrepancy: key to store discrepancies found between label and prediction.
"""
def __init__(
self,
keys: KeysCollection,
pred: str = "pred",
discrepancy: str = "discrepancy",
allow_missing_keys: bool = False,
):
super().__init__(keys, allow_missing_keys)
self.pred = pred
self.discrepancy = discrepancy
@staticmethod
def disparity(label, pred):
disparity = label - pred
# Negative ONES mean predicted label is not part of the ground truth
# Positive ONES mean predicted label missed that region of the ground truth
pos_disparity = (disparity > 0).astype(np.float32)
neg_disparity = (disparity < 0).astype(np.float32)
return [pos_disparity, neg_disparity]
def _apply(self, label, pred):
return self.disparity(label, pred)
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
if key == "label":
all_discrepancies = {}
for _, (key_label, val_label) in enumerate(d["label_names"].items()):
if key_label != "background":
# Taking single label
label = np.copy(d[key])
label[label != val_label] = 0
# Label should be represented in 1
label = (label > 0.5).astype(np.float32)
# Taking single prediction
pred = np.copy(d[self.pred])
pred[pred != val_label] = 0
# Prediction should be represented in one
pred = (pred > 0.5).astype(np.float32)
else:
# Taking single label
label = np.copy(d[key])
label[label != val_label] = 1
label = 1 - label
# Label should be represented in 1
label = (label > 0.5).astype(np.float32)
# Taking single prediction
pred = np.copy(d[self.pred])
pred[pred != val_label] = 1
pred = 1 - pred
# Prediction should be represented in one
pred = (pred > 0.5).astype(np.float32)
all_discrepancies[key_label] = self._apply(label, pred)
d[self.discrepancy] = all_discrepancies
return d
else:
print("This transform only applies to 'label' key")
return d
class AddRandomGuidanceDeepEditd(Randomizable, MapTransform):
"""
Add random guidance based on discrepancies that were found between label and prediction.
Args:
guidance: key to guidance source, shape (2, N, # of dim)
discrepancy: key to discrepancy map between label and prediction shape (2, C, H, W, D) or (2, C, H, W)
probability: key to click/interaction probability, shape (1)
"""
def __init__(
self,
keys: KeysCollection,
guidance: str = "guidance",
discrepancy: str = "discrepancy",
probability: str = "probability",
allow_missing_keys: bool = False,
):
super().__init__(keys, allow_missing_keys)
self.guidance_key = guidance
self.discrepancy = discrepancy
self.probability = probability
self._will_interact = None
self.is_pos = None
self.is_other = None
self.default_guidance = None
self.guidance: Dict[str, List[List[int]]] = {}
def randomize(self, data=None):
probability = data[self.probability]
self._will_interact = self.R.choice([True, False], p=[probability, 1.0 - probability])
def find_guidance(self, discrepancy):
distance = distance_transform_cdt(discrepancy).flatten()
probability = np.exp(distance.flatten()) - 1.0
idx = np.where(discrepancy.flatten() > 0)[0]
if np.sum(discrepancy > 0) > 0:
seed = self.R.choice(idx, size=1, p=probability[idx] / np.sum(probability[idx]))
dst = distance[seed]
g = np.asarray(np.unravel_index(seed, discrepancy.shape)).transpose().tolist()[0]
g[0] = dst[0]
return g
return None
def add_guidance(self, guidance, discrepancy, label_names, labels):
# Positive clicks of the segment in the iteration
pos_discr = discrepancy[0] # idx 0 is positive discrepancy and idx 1 is negative discrepancy
# Check the areas that belong to other segments
other_discrepancy_areas = {}
for _, (key_label, val_label) in enumerate(label_names.items()):
if key_label != "background":
tmp_label = np.copy(labels)
tmp_label[tmp_label != val_label] = 0
tmp_label = (tmp_label > 0.5).astype(np.float32)
other_discrepancy_areas[key_label] = np.sum(discrepancy[1] * tmp_label)
else:
tmp_label = np.copy(labels)
tmp_label[tmp_label != val_label] = 1
tmp_label = 1 - tmp_label
other_discrepancy_areas[key_label] = np.sum(discrepancy[1] * tmp_label)
# Add guidance to the current key label
if np.sum(pos_discr) > 0:
guidance.append(self.find_guidance(pos_discr))
self.is_pos = True
# Add guidance to the other areas
for key_label in label_names.keys():
# Areas that cover more than 50 voxels
if other_discrepancy_areas[key_label] > 50:
self.is_other = True
if key_label != "background":
tmp_label = np.copy(labels)
tmp_label[tmp_label != label_names[key_label]] = 0
tmp_label = (tmp_label > 0.5).astype(np.float32)
self.guidance[key_label].append(self.find_guidance(discrepancy[1] * tmp_label))
else:
tmp_label = np.copy(labels)
tmp_label[tmp_label != label_names[key_label]] = 1
tmp_label = 1 - tmp_label
self.guidance[key_label].append(self.find_guidance(discrepancy[1] * tmp_label))
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
guidance = d[self.guidance_key]
discrepancy = d[self.discrepancy]
self.randomize(data)
if self._will_interact:
# Convert all guidance to lists so new guidance can be easily appended
for key_label in d["label_names"].keys():
tmp_gui = guidance[key_label]
tmp_gui = tmp_gui.tolist() if isinstance(tmp_gui, np.ndarray) else tmp_gui
tmp_gui = json.loads(tmp_gui) if isinstance(tmp_gui, str) else tmp_gui
self.guidance[key_label] = [j for j in tmp_gui if -1 not in j]
# Add guidance according to discrepancy
for key_label in d["label_names"].keys():
# Add guidance based on discrepancy
self.add_guidance(self.guidance[key_label], discrepancy[key_label], d["label_names"], d["label"])
# Checking the number of clicks
num_clicks = random.randint(1, 10)
counter = 0
keep_guidance = []
while True:
aux_label = random.choice(list(d["label_names"].keys()))
if aux_label in keep_guidance:
pass
else:
keep_guidance.append(aux_label)
counter = counter + len(self.guidance[aux_label])
# If collected clicks is bigger than max clicks, discard the others
if counter >= num_clicks:
for key_label in d["label_names"].keys():
if key_label not in keep_guidance:
self.guidance[key_label] = []
logger.info(f"Number of simulated clicks: {counter}")
break
# Breaking once all labels are covered
if len(keep_guidance) == len(d["label_names"].keys()):
logger.info(f"Number of simulated clicks: {counter}")
break
d[self.guidance_key] = self.guidance # Update the guidance
return d
class AddGuidanceFromPointsDeepEditd(Transform):
"""
Add guidance based on user clicks. ONLY WORKS FOR 3D
We assume the input is loaded by LoadImaged and has the shape of (H, W, D) originally.
Clicks always specify the coordinates in (H, W, D)
Args:
ref_image: key to reference image to fetch current and original image details.
guidance: output key to store guidance.
meta_keys: explicitly indicate the key of the metadata dictionary of `ref_image`.
for example, for data with key `image`, the metadata by default is in `image_meta_dict`.
the metadata is a dictionary object which contains: filename, original_shape, etc.
if None, will try to construct meta_keys by `{ref_image}_{meta_key_postfix}`.
meta_key_postfix: if meta_key is None, use `{ref_image}_{meta_key_postfix}` to fetch the metadata according
to the key data, default is `meta_dict`, the metadata is a dictionary object.
For example, to handle key `image`, read/write affine matrices from the
metadata `image_meta_dict` dictionary's `affine` field.
"""
def __init__(
self,
ref_image,
guidance: str = "guidance",
label_names=None,
meta_keys: Optional[str] = None,
meta_key_postfix: str = "meta_dict",
):
self.ref_image = ref_image
self.guidance = guidance
self.label_names = label_names
self.meta_keys = meta_keys
self.meta_key_postfix = meta_key_postfix
@staticmethod
def _apply(clicks, factor):
if len(clicks):
guidance = np.multiply(clicks, factor).astype(int).tolist()
return guidance
else:
return []
def __call__(self, data):
d = dict(data)
meta_dict_key = self.meta_keys or f"{self.ref_image}_{self.meta_key_postfix}"
if meta_dict_key not in d:
raise RuntimeError(f"Missing meta_dict {meta_dict_key} in data!")
if "spatial_shape" not in d[meta_dict_key]:
raise RuntimeError('Missing "spatial_shape" in meta_dict!')
# Assume channel is first and depth is last CHWD
original_shape = d[meta_dict_key]["spatial_shape"]
current_shape = list(d[self.ref_image].shape)[1:]
# in here we assume the depth dimension is in the last dimension of "original_shape" and "current_shape"
factor = np.array(current_shape) / original_shape
# Creating guidance for all clicks
all_guidances = {}
for key_label in self.label_names.keys():
clicks = d.get(key_label, [])
clicks = list(np.array(clicks).astype(int))
all_guidances[key_label] = self._apply(clicks, factor)
d[self.guidance] = all_guidances
return d
class ResizeGuidanceMultipleLabelDeepEditd(Transform):
"""
Resize the guidance based on cropped vs resized image.
"""
def __init__(self, guidance: str, ref_image: str) -> None:
self.guidance = guidance
self.ref_image = ref_image
def __call__(self, data):
d = dict(data)
# Assume channel is first and depth is last CHWD
current_shape = d[self.ref_image].shape[1:]
original_shape = d["image_meta_dict"]["spatial_shape"]
factor = np.divide(current_shape, original_shape)
all_guidances = {}
for key_label in d[self.guidance].keys():
guidance = (
np.multiply(d[self.guidance][key_label], factor).astype(int).tolist()
if len(d[self.guidance][key_label])
else []
)
all_guidances[key_label] = guidance
d[self.guidance] = all_guidances
return d
class SplitPredsLabeld(MapTransform):
"""
Split preds and labels for individual evaluation
"""
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
if key == "pred":
for idx, (key_label, _) in enumerate(d["label_names"].items()):
if key_label != "background":
d[f"pred_{key_label}"] = d[key][idx + 1, ...][None]
d[f"label_{key_label}"] = d["label"][idx + 1, ...][None]
elif key != "pred":
logger.info("This is only for pred key")
return d
class AddInitialSeedPointMissingLabelsd(Randomizable, MapTransform):
"""
Add random guidance as initial seed point for a given label.
Note that the label is of size (C, D, H, W) or (C, H, W)
The guidance is of size (2, N, # of dims) where N is number of guidance added.
# of dims = 4 when C, D, H, W; # of dims = 3 when (C, H, W)
Args:
guidance: key to store guidance.
sids: key that represents lists of valid slice indices for the given label.
sid: key that represents the slice to add initial seed point. If not present, random sid will be chosen.
connected_regions: maximum connected regions to use for adding initial points.
"""
def __init__(
self,
keys: KeysCollection,
guidance: str = "guidance",
sids: str = "sids",
sid: str = "sid",
connected_regions: int = 5,
allow_missing_keys: bool = False,
):
super().__init__(keys, allow_missing_keys)
self.sids_key = sids
self.sid_key = sid
self.sid: Dict[str, int] = dict()
self.guidance = guidance
self.connected_regions = connected_regions
def _apply(self, label, sid):
dimensions = 3 if len(label.shape) > 3 else 2
self.default_guidance = [-1] * (dimensions + 1)
dims = dimensions
if sid is not None and dimensions == 3:
dims = 2
label = label[0][..., sid][np.newaxis] # Assume channel is first and depth is last CHWD
# THERE MAY BE MULTIPLE BLOBS FOR SINGLE LABEL IN THE SELECTED SLICE
label = (label > 0.5).astype(np.float32)
# measure.label: Label connected regions of an integer array - Two pixels are connected
# when they are neighbors and have the same value
blobs_labels = measure.label(label.astype(int), background=0) if dims == 2 else label
label_guidance = []
# If there are is presence of that label in this slice
if np.max(blobs_labels) <= 0:
label_guidance.append(self.default_guidance)
else:
for ridx in range(1, 2 if dims == 3 else self.connected_regions + 1):
if dims == 2:
label = (blobs_labels == ridx).astype(np.float32)
if np.sum(label) == 0:
label_guidance.append(self.default_guidance)
continue
# The distance transform provides a metric or measure of the separation of points in the image.
# This function calculates the distance between each pixel that is set to off (0) and
# the nearest nonzero pixel for binary images
# http://matlab.izmiran.ru/help/toolbox/images/morph14.html
distance = distance_transform_cdt(label).flatten()
probability = np.exp(distance) - 1.0
idx = np.where(label.flatten() > 0)[0]
seed = self.R.choice(idx, size=1, p=probability[idx] / np.sum(probability[idx]))
dst = distance[seed]
g = np.asarray(np.unravel_index(seed, label.shape)).transpose().tolist()[0]
g[0] = dst[0] # for debug
if dimensions == 2 or dims == 3:
label_guidance.append(g)
else:
# Clicks are created using this convention Channel Height Width Depth (CHWD)
label_guidance.append([g[0], g[-2], g[-1], sid]) # Assume channel is first and depth is last CHWD
return np.asarray(label_guidance)
def _randomize(self, d, key_label):
sids = d.get(self.sids_key).get(key_label) if d.get(self.sids_key) is not None else None
sid = d.get(self.sid_key).get(key_label) if d.get(self.sid_key) is not None else None
if sids is not None and sids:
if sid is None or sid not in sids:
sid = self.R.choice(sids, replace=False)
else:
logger.info(f"Not slice IDs for label: {key_label}")
sid = None
self.sid[key_label] = sid
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
if key == "label":
label_guidances = {}
for key_label in d["sids"].keys():
# Randomize: Select a random slice
self._randomize(d, key_label)
# Generate guidance base on selected slice
tmp_label = np.copy(d[key])
# Taking one label to create the guidance
if key_label != "background":
tmp_label[tmp_label != float(d["label_names"][key_label])] = 0
else:
tmp_label[tmp_label != float(d["label_names"][key_label])] = 1
tmp_label = 1 - tmp_label
label_guidances[key_label] = json.dumps(
self._apply(tmp_label, self.sid.get(key_label)).astype(int).tolist()
)
d[self.guidance] = label_guidances
return d
else:
print("This transform only applies to label key")
return d
class FindAllValidSlicesMissingLabelsd(MapTransform):
"""
Find/List all valid slices in the labels.
Label is assumed to be a 4D Volume with shape CHWD, where C=1.
Args:
sids: key to store slices indices having valid label map.
"""
def __init__(self, keys: KeysCollection, sids="sids", allow_missing_keys: bool = False):
super().__init__(keys, allow_missing_keys)
self.sids = sids
def _apply(self, label, d):
sids = {}
for key_label in d["label_names"].keys():
l_ids = []
for sid in range(label.shape[-1]): # Assume channel is first and depth is last CHWD
if d["label_names"][key_label] in label[0][..., sid]:
l_ids.append(sid)
# If there are not slices with the label
if l_ids == []:
l_ids = [-1] * 10
sids[key_label] = l_ids
return sids
def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:
d: Dict = dict(data)
for key in self.key_iterator(d):
if key == "label":
label = d[key]
if label.shape[0] != 1:
raise ValueError("Only supports single channel labels!")
if len(label.shape) != 4: # only for 3D
raise ValueError("Only supports label with shape CHWD!")
sids = self._apply(label, d)
if sids is not None and len(sids.keys()):
d[self.sids] = sids
return d
else:
print("This transform only applies to label key")
return d
|
{
"content_hash": "03acda763847ea9049609b2e746e60c5",
"timestamp": "",
"source": "github",
"line_count": 876,
"max_line_length": 118,
"avg_line_length": 41.80707762557078,
"alnum_prop": 0.5538049859377987,
"repo_name": "Project-MONAI/MONAI",
"id": "76b9e18cc7028ab946ccdcc22b52d45a2814be8b",
"size": "37197",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "monai/apps/deepedit/transforms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "15956"
},
{
"name": "C++",
"bytes": "189648"
},
{
"name": "Cuda",
"bytes": "154905"
},
{
"name": "Dockerfile",
"bytes": "2454"
},
{
"name": "Python",
"bytes": "7209898"
},
{
"name": "Shell",
"bytes": "20587"
}
],
"symlink_target": ""
}
|
from oslo_log import log as logging
import pecan
from pecan import hooks
from pecan import rest
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from mistral.api.controllers import resource
from mistral.api.controllers.v2 import types
from mistral.api.hooks import content_type as ct_hook
from mistral.db.v2 import api as db_api
from mistral import exceptions as exc
from mistral.services import actions
from mistral.utils import rest_utils
LOG = logging.getLogger(__name__)
SCOPE_TYPES = wtypes.Enum(str, 'private', 'public')
class Action(resource.Resource):
"""Action resource.
NOTE: *name* is immutable. Note that name and description get inferred
from action definition when Mistral service receives a POST request.
So they can't be changed in another way.
"""
id = wtypes.text
name = wtypes.text
is_system = bool
input = wtypes.text
description = wtypes.text
tags = [wtypes.text]
definition = wtypes.text
scope = SCOPE_TYPES
created_at = wtypes.text
updated_at = wtypes.text
@classmethod
def sample(cls):
return cls(id='123e4567-e89b-12d3-a456-426655440000',
name='flow',
definition='HERE GOES ACTION DEFINITION IN MISTRAL DSL v2',
tags=['large', 'expensive'],
scope='private',
created_at='1970-01-01T00:00:00.000000',
updated_at='1970-01-01T00:00:00.000000')
class Actions(resource.ResourceList):
"""A collection of Actions."""
actions = [Action]
def __init__(self, **kwargs):
self._type = 'actions'
super(Actions, self).__init__(**kwargs)
@classmethod
def sample(cls):
actions_sample = cls()
actions_sample.actions = [Action.sample()]
actions_sample.next = "http://localhost:8989/v2/actions?" \
"sort_keys=id,name&" \
"sort_dirs=asc,desc&limit=10&" \
"marker=123e4567-e89b-12d3-a456-426655440000"
return actions_sample
class ActionsController(rest.RestController, hooks.HookController):
# TODO(nmakhotkin): Have a discussion with pecan/WSME folks in order
# to have requests and response of different content types. Then
# delete ContentTypeHook.
__hooks__ = [ct_hook.ContentTypeHook("application/json", ['POST', 'PUT'])]
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(Action, wtypes.text)
def get(self, name):
"""Return the named action."""
LOG.info("Fetch action [name=%s]" % name)
db_model = db_api.get_action_definition(name)
return Action.from_dict(db_model.to_dict())
@rest_utils.wrap_pecan_controller_exception
@pecan.expose(content_type="text/plain")
def put(self):
"""Update one or more actions.
NOTE: This text is allowed to have definitions
of multiple actions. In this case they all will be updated.
"""
definition = pecan.request.text
LOG.info("Update action(s) [definition=%s]" % definition)
db_acts = actions.update_actions(definition)
models_dicts = [db_act.to_dict() for db_act in db_acts]
action_list = [Action.from_dict(act) for act in models_dicts]
return Actions(actions=action_list).to_string()
@rest_utils.wrap_pecan_controller_exception
@pecan.expose(content_type="text/plain")
def post(self):
"""Create a new action.
NOTE: This text is allowed to have definitions
of multiple actions. In this case they all will be created.
"""
definition = pecan.request.text
pecan.response.status = 201
LOG.info("Create action(s) [definition=%s]" % definition)
db_acts = actions.create_actions(definition)
models_dicts = [db_act.to_dict() for db_act in db_acts]
action_list = [Action.from_dict(act) for act in models_dicts]
return Actions(actions=action_list).to_string()
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
def delete(self, name):
"""Delete the named action."""
LOG.info("Delete action [name=%s]" % name)
with db_api.transaction():
db_model = db_api.get_action_definition(name)
if db_model.is_system:
msg = "Attempt to delete a system action: %s" % name
raise exc.DataAccessException(msg)
db_api.delete_action_definition(name)
@wsme_pecan.wsexpose(Actions, types.uuid, int, types.uniquelist,
types.list)
def get_all(self, marker=None, limit=None, sort_keys='name',
sort_dirs='asc'):
"""Return all actions.
:param marker: Optional. Pagination marker for large data sets.
:param limit: Optional. Maximum number of resources to return in a
single result. Default value is None for backward
compatability.
:param sort_keys: Optional. Columns to sort results by.
Default: name.
:param sort_dirs: Optional. Directions to sort corresponding to
sort_keys, "asc" or "desc" can be choosed.
Default: asc.
Where project_id is the same as the requester or
project_id is different but the scope is public.
"""
LOG.info("Fetch actions. marker=%s, limit=%s, sort_keys=%s, "
"sort_dirs=%s", marker, limit, sort_keys, sort_dirs)
rest_utils.validate_query_params(limit, sort_keys, sort_dirs)
marker_obj = None
if marker:
marker_obj = db_api.get_action_definition_by_id(marker)
db_action_defs = db_api.get_action_definitions(
limit=limit,
marker=marker_obj,
sort_keys=sort_keys,
sort_dirs=sort_dirs
)
actions_list = [Action.from_dict(db_model.to_dict())
for db_model in db_action_defs]
return Actions.convert_with_links(
actions_list,
limit,
pecan.request.host_url,
sort_keys=','.join(sort_keys),
sort_dirs=','.join(sort_dirs)
)
|
{
"content_hash": "c66b012048cc52f7916a446075409e4e",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 78,
"avg_line_length": 33.613756613756614,
"alnum_prop": 0.6099480560365181,
"repo_name": "dennybaa/mistral",
"id": "d081e9b5fae324bf628c2f0ad241fad5389e126a",
"size": "7008",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mistral/api/controllers/v2/action.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "951"
},
{
"name": "Python",
"bytes": "1037769"
},
{
"name": "Shell",
"bytes": "18657"
}
],
"symlink_target": ""
}
|
import suspect
import numpy
def test_null_transform():
fid = numpy.ones(128, 'complex')
data = suspect.MRSData(fid, 1.0 / 128, 123)
transformed_data = suspect.processing.frequency_correction.transform_fid(data, 0, 0)
assert type(transformed_data) == suspect.MRSData
def test_water_peak_alignment_misshape():
spectrum = numpy.zeros(128, 'complex')
spectrum[0] = 1
fids = suspect.MRSData(numpy.zeros((16, 128), 'complex'), 1.0 / 128, 123)
for i in range(fids.shape[0]):
rolled_spectrum = numpy.roll(spectrum, i)
fids[i] = numpy.fft.ifft(rolled_spectrum)
current_fid = numpy.reshape(fids[i], (1, 128))
frequency_shift = suspect.processing.frequency_correction.residual_water_alignment(current_fid)
numpy.testing.assert_almost_equal(frequency_shift, i)
def test_water_peak_alignment():
spectrum = numpy.zeros(128, 'complex')
spectrum[0] = 1
fids = suspect.MRSData(numpy.zeros((16, 128), 'complex'), 1.0 / 128, 123)
for i in range(fids.shape[0]):
rolled_spectrum = numpy.roll(spectrum, i)
fids[i] = numpy.fft.ifft(rolled_spectrum)
frequency_shift = suspect.processing.frequency_correction.residual_water_alignment(fids[i])
numpy.testing.assert_almost_equal(frequency_shift, i)
def test_spectral_registration():
time_axis = numpy.arange(0, 0.512, 5e-4)
target_fid = suspect.MRSData(suspect.basis.gaussian(time_axis, 0, 0, 50.0), 5e-4, 123)
for i in range(1, 15):
input_fid = suspect.MRSData(suspect.basis.gaussian(time_axis, i, 0, 50.0), 5e-4, 123)
frequency_shift, phase_shift = suspect.processing.frequency_correction.spectral_registration(input_fid, target_fid)
numpy.testing.assert_allclose(frequency_shift, i)
def test_compare_frequency_correction():
test_data = suspect.io.load_twix("tests/test_data/siemens/twix_vb.dat")
test_data = test_data.inherit(numpy.average(test_data, axis=1, weights=suspect.processing.channel_combination.svd_weighting(numpy.average(test_data, axis=0))))
sr_target = test_data[0]
for i in range(test_data.shape[0]):
current_fid = test_data[i]
wpa_fs = suspect.processing.frequency_correction.residual_water_alignment(current_fid)
sr_fs = suspect.processing.frequency_correction.spectral_registration(current_fid, sr_target)[0]
numpy.testing.assert_allclose(wpa_fs, sr_fs, atol=current_fid.df)
def test_frequency_transform():
spectrum = numpy.zeros(128, 'complex')
spectrum[0] = 1
for i in range(16):
rolled_spectrum = numpy.roll(spectrum, i)
fid = suspect.MRSData(numpy.fft.ifft(rolled_spectrum), 1.0 / 128, 123)
transformed_fid = suspect.processing.frequency_correction.transform_fid(fid, -i, 0)
transformed_spectrum = numpy.fft.fft(transformed_fid)
numpy.testing.assert_almost_equal(transformed_spectrum, spectrum)
def test_apodize():
data = suspect.MRSData(numpy.ones(1024), 5e-4, 123.456)
raw_spectrum = numpy.fft.fft(data)
apodized_data = suspect.processing.apodize(data, suspect.processing.gaussian_window, {"line_broadening": data.df * 8})
spectrum = numpy.fft.fft(apodized_data)
numpy.testing.assert_allclose(spectrum[4].real, 0.5 * numpy.amax(spectrum), rtol=0.01)
numpy.testing.assert_allclose(numpy.sum(spectrum), numpy.sum(raw_spectrum))
def test_gaussian_denoising():
# constant signal denoised should be the same as original
data = numpy.ones(128)
denoised_data = suspect.processing.denoising.sliding_gaussian(data, 11)
numpy.testing.assert_almost_equal(data, denoised_data)
def test_water_suppression():
data = suspect.io.load_twix("tests/test_data/siemens/twix_vb.dat")
channel_combined_data = data.inherit(numpy.average(data, axis=1))
components = suspect.processing.water_suppression.hsvd(channel_combined_data[10], 4, int(data.np / 2))
fid = suspect.processing.water_suppression.construct_fid(components, data.time_axis())
assert len(components) == 4
|
{
"content_hash": "a7d06602021dfcf928fb684eb02aa122",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 163,
"avg_line_length": 45.31460674157304,
"alnum_prop": 0.70047111331515,
"repo_name": "bennyrowland/suspect",
"id": "40ea9c3ce9430bde85ac058de81eb50a53467549",
"size": "4033",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_mrs/test_processing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "98992"
}
],
"symlink_target": ""
}
|
""".. Ignore pydocstyle D400.
===============
Signal Handlers
===============
"""
import logging
from asgiref.sync import async_to_sync
from channels.layers import ChannelFull, get_channel_layer
from django.db import transaction
from django.db.models.signals import post_save
from django.dispatch import receiver
from rolca.backup.protocol import CHANNEL_BACKUP, TYPE_FILE
from rolca.backup.models import FileBackup
from rolca.core.models import File
logger = logging.getLogger(__name__)
def commit_signal(file_backup_pk):
"""Trigger a backup on a separate worker."""
channel_layer = get_channel_layer()
try:
async_to_sync(channel_layer.send)(
CHANNEL_BACKUP, {"type": TYPE_FILE, "file_backup_pk": file_backup_pk},
)
except ChannelFull:
logger.warning("Cannot trigger backup because channel is full.")
@receiver(post_save, sender=File)
def backup_post_save_handler(sender, instance, created, **kwargs):
"""Trigger a backup after a new File is created."""
if created:
file_backup = FileBackup.objects.create(source=instance)
transaction.on_commit(lambda: commit_signal(file_backup.pk))
|
{
"content_hash": "f9f6ea15d26d0b9459695e22c51ab10a",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 82,
"avg_line_length": 29.275,
"alnum_prop": 0.7019641332194705,
"repo_name": "dblenkus/rolca",
"id": "1d03f833f0ff98e00c13463e530c335453734f5a",
"size": "1171",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/rolca/backup/signals.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1186"
},
{
"name": "Python",
"bytes": "109206"
},
{
"name": "Shell",
"bytes": "345"
}
],
"symlink_target": ""
}
|
import pytest
from spacy.language import Language, BaseDefaults
from spacy.lang.punctuation import TOKENIZER_INFIXES
from spacy.lang.char_classes import ALPHA
@pytest.mark.issue(768)
@pytest.mark.parametrize(
"text,expected_tokens", [("l'avion", ["l'", "avion"]), ("j'ai", ["j'", "ai"])]
)
def test_issue768(text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
SPLIT_INFIX = r"(?<=[{a}]\')(?=[{a}])".format(a=ALPHA)
class FrenchTest(Language):
class Defaults(BaseDefaults):
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
fr_tokenizer_w_infix = FrenchTest().tokenizer
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
|
{
"content_hash": "d8aea89de6d9a6099bb524e0be9ecafc",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 82,
"avg_line_length": 35.09090909090909,
"alnum_prop": 0.6709844559585493,
"repo_name": "explosion/spaCy",
"id": "272531b638f243068e0dca0dc8ee1db7d749f56e",
"size": "772",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spacy/tests/lang/fr/test_prefix_suffix_infix.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9571"
},
{
"name": "C++",
"bytes": "187"
},
{
"name": "Cython",
"bytes": "784034"
},
{
"name": "Dockerfile",
"bytes": "432"
},
{
"name": "HTML",
"bytes": "29880"
},
{
"name": "JavaScript",
"bytes": "240056"
},
{
"name": "Jinja",
"bytes": "12977"
},
{
"name": "Makefile",
"bytes": "1576"
},
{
"name": "Python",
"bytes": "3783857"
},
{
"name": "Sass",
"bytes": "56930"
},
{
"name": "Shell",
"bytes": "984"
}
],
"symlink_target": ""
}
|
import time
import glob
import re
import os
import subprocess
from Tkinter import *
import tkFont
import threading
WINWIDTH = 776
WINHEIGHT = 390
BGCOLOR = "#3BD7EA"
ACTIVECOLOR = "#3BD7EA"
TEXTCOLOR = "#FFFFFF"
PROGRESSBG = "#FFFFFF"
BTNTEXT = "#2AD4E7"
BTNCOLOR = "#FFFFFF"
class Application(Frame):
def createStartButton(self, master, onclick):
b = Label( master )
b.config( text="START", highlightthickness=1, highlightbackground=ACTIVECOLOR, background=BTNCOLOR, foreground=BTNTEXT, width=12, height=3 )
b.bind("<Button-1>", onclick)
b.place( x=336, y=80, anchor=NW )
return b
def createNextButton(self, master, onclick):
b = Label( master )
b.config( text="NEXT", highlightthickness=1, highlightbackground=ACTIVECOLOR, background=BTNCOLOR, foreground=BTNTEXT, width=12, height=3 )
b.bind("<Button-1>", onclick)
b.place( x=336, y=80, anchor=NW )
return b
def createDoneButton(self, master, onclick):
b = Label( master )
b.config( text="OK", highlightthickness=1, highlightbackground=ACTIVECOLOR, background=BTNCOLOR, foreground=BTNTEXT, width=12, height=3 )
b.bind("<Button-1>", onclick)
b.place( x=336, y=80, anchor=NW )
return b
def createStartOverButton(self, master, onclick):
b = Label( master )
b.config( text="START OVER", highlightthickness=1, highlightbackground=ACTIVECOLOR, background=BTNCOLOR, foreground=BTNTEXT, width=12, height=3 )
b.bind("<Button-1>", onclick)
b.place( x=336, y=80, anchor=NW )
return b
def createFormatButton(self, master, onclick):
b = Label( master )
b.config( text="INSTALL", highlightthickness=1, highlightbackground=ACTIVECOLOR, background=BTNCOLOR, foreground=BTNTEXT, width=12, height=3 )
b.bind("<Button-1>", onclick)
b.place( x=336, y=80, anchor=NW )
return b
def createInstructionTxt(self, master, text):
instructionFont = tkFont.Font(family="Helvetica", size=18)
instruction = Label( master, background=BGCOLOR, foreground=TEXTCOLOR, font=instructionFont )
instruction["text"] = text
instruction.place( relx=0.5, y=14, anchor=N )
return instruction
def createWidgets(self):
#self.QUIT = Button(self)
#self.QUIT["text"] = "QUIT"
#self.QUIT["fg"] = "red"
#self.QUIT["command"] = self.quit
#self.QUIT.place( relx=0, rely=1, anchor=SW )
self.step1Frame = Frame( width=WINWIDTH, height=190, background=BGCOLOR )
self.step1Frame.instruction = self.createInstructionTxt( self.step1Frame, "Remove any SD Cards from your computer and click Start." )
self.step1Frame.nextButton = self.createStartButton( self.step1Frame, self.preStep2 )
self.step2Frame = Frame( width=WINWIDTH, height=190, background=BGCOLOR )
self.step2Frame.nextButton = self.createNextButton( self.step2Frame, self.preStep3 )
self.step2Frame.instruction = self.createInstructionTxt( self.step2Frame, "Insert an SD Card you wish to format with Coder." )
self.step3Frame = Frame( width=WINWIDTH, height=190, background=BGCOLOR )
self.step3Frame.nextButton = self.createFormatButton( self.step3Frame, self.preStep4 )
self.step3Frame.instruction = self.createInstructionTxt( self.step3Frame, "Click Install to format this SD Card and install Coder." )
self.step3Frame.progress = Meter( self.step3Frame, width=600, height=6, bg=PROGRESSBG, progcolor=ACTIVECOLOR )
self.step4Frame = Frame( width=WINWIDTH, height=190, background=BGCOLOR )
self.step4Frame.instruction = Label( self.step4Frame )
self.step4Frame.instruction = self.createInstructionTxt( self.step4Frame, "Coder has been successfully installed. You may now remove your SD Card.\nFollow instructions at goo.gl/coder to get started." )
self.step4Frame.instruction.place( relx=0.5, y=8, anchor=N )
self.step4Frame.doneButton = self.createDoneButton( self.step4Frame, self.doneClick )
self.errorFrame = Frame( width=WINWIDTH, height=190, background=BGCOLOR )
self.errorFrame.soButton = self.createStartOverButton( self.errorFrame, self.step1 )
self.errorFrame.instruction = Label( self.errorFrame )
self.errorFrame.instruction = self.createInstructionTxt( self.errorFrame, "Error" )
self.workingFrame = Frame( width=WINWIDTH, height=190, background=BGCOLOR )
self.workingFrame.instruction = Label( self.workingFrame )
self.workingFrame.instruction = self.createInstructionTxt( self.workingFrame, "Thinking..." )
self.logoimg = PhotoImage( file="installerlogo.gif" )
self.logo = Label(self)
self.logo.config( background=BGCOLOR, image = self.logoimg )
self.logo.place( x=170, y=55, anchor=NW )
#self.logo.config( image = self.logoimg )
#self.logo.geometry("+%d+%d" % (self.winfo_rootx()+50,
# self.winfo_rooty()+50))
def doneClick( self, event=None ):
self.quit()
def createMenu(self):
menu = Menu( root )
root.config( menu=menu )
def unPlace( self ):
self.step1Frame.place_forget()
self.step2Frame.place_forget()
self.step3Frame.place_forget()
self.step4Frame.place_forget()
self.errorFrame.place_forget()
self.workingFrame.place_forget()
def showWorking(self, text ):
self.unPlace()
self.workingFrame.instruction['text'] = text
self.workingFrame.place( relx=0, rely=1, anchor=SW )
def step1( self, event=None ):
self.existingDrives = []
self.sdCardDev = -1
self.unPlace()
self.step1Frame.place( relx=0, rely=1, anchor=SW )
def preStep2( self, event=None ):
self.unPlace()
self.showWorking( "Just a moment..." )
self.update()
time.sleep( 2 )
self.existingDrives = glob.glob("/dev/rdisk?")
for d in self.existingDrives:
print( "existing drive: " + d )
self.step2()
def step2( self, event=None ):
self.unPlace()
self.step2Frame.place( relx=0, rely=1, anchor=SW )
self.update()
def preStep3( self, event=None ):
self.unPlace()
self.showWorking( "Looking for your SD Card..." )
self.update()
time.sleep( 2 )
updatedDrives = glob.glob("/dev/rdisk?")
self.newDrives = []
for d in updatedDrives:
found = False
for o in self.existingDrives:
if d == o:
found = True
break
if not found:
self.newDrives.append( d )
for d in self.newDrives:
print( "found new drive: " + d )
if len( self.newDrives ) is not 1:
self.errorRetry( "Your card wasn't correctly detected. Let's try again." )
#self.errorRetry( "Error: found " + str( len( self.newDrives ) ) + " new disks inserted but expected 1" )
else:
self.showWorking( "SD Card found." )
self.update()
time.sleep(2)
self.step3()
def step3( self, event=None ):
self.unPlace()
self.step3Frame.progress.place_forget()
self.step3Frame.nextButton.place( x=336, y=80, anchor=NW )
self.step3Frame.instruction['text'] = "Click Install to format this SD Card and install Coder."
self.step3Frame.place( relx=0, rely=1, anchor=SW )
def preStep4( self, event=None ):
global formatError, formatComplete, formatProgress
self.update_idletasks();
self.step3Frame.nextButton.place_forget()
self.step3Frame.progress.place( relx=0.5, y=85, anchor=N )
self.step3Frame.instruction['text'] = "Installing Coder on your SD Card."
self.update_idletasks();
formatProgress = 0.0
formatComplete = False
formatError = None
formatThread = threading.Thread( target=formatSDDevice )
formatThread.daemon = True
formatThread.start()
self.pollFormatProgress()
def step4( self, event=None ):
self.unPlace()
self.step4Frame.place( relx=0, rely=1, anchor=SW )
def pollFormatProgress( self ):
global formatError, formatComplete, formatProgress
if formatError is None and not formatComplete:
self.step3Frame.progress.setProgress( formatProgress )
self.after( 5, self.pollFormatProgress )
elif formatError is not None:
self.errorRetry( formatError )
formatError = ""
formatComplete = None
else:
self.step3Frame.instruction['text'] = "Install complete!"
self.step3Frame.progress.setProgress( 1 )
self.update()
time.sleep(4)
self.step4()
self.update();
def errorRetry( self, message ):
self.unPlace()
self.errorFrame.instruction["text"] = message
self.errorFrame.place( relx=0, rely=1, anchor=SW )
def __init__(self, master=None):
Frame.__init__(self, master)
self.config(width = WINWIDTH, height = WINHEIGHT, background=BGCOLOR)
self.createMenu()
self.pack_propagate(0)
self.pack()
self.createWidgets()
self.update()
self.step1()
existingDrives = []
newDrives = []
sdCardDev = -1
class Meter( Frame ):
def __init__(self, master, width=400, height=4, bg="#909090", progcolor="#f6e5fd", *args, **kw ):
Frame.__init__(self, master, bg=bg, width=width, height=height, *args, **kw)
self.canvas = Canvas( self, width=self['width'], height=self['height'] )
self.canvas.pack( fill='both', expand=1 )
self.progfill = self.canvas.create_rectangle( 0, 0, 0, height, fill=progcolor, width=0 )
self.progress = 0.0
self.setProgress(0)
self.bind('<Configure>', self.redraw)
def redraw(self, event=None):
self.canvas.update_idletasks()
self.canvas.coords( self.progfill, 0, 0, self.canvas.winfo_width() * self.progress, self.canvas.winfo_height() )
self.canvas.update_idletasks()
def setProgress(self, value=0.0):
if value < 0.0:
value = 0.0
elif value > 1.0:
value = 1.0
self.progress = value
self.redraw()
def formatSDDevice():
global formatError, formatComplete, formatProgress
formatComplete = False
formatError = None
formatProgress = 0.0
device = app.newDrives[0]
devreg = re.compile('^/dev/rdisk(\d+)$')
m = devreg.match( device )
if m:
sdCardDev = m.group(1)
print( "SD Card Device: " + str(sdCardDev) )
logfile = "."
if 'TMPDIR' in os.environ.keys():
logfile = os.path.normpath( os.environ['TMPDIR'] )
logfile = logfile + "/piformat.log"
pythonexe = os.path.dirname(sys.argv[0]) + "/../MacOS/python"
open( logfile, 'w' ).close()
command = "osascript -e 'do shell script \"\\\"" + pythonexe + "\\\" -u formatsdcard.py really " + str( sdCardDev ) + " > " + logfile + " \" with administrator privileges'"
print( "SYSTEM: " + command )
#os.system( command )
status = open( logfile, 'r' )
st_results = os.stat(logfile)
st_size = st_results[6]
status.seek(st_size)
proc = subprocess.Popen( command, shell=True )
while True:
#send siginfo ctrl-t
#proc.stdin.write('\x14')
root.update();
where = status.tell()
line = status.readline()
if not line and proc.poll() is not None:
break
elif not line:
time.sleep(0.01)
status.seek(where)
else:
print( "PROGRESS: " + line.rstrip() )
progreg = re.compile('^COMPLETEPCT: (\d\.\d+)')
m = progreg.match( line )
if m:
pct = float( m.group(1) )
formatProgress = pct
time.sleep( 0.01 )
#threading.Event().wait(0.01)
print( "PROCESS COMPLETED" )
if proc.poll() == 0:
formatComplete = True
else:
formatError = "There was an error while writing to your SD Card."
status.close()
else:
formatError = "Error: unexpected device id."
#app.errorRetry( "Error: unexpected device id." )
##MAIN
formatComplete = False
formatError = None
formatProgress = 0.0
formatThread = None
root = Tk()
screen_width = root.winfo_screenwidth()
screen_height = root.winfo_screenheight()
root.geometry("%dx%d+%d+%d" % (WINWIDTH, WINHEIGHT, screen_width/2-500, screen_height/2-250))
root.wm_title("Coder for Pi")
root.wm_client("Coder for Pi")
root.lift()
root.call('wm', 'attributes', '.', '-topmost', '1')
root.call('wm', 'attributes', '.', '-topmost', '0')
app = Application(master=root)
app.mainloop()
#root.destroy()
|
{
"content_hash": "45b86df0f9f333b68bcf9da17002e5af",
"timestamp": "",
"source": "github",
"line_count": 367,
"max_line_length": 210,
"avg_line_length": 36.084468664850135,
"alnum_prop": 0.6071131918749528,
"repo_name": "Adai0808/coder",
"id": "735358888753d205d347f323b50ccba7057a0762",
"size": "13943",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "installer/macosx/CoderSetup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "55046"
},
{
"name": "HTML",
"bytes": "48678"
},
{
"name": "JavaScript",
"bytes": "301967"
},
{
"name": "Python",
"bytes": "22614"
},
{
"name": "Shell",
"bytes": "23951"
}
],
"symlink_target": ""
}
|
import random
import unittest
import numpy as np
import paddle
import paddle.distributed as dist
import paddle.distributed.fleet as fleet
import paddle.nn as nn
import paddle.nn.functional as F
from paddle.distributed.fleet.meta_parallel import LayerDesc, PipelineLayer
from paddle.fluid import layers
from paddle.fluid.dygraph.layers import Layer
def set_random_seed(seed, dp_id, rank_id):
"""Set random seed for reproducability."""
random.seed(seed)
np.random.seed(seed + dp_id)
paddle.seed(seed + dp_id)
batch_size = 8
length = 8
micro_batch_size = 2
vocab_size = 128
hidden_size = 16
d_model = hidden_size
dim_feedforward = 4 * d_model
class EmbeddingNet(Layer):
def __init__(self):
super().__init__()
self.word_embeddings = nn.Embedding(vocab_size, hidden_size)
self.position_embeddings = nn.Embedding(vocab_size, hidden_size)
def forward(self, x):
w_emb = self.word_embeddings(x)
p_emb = self.position_embeddings(x)
w_emb = w_emb + p_emb
return w_emb
class TransformerNet(Layer):
def __init__(self):
super().__init__()
self.linear1 = nn.Linear(d_model, dim_feedforward)
self.linear2 = nn.Linear(dim_feedforward, d_model)
self.q_proj = nn.Linear(d_model, d_model)
self.k_proj = nn.Linear(d_model, d_model)
self.v_proj = nn.Linear(d_model, d_model)
self.norm1 = nn.LayerNorm(d_model, epsilon=1e-5)
def forward(self, x):
q = self.q_proj(x)
k = self.k_proj(x)
v = self.v_proj(x)
product = layers.matmul(
x=q, y=k, transpose_y=True, alpha=d_model**-0.5
)
weights = F.softmax(product)
weights = F.dropout(weights, 0.2)
tgt = layers.matmul(weights, v)
residual = tgt
tgt = self.norm1(tgt)
tgt = residual + tgt
out = self.linear2(F.gelu(self.linear1(tgt), approximate=True))
return out
class EmbeddingPipe(EmbeddingNet):
def forward(self, x):
return super().forward(x)
class TransformerNetPipe(TransformerNet):
def forward(self, x):
output = super().forward(x)
return output
class CriterionPipe(Layer):
def __init__(self):
super().__init__()
def forward(self, out, label):
loss = out.mean()
return loss
class ModelPipe(PipelineLayer):
def __init__(self, hcg):
self.descs = []
self.descs.append(LayerDesc(EmbeddingPipe))
self.hcg = hcg
for x in range(2):
self.descs.append(LayerDesc(TransformerNetPipe))
super().__init__(
layers=self.descs,
loss_fn=CriterionPipe(),
topology=self.hcg.topology(),
seg_method="layer:TransformerNetPipe",
recompute_interval=1,
recompute_ctx={
"mp_group": self.hcg.get_model_parallel_group(),
"offload": False,
"partition": False,
},
)
class TestDistPPTraning(unittest.TestCase):
def setUp(self):
strategy = fleet.DistributedStrategy()
self.model_parallel_size = 1
self.data_parallel_size = 1
self.pipeline_parallel_size = 2
strategy.hybrid_configs = {
"dp_degree": self.data_parallel_size,
"mp_degree": self.model_parallel_size,
"pp_degree": self.pipeline_parallel_size,
}
strategy.pipeline_configs = {
"accumulate_steps": batch_size // micro_batch_size,
"micro_batch_size": micro_batch_size,
}
fleet.init(is_collective=True, strategy=strategy)
def test_pp_model(self):
hcg = fleet.get_hybrid_communicate_group()
word_size = hcg.get_model_parallel_world_size()
dp_id = hcg.get_data_parallel_rank()
pp_id = hcg.get_stage_id()
rank_id = dist.get_rank()
topology = hcg.topology()
set_random_seed(1024, dp_id, rank_id)
model = ModelPipe(hcg)
scheduler = paddle.optimizer.lr.PiecewiseDecay(
boundaries=[2], values=[0.001, 0.002], verbose=True
)
optimizer = paddle.optimizer.SGD(
learning_rate=scheduler, parameters=model.parameters()
)
model = fleet.distributed_model(model)
optimizer = fleet.distributed_optimizer(optimizer)
for step_id in range(5):
x_data = np.random.randint(0, vocab_size, size=[batch_size, length])
x = paddle.to_tensor(x_data)
x.stop_gradient = True
loss = model.train_batch([x, x], optimizer, scheduler)
# TODO(shenliang03) add utest for loss
print("loss: ", loss)
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "2e1d4c0f30b4981a8cc0a9fcdc237d8f",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 80,
"avg_line_length": 28.72289156626506,
"alnum_prop": 0.5958473154362416,
"repo_name": "PaddlePaddle/Paddle",
"id": "d9b0e94ae641576acc1c39e63368bbbea81db251",
"size": "5379",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/collective/fleet/hybrid_parallel_pp_recompute.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36848680"
},
{
"name": "CMake",
"bytes": "902619"
},
{
"name": "Cuda",
"bytes": "5227207"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36203874"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553177"
}
],
"symlink_target": ""
}
|
__author__ = 'nmelo'
import step_lights
|
{
"content_hash": "da334cfb609574ae928c1c607706dfc0",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 20,
"avg_line_length": 13.666666666666666,
"alnum_prop": 0.6585365853658537,
"repo_name": "nmelo/alexa-appkit-lights",
"id": "ebc453c40db504c40198b490fea14f7fd3900fb7",
"size": "41",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "features/steps/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cucumber",
"bytes": "292"
},
{
"name": "HTML",
"bytes": "325"
},
{
"name": "Python",
"bytes": "9266"
}
],
"symlink_target": ""
}
|
import time
from k0001def import appConfig, outputs, inputs, requests, sequence, wachtgroen, timers, countData, extend, \
extend_vag1, extend_vag2, extend_vag3, extend_vag4
from k0001func import set_defaults, conflict_manager, set_remain_green, set_demand_timers, request_green, \
sequence_evaluator, delay_manager, extend_green, conflict_status, meeverlengen
# if appConfig['automaat']['raspberry_pi']:
# from k0001def import rpiConfig
# if appConfig['simulatie']['sumo']:
# from k0001def import sumoConfig
#
def open_tlc(step):
if not appConfig['simulatie']['sumo']:
now = time.time() * 10
else:
now = step
tlc_state_control = False
tlc_state_all_off = False
tlc_state_flashing_yellow = False
tlc_state_all_red = False
if not appConfig['simulatie']['sumo']:
if inputs['is01']:
tlc_state_all_off = True
elif inputs['is02']:
tlc_state_flashing_yellow = True
elif inputs['is03']:
tlc_state_all_red = True
elif inputs['is04']:
tlc_state_control = True
else:
tlc_state_control = True
# tlc state - in control
if tlc_state_control:
set_defaults()
# setCountData()
conflict_manager()
set_remain_green()
detectors = ('d011', 'd012', 'd021', 'd022', 'd031', 'd032', 'd041', 'd042', 'd051', 'd052', 'd081', 'd082',
'd091', 'd092', 'd101', 'd102', 'd111', 'd112', 'd211', 'd241', 'd251', 'd271', 'd311', 'd312',
'd321', 'd322', 'd331', 'd332', 'd351', 'd352', 'd361', 'd362', 'd371', 'd372')
for detector in detectors:
set_demand_timers(detector, inputs[detector], now)
request_green('fc01', 'd011', inputs['d011'], now)
request_green('fc01', 'd012', inputs['d012'], now)
request_green('fc02', 'd021', inputs['d021'], now)
request_green('fc02', 'd022', inputs['d022'], now)
request_green('fc03', 'd031', inputs['d031'], now)
request_green('fc03', 'd032', inputs['d032'], now)
request_green('fc04', 'd041', inputs['d041'], now)
request_green('fc04', 'd042', inputs['d042'], now)
request_green('fc05', 'd051', inputs['d051'], now)
request_green('fc05', 'd052', inputs['d052'], now)
request_green('fc08', 'd081', inputs['d081'], now)
request_green('fc08', 'd082', inputs['d082'], now)
request_green('fc09', 'd091', inputs['d091'], now)
request_green('fc09', 'd092', inputs['d092'], now)
request_green('fc10', 'd101', inputs['d101'], now)
request_green('fc10', 'd102', inputs['d102'], now)
request_green('fc11', 'd111', inputs['d111'], now)
request_green('fc11', 'd112', inputs['d112'], now)
request_green('fc21', 'd211', inputs['d211'], now)
request_green('fc24', 'd241', inputs['d241'], now)
request_green('fc25', 'd251', inputs['d251'], now)
request_green('fc27', 'd271', inputs['d271'], now)
request_green('fc31', 'd311', inputs['d311'], now)
request_green('fc31', 'd312', inputs['d312'], now)
request_green('fc32', 'd321', inputs['d321'], now)
request_green('fc32', 'd322', inputs['d322'], now)
request_green('fc33', 'd331', inputs['d331'], now)
request_green('fc33', 'd332', inputs['d332'], now)
request_green('fc35', 'd351', inputs['d351'], now)
request_green('fc35', 'd352', inputs['d352'], now)
request_green('fc36', 'd361', inputs['d361'], now)
request_green('fc36', 'd362', inputs['d362'], now)
request_green('fc37', 'd371', inputs['d371'], now)
request_green('fc37', 'd372', inputs['d372'], now)
# set_meeaanvragen()
# set_cyclische_aanvragen()
sequence_evaluator(now)
delay_manager(now)
extend_green('fc01', 'd011', inputs['d011'], now)
extend_green('fc01', 'd012', inputs['d012'], now)
extend_green('fc02', 'd021', inputs['d021'], now)
extend_green('fc02', 'd022', inputs['d022'], now)
extend_green('fc03', 'd031', inputs['d031'], now)
extend_green('fc03', 'd032', inputs['d032'], now)
extend_green('fc04', 'd041', inputs['d041'], now)
extend_green('fc04', 'd042', inputs['d042'], now)
extend_green('fc05', 'd051', inputs['d051'], now)
extend_green('fc05', 'd052', inputs['d052'], now)
extend_green('fc08', 'd081', inputs['d081'], now)
extend_green('fc08', 'd082', inputs['d082'], now)
extend_green('fc09', 'd091', inputs['d091'], now)
extend_green('fc09', 'd092', inputs['d092'], now)
extend_green('fc10', 'd101', inputs['d101'], now)
extend_green('fc10', 'd102', inputs['d102'], now)
extend_green('fc11', 'd111', inputs['d111'], now)
extend_green('fc11', 'd112', inputs['d112'], now)
extend_green('fc21', 'd211', inputs['d211'], now)
extend_green('fc24', 'd241', inputs['d241'], now)
extend_green('fc25', 'd251', inputs['d251'], now)
extend_green('fc27', 'd271', inputs['d271'], now)
# determine the state of the signal groups and set timers
for signal_group in appConfig['fasecycli']:
if outputs[signal_group]['WR']:
if timers[signal_group]['GL'] > 0:
timers[signal_group]['GL'] = 0
if timers[signal_group]['R'] == 0:
timers[signal_group]['R'] = now
if sequence[signal_group] == 1:
outputs[signal_group]['WR'] = False
outputs[signal_group]['RVG'] = True
if outputs[signal_group]['RVG']:
if not conflict_status(signal_group):
outputs[signal_group]['RVG'] = False
outputs[signal_group]['VG'] = True
if outputs[signal_group]['VG']:
if sequence[signal_group] > 0:
sequence[signal_group] = 0
if timers[signal_group]['R'] > 0:
timers[signal_group]['R'] = 0
if timers[signal_group]['G'] == 0:
timers[signal_group]['G'] = now
if timers[signal_group]['VG'] == 0:
timers[signal_group]['VG'] = now
if timers[signal_group]['VAG1'] == 0:
timers[signal_group]['VAG1'] = now
if timers[signal_group]['VAG2'] == 0:
timers[signal_group]['VAG2'] = now
if timers[signal_group]['VG'] > 0 and now - timers[signal_group]['VG'] >= timers[signal_group]['basis']['vastgroen']:
outputs[signal_group]['VG'] = False
outputs[signal_group]['VAG1'] = True
if outputs[signal_group]['VAG1']:
if timers[signal_group]['VG'] > 0:
timers[signal_group]['VG'] = 0
if not extend[signal_group] & extend_vag1 or timers[signal_group]['VAG1'] > 0 and now - timers[signal_group]['VAG1'] >= \
timers[signal_group]['maximum']['VAG1']:
outputs[signal_group]['VAG1'] = False
outputs[signal_group]['VAG2'] = True
if outputs[signal_group]['VAG2']:
if timers[signal_group]['VAG1'] > 0:
timers[signal_group]['VAG1'] = 0
if not extend[signal_group] & extend_vag2 or timers[signal_group]['VAG2'] > 0 and now - timers[signal_group]['VAG2'] >= \
timers[signal_group]['maximum']['VAG2']:
outputs[signal_group]['VAG2'] = False
outputs[signal_group]['WG'] = True
if outputs[signal_group]['WG']:
if timers[signal_group]['VAG2'] > 0:
timers[signal_group]['VAG2'] = 0
if not wachtgroen[signal_group]:
outputs[signal_group]['WG'] = False
outputs[signal_group]['VAG3'] = True
if outputs[signal_group]['VAG3']:
if timers[signal_group]['VAG3'] == 0:
timers[signal_group]['VAG3'] = now
if not extend[signal_group] & extend_vag3 or timers[signal_group]['VAG3'] > 0 and now - timers[signal_group]['VAG3'] >= \
timers[signal_group]['maximum']['VAG3']:
outputs[signal_group]['VAG3'] = False
outputs[signal_group]['MVG'] = True
if outputs[signal_group]['MVG']:
if timers[signal_group]['VAG3'] > 0:
timers[signal_group]['VAG3'] = 0
if not meeverlengen(signal_group):
outputs[signal_group]['MVG'] = False
outputs[signal_group]['VAG4'] = True
if outputs[signal_group]['VAG4']:
if timers[signal_group]['VAG4'] == 0:
timers[signal_group]['VAG4'] = now
if not extend[signal_group] & extend_vag4 or timers[signal_group]['VAG4'] > 0 and now - timers[signal_group]['VAG4'] >= \
timers[signal_group]['maximum']['VAG4']:
outputs[signal_group]['VAG4'] = False
outputs[signal_group]['GL'] = True
if outputs[signal_group]['GL']:
if timers[signal_group]['G'] > 0:
timers[signal_group]['G'] = 0
if timers[signal_group]['VAG4'] > 0:
timers[signal_group]['VAG4'] = 0
if timers[signal_group]['GL'] == 0:
timers[signal_group]['GL'] = now
if timers[signal_group]['GL'] > 0 and now - timers[signal_group]['GL'] >= timers[signal_group]['basis']['geel']:
outputs[signal_group]['GL'] = False
outputs[signal_group]['WR'] = True
# set other outputs
for signal_group in appConfig['fasecycli']:
outputs[signal_group]['demand'] = requests[signal_group]
outputs[signal_group]['sequence'] = sequence[signal_group]
if timers[signal_group]['delay'] > 0:
outputs[signal_group]['delay'] = now - timers[signal_group]['delay']
else:
outputs[signal_group]['delay'] = 0
outputs[signal_group]['countData'] = countData[signal_group]
# tlc state - flashing yellow
if tlc_state_flashing_yellow:
amber_state = False
if step % 5 == 0:
amber_state ^= True
for signal_group in appConfig['fasecycli']:
outputs[signal_group]['WR'] = False
outputs[signal_group]['RVG'] = False
outputs[signal_group]['VG'] = False
outputs[signal_group]['VAG1'] = False
outputs[signal_group]['VAG2'] = False
outputs[signal_group]['WG'] = False
outputs[signal_group]['VAG3'] = False
outputs[signal_group]['MVG'] = False
outputs[signal_group]['VAG4'] = False
outputs[signal_group]['GL'] = amber_state
outputs[signal_group]['demand'] = False
outputs[signal_group]['sequence'] = 0
outputs[signal_group]['delay'] = 0
requests[signal_group] = False
sequence[signal_group] = 0
timers[signal_group]['delay'] = 0
# tlc state - all red
if tlc_state_all_red:
for signal_group in appConfig['fasecycli']:
outputs[signal_group]['WR'] = True
outputs[signal_group]['RVG'] = False
outputs[signal_group]['VG'] = False
outputs[signal_group]['VAG1'] = False
outputs[signal_group]['VAG2'] = False
outputs[signal_group]['WG'] = False
outputs[signal_group]['VAG3'] = False
outputs[signal_group]['MVG'] = False
outputs[signal_group]['VAG4'] = False
outputs[signal_group]['GL'] = False
outputs[signal_group]['demand'] = False
outputs[signal_group]['sequence'] = 0
outputs[signal_group]['delay'] = 0
requests[signal_group] = False
sequence[signal_group] = 0
timers[signal_group]['delay'] = 0
# tlc state - all off
if tlc_state_all_off:
for signal_group in appConfig['fasecycli']:
outputs[signal_group]['WR'] = False
outputs[signal_group]['RVG'] = False
outputs[signal_group]['VG'] = False
outputs[signal_group]['VAG1'] = False
outputs[signal_group]['VAG2'] = False
outputs[signal_group]['WG'] = False
outputs[signal_group]['VAG3'] = False
outputs[signal_group]['MVG'] = False
outputs[signal_group]['VAG4'] = False
outputs[signal_group]['GL'] = False
outputs[signal_group]['demand'] = False
outputs[signal_group]['sequence'] = 0
outputs[signal_group]['delay'] = 0
requests[signal_group] = False
sequence[signal_group] = 0
timers[signal_group]['delay'] = 0
|
{
"content_hash": "7b91f5c6d49abf1de0fd28707ca909c4",
"timestamp": "",
"source": "github",
"line_count": 299,
"max_line_length": 137,
"avg_line_length": 43.95986622073578,
"alnum_prop": 0.5289866098600122,
"repo_name": "MartijnHarmenzon/openTLC",
"id": "aa36565d38a2816dcfba2e1976a60e16f0ca660b",
"size": "13171",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "k0001app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "41542"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Tag.name'
db.alter_column('feedjack_tag', 'name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255))
def backwards(self, orm):
# Changing field 'Tag.name'
db.alter_column('feedjack_tag', 'name', self.gf('django.db.models.fields.CharField')(max_length=127, unique=True))
models = {
'feedjack.feed': {
'Meta': {'ordering': "('name', 'feed_url')", 'object_name': 'Feed'},
'etag': ('django.db.models.fields.CharField', [], {'max_length': '127', 'blank': 'True'}),
'feed_url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'filters': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'feeds'", 'blank': 'True', 'to': "orm['feedjack.Filter']"}),
'filters_logic': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'last_checked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shortname': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'skip_errors': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tagline': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
'feedjack.filter': {
'Meta': {'object_name': 'Filter'},
'base': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filters'", 'to': "orm['feedjack.FilterBase']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'})
},
'feedjack.filterbase': {
'Meta': {'object_name': 'FilterBase'},
'crossref': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'crossref_rebuild': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'crossref_span': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'crossref_timeline': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'handler_name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'})
},
'feedjack.filterresult': {
'Meta': {'object_name': 'FilterResult'},
'filter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feedjack.Filter']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filtering_results'", 'to': "orm['feedjack.Post']"}),
'result': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'feedjack.link': {
'Meta': {'object_name': 'Link'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'feedjack.post': {
'Meta': {'ordering': "('-date_modified',)", 'unique_together': "(('feed', 'guid'),)", 'object_name': 'Post'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'author_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'comments': ('django.db.models.fields.URLField', [], {'max_length': '511', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['feedjack.Feed']"}),
'filtering_result': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '511', 'db_index': 'True'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '2047'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['feedjack.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2047'})
},
'feedjack.site': {
'Meta': {'ordering': "('name',)", 'object_name': 'Site'},
'cache_duration': ('django.db.models.fields.PositiveIntegerField', [], {'default': '86400'}),
'default_site': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {}),
'greets': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'links': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['feedjack.Link']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order_posts_by': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'posts_per_page': ('django.db.models.fields.PositiveIntegerField', [], {'default': '20'}),
'show_tagcloud': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'tagcloud_levels': ('django.db.models.fields.PositiveIntegerField', [], {'default': '5'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'use_internal_cache': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'welcome': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'feedjack.subscriber': {
'Meta': {'ordering': "('site', 'name', 'feed')", 'unique_together': "(('site', 'feed'),)", 'object_name': 'Subscriber'},
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feedjack.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'shortname': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feedjack.Site']"})
},
'feedjack.tag': {
'Meta': {'ordering': "('name',)", 'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2047'})
}
}
complete_apps = ['feedjack']
|
{
"content_hash": "623fd419c5dca7a31a93df2304f6181c",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 181,
"avg_line_length": 75.30578512396694,
"alnum_prop": 0.5485074626865671,
"repo_name": "allo-/feedjack",
"id": "70847c05cc3de822adec443cef0d4ce01ccf9d47",
"size": "9136",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "feedjack/migrations.south/0017_auto__chg_field_tag_name.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "22150"
},
{
"name": "CoffeeScript",
"bytes": "15377"
},
{
"name": "HTML",
"bytes": "39412"
},
{
"name": "Python",
"bytes": "388184"
}
],
"symlink_target": ""
}
|
'''
Dictionnary store
=================
Use a Python dictionnary as a store.
'''
__all__ = ('DictStore', )
from kivy.compat import iteritems
from kivy.storage import AbstractStore
class DictStore(AbstractStore):
'''Store implementation using a simple `dict`.
'''
def __init__(self, data=None, **kwargs):
super(DictStore, self).__init__(**kwargs)
if data is None:
data = {}
self.data = data
def store_exists(self, key):
return key in self.data
def store_get(self, key):
return self.data[key]
def store_put(self, key, value):
self.data[key] = value
return True
def store_delete(self, key):
del self.data[key]
return True
def store_find(self, filters):
for key, values in iteritems(self.data):
found = True
for fkey, fvalue in iteritems(filters):
if fkey not in values:
found = False
break
if values[fkey] != fvalue:
found = False
break
if found:
yield key, values
def store_count(self):
return len(self.data)
def store_keys(self):
return self.data.keys()
|
{
"content_hash": "ea72a34bc9559ea9db10f46d26810ebf",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 51,
"avg_line_length": 23.236363636363638,
"alnum_prop": 0.5320813771517997,
"repo_name": "wangjun/kivy",
"id": "d779e1d3c3d27d2593b183dda9662f9afbb5dc9c",
"size": "1278",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "kivy/storage/dictstore.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1372"
},
{
"name": "C",
"bytes": "152467"
},
{
"name": "Emacs Lisp",
"bytes": "9603"
},
{
"name": "GLSL",
"bytes": "289"
},
{
"name": "HTML",
"bytes": "19384"
},
{
"name": "Makefile",
"bytes": "3244"
},
{
"name": "Python",
"bytes": "2604470"
},
{
"name": "Shell",
"bytes": "4638"
},
{
"name": "VimL",
"bytes": "1123"
}
],
"symlink_target": ""
}
|
"""Provisions Android devices with settings required for bots.
Usage:
./provision_devices.py [-d <device serial number>]
"""
import argparse
import json
import logging
import os
import posixpath
import re
import subprocess
import sys
import time
from pylib import constants
from pylib import device_settings
from pylib.device import battery_utils
from pylib.device import device_blacklist
from pylib.device import device_errors
from pylib.device import device_utils
from pylib.utils import run_tests_helper
from pylib.utils import timeout_retry
sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
'third_party', 'android_testrunner'))
import errors
class _DEFAULT_TIMEOUTS(object):
# L can take a while to reboot after a wipe.
LOLLIPOP = 600
PRE_LOLLIPOP = 180
HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP)
class _PHASES(object):
WIPE = 'wipe'
PROPERTIES = 'properties'
FINISH = 'finish'
ALL = [WIPE, PROPERTIES, FINISH]
def ProvisionDevices(options):
devices = device_utils.DeviceUtils.HealthyDevices()
if options.device:
devices = [d for d in devices if d == options.device]
if not devices:
raise device_errors.DeviceUnreachableError(options.device)
parallel_devices = device_utils.DeviceUtils.parallel(devices)
parallel_devices.pMap(ProvisionDevice, options)
if options.auto_reconnect:
_LaunchHostHeartbeat()
blacklist = device_blacklist.ReadBlacklist()
if options.output_device_blacklist:
with open(options.output_device_blacklist, 'w') as f:
json.dump(blacklist, f)
if all(d in blacklist for d in devices):
raise device_errors.NoDevicesError
return 0
def ProvisionDevice(device, options):
if options.reboot_timeout:
reboot_timeout = options.reboot_timeout
elif (device.build_version_sdk >=
constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP):
reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP
else:
reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP
def should_run_phase(phase_name):
return not options.phases or phase_name in options.phases
def run_phase(phase_func, reboot=True):
device.WaitUntilFullyBooted(timeout=reboot_timeout)
phase_func(device, options)
if reboot:
device.Reboot(False, retries=0)
device.adb.WaitForDevice()
try:
if should_run_phase(_PHASES.WIPE):
run_phase(WipeDevice)
if should_run_phase(_PHASES.PROPERTIES):
run_phase(SetProperties)
if should_run_phase(_PHASES.FINISH):
run_phase(FinishProvisioning, reboot=False)
except (errors.WaitForResponseTimedOutError,
device_errors.CommandTimeoutError):
logging.exception('Timed out waiting for device %s. Adding to blacklist.',
str(device))
device_blacklist.ExtendBlacklist([str(device)])
except device_errors.CommandFailedError:
logging.exception('Failed to provision device %s. Adding to blacklist.',
str(device))
device_blacklist.ExtendBlacklist([str(device)])
def WipeDevice(device, options):
"""Wipes data from device, keeping only the adb_keys for authorization.
After wiping data on a device that has been authorized, adb can still
communicate with the device, but after reboot the device will need to be
re-authorized because the adb keys file is stored in /data/misc/adb/.
Thus, adb_keys file is rewritten so the device does not need to be
re-authorized.
Arguments:
device: the device to wipe
"""
if options.skip_wipe:
return
try:
device.EnableRoot()
device_authorized = device.FileExists(constants.ADB_KEYS_FILE)
if device_authorized:
adb_keys = device.ReadFile(constants.ADB_KEYS_FILE,
as_root=True).splitlines()
device.RunShellCommand(['wipe', 'data'],
as_root=True, check_return=True)
device.adb.WaitForDevice()
if device_authorized:
adb_keys_set = set(adb_keys)
for adb_key_file in options.adb_key_files or []:
try:
with open(adb_key_file, 'r') as f:
adb_public_keys = f.readlines()
adb_keys_set.update(adb_public_keys)
except IOError:
logging.warning('Unable to find adb keys file %s.' % adb_key_file)
_WriteAdbKeysFile(device, '\n'.join(adb_keys_set))
except device_errors.CommandFailedError:
logging.exception('Possible failure while wiping the device. '
'Attempting to continue.')
def _WriteAdbKeysFile(device, adb_keys_string):
dir_path = posixpath.dirname(constants.ADB_KEYS_FILE)
device.RunShellCommand(['mkdir', '-p', dir_path],
as_root=True, check_return=True)
device.RunShellCommand(['restorecon', dir_path],
as_root=True, check_return=True)
device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True)
device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE],
as_root=True, check_return=True)
def SetProperties(device, options):
try:
device.EnableRoot()
except device_errors.CommandFailedError as e:
logging.warning(str(e))
_ConfigureLocalProperties(device, options.enable_java_debug)
device_settings.ConfigureContentSettings(
device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
if options.disable_location:
device_settings.ConfigureContentSettings(
device, device_settings.DISABLE_LOCATION_SETTINGS)
else:
device_settings.ConfigureContentSettings(
device, device_settings.ENABLE_LOCATION_SETTINGS)
if options.disable_mock_location:
device_settings.ConfigureContentSettings(
device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS)
else:
device_settings.ConfigureContentSettings(
device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS)
device_settings.SetLockScreenSettings(device)
if options.disable_network:
device_settings.ConfigureContentSettings(
device, device_settings.NETWORK_DISABLED_SETTINGS)
def _ConfigureLocalProperties(device, java_debug=True):
"""Set standard readonly testing device properties prior to reboot."""
local_props = [
'persist.sys.usb.config=adb',
'ro.monkey=1',
'ro.test_harness=1',
'ro.audio.silent=1',
'ro.setupwizard.mode=DISABLED',
]
if java_debug:
local_props.append(
'%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY)
local_props.append('debug.checkjni=1')
try:
device.WriteFile(
constants.DEVICE_LOCAL_PROPERTIES_PATH,
'\n'.join(local_props), as_root=True)
# Android will not respect the local props file if it is world writable.
device.RunShellCommand(
['chmod', '644', constants.DEVICE_LOCAL_PROPERTIES_PATH],
as_root=True, check_return=True)
except device_errors.CommandFailedError:
logging.exception('Failed to configure local properties.')
def FinishProvisioning(device, options):
if options.min_battery_level is not None:
try:
battery = battery_utils.BatteryUtils(device)
battery.ChargeDeviceToLevel(options.min_battery_level)
except device_errors.CommandFailedError:
logging.exception('Unable to charge device to specified level.')
if options.max_battery_temp is not None:
try:
battery = battery_utils.BatteryUtils(device)
battery.LetBatteryCoolToTemperature(options.max_battery_temp)
except device_errors.CommandFailedError:
logging.exception('Unable to let battery cool to specified temperature.')
device.RunShellCommand(
['date', '-s', time.strftime('%Y%m%d.%H%M%S', time.gmtime())],
as_root=True, check_return=True)
props = device.RunShellCommand('getprop', check_return=True)
for prop in props:
logging.info(' %s' % prop)
if options.auto_reconnect:
_PushAndLaunchAdbReboot(device, options.target)
def _PushAndLaunchAdbReboot(device, target):
"""Pushes and launches the adb_reboot binary on the device.
Arguments:
device: The DeviceUtils instance for the device to which the adb_reboot
binary should be pushed.
target: The build target (example, Debug or Release) which helps in
locating the adb_reboot binary.
"""
logging.info('Will push and launch adb_reboot on %s' % str(device))
# Kill if adb_reboot is already running.
device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True)
# Push adb_reboot
logging.info(' Pushing adb_reboot ...')
adb_reboot = os.path.join(constants.DIR_SOURCE_ROOT,
'out/%s/adb_reboot' % target)
device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')])
# Launch adb_reboot
logging.info(' Launching adb_reboot ...')
device.RunShellCommand(
[device.GetDevicePieWrapper(), '/data/local/tmp/adb_reboot'],
check_return=True)
def _LaunchHostHeartbeat():
# Kill if existing host_heartbeat
KillHostHeartbeat()
# Launch a new host_heartbeat
logging.info('Spawning host heartbeat...')
subprocess.Popen([os.path.join(constants.DIR_SOURCE_ROOT,
'build/android/host_heartbeat.py')])
def KillHostHeartbeat():
ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
stdout, _ = ps.communicate()
matches = re.findall('\\n.*host_heartbeat.*', stdout)
for match in matches:
logging.info('An instance of host heart beart running... will kill')
pid = re.findall(r'(\S+)', match)[1]
subprocess.call(['kill', str(pid)])
def main():
# Recommended options on perf bots:
# --disable-network
# TODO(tonyg): We eventually want network on. However, currently radios
# can cause perfbots to drain faster than they charge.
# --min-battery-level 95
# Some perf bots run benchmarks with USB charging disabled which leads
# to gradual draining of the battery. We must wait for a full charge
# before starting a run in order to keep the devices online.
parser = argparse.ArgumentParser(
description='Provision Android devices with settings required for bots.')
parser.add_argument('-d', '--device', metavar='SERIAL',
help='the serial number of the device to be provisioned'
' (the default is to provision all devices attached)')
parser.add_argument('--phase', action='append', choices=_PHASES.ALL,
dest='phases',
help='Phases of provisioning to run. '
'(If omitted, all phases will be run.)')
parser.add_argument('--skip-wipe', action='store_true', default=False,
help="don't wipe device data during provisioning")
parser.add_argument('--reboot-timeout', metavar='SECS', type=int,
help='when wiping the device, max number of seconds to'
' wait after each reboot '
'(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT)
parser.add_argument('--min-battery-level', type=int, metavar='NUM',
help='wait for the device to reach this minimum battery'
' level before trying to continue')
parser.add_argument('--disable-location', action='store_true',
help='disable Google location services on devices')
parser.add_argument('--disable-mock-location', action='store_true',
default=False, help='Set ALLOW_MOCK_LOCATION to false')
parser.add_argument('--disable-network', action='store_true',
help='disable network access on devices')
parser.add_argument('--disable-java-debug', action='store_false',
dest='enable_java_debug', default=True,
help='disable Java property asserts and JNI checking')
parser.add_argument('-t', '--target', default='Debug',
help='the build target (default: %(default)s)')
parser.add_argument('-r', '--auto-reconnect', action='store_true',
help='push binary which will reboot the device on adb'
' disconnections')
parser.add_argument('--adb-key-files', type=str, nargs='+',
help='list of adb keys to push to device')
parser.add_argument('-v', '--verbose', action='count', default=1,
help='Log more information.')
parser.add_argument('--max-battery-temp', type=int, metavar='NUM',
help='Wait for the battery to have this temp or lower.')
parser.add_argument('--output-device-blacklist',
help='Json file to output the device blacklist.')
args = parser.parse_args()
constants.SetBuildType(args.target)
run_tests_helper.SetLogLevel(args.verbose)
return ProvisionDevices(args)
if __name__ == '__main__':
sys.exit(main())
|
{
"content_hash": "b6918969a99f6a3ddf5eb16567da8b78",
"timestamp": "",
"source": "github",
"line_count": 339,
"max_line_length": 79,
"avg_line_length": 37.71091445427729,
"alnum_prop": 0.6738892365456821,
"repo_name": "chuan9/chromium-crosswalk",
"id": "491d1659ee3e15e23f39a48bce90e4e7b01de51e",
"size": "12975",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "build/android/provision_devices.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "37073"
},
{
"name": "Batchfile",
"bytes": "8451"
},
{
"name": "C",
"bytes": "9417055"
},
{
"name": "C++",
"bytes": "240920124"
},
{
"name": "CSS",
"bytes": "938860"
},
{
"name": "DM",
"bytes": "60"
},
{
"name": "Groff",
"bytes": "2494"
},
{
"name": "HTML",
"bytes": "27258381"
},
{
"name": "Java",
"bytes": "14580273"
},
{
"name": "JavaScript",
"bytes": "20507007"
},
{
"name": "Makefile",
"bytes": "70992"
},
{
"name": "Objective-C",
"bytes": "1742904"
},
{
"name": "Objective-C++",
"bytes": "9967587"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "178732"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "480579"
},
{
"name": "Python",
"bytes": "8519074"
},
{
"name": "Shell",
"bytes": "482077"
},
{
"name": "Standard ML",
"bytes": "5034"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
}
|
class Fasta:
def __init__(self, name):
#name can be anything; I usually store the name of the file here.
#to get the name returned, type FastaName.name (if out of class def) or self.name (if within definition)
self.name = name
#initialize lists
# ALWAYS keep IDS and SEQS the same length. id[1] should ALWAYS correspond to seq[1].
# this is implemented as lists due to ordering properties and the ease of adding more list (eg, one that keeps track of just gi nums or species names can be easily added later. dictionaries make that more difficult, though harder to misorder error.
#these will be the modified versions of seq and seqID
self.new_ids = []
self.new_seqs = []
# these are the original SEQids and Sequences. They should never be modified after generation in open_seqs or blast_to_fasta
self.original_ids = []
self.original_seqs = []
def open_seqs(self, fastaname):
#open the fasta name. if you saved it as self.name, can call FastaName.open_seqs(FastaName.name) or FastaName.open_seqs("example.fasta")
with open(fastaname) as fastafile:
#parse per line
for line in fastafile:
#avoid badly written start or end lines
if "\n" == line:
pass
#this marks the beginning of a seqid
if ">" in line:
#write the previous AA seq (if one exists)
try:
#this will fail on the first iteration, since AAseq has not yet been defined.
AAseq=AAseq.strip()
self.new_seqs.append(AAseq)
self.original_seqs.append(AAseq)
except:
pass
#initialize a new AAseq
AAseq = ""
#format the seqID to have no whitespace or linebreaks in front/back, and remove the ">" during storage
newline = line.strip()
newline = line.strip(">")
#save the seqID
self.new_ids.append(newline.strip())
#populate new_ids even though these are identicle to the originals right now, so that it will be the same length, and if any are NOT modified, they will still print.
self.original_ids.append(newline.strip())
else:
#this line is not a new seqID, so it is part of an AA sequence, store it as a continutation of AAseq (and remove whitespace/linebreaks)
AAseq = AAseq+line
AAseq= AAseq.strip()
#usually AAseq will be written on the next instance of ">", but need to also catch the last pass
self.new_seqs.append(AAseq)
self.original_seqs.append(AAseq)
#tell me if it worked, and ret the number of seqs we opened just fyi
print("Initial sequence and ID lists created. Contains "+str(len(self.original_ids))+" sequences")
#EDITED to work with new NCBI download format
def shorten(self):
print("shortening ids...")
#set defaults
unk = "no"
normal = 0
ucount = 0
#go line by line through ids
for line in self.original_ids:
#get index so we know which new_id to modify
index = self.original_ids.index(line)
# changes NCBI's default naming scheme to be
#>Species_name|#########
#where ####is either gi number or accession number, depending on new or old input
# AAH91460.1 Ribosomal protein L3 [Danio rerio] (new format)
#this works w old format
if "gi|" in line:
#get gi num
number = re.sub("(gi)(\|)([0-9]*)(\|)([A-Za-z]*)(\|)(.*)(\[\'?[A-Z]?[a-z]* ?.*\])(.*)", "\\3", line)
num = number.strip()
#get species name
edit1 = re.sub("(gi)(\|)([0-9]*)(\|)([A-Za-z]*)(\|)(.*)(\[\'?[A-Z]?[a-z]* ?.*\])(.*)", "\\8|", line)
#get acc number
else:
#get accession num
number = re.sub("([^ ]*)(.*)(\[\'?[A-Z]?[a-z]* ?.*\])(.*)", "\\1", line)
num = number.strip()
#get species name
edit1 = re.sub("([^ ]*)(.*)(\[\'?[A-Z]?[a-z]* ?.*\])(.*)", "\\3|", line)
#check if species name recovered, if not toggle unknown, if so add to normal counter
if "[" in edit1:
unk = "no"
normal += 1
else:
unk = "yes"
#substitution to remove any weird shit in species name.
edit2 = re.sub("[\[\]]", "", edit1)
#for now, leave periods in number (if accession num) but not name
edit3 = re.sub("[:;\.=,/\+'\(\)]", "_", edit2)
edit4 = re.sub(" ", "_", edit3)
edit4 = re.sub("__", "_", edit4)
edit4 = edit4+num
#if its good, change the entry in new_ids at given index
if unk == "no":
self.new_ids[index] = edit4
else:
print("Unknown Species in ID:" + line)
print("shortened: "+str(normal)+" sequence")
def gen_new_fasta(self, new_fasta_name):
#this should print the changed seqids and changed AA sequences to file.
newfasta = new_fasta_name
#open the new file
with open (newfasta, "w") as new:
#for each sequence...
for i in range(len(self.original_ids)):
#write the id + linebreak
new.write(">"+self.new_ids[i].strip()+"\n")
#write the sequence + linebreak
new.write(self.new_seqs[i]+"\n")
print("Finished, your new fasta file is located at "+newfasta)
#done
#here is the parser... this bit will only run if called from command line
if __name__ == "__main__":
print("Running in terminal")
#imports (not sure we use them all)
import sys
import argparse
import os
import re
parser = argparse.ArgumentParser(description="All")
#optional directory
parser.add_argument("directory", nargs='?', default=os.getcwd(), type=str, help="type name of directory to run in where fasta resides, if not pwd")
parser.add_argument("Fasta", type=str, help="type name your .fasta")
args = parser.parse_args()
##begin commands
#change dir if desired
try:
os.chdir(args.directory)
except:
print ("didn't change dir")
#run the thing
#create the object
ExampleFasta = Fasta(args.Fasta)
#populate the lists
ExampleFasta.open_seqs(args.Fasta)
#shorten the ids
ExampleFasta.shorten()
#gen an output name
try:
start, ext = args.Fasta.split(".")
outname = start+"Sh.fasta"
except:
outname = args.Fasta+"Sh.fasta"
#write the new fasta
ExampleFasta.gen_new_fasta(outname)
print("Operation finished, closing!")
|
{
"content_hash": "523330a97e9bbe7f4954582e3c2806da",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 250,
"avg_line_length": 38.33116883116883,
"alnum_prop": 0.6561070642046417,
"repo_name": "abigailc/feast-etc",
"id": "93b065d003a7cb280cf581591bc8fe4945faa755",
"size": "6499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ShortenFasta.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "339442"
}
],
"symlink_target": ""
}
|
"""Train the model."""
# Example usage:
#
# python train.py \
# --logtostderr \
# --data_dir ~/vid2depth/data/kitti_raw_eigen \
# --seq_length 3 \
# --reconstr_weight 0.85 \
# --smooth_weight 0.05 \
# --ssim_weight 0.15 \
# --icp_weight 0.1 \
# --checkpoint_dir ~/vid2depth/checkpoints
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import os
import random
import time
from absl import app
from absl import flags
from absl import logging
import model
import numpy as np
import tensorflow as tf
import util
gfile = tf.gfile
HOME_DIR = os.path.expanduser('~')
DEFAULT_DATA_DIR = os.path.join(HOME_DIR, 'vid2depth/data/kitti_raw_eigen')
DEFAULT_CHECKPOINT_DIR = os.path.join(HOME_DIR, 'vid2depth/checkpoints')
flags.DEFINE_string('data_dir', DEFAULT_DATA_DIR, 'Preprocessed data.')
flags.DEFINE_float('learning_rate', 0.0002, 'Adam learning rate.')
flags.DEFINE_float('beta1', 0.9, 'Adam momentum.')
flags.DEFINE_float('reconstr_weight', 0.85, 'Frame reconstruction loss weight.')
flags.DEFINE_float('smooth_weight', 0.05, 'Smoothness loss weight.')
flags.DEFINE_float('ssim_weight', 0.15, 'SSIM loss weight.')
flags.DEFINE_float('icp_weight', 0.0, 'ICP loss weight.')
flags.DEFINE_integer('batch_size', 4, 'The size of a sample batch')
flags.DEFINE_integer('img_height', 128, 'Input frame height.')
flags.DEFINE_integer('img_width', 416, 'Input frame width.')
# Note: Training time grows linearly with sequence length. Use 2 or 3.
flags.DEFINE_integer('seq_length', 3, 'Number of frames in sequence.')
flags.DEFINE_string('pretrained_ckpt', None, 'Path to checkpoint with '
'pretrained weights. Do not include .data* extension.')
flags.DEFINE_string('checkpoint_dir', DEFAULT_CHECKPOINT_DIR,
'Directory to save model checkpoints.')
flags.DEFINE_integer('train_steps', 200000, 'Number of training steps.')
flags.DEFINE_integer('summary_freq', 100, 'Save summaries every N steps.')
flags.DEFINE_bool('legacy_mode', False, 'Whether to limit losses to using only '
'the middle frame in sequence as the target frame.')
FLAGS = flags.FLAGS
# Maximum number of checkpoints to keep.
MAX_TO_KEEP = 100
def main(_):
# Fixed seed for repeatability
seed = 8964
tf.set_random_seed(seed)
np.random.seed(seed)
random.seed(seed)
if FLAGS.legacy_mode and FLAGS.seq_length < 3:
raise ValueError('Legacy mode supports sequence length > 2 only.')
if not gfile.Exists(FLAGS.checkpoint_dir):
gfile.MakeDirs(FLAGS.checkpoint_dir)
train_model = model.Model(data_dir=FLAGS.data_dir,
is_training=True,
learning_rate=FLAGS.learning_rate,
beta1=FLAGS.beta1,
reconstr_weight=FLAGS.reconstr_weight,
smooth_weight=FLAGS.smooth_weight,
ssim_weight=FLAGS.ssim_weight,
icp_weight=FLAGS.icp_weight,
batch_size=FLAGS.batch_size,
img_height=FLAGS.img_height,
img_width=FLAGS.img_width,
seq_length=FLAGS.seq_length,
legacy_mode=FLAGS.legacy_mode)
train(train_model, FLAGS.pretrained_ckpt, FLAGS.checkpoint_dir,
FLAGS.train_steps, FLAGS.summary_freq)
def train(train_model, pretrained_ckpt, checkpoint_dir, train_steps,
summary_freq):
"""Train model."""
if pretrained_ckpt is not None:
vars_to_restore = util.get_vars_to_restore(pretrained_ckpt)
pretrain_restorer = tf.train.Saver(vars_to_restore)
vars_to_save = util.get_vars_to_restore()
saver = tf.train.Saver(vars_to_save + [train_model.global_step],
max_to_keep=MAX_TO_KEEP)
sv = tf.train.Supervisor(logdir=checkpoint_dir, save_summaries_secs=0,
saver=None)
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
with sv.managed_session(config=config) as sess:
if pretrained_ckpt is not None:
logging.info('Restoring pretrained weights from %s', pretrained_ckpt)
pretrain_restorer.restore(sess, pretrained_ckpt)
logging.info('Attempting to resume training from %s...', checkpoint_dir)
checkpoint = tf.train.latest_checkpoint(checkpoint_dir)
logging.info('Last checkpoint found: %s', checkpoint)
if checkpoint:
saver.restore(sess, checkpoint)
logging.info('Training...')
start_time = time.time()
last_summary_time = time.time()
steps_per_epoch = train_model.reader.steps_per_epoch
step = 1
while step <= train_steps:
fetches = {
'train': train_model.train_op,
'global_step': train_model.global_step,
'incr_global_step': train_model.incr_global_step
}
if step % summary_freq == 0:
fetches['loss'] = train_model.total_loss
fetches['summary'] = sv.summary_op
results = sess.run(fetches)
global_step = results['global_step']
if step % summary_freq == 0:
sv.summary_writer.add_summary(results['summary'], global_step)
train_epoch = math.ceil(global_step / steps_per_epoch)
train_step = global_step - (train_epoch - 1) * steps_per_epoch
this_cycle = time.time() - last_summary_time
last_summary_time += this_cycle
logging.info(
'Epoch: [%2d] [%5d/%5d] time: %4.2fs (%ds total) loss: %.3f',
train_epoch, train_step, steps_per_epoch, this_cycle,
time.time() - start_time, results['loss'])
if step % steps_per_epoch == 0:
logging.info('[*] Saving checkpoint to %s...', checkpoint_dir)
saver.save(sess, os.path.join(checkpoint_dir, 'model'),
global_step=global_step)
# Setting step to global_step allows for training for a total of
# train_steps even if the program is restarted during training.
step = global_step + 1
if __name__ == '__main__':
app.run(main)
|
{
"content_hash": "08ad2bfd621466fc0bb63c222cea5b59",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 80,
"avg_line_length": 38.550632911392405,
"alnum_prop": 0.64061730421934,
"repo_name": "cshallue/models",
"id": "b2f59cf221223d96c0fc247ef58f2ccdc5ad0b47",
"size": "6780",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "research/vid2depth/train.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1523636"
},
{
"name": "Dockerfile",
"bytes": "9821"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33208"
},
{
"name": "Jupyter Notebook",
"bytes": "2829707"
},
{
"name": "Makefile",
"bytes": "4933"
},
{
"name": "Python",
"bytes": "13149300"
},
{
"name": "Shell",
"bytes": "146035"
}
],
"symlink_target": ""
}
|
from django import template
from image.models import Image
from django.db.models import Q
register = template.Library()
# Custom filter, return all images that are:
# public, shared, or belong to a logged in user
@register.filter
def viewable_all_users(self, user):
query = Image.objects.filter(
Q(user=user) |
Q(published=Image.SHARED) |
Q(published=Image.PUBLIC)
)
orders = ['pk']
result = query.order_by(*orders)
if len(result) > 0:
return result
else:
return []
# Return all images belonging only to one user.
@register.filter
def viewable_all(self, user):
query = Image.objects.filter(Q(user=user))
orders = ['pk']
result = query.order_by(*orders)
if len(result) > 0:
return result
else:
return []
# Return public images belonging only to one user.
@register.filter
def viewable_public(self, user):
query = Image.objects.filter(Q(user=user) &
Q(published=Image.PUBLIC))
orders = ['pk']
result = query.order_by(*orders)
if len(result) > 0:
return result
else:
return []
|
{
"content_hash": "4b72a147005157bf57d4135c3a2adeb9",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 50,
"avg_line_length": 24.67391304347826,
"alnum_prop": 0.6317180616740088,
"repo_name": "henry808/mod_drops",
"id": "e15507a85be7824fc2e7ba3601623ac7a62a77d1",
"size": "1135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "image/templatetags/image_extras.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2752"
},
{
"name": "HTML",
"bytes": "15816"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "93751"
}
],
"symlink_target": ""
}
|
"""
Legiscan Importer am models.
"""
from .lsidperson import LSIDPerson
from .lsidbill import LSIDBill
__all__ = (
'LSIDPerson',
'LSIDBill',
)
|
{
"content_hash": "eb35abbe8a58bed2a2dff1e2724dfd96",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 34,
"avg_line_length": 15.2,
"alnum_prop": 0.6644736842105263,
"repo_name": "access-missouri/am-django-project",
"id": "a1160f4c996758daaffc34cb6688690bfa27a0bd",
"size": "198",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "am/ls_importer/models/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "208381"
},
{
"name": "HTML",
"bytes": "75543"
},
{
"name": "JavaScript",
"bytes": "68836"
},
{
"name": "Makefile",
"bytes": "803"
},
{
"name": "Python",
"bytes": "241729"
},
{
"name": "Ruby",
"bytes": "105"
}
],
"symlink_target": ""
}
|
import pytest
from projectq.cengines import BasicMapperEngine
@pytest.fixture(params=["mapper", "no_mapper"])
def mapper(request):
"""Add a mapper which changes qubit ids by adding 1."""
if request.param == "mapper":
class TrivialMapper(BasicMapperEngine):
def __init__(self):
super().__init__()
self.current_mapping = {}
def receive(self, command_list):
for cmd in command_list:
for qureg in cmd.all_qubits:
for qubit in qureg:
if qubit.id == -1:
continue
elif qubit.id not in self.current_mapping:
previous_map = self.current_mapping
previous_map[qubit.id] = qubit.id + 1
self.current_mapping = previous_map
self._send_cmd_with_mapped_ids(cmd)
return TrivialMapper()
if request.param == "no_mapper":
return None
|
{
"content_hash": "76394733a7984ab968b73997bd0074e0",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 70,
"avg_line_length": 35.86666666666667,
"alnum_prop": 0.49349442379182157,
"repo_name": "ProjectQ-Framework/ProjectQ",
"id": "e04f9084dd292bf69e14f64e1aa7cb74c1700458",
"size": "1697",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "projectq/backends/_sim/_simulator_test_fixtures.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "158833"
},
{
"name": "Python",
"bytes": "1483141"
}
],
"symlink_target": ""
}
|
import subprocess
import praw
import datetime
import pyperclip
from hashlib import sha1
from flask import Flask
from flask import Response
from flask import request
from cStringIO import StringIO
from base64 import b64encode
from base64 import b64decode
from ConfigParser import ConfigParser
import OAuth2Util
import os
import markdown
import bleach
# encoding=utf8
import sys
from participantCollection import ParticipantCollection
reload(sys)
sys.setdefaultencoding('utf8')
# Edit Me!
challengePageSubmissionId = 'bvjyzi'
flaskport = 8964
thisMonthName = "June"
nextMonthName = "July"
readAllCommentsWhichCanBeSlower = False
sorryTooLateToSignUpReplyText = "Sorry, but the late signup grace period for " + thisMonthName + " is over, so you can't officially join this challenge. But feel free to follow along anyway, and comment all you want. And be sure to join us for the " + nextMonthName + " challenge. Signup posts for " + nextMonthName + " will begin during the last week of " + thisMonthName + "."
reinstatedReplyText = "OK, I've reinstated you. You should start showing up on the list again starting tomorrow."
app = Flask(__name__)
app.debug = True
commentHashesAndComments = {}
submission = None
def loginAndReturnRedditSession():
config = ConfigParser()
config.read("../reddit-password-credentials.cfg")
user = config.get("Reddit", "user")
password = config.get("Reddit", "password")
# TODO: password auth is going away, and we will soon need to do oauth.
redditSession = praw.Reddit(user_agent='Test Script by /u/foobarbazblarg')
redditSession.login(user, password, disable_warning=True)
# submissions = redditSession.get_subreddit('pornfree').get_hot(limit=5)
# print [str(x) for x in submissions]
return redditSession
def loginOAuthAndReturnRedditSession():
redditSession = praw.Reddit(user_agent='Test Script by /u/foobarbazblarg')
# New version of praw does not require explicit use of the OAuth2Util object. Presumably because reddit now REQUIRES oauth.
# o = OAuth2Util.OAuth2Util(redditSession, print_log=True, configfile="../reddit-oauth-credentials.cfg")
# TODO: Testing comment of refresh. We authenticate fresh every time, so presumably no need to do o.refresh().
# o.refresh(force=True)
return redditSession
def getSubmissionForRedditSession(redditSession):
# submission = redditSession.get_submission(submission_id=challengePageSubmissionId)
submission = redditSession.submission(id=challengePageSubmissionId)
if readAllCommentsWhichCanBeSlower:
submission.comments.replace_more(limit=None)
# submission.replace_more_comments(limit=None, threshold=0)
return submission
def getCommentsForSubmission(submission):
# return [comment for comment in praw.helpers.flatten_tree(submission.comments) if comment.__class__ == praw.models.Comment]
commentForest = submission.comments
# commentForest.replace_more(limit=None, threshold=0)
return [comment for comment in commentForest.list() if comment.__class__ == praw.models.Comment]
def retireCommentHash(commentHash):
with open("retiredcommenthashes.txt", "a") as commentHashFile:
commentHashFile.write(commentHash + '\n')
def retiredCommentHashes():
with open("retiredcommenthashes.txt", "r") as commentHashFile:
# return commentHashFile.readlines()
return commentHashFile.read().splitlines()
@app.route('/moderatechallenge.html')
def moderatechallenge():
currentDayOfMonthIndex = datetime.date.today().day
lateCheckinGracePeriodIsInEffect = currentDayOfMonthIndex <= 3
global commentHashesAndComments
global submission
commentHashesAndComments = {}
stringio = StringIO()
stringio.write('<html>\n<head>\n</head>\n\n')
# redditSession = loginAndReturnRedditSession()
redditSession = loginOAuthAndReturnRedditSession()
submission = getSubmissionForRedditSession(redditSession)
flat_comments = getCommentsForSubmission(submission)
retiredHashes = retiredCommentHashes()
i = 1
stringio.write('<iframe name="invisibleiframe" style="display:none;"></iframe>\n')
stringio.write("<h3>")
stringio.write(os.getcwd())
stringio.write("<br>\n")
stringio.write(submission.title)
stringio.write("</h3>\n\n")
stringio.write('<form action="copydisplaytoclipboard.html" method="post" target="invisibleiframe">')
stringio.write('<input type="submit" name="actiontotake" value="Copy display.py stdout to clipboard">')
stringio.write('<input type="submit" name="actiontotake" value="Automatically post display.py stdout">')
stringio.write('</form>')
stringio.write('<form action="updategooglechart.html" method="post" target="invisibleiframe">')
stringio.write('<input type="submit" value="update-google-chart.py">')
stringio.write('</form>')
for comment in flat_comments:
# print comment.is_root
# print comment.score
i += 1
commentHash = sha1()
commentHash.update(comment.fullname)
commentHash.update(comment.body.encode('utf-8'))
commentHash = commentHash.hexdigest()
if commentHash not in retiredHashes:
commentHashesAndComments[commentHash] = comment
authorName = str(comment.author) # can be None if author was deleted. So check for that and skip if it's None.
participant = ParticipantCollection().participantNamed(authorName)
stringio.write("<hr>\n")
stringio.write('<font color="blue"><b>')
stringio.write(authorName)
stringio.write('</b></font><br>')
if ParticipantCollection().hasParticipantNamed(authorName):
stringio.write(' <small><font color="green">(member)</font></small>')
if participant.isStillIn:
stringio.write(' <small><font color="green">(still in)</font></small>')
else:
stringio.write(' <small><font color="red">(out)</font></small>')
if participant.hasCheckedIn:
stringio.write(' <small><font color="green">(checked in)</font></small>')
else:
stringio.write(' <small><font color="orange">(not checked in)</font></small>')
if participant.hasRelapsed:
stringio.write(' <small><font color="red">(relapsed)</font></small>')
else:
stringio.write(' <small><font color="green">(not relapsed)</font></small>')
else:
stringio.write(' <small><font color="red">(not a member)</font></small>')
stringio.write('<form action="takeaction.html" method="post" target="invisibleiframe">')
if lateCheckinGracePeriodIsInEffect:
stringio.write('<input type="submit" name="actiontotake" value="Checkin">')
stringio.write('<input type="submit" name="actiontotake" value="Signup and checkin" style="color:white;background-color:green">')
else:
stringio.write('<input type="submit" name="actiontotake" value="Checkin" style="color:white;background-color:green">')
stringio.write('<input type="submit" name="actiontotake" value="Signup and checkin">')
stringio.write('<input type="submit" name="actiontotake" value="Relapse" style="color:white;background-color:red">')
stringio.write('<input type="submit" name="actiontotake" value="Reinstate with automatic comment">')
stringio.write('<input type="submit" name="actiontotake" value="Reply with sorry-too-late comment">')
stringio.write('<input type="submit" name="actiontotake" value="Skip comment">')
stringio.write('<input type="submit" name="actiontotake" value="Skip comment and don\'t upvote">')
stringio.write('<input type="hidden" name="username" value="' + b64encode(authorName) + '">')
stringio.write('<input type="hidden" name="bodyencodedformlcorpus" value="' + b64encode(comment.body.encode('utf-8')) + '">')
stringio.write('<input type="hidden" name="commenthash" value="' + commentHash + '">')
# stringio.write('<input type="hidden" name="commentpermalink" value="' + comment.permalink + '">')
stringio.write('</form>')
stringio.write(bleach.clean(markdown.markdown(comment.body.encode('utf-8')), tags=['p']))
stringio.write("\n<br><br>\n\n")
stringio.write('</html>')
pageString = stringio.getvalue()
stringio.close()
return Response(pageString, mimetype='text/html')
@app.route('/takeaction.html', methods=["POST"])
def takeaction():
username = b64decode(request.form["username"])
commentHash = str(request.form["commenthash"])
bodyEncodedForMLCorpus = str(request.form["bodyencodedformlcorpus"])
# commentPermalink = request.form["commentpermalink"]
actionToTake = request.form["actiontotake"]
# print commentHashesAndComments
comment = commentHashesAndComments[commentHash]
# print "comment: " + str(comment)
if actionToTake == 'Checkin':
print "checkin - " + username
subprocess.call(['./checkin.py', username])
comment.upvote()
retireCommentHash(commentHash)
recordMLCorpusCheckin(bodyEncodedForMLCorpus)
if actionToTake == 'Signup and checkin':
print "signup and checkin - " + username
subprocess.call(['./signup-and-checkin.sh', username])
comment.upvote()
retireCommentHash(commentHash)
recordMLCorpusSignupAndCheckin(bodyEncodedForMLCorpus)
elif actionToTake == 'Relapse':
print "relapse - " + username
subprocess.call(['./relapse.py', username])
comment.upvote()
retireCommentHash(commentHash)
recordMLCorpusRelapse(bodyEncodedForMLCorpus)
elif actionToTake == 'Reinstate with automatic comment':
print "reinstate - " + username
subprocess.call(['./reinstate.py', username])
comment.reply(reinstatedReplyText)
comment.upvote()
retireCommentHash(commentHash)
recordMLCorpusReinstate(bodyEncodedForMLCorpus)
elif actionToTake == 'Reply with sorry-too-late comment':
print "reply with sorry-too-late comment - " + username
comment.reply(sorryTooLateToSignUpReplyText)
comment.upvote()
retireCommentHash(commentHash)
recordMLCorpusTooLate(bodyEncodedForMLCorpus)
elif actionToTake == 'Skip comment':
print "Skip comment - " + username
comment.upvote()
retireCommentHash(commentHash)
recordMLCorpusSkip(bodyEncodedForMLCorpus)
elif actionToTake == "Skip comment and don't upvote":
print "Skip comment and don't upvote - " + username
retireCommentHash(commentHash)
recordMLCorpusSkip(bodyEncodedForMLCorpus)
return Response("hello", mimetype='text/html')
@app.route('/copydisplaytoclipboard.html', methods=["POST"])
def copydisplaytoclipboard():
actionToTake = request.form["actiontotake"]
if actionToTake == 'Copy display.py stdout to clipboard':
subprocess.call(['./display.py'])
if actionToTake == 'Automatically post display.py stdout':
subprocess.call(['./display.py'])
submissionText = pyperclip.paste()
submission.edit(submissionText)
return Response("hello", mimetype='text/html')
@app.route('/updategooglechart.html', methods=["POST"])
def updategooglechart():
print "TODO: Copy display to clipboard"
subprocess.call(['./update-google-chart.py'])
return Response("hello", mimetype='text/html')
def recordMLCorpusCheckin(aString):
with open("../new-ml-corpus-monthly-checkin.txt", "a") as f:
f.write(aString)
f.write("\n")
def recordMLCorpusSignupAndCheckin(aString):
with open("../new-ml-corpus-monthly-signup-and-checkin.txt", "a") as f:
f.write(aString)
f.write("\n")
def recordMLCorpusRelapse(aString):
with open("../new-ml-corpus-monthly-relapse.txt", "a") as f:
f.write(aString)
f.write("\n")
def recordMLCorpusReinstate(aString):
with open("../new-ml-corpus-monthly-reinstate.txt", "a") as f:
f.write(aString)
f.write("\n")
def recordMLCorpusTooLate(aString):
with open("../new-ml-corpus-monthly-too-late.txt", "a") as f:
f.write(aString)
f.write("\n")
def recordMLCorpusSkip(aString):
with open("../new-ml-corpus-monthly-skip.txt", "a") as f:
f.write(aString)
f.write("\n")
if __name__ == '__main__':
app.run(host='127.0.0.1', port=flaskport)
|
{
"content_hash": "157172deda4569249a6a440a38f2cd69",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 381,
"avg_line_length": 45.39426523297491,
"alnum_prop": 0.6794315041452823,
"repo_name": "foobarbazblarg/stayclean",
"id": "c2551f74c6ae27568fd8855a152ab7a575b757da",
"size": "12688",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stayclean-2019-june/serve-challenge-with-flask.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4232161"
},
{
"name": "Shell",
"bytes": "52056"
}
],
"symlink_target": ""
}
|
from __future__ import division
import argparse, time, os
import logging
import mxnet as mx
from mxnet import gluon
from mxnet import profiler
from mxnet.gluon import nn
from mxnet.gluon.model_zoo import vision as models
from mxnet import autograd as ag
from mxnet.test_utils import get_mnist_iterator
from mxnet.metric import Accuracy, TopKAccuracy, CompositeEvalMetric
import numpy as np
from data import *
# logging
logging.basicConfig(level=logging.INFO)
fh = logging.FileHandler('image-classification.log')
logger = logging.getLogger()
logger.addHandler(fh)
formatter = logging.Formatter('%(message)s')
fh.setFormatter(formatter)
fh.setLevel(logging.DEBUG)
logging.debug('\n%s', '-' * 100)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
fh.setFormatter(formatter)
# CLI
parser = argparse.ArgumentParser(description='Train a model for image classification.')
parser.add_argument('--dataset', type=str, default='cifar10',
help='dataset to use. options are mnist, cifar10, imagenet and dummy.')
parser.add_argument('--data-dir', type=str, default='',
help='training directory of imagenet images, contains train/val subdirs.')
parser.add_argument('--batch-size', type=int, default=32,
help='training batch size per device (CPU/GPU).')
parser.add_argument('--num-worker', '-j', dest='num_workers', default=4, type=int,
help='number of workers of dataloader.')
parser.add_argument('--gpus', type=str, default='',
help='ordinates of gpus to use, can be "0,1,2" or empty for cpu only.')
parser.add_argument('--epochs', type=int, default=120,
help='number of training epochs.')
parser.add_argument('--lr', type=float, default=0.1,
help='learning rate. default is 0.1.')
parser.add_argument('--momentum', type=float, default=0.9,
help='momentum value for optimizer, default is 0.9.')
parser.add_argument('--wd', type=float, default=0.0001,
help='weight decay rate. default is 0.0001.')
parser.add_argument('--seed', type=int, default=123,
help='random seed to use. Default=123.')
parser.add_argument('--mode', type=str,
help='mode in which to train the model. options are symbolic, imperative, hybrid')
parser.add_argument('--model', type=str, required=True,
help='type of model to use. see vision_model for options.')
parser.add_argument('--use_thumbnail', action='store_true',
help='use thumbnail or not in resnet. default is false.')
parser.add_argument('--batch-norm', action='store_true',
help='enable batch normalization or not in vgg. default is false.')
parser.add_argument('--use-pretrained', action='store_true',
help='enable using pretrained model from gluon.')
parser.add_argument('--prefix', default='', type=str,
help='path to checkpoint prefix, default is current working dir')
parser.add_argument('--start-epoch', default=0, type=int,
help='starting epoch, 0 for fresh training, > 0 to resume')
parser.add_argument('--resume', type=str, default='',
help='path to saved weight where you want resume')
parser.add_argument('--lr-factor', default=0.1, type=float,
help='learning rate decay ratio')
parser.add_argument('--lr-steps', default='30,60,90', type=str,
help='list of learning rate decay epochs as in str')
parser.add_argument('--dtype', default='float32', type=str,
help='data type, float32 or float16 if applicable')
parser.add_argument('--save-frequency', default=10, type=int,
help='epoch frequence to save model, best model will always be saved')
parser.add_argument('--kvstore', type=str, default='device',
help='kvstore to use for trainer/module.')
parser.add_argument('--log-interval', type=int, default=50,
help='Number of batches to wait before logging.')
parser.add_argument('--profile', action='store_true',
help='Option to turn on memory profiling for front-end, '\
'and prints out the memory usage by python function at the end.')
parser.add_argument('--builtin-profiler', type=int, default=0, help='Enable built-in profiler (0=off, 1=on)')
opt = parser.parse_args()
# global variables
logger.info('Starting new image-classification task:, %s',opt)
mx.random.seed(opt.seed)
model_name = opt.model
dataset_classes = {'mnist': 10, 'cifar10': 10, 'imagenet': 1000, 'dummy': 1000}
batch_size, dataset, classes = opt.batch_size, opt.dataset, dataset_classes[opt.dataset]
context = [mx.gpu(int(i)) for i in opt.gpus.split(',')] if opt.gpus.strip() else [mx.cpu()]
num_gpus = len(context)
batch_size *= max(1, num_gpus)
lr_steps = [int(x) for x in opt.lr_steps.split(',') if x.strip()]
metric = CompositeEvalMetric([Accuracy(), TopKAccuracy(5)])
def get_model(model, ctx, opt):
"""Model initialization."""
kwargs = {'ctx': ctx, 'pretrained': opt.use_pretrained, 'classes': classes}
if model.startswith('resnet'):
kwargs['thumbnail'] = opt.use_thumbnail
elif model.startswith('vgg'):
kwargs['batch_norm'] = opt.batch_norm
net = models.get_model(model, **kwargs)
if opt.resume:
net.load_params(opt.resume)
elif not opt.use_pretrained:
if model in ['alexnet']:
net.initialize(mx.init.Normal())
else:
net.initialize(mx.init.Xavier(magnitude=2))
net.cast(opt.dtype)
return net
net = get_model(opt.model, context, opt)
def get_data_iters(dataset, batch_size, num_workers=1, rank=0):
"""get dataset iterators"""
if dataset == 'mnist':
train_data, val_data = get_mnist_iterator(batch_size, (1, 28, 28),
num_parts=num_workers, part_index=rank)
elif dataset == 'cifar10':
train_data, val_data = get_cifar10_iterator(batch_size, (3, 32, 32),
num_parts=num_workers, part_index=rank)
elif dataset == 'imagenet':
if not opt.data_dir:
raise ValueError('Dir containing raw images in train/val is required for imagenet, plz specify "--data-dir"')
if model_name == 'inceptionv3':
train_data, val_data = get_imagenet_iterator(opt.data_dir, batch_size, opt.num_workers, 299, opt.dtype)
else:
train_data, val_data = get_imagenet_iterator(opt.data_dir, batch_size, opt.num_workers, 224, opt.dtype)
elif dataset == 'dummy':
if model_name == 'inceptionv3':
train_data, val_data = dummy_iterator(batch_size, (3, 299, 299))
else:
train_data, val_data = dummy_iterator(batch_size, (3, 224, 224))
return train_data, val_data
def test(ctx, val_data):
metric.reset()
val_data.reset()
for batch in val_data:
data = gluon.utils.split_and_load(batch.data[0], ctx_list=ctx, batch_axis=0)
label = gluon.utils.split_and_load(batch.label[0], ctx_list=ctx, batch_axis=0)
outputs = []
for x in data:
outputs.append(net(x))
metric.update(label, outputs)
return metric.get()
def update_learning_rate(lr, trainer, epoch, ratio, steps):
"""Set the learning rate to the initial value decayed by ratio every N epochs."""
new_lr = lr * (ratio ** int(np.sum(np.array(steps) < epoch)))
trainer.set_learning_rate(new_lr)
return trainer
def save_checkpoint(epoch, top1, best_acc):
if opt.save_frequency and (epoch + 1) % opt.save_frequency == 0:
fname = os.path.join(opt.prefix, '%s_%d_acc_%.4f.params' % (opt.model, epoch, top1))
net.save_params(fname)
logger.info('[Epoch %d] Saving checkpoint to %s with Accuracy: %.4f', epoch, fname, top1)
if top1 > best_acc[0]:
best_acc[0] = top1
fname = os.path.join(opt.prefix, '%s_best.params' % (opt.model))
net.save_params(fname)
logger.info('[Epoch %d] Saving checkpoint to %s with Accuracy: %.4f', epoch, fname, top1)
def train(opt, ctx):
if isinstance(ctx, mx.Context):
ctx = [ctx]
kv = mx.kv.create(opt.kvstore)
train_data, val_data = get_data_iters(dataset, batch_size, kv.num_workers, kv.rank)
net.collect_params().reset_ctx(ctx)
trainer = gluon.Trainer(net.collect_params(), 'sgd',
{'learning_rate': opt.lr, 'wd': opt.wd, 'momentum': opt.momentum,
'multi_precision': True},
kvstore = kv)
loss = gluon.loss.SoftmaxCrossEntropyLoss()
total_time = 0
num_epochs = 0
best_acc = [0]
for epoch in range(opt.start_epoch, opt.epochs):
trainer = update_learning_rate(opt.lr, trainer, epoch, opt.lr_factor, lr_steps)
tic = time.time()
train_data.reset()
metric.reset()
btic = time.time()
for i, batch in enumerate(train_data):
data = gluon.utils.split_and_load(batch.data[0].astype(opt.dtype), ctx_list=ctx, batch_axis=0)
label = gluon.utils.split_and_load(batch.label[0].astype(opt.dtype), ctx_list=ctx, batch_axis=0)
outputs = []
Ls = []
with ag.record():
for x, y in zip(data, label):
z = net(x)
L = loss(z, y)
# store the loss and do backward after we have done forward
# on all GPUs for better speed on multiple GPUs.
Ls.append(L)
outputs.append(z)
ag.backward(Ls)
trainer.step(batch.data[0].shape[0])
metric.update(label, outputs)
if opt.log_interval and not (i+1)%opt.log_interval:
name, acc = metric.get()
logger.info('Epoch[%d] Batch [%d]\tSpeed: %f samples/sec\t%s=%f, %s=%f'%(
epoch, i, batch_size/(time.time()-btic), name[0], acc[0], name[1], acc[1]))
btic = time.time()
epoch_time = time.time()-tic
# First epoch will usually be much slower than the subsequent epics,
# so don't factor into the average
if num_epochs > 0:
total_time = total_time + epoch_time
num_epochs = num_epochs + 1
name, acc = metric.get()
logger.info('[Epoch %d] training: %s=%f, %s=%f'%(epoch, name[0], acc[0], name[1], acc[1]))
logger.info('[Epoch %d] time cost: %f'%(epoch, epoch_time))
name, val_acc = test(ctx, val_data)
logger.info('[Epoch %d] validation: %s=%f, %s=%f'%(epoch, name[0], val_acc[0], name[1], val_acc[1]))
# save model if meet requirements
save_checkpoint(epoch, val_acc[0], best_acc)
if num_epochs > 1:
print('Average epoch time: {}'.format(float(total_time)/(num_epochs - 1)))
def main():
if opt.builtin_profiler > 0:
profiler.set_config(profile_all=True, aggregate_stats=True)
profiler.set_state('run')
if opt.mode == 'symbolic':
data = mx.sym.var('data')
out = net(data)
softmax = mx.sym.SoftmaxOutput(out, name='softmax')
mod = mx.mod.Module(softmax, context=[mx.gpu(i) for i in range(num_gpus)] if num_gpus > 0 else [mx.cpu()])
kv = mx.kv.create(opt.kvstore)
train_data, val_data = get_data_iters(dataset, batch_size, kv.num_workers, kv.rank)
mod.fit(train_data,
eval_data = val_data,
num_epoch=opt.epochs,
kvstore=kv,
batch_end_callback = mx.callback.Speedometer(batch_size, max(1, opt.log_interval)),
epoch_end_callback = mx.callback.do_checkpoint('image-classifier-%s'% opt.model),
optimizer = 'sgd',
optimizer_params = {'learning_rate': opt.lr, 'wd': opt.wd, 'momentum': opt.momentum, 'multi_precision': True},
initializer = mx.init.Xavier(magnitude=2))
mod.save_params('image-classifier-%s-%d-final.params'%(opt.model, opt.epochs))
else:
if opt.mode == 'hybrid':
net.hybridize()
train(opt, context)
if opt.builtin_profiler > 0:
profiler.set_state('stop')
print(profiler.dumps())
if __name__ == '__main__':
if opt.profile:
import hotshot, hotshot.stats
prof = hotshot.Profile('image-classifier-%s-%s.prof'%(opt.model, opt.mode))
prof.runcall(main)
prof.close()
stats = hotshot.stats.load('image-classifier-%s-%s.prof'%(opt.model, opt.mode))
stats.strip_dirs()
stats.sort_stats('cumtime', 'calls')
stats.print_stats()
else:
main()
|
{
"content_hash": "8a02f1d096d73dc94c774dad46feab47",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 126,
"avg_line_length": 46.84981684981685,
"alnum_prop": 0.6068021892103206,
"repo_name": "luoyetx/mxnet",
"id": "a67a31790a06bee1fe8e03b9330459adaaa9a491",
"size": "13576",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "example/gluon/image_classification.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "119122"
},
{
"name": "C++",
"bytes": "5117406"
},
{
"name": "CMake",
"bytes": "80180"
},
{
"name": "Cuda",
"bytes": "883194"
},
{
"name": "Groovy",
"bytes": "1020"
},
{
"name": "Java",
"bytes": "122297"
},
{
"name": "Jupyter Notebook",
"bytes": "1275177"
},
{
"name": "Makefile",
"bytes": "60221"
},
{
"name": "Matlab",
"bytes": "34903"
},
{
"name": "Perl",
"bytes": "1271567"
},
{
"name": "Perl 6",
"bytes": "7280"
},
{
"name": "Python",
"bytes": "5393105"
},
{
"name": "R",
"bytes": "311544"
},
{
"name": "Scala",
"bytes": "988074"
},
{
"name": "Shell",
"bytes": "268529"
},
{
"name": "Smalltalk",
"bytes": "43774"
}
],
"symlink_target": ""
}
|
from __future__ import with_statement
from fabric.api import local, put, run, env
env.use_ssh_config = True
def build():
local('sbt clean compile assembly')
def deploy():
cuttrdir = '/home/%s/cuttr' % env.GLITCHUSER
put('assembled/cuttr.jar', cuttrdir)
run('chmod +x %s/cuttr.jar' % cuttrdir)
run('chown %s:%s %s/cuttr.jar' % (env.GLITCHUSER, env.GLITCHGROUP, cuttrdir))
|
{
"content_hash": "db0dae7edd15e3f1d6d735e9dd2a2b33",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 81,
"avg_line_length": 26.4,
"alnum_prop": 0.6691919191919192,
"repo_name": "rumblesan/cuttr",
"id": "0d7682987997e77d88b27db8bc84ee38404379db",
"size": "419",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabfile.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "36032"
},
{
"name": "Scala",
"bytes": "42272"
}
],
"symlink_target": ""
}
|
from precisely import Matcher
from precisely.coercion import to_matcher
from precisely.results import matched, unmatched, indented_list
class Prop(Matcher):
def __init__(self, path, matcher):
self.matcher = matcher
self._path = path
def readable_path(self):
readable_path = ']['.join(str(p) for p in self._path)
return '[{}]'.format(readable_path)
def _get_value(self, item, path):
if not path:
return item
elif not item:
return None
else:
head_value = item.get(path[0])
tail_path = path[1:]
if not tail_path:
return head_value
else:
return self._get_value(head_value, tail_path)
def match(self, item):
actual = self._get_value(item, self._path)
if actual:
result = self.matcher.match(actual)
if result.is_match:
return result
else:
explanation = "property {} {}".format(self.readable_path(), result.explanation)
return unmatched(explanation)
else:
return unmatched("had no property {}".format(self.readable_path()))
def describe(self):
return "property {} {}".format(self.readable_path(), self.matcher.describe())
def prop(path, matcher):
"""Extract property from dictionary.
Let's say we have v = {'foo':{'baz:2'}, 'bar':1}
The following will be pass:
assert_that(v, prop(['foo', 'baz'], equal_to(2)))
assert_that(v, prop(['bar'], equal_to(1)))
The following will fail will fail with an assertion error:
assert_that(v, prop('foo', equal_to(0)))
"""
return Prop(path, matcher)
def has_value(name, matcher):
"""Match a value in a dictionay.
Let's say we have v = {'foo':0, 'bar':1}
The following will be pass:
assert_that(v, has_value('foo', 0))
assert_that(v, has_value('bar', 1))
assert_that(v, has_value('foo', not_(equal_to(1))))
The following will fail will fail with an assertion error:
assert_that(v, has_value('foo', 1))
assert_that(v, has_value('bar', 42))
assert_that(v, has_value('foo', not_(equal_to(0))))
"""
return HasValue(name, to_matcher(matcher))
class HasValue(Matcher):
def __init__(self, name, matcher):
self._name = name
self._matcher = matcher
def match(self, actual):
if self._name not in actual:
return unmatched("was missing value '{0}'".format(self._name))
else:
actual_value = actual.get(self._name)
property_result = self._matcher.match(actual_value)
if property_result.is_match:
return matched()
else:
return unmatched("value '{0}' {1}".format(self._name, property_result.explanation))
def describe(self):
return "object with value {0}: {1}".format(self._name, self._matcher.describe())
def has_values(**kwargs):
"""Match multiple values in a dictionay.
Let's say we have v = {'foo':0, 'bar':1}
The following will be pass:
assert_that(v, has_values(foo=0, bar=1))
assert_that(v, has_values(bar=1))
assert_that(v, has_values(foo=not_(equal_to(1))))
The following will fail will fail with an assertion error:
assert_that(v, has_values(foo=1, bar=1))
assert_that(v, has_values(foo=not_(equal_to(0))))
"""
return HasValues(kwargs.items())
class HasValues(Matcher):
def __init__(self, matchers):
self._matchers = [
has_value(name, matcher)
for name, matcher in matchers
]
def match(self, actual):
for matcher in self._matchers:
result = matcher.match(actual)
if not result.is_match:
return result
return matched()
def describe(self):
return "object with values:{0}".format(indented_list(
"{0}: {1}".format(matcher._name, matcher._matcher.describe())
for matcher in self._matchers
))
|
{
"content_hash": "b0b377312c9004a735b87a42ea4442ae",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 99,
"avg_line_length": 30.350746268656717,
"alnum_prop": 0.5815097123186624,
"repo_name": "gsantovena/marathon",
"id": "9b16e068158f3d7ab5160fdaff38735467bf4368",
"size": "4067",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/shakedown/shakedown/matcher/property.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "59278"
},
{
"name": "Dockerfile",
"bytes": "6958"
},
{
"name": "Groovy",
"bytes": "17238"
},
{
"name": "HTML",
"bytes": "16356"
},
{
"name": "Java",
"bytes": "36549"
},
{
"name": "Liquid",
"bytes": "1484"
},
{
"name": "Makefile",
"bytes": "9396"
},
{
"name": "Python",
"bytes": "425193"
},
{
"name": "RAML",
"bytes": "356"
},
{
"name": "Ruby",
"bytes": "772"
},
{
"name": "Scala",
"bytes": "4614713"
},
{
"name": "Shell",
"bytes": "47966"
}
],
"symlink_target": ""
}
|
from flask.ext import wtf
import flask
import wtforms
import auth
import config
import model
import util
import task
from main import app
###############################################################################
# Profile View
###############################################################################
@app.route('/admin/profile/')
@auth.login_required
def profile():
user_db = auth.current_user_db()
return flask.render_template(
'admin/profile/profile.html',
title=user_db.name,
html_class='profile-view',
user_db=user_db,
)
###############################################################################
# Profile Update
###############################################################################
class ProfileUpdateForm(wtf.Form):
name = wtforms.StringField(
model.User.name._verbose_name,
[wtforms.validators.required()], filters=[util.strip_filter],
)
email = wtforms.StringField(
model.User.email._verbose_name,
[wtforms.validators.optional(), wtforms.validators.email()],
filters=[util.email_filter],
)
@app.route('/profile/update/', methods=['GET', 'POST'])
@auth.login_required
def profile_update():
user_db = auth.current_user_db()
form = ProfileUpdateForm(obj=user_db)
if form.validate_on_submit():
email = form.email.data
if email and not user_db.is_email_available(email, user_db.key):
form.email.errors.append('This email is already taken.')
if not form.errors:
send_verification = not user_db.token or user_db.email != email
form.populate_obj(user_db)
if send_verification:
user_db.verified = False
task.verify_email_notification(user_db)
user_db.put()
return flask.redirect(flask.url_for('profile'))
return flask.render_template(
'admin/profile/profile_update.html',
title=user_db.name,
html_class='profile-update',
form=form,
user_db=user_db,
)
###############################################################################
# Profile Password
###############################################################################
class ProfilePasswordForm(wtf.Form):
old_password = wtforms.StringField(
'Old Password', [wtforms.validators.optional()],
)
new_password = wtforms.StringField(
'New Password',
[wtforms.validators.required(), wtforms.validators.length(min=6)]
)
@app.route('/profile/password/', methods=['GET', 'POST'])
@auth.login_required
def profile_password():
if not config.CONFIG_DB.has_email_authentication:
flask.abort(418)
user_db = auth.current_user_db()
form = ProfilePasswordForm(obj=user_db)
if form.validate_on_submit():
errors = False
old_password = form.old_password.data
new_password = form.new_password.data
if new_password or old_password:
if user_db.password_hash:
if util.password_hash(user_db, old_password) != user_db.password_hash:
form.old_password.errors.append('Invalid current password')
errors = True
if not errors and old_password and not new_password:
form.new_password.errors.append('This field is required.')
errors = True
if not (form.errors or errors):
user_db.password_hash = util.password_hash(user_db, new_password)
flask.flash('Your password has been changed.', category='success')
if not (form.errors or errors):
user_db.put()
return flask.redirect(flask.url_for('profile'))
return flask.render_template(
'admin/profile/profile_password.html',
title=user_db.name,
html_class='profile-password',
form=form,
user_db=user_db,
)
|
{
"content_hash": "f5d13fd8b010601ee1226ea0374491a4",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 79,
"avg_line_length": 30.262295081967213,
"alnum_prop": 0.5823401950162513,
"repo_name": "tiberiucorbu/av-website",
"id": "bd2fb37d97b359e4e83a2f28f09b051cd8338df5",
"size": "3709",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main/views/admin/profile/profile_controller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27743"
},
{
"name": "CoffeeScript",
"bytes": "15532"
},
{
"name": "HTML",
"bytes": "161312"
},
{
"name": "JavaScript",
"bytes": "70055"
},
{
"name": "Python",
"bytes": "158804"
}
],
"symlink_target": ""
}
|
from airflow import DAG
# We need to import the operators used in our tasks
from airflow.operators.bash_operator import BashOperator
# We then import the days_ago function
from airflow.utils.dates import days_ago
from datetime import timedelta
# initializing the default arguments that we'll pass to our DAG
default_args = {
'owner': 'airflow',
'start_date': days_ago(5),
'email': ['airflow@my_first_dag.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5),
}
my_first_dag = DAG(
'first_dag',
default_args=default_args,
description='Our first DAG',
schedule_interval=timedelta(days=1),
)
task_1 = BashOperator(
task_id='first_task',
bash_command='echo 1',
dag=my_first_dag,
)
task_2 = BashOperator(
task_id='second_task',
bash_command='echo 2',
dag=my_first_dag,
)
task_1.set_downstream(task_2)
|
{
"content_hash": "e6b94b79bf252e7534292503c85ece6d",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 63,
"avg_line_length": 24.31578947368421,
"alnum_prop": 0.6807359307359307,
"repo_name": "andrzejsydor/docker",
"id": "9f4f2a1033caec6149852ae414dd41bb43c782ab",
"size": "966",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dockerlands/airflow/dags/my_dag.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1111"
},
{
"name": "HTML",
"bytes": "4"
},
{
"name": "Java",
"bytes": "1492"
},
{
"name": "JavaScript",
"bytes": "230"
},
{
"name": "Shell",
"bytes": "1892"
},
{
"name": "TSQL",
"bytes": "435"
}
],
"symlink_target": ""
}
|
"""Deployment Manager V2 manifests sub-group."""
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
class Manifests(base.Group):
"""Commands for Deployment Manager V2 manifests.
Commands to list and examine manifests within a deployment.
"""
detailed_help = {
'DESCRIPTION': '{description}',
'EXAMPLES': """\
To view all details about a manifest, run:
$ {command} describe manifest-name --deployment my-deployment
To see the list of all manifests in a deployment, run:
$ {command} list --deployment my-deployment
""",
}
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
parser.add_argument('--deployment', help='Deployment name')
def Filter(self, unused_tool_context, args):
if not args.deployment:
raise exceptions.ToolException('argument --deployment is required')
|
{
"content_hash": "d8cdf774ceeff1ee246ea6e424cdb2eb",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 74,
"avg_line_length": 29.53846153846154,
"alnum_prop": 0.6744791666666666,
"repo_name": "wemanuel/smry",
"id": "56c31c6b27d9d7e1d0662c2572b567844e6dddeb",
"size": "1203",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "server-auth/ls/google-cloud-sdk/lib/googlecloudsdk/deployment_manager/commands/manifests/__init__.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3990"
},
{
"name": "Groff",
"bytes": "1221174"
},
{
"name": "HTML",
"bytes": "1873470"
},
{
"name": "JavaScript",
"bytes": "2192"
},
{
"name": "Makefile",
"bytes": "6032"
},
{
"name": "PHP",
"bytes": "16660"
},
{
"name": "Python",
"bytes": "47139164"
},
{
"name": "Shell",
"bytes": "37102"
},
{
"name": "SourcePawn",
"bytes": "1160"
}
],
"symlink_target": ""
}
|
"""Urls for news app."""
from django.conf.urls import url
from news import views
urlpatterns = [
url(r'^$', views.NewsListView.as_view(), name='list'),
url(r'^(?P<pk>\d+)/$', views.NewsDetailView.as_view(), name='detail'),
url(r'^create/$', views.NewsCreateView.as_view(), name='create'),
url(r'^(?P<pk>\d+)/update/$', views.NewsUpdateView.as_view(), name='update'),
url(r'^search/$', views.NewsSearchView.as_view(), name='search')
]
|
{
"content_hash": "67e14f59dd2c78a81ce778b25fe0da38",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 81,
"avg_line_length": 38,
"alnum_prop": 0.631578947368421,
"repo_name": "Atlases/qnews",
"id": "3699388bfe6eae13b021621900995cd89948562b",
"size": "456",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "news/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5560"
},
{
"name": "Python",
"bytes": "10957"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import object
import os
import shutil
import tarfile
import io
import boto3
import botocore
from typing import Optional
from typing import Text
from rasa_nlu.config import RasaNLUConfig
def get_persistor(config):
# type: (RasaNLUConfig) -> Optional[Persistor]
"""Returns an instance of the requested persistor. Currently, `aws` and `gcs` are supported"""
if 'storage' not in config:
raise KeyError("No persistent storage specified. Supported values are {}".format(", ".join(['aws', 'gcs'])))
if config['storage'] == 'aws':
return AWSPersistor(config['path'], config['aws_region'], config['bucket_name'], config['aws_endpoint_url'])
elif config['storage'] == 'gcs':
return GCSPersistor(config['path'], config['bucket_name'])
else:
return None
class Persistor(object):
"""Store models in cloud and fetch them when needed"""
def save_tar(self, target_dir):
# type: (Text) -> None
"""Uploads a model persisted in the `target_dir` to cloud storage."""
raise NotImplementedError("")
def fetch_and_extract(self, filename):
# type: (Text) -> None
"""Downloads a model that has previously been persisted to cloud storage."""
raise NotImplementedError("")
class AWSPersistor(Persistor):
"""Store models on S3 and fetch them when needed instead of storing them on the local disk."""
def __init__(self, data_dir, aws_region, bucket_name, endpoint_url):
# type: (Text, Text, Text) -> None
Persistor.__init__(self)
self.data_dir = data_dir
self.s3 = boto3.resource('s3', region_name=aws_region, endpoint_url=endpoint_url)
self.bucket_name = bucket_name
try:
self.s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={'LocationConstraint': aws_region})
except botocore.exceptions.ClientError:
pass # bucket already exists
self.bucket = self.s3.Bucket(bucket_name)
def save_tar(self, target_dir):
# type: (Text) -> None
"""Uploads a model persisted in the `target_dir` to s3."""
if not os.path.isdir(target_dir):
raise ValueError("Target directory '{}' not found.".format(target_dir))
base_name = os.path.basename(target_dir)
base_dir = os.path.dirname(target_dir)
tarname = shutil.make_archive(base_name, 'gztar', root_dir=base_dir, base_dir=base_name)
filekey = os.path.basename(tarname)
self.s3.Object(self.bucket_name, filekey).put(Body=open(tarname, 'rb'))
def fetch_and_extract(self, filename):
# type: (Text) -> None
"""Downloads a model that has previously been persisted to s3."""
with io.open(filename, 'wb') as f:
self.bucket.download_fileobj(filename, f)
with tarfile.open(filename, "r:gz") as tar:
tar.extractall(self.data_dir)
class GCSPersistor(Persistor):
"""Store models on Google Cloud Storage and fetch them when needed instead of storing them on the local disk."""
def __init__(self, data_dir, bucket_name):
Persistor.__init__(self)
from google.cloud import storage
from google.cloud import exceptions
self.data_dir = data_dir
self.bucket_name = bucket_name
self.storage_client = storage.Client()
try:
self.storage_client.create_bucket(bucket_name)
except exceptions.Conflict:
# bucket exists
pass
self.bucket = self.storage_client.bucket(bucket_name)
def save_tar(self, target_dir):
# type: (Text) -> None
"""Uploads a model persisted in the `target_dir` to GCS."""
if not os.path.isdir(target_dir):
raise ValueError('target_dir %r not found.' % target_dir)
base_name = os.path.basename(target_dir)
base_dir = os.path.dirname(target_dir)
tarname = shutil.make_archive(base_name, 'gztar', root_dir=base_dir, base_dir=base_name)
filekey = os.path.basename(tarname)
blob = self.bucket.blob(filekey)
blob.upload_from_filename(tarname)
def fetch_and_extract(self, filename):
# type: (Text) -> None
"""Downloads a model that has previously been persisted to GCS."""
blob = self.bucket.blob(filename)
blob.download_to_filename(filename)
with tarfile.open(filename, "r:gz") as tar:
tar.extractall(self.data_dir)
|
{
"content_hash": "9cb78e5f5b7e9942a5471507d4c55b4b",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 116,
"avg_line_length": 37.5609756097561,
"alnum_prop": 0.6426406926406927,
"repo_name": "PHLF/rasa_nlu",
"id": "5a37040a6b5504bdfddbb1087920aa10f80c27c6",
"size": "4620",
"binary": false,
"copies": "1",
"ref": "refs/heads/flask_to_klein_no_agents",
"path": "rasa_nlu/persistor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "224409"
},
{
"name": "Shell",
"bytes": "1503"
}
],
"symlink_target": ""
}
|
import abc
import logging
import luigi
import luigi.hadoop
from luigi.target import FileSystemTarget, FileAlreadyExists
import os
import subprocess
import tempfile
from luigi.task import flatten
logger = logging.getLogger('luigi-interface')
class HiveCommandError(RuntimeError):
def __init__(self, message, out=None, err=None):
super(HiveCommandError, self).__init__(message, out, err)
self.message = message
self.out = out
self.err = err
def load_hive_cmd():
return luigi.configuration.get_config().get('hive', 'command', 'hive')
def get_hive_syntax():
return luigi.configuration.get_config().get('hive', 'release', 'cdh4')
def run_hive(args, check_return_code=True):
"""Runs the `hive` from the command line, passing in the given args, and
returning stdout.
With the apache release of Hive, so of the table existence checks
(which are done using DESCRIBE do not exit with a return code of 0
so we need an option to ignore the return code and just return stdout for parsing
"""
cmd = [load_hive_cmd()] + args
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if check_return_code and p.returncode != 0:
raise HiveCommandError("Hive command: {0} failed with error code: {1}".format(" ".join(cmd), p.returncode),
stdout, stderr)
return stdout
def run_hive_cmd(hivecmd, check_return_code=True):
"""Runs the given hive query and returns stdout"""
return run_hive(['-e', hivecmd], check_return_code)
def run_hive_script(script):
"""Runs the contents of the given script in hive and returns stdout"""
if not os.path.isfile(script):
raise RuntimeError("Hive script: {0} does not exist.".format(script))
return run_hive(['-f', script])
class HiveClient(object): # interface
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def table_location(self, table, database='default', partition={}):
"""
Returns location of db.table (or db.table.partition). partition is a dict of partition key to
value.
"""
pass
@abc.abstractmethod
def table_schema(self, table, database='default'):
""" Returns list of [(name, type)] for each column in database.table """
pass
@abc.abstractmethod
def table_exists(self, table, database='default', partition={}):
"""
Returns true iff db.table (or db.table.partition) exists. partition is a dict of partition key to
value.
"""
pass
@abc.abstractmethod
def partition_spec(self, partition):
""" Turn a dict into a string partition specification """
pass
class HiveCommandClient(HiveClient):
""" Uses `hive` invocations to find information """
def table_location(self, table, database='default', partition={}):
cmd = "use {0}; describe formatted {1}".format(database, table)
if partition:
cmd += " PARTITION ({0})".format(self.partition_spec(partition))
stdout = run_hive_cmd(cmd)
for line in stdout.split("\n"):
if "Location:" in line:
return line.split("\t")[1]
def table_exists(self, table, database='default', partition={}):
if not partition:
stdout = run_hive_cmd('use {0}; describe {1}'.format(database, table))
return not "does not exist" in stdout
else:
stdout = run_hive_cmd("""use %s; show partitions %s partition
(%s)""" % (database, table, self.partition_spec(partition)))
if stdout:
return True
else:
return False
def table_schema(self, table, database='default'):
describe = run_hive_cmd("use {0}; describe {1}".format(database, table))
if not describe or "does not exist" in describe:
return None
return [tuple([x.strip() for x in line.strip().split("\t")]) for line in describe.strip().split("\n")]
def partition_spec(self, partition):
""" Turns a dict into the a Hive partition specification string """
return ','.join(["{0}='{1}'".format(k, v) for (k, v) in partition.items()])
class ApacheHiveCommandClient(HiveCommandClient):
"""
A subclass for the HiveCommandClient to (in some cases) ignore the return code from
the hive command so that we can just parse the output.
"""
def table_exists(self, table, database='default', partition={}):
if not partition:
# Hive 0.11 returns 17 as the exit status if the table does not exist.
# The actual message is: [Error 10001]: Table not found tablename
# stdout is empty and an error message is returned on stderr.
# This is why we can't check the return code on this command and
# assume if stdout is empty that the table doesn't exist.
stdout = run_hive_cmd('use {0}; describe {1}'.format(database, table), False)
if stdout:
return not "Table not found" in stdout
else:
# Hive returned a non-zero exit status and printed its output to stderr not stdout
return False
else:
stdout = run_hive_cmd("""use %s; show partitions %s partition
(%s)""" % (database, table, self.partition_spec(partition)), False)
if stdout:
return True
else:
return False
def table_schema(self, table, database='default'):
describe = run_hive_cmd("use {0}; describe {1}".format(database, table), False)
if not describe or "Table not found" in describe:
return None
return [tuple([x.strip() for x in line.strip().split("\t")]) for line in describe.strip().split("\n")]
class MetastoreClient(HiveClient):
def table_location(self, table, database='default', partition={}):
with HiveThriftContext() as client:
if partition:
partition_str = self.partition_spec(partition)
thrift_table = client.get_partition_by_name(database, table, partition_str)
else:
thrift_table = client.get_table(database, table)
return thrift_table.sd.location
def table_exists(self, table, database='default', partition={}):
with HiveThriftContext() as client:
if not partition:
return table in client.get_all_tables(database)
else:
partition_str = self.partition_spec(partition)
# -1 is max_parts, the # of partition names to return (-1 = unlimited)
return partition_str in client.get_partition_names(database, table, -1)
def table_schema(self, table, database='default'):
with HiveThriftContext() as client:
return [(field_schema.name, field_schema.type) for field_schema in client.get_schema(database, table)]
def partition_spec(self, partition):
return "/".join("%s=%s" % (k, v) for (k, v) in partition.items())
class HiveThriftContext(object):
""" Context manager for hive metastore client """
def __enter__(self):
try:
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
# Note that this will only work with a CDH release.
# This uses the thrift bindings generated by the ThriftHiveMetastore service in Beeswax.
# If using the Apache release of Hive this import will fail.
from hive_metastore import ThriftHiveMetastore
config = luigi.configuration.get_config()
host = config.get('hive', 'metastore_host')
port = config.getint('hive', 'metastore_port')
transport = TSocket.TSocket(host, port)
transport = TTransport.TBufferedTransport(transport)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
transport.open()
self.transport = transport
return ThriftHiveMetastore.Client(protocol)
except ImportError, e:
raise Exception('Could not import Hive thrift library:' + str(e))
def __exit__(self, exc_type, exc_val, exc_tb):
self.transport.close()
if get_hive_syntax() == "apache":
default_client = ApacheHiveCommandClient()
else:
default_client = HiveCommandClient()
client = default_client
def _deprecated(message):
import warnings
warnings.warn(message=message, category=DeprecationWarning, stacklevel=2)
def table_location(**kwargs):
""" Deprecated. Use an instance of client instead and call client.table_location """
_deprecated("luigi.hive.table_location is deprecated and will be removed soon, use hive.default_client or create a client instead")
return default_client.table_location(**kwargs)
def table_exists(**kwargs):
""" Deprecated. Use an instance of client instead and call client.table_exists """
_deprecated("luigi.hive.table_exists is deprecated and will be removed soon, use hive.default_client or create a client instead")
return default_client.table_exists(**kwargs)
def table_schema(**kwargs):
""" Deprecated. Use an instance of client instead and call client.table_schema """
_deprecated("luigi.hive.table_schema is deprecated and will be removed soon, use hive.default_client or create a client instead")
return default_client.table_schema(**kwargs)
def partition_spec(**kwargs):
""" Deprecated. Use an instance of client instead and call client.partition_spec """
_deprecated("luigi.hive.partition_spec is deprecated and will be removed soon, use hive.default_client or create a client instead")
return default_client.partition_spec(**kwargs)
class HiveQueryTask(luigi.hadoop.BaseHadoopJobTask):
""" Task to run a hive query """
# by default, we let hive figure these out.
n_reduce_tasks = None
bytes_per_reducer = None
reducers_max = None
@abc.abstractmethod
def query(self):
""" Text of query to run in hive """
raise RuntimeError("Must implement query!")
def hiverc(self):
""" Location of an rc file to run before the query
if hiverc-location key is specified in client.cfg, will default to the value there
otherwise returns None
"""
return luigi.configuration.get_config().get('hive', 'hiverc-location', default=None)
def hiveconfs(self):
"""
Returns an dict of key=value settings to be passed along
to the hive command line via --hiveconf. By default, sets
mapred.job.name to task_id and if not None, sets:
* mapred.reduce.tasks (n_reduce_tasks)
* mapred.fairscheduler.pool (pool) or mapred.job.queue.name (pool)
* hive.exec.reducers.bytes.per.reducer (bytes_per_reducer)
* hive.exec.reducers.max (reducers_max)
"""
jcs = {}
jcs['mapred.job.name'] = self.task_id
if self.n_reduce_tasks is not None:
jcs['mapred.reduce.tasks'] = self.n_reduce_tasks
if self.pool is not None:
# Supporting two schedulers: fair (default) and capacity using the same option
scheduler_type = luigi.configuration.get_config().get('hadoop', 'scheduler', 'fair')
if scheduler_type == 'fair':
jcs['mapred.fairscheduler.pool'] = self.pool
elif scheduler_type == 'capacity':
jcs['mapred.job.queue.name'] = self.pool
if self.bytes_per_reducer is not None:
jcs['hive.exec.reducers.bytes.per.reducer'] = self.bytes_per_reducer
if self.reducers_max is not None:
jcs['hive.exec.reducers.max'] = self.reducers_max
return jcs
def job_runner(self):
return HiveQueryRunner()
class HiveQueryRunner(luigi.hadoop.JobRunner):
""" Runs a HiveQueryTask by shelling out to hive """
def prepare_outputs(self, job):
""" Called before job is started
If output is a `FileSystemTarget`, create parent directories so the hive command won't fail
"""
outputs = flatten(job.output())
for o in outputs:
if isinstance(o, FileSystemTarget):
parent_dir = os.path.dirname(o.path)
if parent_dir and not o.fs.exists(parent_dir):
logger.info("Creating parent directory %r", parent_dir)
try:
# there is a possible race condition
# which needs to be handled here
o.fs.mkdir(parent_dir)
except FileAlreadyExists:
pass
def run_job(self, job):
self.prepare_outputs(job)
with tempfile.NamedTemporaryFile() as f:
f.write(job.query())
f.flush()
arglist = [load_hive_cmd(), '-f', f.name]
if job.hiverc():
arglist += ['-i', job.hiverc()]
if job.hiveconfs():
for k, v in job.hiveconfs().iteritems():
arglist += ['--hiveconf', '{0}={1}'.format(k, v)]
logger.info(arglist)
return luigi.hadoop.run_and_track_hadoop_job(arglist)
class HiveTableTarget(luigi.Target):
""" exists returns true if the table exists """
def __init__(self, table, database='default', client=default_client):
self.database = database
self.table = table
self.hive_cmd = load_hive_cmd()
self.client = client
def exists(self):
logger.debug("Checking Hive table '%s.%s' exists", self.database, self.table)
return self.client.table_exists(self.table, self.database)
@property
def path(self):
"""Returns the path to this table in HDFS"""
location = self.client.table_location(self.table, self.database)
if not location:
raise Exception("Couldn't find location for table: {0}".format(str(self)))
return location
def open(self, mode):
return NotImplementedError("open() is not supported for HiveTableTarget")
class HivePartitionTarget(luigi.Target):
""" exists returns true if the table's partition exists """
def __init__(self, table, partition, database='default', fail_missing_table=True, client=default_client):
self.database = database
self.table = table
self.partition = partition
self.client = client
self.fail_missing_table = fail_missing_table
def exists(self):
try:
logger.debug("Checking Hive table '{d}.{t}' for partition {p}".format(d=self.database, t=self.table, p=str(self.partition)))
return self.client.table_exists(self.table, self.database, self.partition)
except HiveCommandError, e:
if self.fail_missing_table:
raise
else:
if self.client.table_exists(self.table, self.database):
# a real error occurred
raise
else:
# oh the table just doesn't exist
return False
@property
def path(self):
"""Returns the path for this HiveTablePartitionTarget's data"""
location = self.client.table_location(self.table, self.database, self.partition)
if not location:
raise Exception("Couldn't find location for table: {0}".format(str(self)))
return location
def open(self, mode):
return NotImplementedError("open() is not supported for HivePartitionTarget")
class ExternalHiveTask(luigi.ExternalTask):
""" External task that depends on a Hive table/partition """
database = luigi.Parameter(default='default')
table = luigi.Parameter()
# since this is an external task and will never be initialized from the CLI, partition can be any python object, in this case a dictionary
partition = luigi.Parameter(default=None, description='Python dictionary specifying the target partition e.g. {"date": "2013-01-25"}')
def output(self):
if self.partition is not None:
assert self.partition, "partition required"
return HivePartitionTarget(table=self.table,
partition=self.partition,
database=self.database)
else:
return HiveTableTarget(self.table, self.database)
|
{
"content_hash": "b9dbcfa8340f4238c354081965ed3df7",
"timestamp": "",
"source": "github",
"line_count": 416,
"max_line_length": 142,
"avg_line_length": 40.01923076923077,
"alnum_prop": 0.6225973089860644,
"repo_name": "cpcloud/luigi",
"id": "c4dfb0f30f7bb0287560d47259c0b95a2441b38c",
"size": "17189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "luigi/hive.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "35864"
},
{
"name": "Python",
"bytes": "596754"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger
class RequestConfig(object):
'''
A configurator that uses request data to setup a table.
A single RequestConfig can be used for multiple tables in one view. See
[pagination]
Arguments:
paginate (dict or bool): Indicates whether to paginate, and if so, what
default values to use. If the value evaluates to `False`, pagination
will be disabled. A `dict` can be used to specify default values for
the call to `~.tables.Table.paginate` (e.g. to define a default
`per_page` value).
A special *silent* item can be used to enable automatic handling of
pagination exceptions using the following logic:
- If `~django.core.paginator.PageNotAnInteger` is raised, show the
first page.
- If `~django.core.paginator.EmptyPage` is raised, show the last
page.
'''
def __init__(self, request, paginate=True):
self.request = request
self.paginate = paginate
def configure(self, table):
'''
Configure a table using information from the request.
Arguments:
table (`~.Table`): table to be configured
'''
order_by = self.request.GET.getlist(table.prefixed_order_by_field)
if order_by:
table.order_by = order_by
if self.paginate:
if hasattr(self.paginate, 'items'):
kwargs = dict(self.paginate)
else:
kwargs = {}
# extract some options from the request
for arg in ('page', 'per_page'):
name = getattr(table, 'prefixed_%s_field' % arg)
try:
kwargs[arg] = int(self.request.GET[name])
except (ValueError, KeyError):
pass
silent = kwargs.pop('silent', True)
if not silent:
table.paginate(**kwargs)
else:
try:
table.paginate(**kwargs)
except PageNotAnInteger:
table.page = table.paginator.page(1)
except EmptyPage:
table.page = table.paginator.page(table.paginator.num_pages)
|
{
"content_hash": "1ca8df1fba1a7612cd08e095412f403b",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 80,
"avg_line_length": 36.215384615384615,
"alnum_prop": 0.5632965165675446,
"repo_name": "vicky2135/lucious",
"id": "98a4f0b0bd60a055eedb2ef32165c8e74bb6947d",
"size": "2370",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oscar/lib/python2.7/site-packages/django_tables2/config.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "896683"
},
{
"name": "C++",
"bytes": "52230"
},
{
"name": "CSS",
"bytes": "1169533"
},
{
"name": "HTML",
"bytes": "1104983"
},
{
"name": "JavaScript",
"bytes": "1055140"
},
{
"name": "Makefile",
"bytes": "145238"
},
{
"name": "Python",
"bytes": "55993261"
},
{
"name": "Shell",
"bytes": "40487"
}
],
"symlink_target": ""
}
|
import sys, os, glob, re, subprocess
from collections import OrderedDict
testing_mode = False
hide_warnings = False
build = True
for arg in sys.argv[1:]:
if arg.lower() == "test":
testing_mode = True
elif arg.lower() == "whide":
hide_warnings = True
elif arg.lower() == "no-build":
build = False
else:
print("useage: {} [test] [whide] [no-build]".format(sys.argv[0]))
exit(1)
executable = "simulate.exe"
eigen_dirs = ".eigen-dirs"
boost_dir = ".boost-dir"
mkl_root = ".mkl-root"
ignore_dirs = [ "~/.ccache/" ]
language_standard_flag = "-std=c++11"
warning_flags = "-Wall -Werror"
link_time_optimization_flag = "-flto"
common_flags = [ language_standard_flag,
warning_flags,
link_time_optimization_flag ]
debug_flag = "-g"
optimization_flag = "-O3"
ignored_warning_flags = [ "-Wno-unused-variable",
"-Wno-unused-but-set-variable",
"-Wno-unused-local-typedefs" ]
mkl_flags = ("-Wl,--no-as-needed,-rpath=$(cat {0})/lib/intel64/" + \
" -L $(cat {0})/lib/intel64/ -lmkl_intel_lp64 -lmkl_core" + \
" -lmkl_gnu_thread -lpthread -lm -ldl -fopenmp -m64" + \
" -I $(cat {0})/include/").format(mkl_root)
lib_flags = OrderedDict()
lib_flags["eigen3"] = ["$(cat {})".format(eigen_dirs), eigen_dirs ]
lib_flags["USE_MKL"] = ["{}".format(mkl_flags), mkl_root]
lib_flags["boost"] = ["-lboost_system"] + \
([boost_dir] if os.path.isfile(boost_dir) else [])
lib_flags["boost/filesystem"] = ["-lboost_filesystem"]
lib_flags["boost/program_options"] = ["-lboost_program_options"]
lib_flags["gsl"] = ["-lgsl"]
fac_text = ""
global_libraries = []
global_dependencies = []
cpp_files = sorted(glob.glob("*.cpp"))
def fac_rule(libraries, file_dependencies, input_files, output_file, link = False):
cmd_parts = ["| g++"]
cmd_parts += common_flags
cmd_parts += [ debug_flag if testing_mode else optimization_flag ]
if hide_warnings: cmd_parts += ignored_warning_flags
if not link: cmd_parts += ["-c"]
cmd_parts += ["-o {}".format(output_file)]
cmd_parts += [" ".join(input_files)]
cmd_parts += [" ".join(libraries)]
cmd_parts = list(filter(None,cmd_parts))
rule_text = " ".join(cmd_parts) + "\n"
for dependency in file_dependencies + input_files:
rule_text += "< {}\n".format(dependency)
for ignore_dir in ignore_dirs:
rule_text += "C {}\n".format(ignore_dir)
rule_text += "> {}\n\n".format(output_file)
return rule_text
for cpp_file in cpp_files:
output_file = cpp_file.replace(".cpp",".o")
libraries = []
dependencies = []
with open(cpp_file,'r') as f:
for line in f:
if "#include" in line or "#define" in line:
for tag in lib_flags.keys():
if tag in line and lib_flags[tag][0] not in libraries:
libraries += [lib_flags[tag][0]]
if len(lib_flags[tag]) > 1:
dependencies += [lib_flags[tag][1]]
if re.search('"*\.h"',line):
dependencies += [line.split('"')[-2]]
fac_text += fac_rule(libraries, dependencies, [cpp_file], output_file)
for library in libraries:
if library not in global_libraries:
global_libraries += [library]
for dependency in dependencies:
if dependency not in global_dependencies:
global_dependencies += [dependency]
compiled_binaries = [ cpp_file.replace(".cpp",".o") for cpp_file in cpp_files ]
fac_text += fac_rule(global_libraries, global_dependencies,
compiled_binaries, executable, link = True)
fac_text += "| etags *.cpp *.h\n< {}\n> TAGS\n".format(executable)
with open(".{}".format(executable.replace(".exe",".fac")),"w") as f:
f.write(fac_text)
if build: exit(subprocess.call(["fac"]))
|
{
"content_hash": "e5067adc97fb514a27feec5337585db0",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 83,
"avg_line_length": 35.82727272727273,
"alnum_prop": 0.5790408525754884,
"repo_name": "perlinm/qcdg-nv-simulation",
"id": "50434884813a99227359d017fa87419a3deee665",
"size": "3964",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mkfac.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "135516"
},
{
"name": "Python",
"bytes": "25362"
},
{
"name": "Shell",
"bytes": "293"
},
{
"name": "TeX",
"bytes": "86025"
}
],
"symlink_target": ""
}
|
import lldb
import fblldbbase as fb
import fblldbobjcruntimehelpers as runtimeHelpers
def flushCoreAnimationTransaction():
fb.evaluateEffect('[CATransaction flush]')
def setViewHidden(object, hidden):
fb.evaluateEffect('[{} setHidden:{}]'.format(object, int(hidden)))
flushCoreAnimationTransaction()
def maskView(viewOrLayer, color, alpha):
unmaskView(viewOrLayer)
window = fb.evaluateExpression('(UIWindow *)[[UIApplication sharedApplication] keyWindow]')
origin = convertPoint(0, 0, viewOrLayer, window)
size = fb.evaluateExpressionValue('(CGSize)((CGRect)[(id)%s frame]).size' % viewOrLayer)
rectExpr = '(CGRect){{%s, %s}, {%s, %s}}' % (origin.GetChildMemberWithName('x').GetValue(),
origin.GetChildMemberWithName('y').GetValue(),
size.GetChildMemberWithName('width').GetValue(),
size.GetChildMemberWithName('height').GetValue())
mask = fb.evaluateExpression('(id)[[UIView alloc] initWithFrame:%s]' % rectExpr)
fb.evaluateEffect('[%s setTag:(NSInteger)%s]' % (mask, viewOrLayer))
fb.evaluateEffect('[%s setBackgroundColor:[UIColor %sColor]]' % (mask, color))
fb.evaluateEffect('[%s setAlpha:(CGFloat)%s]' % (mask, alpha))
fb.evaluateEffect('[%s addSubview:%s]' % (window, mask))
flushCoreAnimationTransaction()
def unmaskView(viewOrLayer):
window = fb.evaluateExpression('(UIWindow *)[[UIApplication sharedApplication] keyWindow]')
mask = fb.evaluateExpression('(UIView *)[%s viewWithTag:(NSInteger)%s]' % (window, viewOrLayer))
fb.evaluateEffect('[%s removeFromSuperview]' % mask)
flushCoreAnimationTransaction()
def convertPoint(x, y, fromViewOrLayer, toViewOrLayer):
fromLayer = convertToLayer(fromViewOrLayer)
toLayer = convertToLayer(toViewOrLayer)
return fb.evaluateExpressionValue('(CGPoint)[%s convertPoint:(CGPoint){ .x = %s, .y = %s } toLayer:(CALayer *)%s]' % (fromLayer, x, y, toLayer))
def convertToLayer(viewOrLayer):
if fb.evaluateBooleanExpression('[(id)%s isKindOfClass:(Class)[CALayer class]]' % viewOrLayer):
return viewOrLayer
elif fb.evaluateBooleanExpression('[(id)%s respondsToSelector:(SEL)@selector(layer)]' % viewOrLayer):
return fb.evaluateExpression('(CALayer *)[%s layer]' % viewOrLayer)
else:
raise Exception('Argument must be a CALayer, UIView, or NSView.')
def isUIView(obj):
return not runtimeHelpers.isMacintoshArch() and fb.evaluateBooleanExpression('[(id)%s isKindOfClass:(Class)[UIView class]]' % obj)
def isNSView(obj):
return runtimeHelpers.isMacintoshArch() and fb.evaluateBooleanExpression('[(id)%s isKindOfClass:(Class)[NSView class]]' % obj)
def isView(obj):
return isUIView(obj) or isNSView(obj)
# Generates a BFS of the views tree starting at the given view as root.
# Yields a tuple of the current view in the tree and its level (view, level)
def subviewsOfView(view):
views = [(view, 0)]
yield views[0]
while views:
(view, level) = views.pop(0)
subviews = fb.evaluateExpression('(id)[%s subviews]' % view)
subviewsCount = int(fb.evaluateExpression('(int)[(id)%s count]' % subviews))
for i in xrange(subviewsCount):
subview = fb.evaluateExpression('(id)[%s objectAtIndex:%i]' % (subviews, i))
views.append((subview, level+1))
yield (subview, level+1)
def upwardsRecursiveDescription(view, maxDepth=0):
if not fb.evaluateBooleanExpression('[(id)%s isKindOfClass:(Class)[UIView class]]' % view) and not fb.evaluateBooleanExpression('[(id)%s isKindOfClass:(Class)[NSView class]]' % view):
return None
currentView = view
recursiveDescription = []
depth = 0
while currentView and (maxDepth <= 0 or depth <= maxDepth):
depth += 1
viewDescription = fb.evaluateExpressionValue('(id)[%s debugDescription]' % (currentView)).GetObjectDescription()
currentView = fb.evaluateExpression('(void*)[%s superview]' % (currentView))
try:
if int(currentView, 0) == 0:
currentView = None
except:
currentView = None
if viewDescription:
recursiveDescription.insert(0, viewDescription)
if not len(viewDescription):
return None
currentPrefix = ""
builder = ""
for viewDescription in recursiveDescription:
builder += currentPrefix + viewDescription + "\n"
currentPrefix += " | "
return builder
def slowAnimation(speed=1):
fb.evaluateEffect('[[[UIApplication sharedApplication] windows] setValue:@(%s) forKeyPath:@"layer.speed"]' % speed)
|
{
"content_hash": "40ccf84dda67ed6b8b00e7c8ccf03d58",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 185,
"avg_line_length": 42.299065420560744,
"alnum_prop": 0.6944321696862572,
"repo_name": "dopcn/chisel",
"id": "f5f05830507e49bba9ccc5a1ac6743eeefe59cc1",
"size": "4833",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "fblldbviewhelpers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1674"
},
{
"name": "C++",
"bytes": "1588"
},
{
"name": "Makefile",
"bytes": "321"
},
{
"name": "Objective-C",
"bytes": "3841"
},
{
"name": "Objective-C++",
"bytes": "11167"
},
{
"name": "Python",
"bytes": "164249"
}
],
"symlink_target": ""
}
|
"""Get stats about your activity.
Example:
- my_activity.py for stats for the current week (last week on mondays).
- my_activity.py -Q for stats for last quarter.
- my_activity.py -Y for stats for this year.
- my_activity.py -b 4/5/12 for stats since 4/5/12.
- my_activity.py -b 4/5/12 -e 6/7/12 for stats between 4/5/12 and 6/7/12.
"""
# TODO(vadimsh): This script knows too much about ClientLogin and cookies. It
# will stop to work on ~20 Apr 2015.
# These services typically only provide a created time and a last modified time
# for each item for general queries. This is not enough to determine if there
# was activity in a given time period. So, we first query for all things created
# before end and modified after begin. Then, we get the details of each item and
# check those details to determine if there was activity in the given period.
# This means that query time scales mostly with (today() - begin).
import cookielib
import datetime
from datetime import datetime
from datetime import timedelta
from functools import partial
import json
import optparse
import os
import subprocess
import sys
import urllib
import urllib2
import auth
import fix_encoding
import gerrit_util
import rietveld
from third_party import upload
import auth
from third_party import httplib2
try:
from dateutil.relativedelta import relativedelta # pylint: disable=F0401
except ImportError:
print 'python-dateutil package required'
exit(1)
# python-keyring provides easy access to the system keyring.
try:
import keyring # pylint: disable=W0611,F0401
except ImportError:
print 'Consider installing python-keyring'
rietveld_instances = [
{
'url': 'codereview.chromium.org',
'shorturl': 'crrev.com',
'supports_owner_modified_query': True,
'requires_auth': False,
'email_domain': 'chromium.org',
},
{
'url': 'chromereviews.googleplex.com',
'shorturl': 'go/chromerev',
'supports_owner_modified_query': True,
'requires_auth': True,
'email_domain': 'google.com',
},
{
'url': 'codereview.appspot.com',
'supports_owner_modified_query': True,
'requires_auth': False,
'email_domain': 'chromium.org',
},
{
'url': 'breakpad.appspot.com',
'supports_owner_modified_query': False,
'requires_auth': False,
'email_domain': 'chromium.org',
},
]
gerrit_instances = [
{
'url': 'chromium-review.googlesource.com',
'shorturl': 'crosreview.com',
},
{
'url': 'chrome-internal-review.googlesource.com',
'shorturl': 'crosreview.com/i',
},
]
google_code_projects = [
{
'name': 'brillo',
'shorturl': 'brbug.com',
},
{
'name': 'chromium',
'shorturl': 'crbug.com',
},
{
'name': 'chromium-os',
'shorturl': 'crosbug.com',
},
{
'name': 'chrome-os-partner',
},
{
'name': 'google-breakpad',
},
{
'name': 'gyp',
},
{
'name': 'skia',
},
]
def username(email):
"""Keeps the username of an email address."""
return email and email.split('@', 1)[0]
def datetime_to_midnight(date):
return date - timedelta(hours=date.hour, minutes=date.minute,
seconds=date.second, microseconds=date.microsecond)
def get_quarter_of(date):
begin = (datetime_to_midnight(date) -
relativedelta(months=(date.month % 3) - 1, days=(date.day - 1)))
return begin, begin + relativedelta(months=3)
def get_year_of(date):
begin = (datetime_to_midnight(date) -
relativedelta(months=(date.month - 1), days=(date.day - 1)))
return begin, begin + relativedelta(years=1)
def get_week_of(date):
begin = (datetime_to_midnight(date) - timedelta(days=date.weekday()))
return begin, begin + timedelta(days=7)
def get_yes_or_no(msg):
while True:
response = raw_input(msg + ' yes/no [no] ')
if response == 'y' or response == 'yes':
return True
elif not response or response == 'n' or response == 'no':
return False
def datetime_from_gerrit(date_string):
return datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S.%f000')
def datetime_from_rietveld(date_string):
try:
return datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S.%f')
except ValueError:
# Sometimes rietveld returns a value without the milliseconds part, so we
# attempt to parse those cases as well.
return datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S')
def datetime_from_google_code(date_string):
return datetime.strptime(date_string, '%Y-%m-%dT%H:%M:%S.%fZ')
class MyActivity(object):
def __init__(self, options):
self.options = options
self.modified_after = options.begin
self.modified_before = options.end
self.user = options.user
self.changes = []
self.reviews = []
self.issues = []
self.check_cookies()
self.google_code_auth_token = None
# Check the codereview cookie jar to determine which Rietveld instances to
# authenticate to.
def check_cookies(self):
filtered_instances = []
def has_cookie(instance):
auth_config = auth.extract_auth_config_from_options(self.options)
a = auth.get_authenticator_for_host(instance['url'], auth_config)
return a.has_cached_credentials()
for instance in rietveld_instances:
instance['auth'] = has_cookie(instance)
if filtered_instances:
print ('No cookie found for the following Rietveld instance%s:' %
('s' if len(filtered_instances) > 1 else ''))
for instance in filtered_instances:
print '\t' + instance['url']
print 'Use --auth if you would like to authenticate to them.\n'
def rietveld_search(self, instance, owner=None, reviewer=None):
if instance['requires_auth'] and not instance['auth']:
return []
email = None if instance['auth'] else ''
auth_config = auth.extract_auth_config_from_options(self.options)
remote = rietveld.Rietveld('https://' + instance['url'], auth_config, email)
# See def search() in rietveld.py to see all the filters you can use.
query_modified_after = None
if instance['supports_owner_modified_query']:
query_modified_after = self.modified_after.strftime('%Y-%m-%d')
# Rietveld does not allow search by both created_before and modified_after.
# (And some instances don't allow search by both owner and modified_after)
owner_email = None
reviewer_email = None
if owner:
owner_email = owner + '@' + instance['email_domain']
if reviewer:
reviewer_email = reviewer + '@' + instance['email_domain']
issues = remote.search(
owner=owner_email,
reviewer=reviewer_email,
modified_after=query_modified_after,
with_messages=True)
issues = filter(
lambda i: (datetime_from_rietveld(i['created']) < self.modified_before),
issues)
issues = filter(
lambda i: (datetime_from_rietveld(i['modified']) > self.modified_after),
issues)
should_filter_by_user = True
issues = map(partial(self.process_rietveld_issue, instance), issues)
issues = filter(
partial(self.filter_issue, should_filter_by_user=should_filter_by_user),
issues)
issues = sorted(issues, key=lambda i: i['modified'], reverse=True)
return issues
def process_rietveld_issue(self, instance, issue):
ret = {}
ret['owner'] = issue['owner_email']
ret['author'] = ret['owner']
ret['reviewers'] = set(issue['reviewers'])
shorturl = instance['url']
if 'shorturl' in instance:
shorturl = instance['shorturl']
ret['review_url'] = 'http://%s/%d' % (shorturl, issue['issue'])
# Rietveld sometimes has '\r\n' instead of '\n'.
ret['header'] = issue['description'].replace('\r', '').split('\n')[0]
ret['modified'] = datetime_from_rietveld(issue['modified'])
ret['created'] = datetime_from_rietveld(issue['created'])
ret['replies'] = self.process_rietveld_replies(issue['messages'])
return ret
@staticmethod
def process_rietveld_replies(replies):
ret = []
for reply in replies:
r = {}
r['author'] = reply['sender']
r['created'] = datetime_from_rietveld(reply['date'])
r['content'] = ''
ret.append(r)
return ret
@staticmethod
def gerrit_changes_over_ssh(instance, filters):
# See https://review.openstack.org/Documentation/cmd-query.html
# Gerrit doesn't allow filtering by created time, only modified time.
gquery_cmd = ['ssh', '-p', str(instance['port']), instance['host'],
'gerrit', 'query',
'--format', 'JSON',
'--comments',
'--'] + filters
(stdout, _) = subprocess.Popen(gquery_cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
# Drop the last line of the output with the stats.
issues = stdout.splitlines()[:-1]
return map(json.loads, issues)
@staticmethod
def gerrit_changes_over_rest(instance, filters):
# Convert the "key:value" filter to a dictionary.
req = dict(f.split(':', 1) for f in filters)
try:
# Instantiate the generator to force all the requests now and catch the
# errors here.
return list(gerrit_util.GenerateAllChanges(instance['url'], req,
o_params=['MESSAGES', 'LABELS', 'DETAILED_ACCOUNTS']))
except gerrit_util.GerritError, e:
print 'ERROR: Looking up %r: %s' % (instance['url'], e)
return []
def gerrit_search(self, instance, owner=None, reviewer=None):
max_age = datetime.today() - self.modified_after
max_age = max_age.days * 24 * 3600 + max_age.seconds
user_filter = 'owner:%s' % owner if owner else 'reviewer:%s' % reviewer
filters = ['-age:%ss' % max_age, user_filter]
# Determine the gerrit interface to use: SSH or REST API:
if 'host' in instance:
issues = self.gerrit_changes_over_ssh(instance, filters)
issues = [self.process_gerrit_ssh_issue(instance, issue)
for issue in issues]
elif 'url' in instance:
issues = self.gerrit_changes_over_rest(instance, filters)
issues = [self.process_gerrit_rest_issue(instance, issue)
for issue in issues]
else:
raise Exception('Invalid gerrit_instances configuration.')
# TODO(cjhopman): should we filter abandoned changes?
issues = filter(self.filter_issue, issues)
issues = sorted(issues, key=lambda i: i['modified'], reverse=True)
return issues
def process_gerrit_ssh_issue(self, instance, issue):
ret = {}
ret['review_url'] = issue['url']
if 'shorturl' in instance:
ret['review_url'] = 'http://%s/%s' % (instance['shorturl'],
issue['number'])
ret['header'] = issue['subject']
ret['owner'] = issue['owner']['email']
ret['author'] = ret['owner']
ret['created'] = datetime.fromtimestamp(issue['createdOn'])
ret['modified'] = datetime.fromtimestamp(issue['lastUpdated'])
if 'comments' in issue:
ret['replies'] = self.process_gerrit_ssh_issue_replies(issue['comments'])
else:
ret['replies'] = []
ret['reviewers'] = set(r['author'] for r in ret['replies'])
ret['reviewers'].discard(ret['author'])
return ret
@staticmethod
def process_gerrit_ssh_issue_replies(replies):
ret = []
replies = filter(lambda r: 'email' in r['reviewer'], replies)
for reply in replies:
ret.append({
'author': reply['reviewer']['email'],
'created': datetime.fromtimestamp(reply['timestamp']),
'content': '',
})
return ret
def process_gerrit_rest_issue(self, instance, issue):
ret = {}
ret['review_url'] = 'https://%s/%s' % (instance['url'], issue['_number'])
if 'shorturl' in instance:
# TODO(deymo): Move this short link to https once crosreview.com supports
# it.
ret['review_url'] = 'http://%s/%s' % (instance['shorturl'],
issue['_number'])
ret['header'] = issue['subject']
ret['owner'] = issue['owner']['email']
ret['author'] = ret['owner']
ret['created'] = datetime_from_gerrit(issue['created'])
ret['modified'] = datetime_from_gerrit(issue['updated'])
if 'messages' in issue:
ret['replies'] = self.process_gerrit_rest_issue_replies(issue['messages'])
else:
ret['replies'] = []
ret['reviewers'] = set(r['author'] for r in ret['replies'])
ret['reviewers'].discard(ret['author'])
return ret
@staticmethod
def process_gerrit_rest_issue_replies(replies):
ret = []
replies = filter(lambda r: 'author' in r and 'email' in r['author'],
replies)
for reply in replies:
ret.append({
'author': reply['author']['email'],
'created': datetime_from_gerrit(reply['date']),
'content': reply['message'],
})
return ret
def project_hosting_issue_search(self, instance):
auth_config = auth.extract_auth_config_from_options(self.options)
authenticator = auth.get_authenticator_for_host(
"code.google.com", auth_config)
http = authenticator.authorize(httplib2.Http())
url = "https://www.googleapis.com/projecthosting/v2/projects/%s/issues" % (
instance["name"])
epoch = datetime.utcfromtimestamp(0)
user_str = '%s@chromium.org' % self.user
query_data = urllib.urlencode({
'maxResults': 10000,
'q': user_str,
'publishedMax': '%d' % (self.modified_before - epoch).total_seconds(),
'updatedMin': '%d' % (self.modified_after - epoch).total_seconds(),
})
url = url + '?' + query_data
_, body = http.request(url)
content = json.loads(body)
if not content:
print "Unable to parse %s response from projecthosting." % (
instance["name"])
return []
issues = []
if 'items' in content:
items = content['items']
for item in items:
issue = {
"header": item["title"],
"created": item["published"],
"modified": item["updated"],
"author": item["author"]["name"],
"url": "https://code.google.com/p/%s/issues/detail?id=%s" % (
instance["name"], item["id"]),
"comments": []
}
if 'owner' in item:
issue['owner'] = item['owner']['name']
else:
issue['owner'] = 'None'
if issue['owner'] == user_str or issue['author'] == user_str:
issues.append(issue)
return issues
def print_heading(self, heading):
print
print self.options.output_format_heading.format(heading=heading)
def print_change(self, change):
optional_values = {
'reviewers': ', '.join(change['reviewers'])
}
self.print_generic(self.options.output_format,
self.options.output_format_changes,
change['header'],
change['review_url'],
change['author'],
optional_values)
def print_issue(self, issue):
optional_values = {
'owner': issue['owner'],
}
self.print_generic(self.options.output_format,
self.options.output_format_issues,
issue['header'],
issue['url'],
issue['author'],
optional_values)
def print_review(self, review):
self.print_generic(self.options.output_format,
self.options.output_format_reviews,
review['header'],
review['review_url'],
review['author'])
@staticmethod
def print_generic(default_fmt, specific_fmt,
title, url, author,
optional_values=None):
output_format = specific_fmt if specific_fmt is not None else default_fmt
output_format = unicode(output_format)
required_values = {
'title': title,
'url': url,
'author': author,
}
# Merge required and optional values.
if optional_values is not None:
values = dict(required_values.items() + optional_values.items())
else:
values = required_values
print output_format.format(**values).encode(sys.getdefaultencoding())
def filter_issue(self, issue, should_filter_by_user=True):
def maybe_filter_username(email):
return not should_filter_by_user or username(email) == self.user
if (maybe_filter_username(issue['author']) and
self.filter_modified(issue['created'])):
return True
if (maybe_filter_username(issue['owner']) and
(self.filter_modified(issue['created']) or
self.filter_modified(issue['modified']))):
return True
for reply in issue['replies']:
if self.filter_modified(reply['created']):
if not should_filter_by_user:
break
if (username(reply['author']) == self.user
or (self.user + '@') in reply['content']):
break
else:
return False
return True
def filter_modified(self, modified):
return self.modified_after < modified and modified < self.modified_before
def auth_for_changes(self):
#TODO(cjhopman): Move authentication check for getting changes here.
pass
def auth_for_reviews(self):
# Reviews use all the same instances as changes so no authentication is
# required.
pass
def get_changes(self):
for instance in rietveld_instances:
self.changes += self.rietveld_search(instance, owner=self.user)
for instance in gerrit_instances:
self.changes += self.gerrit_search(instance, owner=self.user)
def print_changes(self):
if self.changes:
self.print_heading('Changes')
for change in self.changes:
self.print_change(change)
def get_reviews(self):
for instance in rietveld_instances:
self.reviews += self.rietveld_search(instance, reviewer=self.user)
for instance in gerrit_instances:
reviews = self.gerrit_search(instance, reviewer=self.user)
reviews = filter(lambda r: not username(r['owner']) == self.user, reviews)
self.reviews += reviews
def print_reviews(self):
if self.reviews:
self.print_heading('Reviews')
for review in self.reviews:
self.print_review(review)
def get_issues(self):
for project in google_code_projects:
self.issues += self.project_hosting_issue_search(project)
def print_issues(self):
if self.issues:
self.print_heading('Issues')
for issue in self.issues:
self.print_issue(issue)
def print_activity(self):
self.print_changes()
self.print_reviews()
self.print_issues()
def main():
# Silence upload.py.
rietveld.upload.verbosity = 0
parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
parser.add_option(
'-u', '--user', metavar='<email>',
default=os.environ.get('USER'),
help='Filter on user, default=%default')
parser.add_option(
'-b', '--begin', metavar='<date>',
help='Filter issues created after the date (mm/dd/yy)')
parser.add_option(
'-e', '--end', metavar='<date>',
help='Filter issues created before the date (mm/dd/yy)')
quarter_begin, quarter_end = get_quarter_of(datetime.today() -
relativedelta(months=2))
parser.add_option(
'-Q', '--last_quarter', action='store_true',
help='Use last quarter\'s dates, i.e. %s to %s' % (
quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d')))
parser.add_option(
'-Y', '--this_year', action='store_true',
help='Use this year\'s dates')
parser.add_option(
'-w', '--week_of', metavar='<date>',
help='Show issues for week of the date (mm/dd/yy)')
parser.add_option(
'-W', '--last_week', action='count',
help='Show last week\'s issues. Use more times for more weeks.')
parser.add_option(
'-a', '--auth',
action='store_true',
help='Ask to authenticate for instances with no auth cookie')
activity_types_group = optparse.OptionGroup(parser, 'Activity Types',
'By default, all activity will be looked up and '
'printed. If any of these are specified, only '
'those specified will be searched.')
activity_types_group.add_option(
'-c', '--changes',
action='store_true',
help='Show changes.')
activity_types_group.add_option(
'-i', '--issues',
action='store_true',
help='Show issues.')
activity_types_group.add_option(
'-r', '--reviews',
action='store_true',
help='Show reviews.')
parser.add_option_group(activity_types_group)
output_format_group = optparse.OptionGroup(parser, 'Output Format',
'By default, all activity will be printed in the '
'following format: {url} {title}. This can be '
'changed for either all activity types or '
'individually for each activity type. The format '
'is defined as documented for '
'string.format(...). The variables available for '
'all activity types are url, title and author. '
'Format options for specific activity types will '
'override the generic format.')
output_format_group.add_option(
'-f', '--output-format', metavar='<format>',
default=u'{url} {title}',
help='Specifies the format to use when printing all your activity.')
output_format_group.add_option(
'--output-format-changes', metavar='<format>',
default=None,
help='Specifies the format to use when printing changes. Supports the '
'additional variable {reviewers}')
output_format_group.add_option(
'--output-format-issues', metavar='<format>',
default=None,
help='Specifies the format to use when printing issues. Supports the '
'additional variable {owner}.')
output_format_group.add_option(
'--output-format-reviews', metavar='<format>',
default=None,
help='Specifies the format to use when printing reviews.')
output_format_group.add_option(
'--output-format-heading', metavar='<format>',
default=u'{heading}:',
help='Specifies the format to use when printing headings.')
output_format_group.add_option(
'-m', '--markdown', action='store_true',
help='Use markdown-friendly output (overrides --output-format '
'and --output-format-heading)')
parser.add_option_group(output_format_group)
auth.add_auth_options(parser)
# Remove description formatting
parser.format_description = (
lambda _: parser.description) # pylint: disable=E1101
options, args = parser.parse_args()
options.local_user = os.environ.get('USER')
if args:
parser.error('Args unsupported')
if not options.user:
parser.error('USER is not set, please use -u')
options.user = username(options.user)
if not options.begin:
if options.last_quarter:
begin, end = quarter_begin, quarter_end
elif options.this_year:
begin, end = get_year_of(datetime.today())
elif options.week_of:
begin, end = (get_week_of(datetime.strptime(options.week_of, '%m/%d/%y')))
elif options.last_week:
begin, end = (get_week_of(datetime.today() -
timedelta(days=1 + 7 * options.last_week)))
else:
begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
else:
begin = datetime.strptime(options.begin, '%m/%d/%y')
if options.end:
end = datetime.strptime(options.end, '%m/%d/%y')
else:
end = datetime.today()
options.begin, options.end = begin, end
if options.markdown:
options.output_format = ' * [{title}]({url})'
options.output_format_heading = '### {heading} ###'
print 'Searching for activity by %s' % options.user
print 'Using range %s to %s' % (options.begin, options.end)
my_activity = MyActivity(options)
if not (options.changes or options.reviews or options.issues):
options.changes = True
options.issues = True
options.reviews = True
# First do any required authentication so none of the user interaction has to
# wait for actual work.
if options.changes:
my_activity.auth_for_changes()
if options.reviews:
my_activity.auth_for_reviews()
print 'Looking up activity.....'
try:
if options.changes:
my_activity.get_changes()
if options.reviews:
my_activity.get_reviews()
if options.issues:
my_activity.get_issues()
except auth.AuthenticationError as e:
print "auth.AuthenticationError: %s" % e
print '\n\n\n'
my_activity.print_changes()
my_activity.print_reviews()
my_activity.print_issues()
return 0
if __name__ == '__main__':
# Fix encoding to support non-ascii issue titles.
fix_encoding.fix_encoding()
try:
sys.exit(main())
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(1)
|
{
"content_hash": "703dff1d26cab825026942e8e70d7c33",
"timestamp": "",
"source": "github",
"line_count": 751,
"max_line_length": 80,
"avg_line_length": 33.435419440745676,
"alnum_prop": 0.6205495818399044,
"repo_name": "kevinkindom/chrome_depto_tools",
"id": "17cb4cf83b1f555bc5cfd56497a77bebcbab2ffa",
"size": "25299",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "my_activity.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "22465"
},
{
"name": "C",
"bytes": "5028"
},
{
"name": "Makefile",
"bytes": "4634"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "1855162"
},
{
"name": "Shell",
"bytes": "70869"
}
],
"symlink_target": ""
}
|
import logging
from typing import Any
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name: str = "zerver"
def ready(self) -> None:
# We import zerver.signals here for the side effect of
# registering the user_logged_in signal receiver. This import
# needs to be here (rather than e.g. at top-of-file) to avoid
# running that code too early in Django's setup process, but
# in any case, this is an intentionally unused import.
import zerver.signals
zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
{
"content_hash": "b15b0807a37ea0d6d22db8e102a7f82f",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 70,
"avg_line_length": 31.79310344827586,
"alnum_prop": 0.7006507592190889,
"repo_name": "punchagan/zulip",
"id": "587eb084030b4e8432361a27664a6745aa92ad5f",
"size": "922",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/apps.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "453615"
},
{
"name": "Dockerfile",
"bytes": "4898"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "607321"
},
{
"name": "Handlebars",
"bytes": "315160"
},
{
"name": "JavaScript",
"bytes": "3572990"
},
{
"name": "Perl",
"bytes": "9884"
},
{
"name": "Puppet",
"bytes": "94991"
},
{
"name": "Python",
"bytes": "8750579"
},
{
"name": "Ruby",
"bytes": "3875"
},
{
"name": "Shell",
"bytes": "134468"
},
{
"name": "TypeScript",
"bytes": "223296"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/particle/shared_pt_miasma_of_fog_orange.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "66d6f6b8e5e3022de7a932d55094f917",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 78,
"avg_line_length": 23.615384615384617,
"alnum_prop": 0.6905537459283387,
"repo_name": "anhstudios/swganh",
"id": "6d304a714ac58fdf5585881ac4a308613d6cbc5c",
"size": "452",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/static/particle/shared_pt_miasma_of_fog_orange.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
from raw_to_ass import Raw_to_ass_parser
import os.path #exists
import sys #exit and argv
import codecs #to write utf-8 output
import kuplett_parser
import diff_tool
from meta_parser import get_metadata
from preprocess import preprocess_ass
def wiki_to_text(username,password,outputdir,sourcefile):
lyrics=kuplett_parser.get_all_lyrics(sourcefile, username, password);
data = kuplett_parser.load_data(sourcefile)
counter = 0
for lyric in lyrics:
filename = outputdir + "/" + kuplett_parser.get_generic_name(data.get("urls")[counter])
counter += 1
if len(os.path.dirname(filename).strip()) > 0 and not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
outfile = codecs.open(filename, 'w', 'utf-8')
for line in lyric:
outfile.write(line+"\n")
return lyrics
if __name__ == "__main__":
if len(sys.argv)<4:
print("Need 3 arguments.")
print("Usage:")
print("get_and_parse_kuplett.py USERNAME PASSWORD OUTFILE_NAME")
sys.exit(3)
#if os.path.exists(sys.argv[3]):
# print("File '"+sys.argv[3]+"' already exists. Delete or rename it and try again.")
# sys.exit(1)
wiki_to_text(sys.argv[1], sys.argv[2], sys.argv[3],sourcefile="data_2017.txt")
|
{
"content_hash": "d6b7b87f297a4f6ad20d0b50617d39d5",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 104,
"avg_line_length": 36.77777777777778,
"alnum_prop": 0.6487915407854985,
"repo_name": "ITNano/WikiSubtitleReader",
"id": "248581488ed2fad0f1f2cfa0461d5313ae502a33",
"size": "1349",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wiki_to_text.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "34663"
}
],
"symlink_target": ""
}
|
import sys,os
current_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(os.path.join(current_dir, '../'))
from coincheck.account import Account
from coincheck import settings
def test_get_info():
a1 = Account(access_key=settings.access_key, secret_key=settings.secret_key)
return a1.get_info()
def test_get_balance():
a1 = Account(access_key=settings.access_key, secret_key=settings.secret_key)
return a1.get_balance()
if __name__ == '__main__':
test_get_info()
test_get_balance()
|
{
"content_hash": "d737dd2fc54a445ad5f8896ff19a8037",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 80,
"avg_line_length": 28.526315789473685,
"alnum_prop": 0.6974169741697417,
"repo_name": "kmn/coincheck",
"id": "d5bcb55e9f950b63d12b9283ac69c108c6613399",
"size": "542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_account.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10643"
}
],
"symlink_target": ""
}
|
"""Discover and load entry points from installed packages."""
# Copyright (c) Thomas Kluyver and contributors
# Distributed under the terms of the MIT license; see LICENSE file.
from contextlib import contextmanager
import glob
from importlib import import_module
import io
import itertools
import os.path as osp
import re
import sys
import warnings
import zipfile
if sys.version_info[0] >= 3:
import configparser
else:
from backports import configparser
entry_point_pattern = re.compile(r"""
(?P<modulename>\w+(\.\w+)*)
(:(?P<objectname>\w+(\.\w+)*))?
\s*
(\[(?P<extras>.+)\])?
$
""", re.VERBOSE)
__version__ = '0.2.3'
class BadEntryPoint(Exception):
"""Raised when an entry point can't be parsed.
"""
def __init__(self, epstr):
self.epstr = epstr
def __str__(self):
return "Couldn't parse entry point spec: %r" % self.epstr
@staticmethod
@contextmanager
def err_to_warnings():
try:
yield
except BadEntryPoint as e:
warnings.warn(str(e))
class NoSuchEntryPoint(Exception):
"""Raised by :func:`get_single` when no matching entry point is found."""
def __init__(self, group, name):
self.group = group
self.name = name
def __str__(self):
return "No {!r} entry point found in group {!r}".format(self.name, self.group)
class CaseSensitiveConfigParser(configparser.ConfigParser):
optionxform = staticmethod(str)
class EntryPoint(object):
def __init__(self, name, module_name, object_name, extras=None, distro=None):
self.name = name
self.module_name = module_name
self.object_name = object_name
self.extras = extras
self.distro = distro
def __repr__(self):
return "EntryPoint(%r, %r, %r, %r)" % \
(self.name, self.module_name, self.object_name, self.distro)
def load(self):
"""Load the object to which this entry point refers.
"""
mod = import_module(self.module_name)
obj = mod
if self.object_name:
for attr in self.object_name.split('.'):
obj = getattr(obj, attr)
return obj
@classmethod
def from_string(cls, epstr, name, distro=None):
"""Parse an entry point from the syntax in entry_points.txt
:param str epstr: The entry point string (not including 'name =')
:param str name: The name of this entry point
:param Distribution distro: The distribution in which the entry point was found
:rtype: EntryPoint
:raises BadEntryPoint: if *epstr* can't be parsed as an entry point.
"""
m = entry_point_pattern.match(epstr)
if m:
mod, obj, extras = m.group('modulename', 'objectname', 'extras')
if extras is not None:
extras = re.split(',\s*', extras)
return cls(name, mod, obj, extras, distro)
else:
raise BadEntryPoint(epstr)
class Distribution(object):
def __init__(self, name, version):
self.name = name
self.version = version
def __repr__(self):
return "Distribution(%r, %r)" % (self.name, self.version)
def iter_files_distros(path=None, repeated_distro='first'):
if path is None:
path = sys.path
# Distributions found earlier in path will shadow those with the same name
# found later. If these distributions used different module names, it may
# actually be possible to import both, but in most cases this shadowing
# will be correct.
distro_names_seen = set()
for folder in path:
if folder.rstrip('/\\').endswith('.egg'):
# Gah, eggs
egg_name = osp.basename(folder)
if '-' in egg_name:
distro = Distribution(*egg_name.split('-')[:2])
if (repeated_distro == 'first') \
and (distro.name in distro_names_seen):
continue
distro_names_seen.add(distro.name)
else:
distro = None
if osp.isdir(folder):
ep_path = osp.join(folder, 'EGG-INFO', 'entry_points.txt')
if osp.isfile(ep_path):
cp = CaseSensitiveConfigParser()
cp.read(ep_path)
yield cp, distro
elif zipfile.is_zipfile(folder):
z = zipfile.ZipFile(folder)
try:
info = z.getinfo('EGG-INFO/entry_points.txt')
except KeyError:
continue
cp = CaseSensitiveConfigParser()
with z.open(info) as f:
fu = io.TextIOWrapper(f)
cp.read_file(fu,
source=osp.join(folder, 'EGG-INFO', 'entry_points.txt'))
yield cp, distro
for path in itertools.chain(
glob.iglob(osp.join(folder, '*.dist-info', 'entry_points.txt')),
glob.iglob(osp.join(folder, '*.egg-info', 'entry_points.txt'))
):
distro_name_version = osp.splitext(osp.basename(osp.dirname(path)))[0]
if '-' in distro_name_version:
distro = Distribution(*distro_name_version.split('-', 1))
if (repeated_distro == 'first') \
and (distro.name in distro_names_seen):
continue
distro_names_seen.add(distro.name)
else:
distro = None
cp = CaseSensitiveConfigParser()
cp.read(path)
yield cp, distro
def get_single(group, name, path=None):
"""Find a single entry point.
Returns an :class:`EntryPoint` object, or raises :exc:`NoSuchEntryPoint`
if no match is found.
"""
for config, distro in iter_files_distros(path=path):
if (group in config) and (name in config[group]):
epstr = config[group][name]
with BadEntryPoint.err_to_warnings():
return EntryPoint.from_string(epstr, name, distro)
raise NoSuchEntryPoint(group, name)
def get_group_named(group, path=None):
"""Find a group of entry points with unique names.
Returns a dictionary of names to :class:`EntryPoint` objects.
"""
result = {}
for ep in get_group_all(group, path=path):
if ep.name not in result:
result[ep.name] = ep
return result
def get_group_all(group, path=None):
"""Find all entry points in a group.
Returns a list of :class:`EntryPoint` objects.
"""
result = []
for config, distro in iter_files_distros(path=path):
if group in config:
for name, epstr in config[group].items():
with BadEntryPoint.err_to_warnings():
result.append(EntryPoint.from_string(epstr, name, distro))
return result
if __name__ == '__main__':
import pprint
pprint.pprint(get_group_all('console_scripts'))
|
{
"content_hash": "84b9659e0692b2592421bbf619ad49d8",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 87,
"avg_line_length": 32.544186046511626,
"alnum_prop": 0.5755323710161497,
"repo_name": "unnikrishnankgs/va",
"id": "526f8df23e70e41252defff4e6d348d2d4410290",
"size": "6997",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "venv/lib/python3.5/site-packages/entrypoints.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "1836035"
},
{
"name": "C++",
"bytes": "12002305"
},
{
"name": "CMake",
"bytes": "128"
},
{
"name": "CSS",
"bytes": "64776"
},
{
"name": "Cuda",
"bytes": "78890"
},
{
"name": "Fortran",
"bytes": "8281"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "297329"
},
{
"name": "JavaScript",
"bytes": "4313047"
},
{
"name": "Jupyter Notebook",
"bytes": "603900"
},
{
"name": "Makefile",
"bytes": "7573"
},
{
"name": "Nginx",
"bytes": "544"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Protocol Buffer",
"bytes": "72897"
},
{
"name": "PureBasic",
"bytes": "134"
},
{
"name": "Python",
"bytes": "51104955"
},
{
"name": "Shell",
"bytes": "71646"
},
{
"name": "Smarty",
"bytes": "28890"
},
{
"name": "XSLT",
"bytes": "152770"
}
],
"symlink_target": ""
}
|
import sqlite3
conn = sqlite3.connect('twitter.db')
c = conn.cursor()
c.execute('''create table tweets (
id INTEGER PRIMARY KEY,
created_at TIMESTAMP,
text TEXT,
in_reply_to_screen_name TEXT,
in_reply_to_status_id INTEGER,
in_reply_to_user_id INTEGER,
retweet_count INTEGER,
retweeted BOOLEAN,
favorited BOOLEAN,
truncated BOOLEAN,
source TEXT
)
''')
conn.commit()
c.close()
|
{
"content_hash": "fc075be4184c32aae13551bc47c5076b",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 36,
"avg_line_length": 19.363636363636363,
"alnum_prop": 0.6643192488262911,
"repo_name": "quarkness/twitterbackup",
"id": "9a33030fad75dafbf2afa5ce8dfea883622dea91",
"size": "449",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "create_db.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "2149"
}
],
"symlink_target": ""
}
|
import datetime as dt
import itertools
import logging
import re
import urlparse
import bson
import pytz
import itsdangerous
from modularodm import fields, Q
from modularodm.exceptions import NoResultsFound
from modularodm.exceptions import ValidationError, ValidationValueError, QueryException
from modularodm.validators import URLValidator
import framework
from framework.addons import AddonModelMixin
from framework import analytics
from framework.auth import signals, utils
from framework.auth.exceptions import (ChangePasswordError, ExpiredTokenError, InvalidTokenError,
MergeConfirmedRequiredError, MergeConflictError)
from framework.bcrypt import generate_password_hash, check_password_hash
from framework.exceptions import PermissionsError
from framework.guid.model import GuidStoredObject
from framework.mongo.validators import string_required
from framework.sentry import log_exception
from framework.sessions import session
from framework.sessions.model import Session
from framework.sessions.utils import remove_sessions_for_user
from website import mails, settings, filters, security
name_formatters = {
'long': lambda user: user.fullname,
'surname': lambda user: user.family_name if user.family_name else user.fullname,
'initials': lambda user: u'{surname}, {initial}.'.format(
surname=user.family_name,
initial=user.given_name_initial,
),
}
logger = logging.getLogger(__name__)
# Hide implementation of token generation
def generate_confirm_token():
return security.random_string(30)
def generate_claim_token():
return security.random_string(30)
def validate_history_item(item):
string_required(item.get('institution'))
startMonth = item.get('startMonth')
startYear = item.get('startYear')
endMonth = item.get('endMonth')
endYear = item.get('endYear')
validate_year(startYear)
validate_year(endYear)
if startYear and endYear:
if endYear < startYear:
raise ValidationValueError('End date must be later than start date.')
elif endYear == startYear:
if endMonth and startMonth and endMonth < startMonth:
raise ValidationValueError('End date must be later than start date.')
def validate_year(item):
if item:
try:
int(item)
except ValueError:
raise ValidationValueError('Please enter a valid year.')
else:
if len(item) != 4:
raise ValidationValueError('Please enter a valid year.')
validate_url = URLValidator()
def validate_profile_websites(profile_websites):
for value in profile_websites or []:
try:
validate_url(value)
except ValidationError:
# Reraise with a better message
raise ValidationError('Invalid personal URL.')
def validate_social(value):
validate_profile_websites(value.get('profileWebsites'))
# TODO - rename to _get_current_user_from_session /HRYBACKI
def _get_current_user():
uid = session._get_current_object() and session.data.get('auth_user_id')
return User.load(uid)
# TODO: This should be a class method of User?
def get_user(email=None, password=None, verification_key=None):
"""Get an instance of User matching the provided params.
:return: The instance of User requested
:rtype: User or None
"""
# tag: database
if password and not email:
raise AssertionError("If a password is provided, an email must also "
"be provided.")
query_list = []
if email:
email = email.strip().lower()
query_list.append(Q('emails', 'eq', email) | Q('username', 'eq', email))
if password:
password = password.strip()
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
except Exception as err:
logger.error(err)
user = None
if user and not user.check_password(password):
return False
return user
if verification_key:
query_list.append(Q('verification_key', 'eq', verification_key))
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
return user
except Exception as err:
logger.error(err)
return None
class Auth(object):
def __init__(self, user=None, api_node=None,
private_key=None):
self.user = user
self.api_node = api_node
self.private_key = private_key
def __repr__(self):
return ('<Auth(user="{self.user}", '
'private_key={self.private_key})>').format(self=self)
@property
def logged_in(self):
return self.user is not None
@property
def private_link(self):
if not self.private_key:
return None
try:
# Avoid circular import
from website.project.model import PrivateLink
private_link = PrivateLink.find_one(
Q('key', 'eq', self.private_key)
)
if private_link.is_deleted:
return None
except QueryException:
return None
return private_link
@classmethod
def from_kwargs(cls, request_args, kwargs):
user = request_args.get('user') or kwargs.get('user') or _get_current_user()
private_key = request_args.get('view_only')
return cls(
user=user,
private_key=private_key,
)
class User(GuidStoredObject, AddonModelMixin):
# Node fields that trigger an update to the search engine on save
SEARCH_UPDATE_FIELDS = {
'fullname',
'given_name',
'middle_names',
'family_name',
'suffix',
'merged_by',
'date_disabled',
'date_confirmed',
'jobs',
'schools',
'social',
}
# TODO: Add SEARCH_UPDATE_NODE_FIELDS, for fields that should trigger a
# search update for all nodes to which the user is a contributor.
SOCIAL_FIELDS = {
'orcid': u'http://orcid.org/{}',
'github': u'http://github.com/{}',
'scholar': u'http://scholar.google.com/citations?user={}',
'twitter': u'http://twitter.com/{}',
'profileWebsites': [],
'linkedIn': u'https://www.linkedin.com/{}',
'impactStory': u'https://impactstory.org/{}',
'researcherId': u'http://researcherid.com/rid/{}',
'researchGate': u'https://researchgate.net/profile/{}',
'academiaInstitution': u'https://{}',
'academiaProfileID': u'.academia.edu/{}',
'baiduScholar': u'http://xueshu.baidu.com/scholarID/{}'
}
# This is a GuidStoredObject, so this will be a GUID.
_id = fields.StringField(primary=True)
# The primary email address for the account.
# This value is unique, but multiple "None" records exist for:
# * unregistered contributors where an email address was not provided.
# TODO: Update mailchimp subscription on username change in user.save()
username = fields.StringField(required=False, unique=True, index=True)
# Hashed. Use `User.set_password` and `User.check_password`
password = fields.StringField()
fullname = fields.StringField(required=True, validate=string_required)
# user has taken action to register the account
is_registered = fields.BooleanField(index=True)
# user has claimed the account
# TODO: This should be retired - it always reflects is_registered.
# While a few entries exist where this is not the case, they appear to be
# the result of a bug, as they were all created over a small time span.
is_claimed = fields.BooleanField(default=False, index=True)
# a list of strings - for internal use
system_tags = fields.StringField(list=True)
# security emails that have been sent
# TODO: This should be removed and/or merged with system_tags
security_messages = fields.DictionaryField()
# Format: {
# <message label>: <datetime>
# ...
# }
# user was invited (as opposed to registered unprompted)
is_invited = fields.BooleanField(default=False, index=True)
# Per-project unclaimed user data:
# TODO: add validation
unclaimed_records = fields.DictionaryField(required=False)
# Format: {
# <project_id>: {
# 'name': <name that referrer provided>,
# 'referrer_id': <user ID of referrer>,
# 'token': <token used for verification urls>,
# 'email': <email the referrer provided or None>,
# 'claimer_email': <email the claimer entered or None>,
# 'last_sent': <timestamp of last email sent to referrer or None>
# }
# ...
# }
# Time of last sent notification email to newly added contributors
# Format : {
# <project_id>: {
# 'last_sent': time.time()
# }
# ...
# }
contributor_added_email_records = fields.DictionaryField(default=dict)
# The user into which this account was merged
merged_by = fields.ForeignField('user', default=None, index=True)
# verification key used for resetting password
verification_key = fields.StringField()
forgot_password_last_post = fields.DateTimeField()
# confirmed emails
# emails should be stripped of whitespace and lower-cased before appending
# TODO: Add validator to ensure an email address only exists once across
# all User's email lists
emails = fields.StringField(list=True)
# email verification tokens
# see also ``unconfirmed_emails``
email_verifications = fields.DictionaryField(default=dict)
# Format: {
# <token> : {'email': <email address>,
# 'expiration': <datetime>}
# }
# TODO remove this field once migration (scripts/migration/migrate_mailing_lists_to_mailchimp_fields.py)
# has been run. This field is deprecated and replaced with mailchimp_mailing_lists
mailing_lists = fields.DictionaryField()
# email lists to which the user has chosen a subscription setting
mailchimp_mailing_lists = fields.DictionaryField()
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# email lists to which the user has chosen a subscription setting, being sent from osf, rather than mailchimp
osf_mailing_lists = fields.DictionaryField(default=lambda: {settings.OSF_HELP_LIST: True})
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# the date this user was registered
# TODO: consider removal - this can be derived from date_registered
date_registered = fields.DateTimeField(auto_now_add=dt.datetime.utcnow,
index=True)
# watched nodes are stored via a list of WatchConfigs
watched = fields.ForeignField("WatchConfig", list=True)
# list of collaborators that this user recently added to nodes as a contributor
recently_added = fields.ForeignField("user", list=True)
# Attached external accounts (OAuth)
external_accounts = fields.ForeignField("externalaccount", list=True)
# CSL names
given_name = fields.StringField()
middle_names = fields.StringField()
family_name = fields.StringField()
suffix = fields.StringField()
# Employment history
jobs = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'title': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Educational history
schools = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'degree': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Social links
social = fields.DictionaryField(validate=validate_social)
# Format: {
# 'profileWebsites': <list of profile websites>
# 'twitter': <twitter id>,
# }
# hashed password used to authenticate to Piwik
piwik_token = fields.StringField()
# date the user last sent a request
date_last_login = fields.DateTimeField()
# date the user first successfully confirmed an email address
date_confirmed = fields.DateTimeField(index=True)
# When the user was disabled.
date_disabled = fields.DateTimeField(index=True)
# when comments were last viewed
comments_viewed_timestamp = fields.DictionaryField()
# Format: {
# 'Comment.root_target._id': 'timestamp',
# ...
# }
# timezone for user's locale (e.g. 'America/New_York')
timezone = fields.StringField(default='Etc/UTC')
# user language and locale data (e.g. 'en_US')
locale = fields.StringField(default='en_US')
_meta = {'optimistic': True}
def __repr__(self):
return '<User({0!r}) with id {1!r}>'.format(self.username, self._id)
def __str__(self):
return self.fullname.encode('ascii', 'replace')
__unicode__ = __str__
# For compatibility with Django auth
@property
def pk(self):
return self._id
@property
def email(self):
return self.username
def is_authenticated(self): # Needed for django compat
return True
def is_anonymous(self):
return False
@property
def absolute_api_v2_url(self):
from website import util
return util.api_v2_url('users/{}/'.format(self.pk))
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
@classmethod
def create_unregistered(cls, fullname, email=None):
"""Create a new unregistered user.
"""
user = cls(
username=email,
fullname=fullname,
is_invited=True,
is_registered=False,
)
user.update_guessed_names()
return user
@classmethod
def create(cls, username, password, fullname):
user = cls(
username=username,
fullname=fullname,
)
user.update_guessed_names()
user.set_password(password)
return user
@classmethod
def create_unconfirmed(cls, username, password, fullname, do_confirm=True,
campaign=None):
"""Create a new user who has begun registration but needs to verify
their primary email address (username).
"""
user = cls.create(username, password, fullname)
user.add_unconfirmed_email(username)
user.is_registered = False
if campaign:
# needed to prevent cirular import
from framework.auth.campaigns import system_tag_for_campaign # skipci
user.system_tags.append(system_tag_for_campaign(campaign))
return user
@classmethod
def create_confirmed(cls, username, password, fullname):
user = cls.create(username, password, fullname)
user.is_registered = True
user.is_claimed = True
user.date_confirmed = user.date_registered
user.emails.append(username)
return user
@classmethod
def from_cookie(cls, cookie, secret=None):
"""Attempt to load a user from their signed cookie
:returns: None if a user cannot be loaded else User
"""
if not cookie:
return None
secret = secret or settings.SECRET_KEY
try:
token = itsdangerous.Signer(secret).unsign(cookie)
except itsdangerous.BadSignature:
return None
user_session = Session.load(token)
if user_session is None:
return None
return cls.load(user_session.data.get('auth_user_id'))
def get_or_create_cookie(self, secret=None):
"""Find the cookie for the given user
Create a new session if no cookie is found
:param str secret: The key to sign the cookie with
:returns: The signed cookie
"""
secret = secret or settings.SECRET_KEY
sessions = Session.find(
Q('data.auth_user_id', 'eq', self._id)
).sort(
'-date_modified'
).limit(1)
if sessions.count() > 0:
user_session = sessions[0]
else:
user_session = Session(data={
'auth_user_id': self._id,
'auth_user_username': self.username,
'auth_user_fullname': self.fullname,
})
user_session.save()
signer = itsdangerous.Signer(secret)
return signer.sign(user_session._id)
def update_guessed_names(self):
"""Updates the CSL name fields inferred from the the full name.
"""
parsed = utils.impute_names(self.fullname)
self.given_name = parsed['given']
self.middle_names = parsed['middle']
self.family_name = parsed['family']
self.suffix = parsed['suffix']
def register(self, username, password=None):
"""Registers the user.
"""
self.username = username
if password:
self.set_password(password)
if username not in self.emails:
self.emails.append(username)
self.is_registered = True
self.is_claimed = True
self.date_confirmed = dt.datetime.utcnow()
self.update_search()
self.update_search_nodes()
# Emit signal that a user has confirmed
signals.user_confirmed.send(self)
return self
def add_unclaimed_record(self, node, referrer, given_name, email=None):
"""Add a new project entry in the unclaimed records dictionary.
:param Node node: Node this unclaimed user was added to.
:param User referrer: User who referred this user.
:param str given_name: The full name that the referrer gave for this user.
:param str email: The given email address.
:returns: The added record
"""
if not node.can_edit(user=referrer):
raise PermissionsError('Referrer does not have permission to add a contributor '
'to project {0}'.format(node._primary_key))
project_id = node._primary_key
referrer_id = referrer._primary_key
if email:
clean_email = email.lower().strip()
else:
clean_email = None
record = {
'name': given_name,
'referrer_id': referrer_id,
'token': generate_confirm_token(),
'email': clean_email
}
self.unclaimed_records[project_id] = record
return record
def display_full_name(self, node=None):
"""Return the full name , as it would display in a contributor list for a
given node.
NOTE: Unclaimed users may have a different name for different nodes.
"""
if node:
unclaimed_data = self.unclaimed_records.get(node._primary_key, None)
if unclaimed_data:
return unclaimed_data['name']
return self.fullname
@property
def is_active(self):
"""Returns True if the user is active. The user must have activated
their account, must not be deleted, suspended, etc.
:return: bool
"""
return (self.is_registered and
self.password is not None and
not self.is_merged and
not self.is_disabled and
self.is_confirmed)
def get_unclaimed_record(self, project_id):
"""Get an unclaimed record for a given project_id.
:raises: ValueError if there is no record for the given project.
"""
try:
return self.unclaimed_records[project_id]
except KeyError: # reraise as ValueError
raise ValueError('No unclaimed record for user {self._id} on node {project_id}'
.format(**locals()))
def get_claim_url(self, project_id, external=False):
"""Return the URL that an unclaimed user should use to claim their
account. Return ``None`` if there is no unclaimed_record for the given
project ID.
:param project_id: The project ID for the unclaimed record
:raises: ValueError if a record doesn't exist for the given project ID
:rtype: dict
:returns: The unclaimed record for the project
"""
uid = self._primary_key
base_url = settings.DOMAIN if external else '/'
unclaimed_record = self.get_unclaimed_record(project_id)
token = unclaimed_record['token']
return '{base_url}user/{uid}/{project_id}/claim/?token={token}'\
.format(**locals())
def set_password(self, raw_password, notify=True):
"""Set the password for this user to the hash of ``raw_password``.
If this is a new user, we're done. If this is a password change,
then email the user about the change and clear all the old sessions
so that users will have to log in again with the new password.
:param raw_password: the plaintext value of the new password
:param notify: Only meant for unit tests to keep extra notifications from being sent
:rtype: list
:returns: Changed fields from the user save
"""
had_existing_password = bool(self.password)
self.password = generate_password_hash(raw_password)
if had_existing_password and notify:
mails.send_mail(
to_addr=self.username,
mail=mails.PASSWORD_RESET,
mimetype='plain',
user=self
)
remove_sessions_for_user(self)
def check_password(self, raw_password):
"""Return a boolean of whether ``raw_password`` was correct."""
if not self.password or not raw_password:
return False
return check_password_hash(self.password, raw_password)
@property
def csl_given_name(self):
parts = [self.given_name]
if self.middle_names:
parts.extend(each[0] for each in re.split(r'\s+', self.middle_names))
return ' '.join(parts)
@property
def csl_name(self):
return {
'family': self.family_name,
'given': self.csl_given_name,
}
@property
def created(self):
from website.project.model import Node
return Node.find(Q('creator', 'eq', self._id))
# TODO: This should not be on the User object.
def change_password(self, raw_old_password, raw_new_password, raw_confirm_password):
"""Change the password for this user to the hash of ``raw_new_password``."""
raw_old_password = (raw_old_password or '').strip()
raw_new_password = (raw_new_password or '').strip()
raw_confirm_password = (raw_confirm_password or '').strip()
issues = []
if not self.check_password(raw_old_password):
issues.append('Old password is invalid')
elif raw_old_password == raw_new_password:
issues.append('Password cannot be the same')
if not raw_old_password or not raw_new_password or not raw_confirm_password:
issues.append('Passwords cannot be blank')
elif len(raw_new_password) < 6:
issues.append('Password should be at least six characters')
elif len(raw_new_password) > 256:
issues.append('Password should not be longer than 256 characters')
if raw_new_password != raw_confirm_password:
issues.append('Password does not match the confirmation')
if issues:
raise ChangePasswordError(issues)
self.set_password(raw_new_password)
def _set_email_token_expiration(self, token, expiration=None):
"""Set the expiration date for given email token.
:param str token: The email token to set the expiration for.
:param datetime expiration: Datetime at which to expire the token. If ``None``, the
token will expire after ``settings.EMAIL_TOKEN_EXPIRATION`` hours. This is only
used for testing purposes.
"""
expiration = expiration or (dt.datetime.utcnow() + dt.timedelta(hours=settings.EMAIL_TOKEN_EXPIRATION))
self.email_verifications[token]['expiration'] = expiration
return expiration
def add_unconfirmed_email(self, email, expiration=None):
"""Add an email verification token for a given email."""
# TODO: This is technically not compliant with RFC 822, which requires
# that case be preserved in the "local-part" of an address. From
# a practical standpoint, the vast majority of email servers do
# not preserve case.
# ref: https://tools.ietf.org/html/rfc822#section-6
email = email.lower().strip()
if email in self.emails:
raise ValueError("Email already confirmed to this user.")
utils.validate_email(email)
# If the unconfirmed email is already present, refresh the token
if email in self.unconfirmed_emails:
self.remove_unconfirmed_email(email)
token = generate_confirm_token()
# handle when email_verifications is None
if not self.email_verifications:
self.email_verifications = {}
self.email_verifications[token] = {'email': email}
self._set_email_token_expiration(token, expiration=expiration)
return token
def remove_unconfirmed_email(self, email):
"""Remove an unconfirmed email addresses and their tokens."""
for token, value in self.email_verifications.iteritems():
if value.get('email') == email:
del self.email_verifications[token]
return True
return False
def remove_email(self, email):
"""Remove a confirmed email"""
if email == self.username:
raise PermissionsError("Can't remove primary email")
if email in self.emails:
self.emails.remove(email)
signals.user_email_removed.send(self, email=email)
@signals.user_email_removed.connect
def _send_email_removal_confirmations(self, email):
mails.send_mail(to_addr=self.username,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='alternate email address ({})'.format(email))
mails.send_mail(to_addr=email,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='primary email address ({})'.format(self.username))
def get_confirmation_token(self, email, force=False):
"""Return the confirmation token for a given email.
:param str email: Email to get the token for.
:param bool force: If an expired token exists for the given email, generate a new
token and return that token.
:raises: ExpiredTokenError if trying to access a token that is expired and force=False.
:raises: KeyError if there no token for the email.
"""
# TODO: Refactor "force" flag into User.get_or_add_confirmation_token
for token, info in self.email_verifications.items():
if info['email'].lower() == email.lower():
# Old records will not have an expiration key. If it's missing,
# assume the token is expired
expiration = info.get('expiration')
if not expiration or (expiration and expiration < dt.datetime.utcnow()):
if not force:
raise ExpiredTokenError('Token for email "{0}" is expired'.format(email))
else:
new_token = self.add_unconfirmed_email(email)
self.save()
return new_token
return token
raise KeyError('No confirmation token for email "{0}"'.format(email))
def get_confirmation_url(self, email, external=True, force=False):
"""Return the confirmation url for a given email.
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: KeyError if there is no token for the email.
"""
base = settings.DOMAIN if external else '/'
token = self.get_confirmation_token(email, force=force)
return "{0}confirm/{1}/{2}/".format(base, self._primary_key, token)
def _get_unconfirmed_email_for_token(self, token):
"""Return whether or not a confirmation token is valid for this user.
:rtype: bool
"""
if token not in self.email_verifications:
raise InvalidTokenError
verification = self.email_verifications[token]
# Not all tokens are guaranteed to have expiration dates
if (
'expiration' in verification and
verification['expiration'] < dt.datetime.utcnow()
):
raise ExpiredTokenError
return verification['email']
def verify_claim_token(self, token, project_id):
"""Return whether or not a claim token is valid for this user for
a given node which they were added as a unregistered contributor for.
"""
try:
record = self.get_unclaimed_record(project_id)
except ValueError: # No unclaimed record for given pid
return False
return record['token'] == token
def confirm_email(self, token, merge=False):
"""Confirm the email address associated with the token"""
email = self._get_unconfirmed_email_for_token(token)
# If this email is confirmed on another account, abort
try:
user_to_merge = User.find_one(Q('emails', 'iexact', email))
except NoResultsFound:
user_to_merge = None
if user_to_merge and merge:
self.merge_user(user_to_merge)
elif user_to_merge:
raise MergeConfirmedRequiredError(
'Merge requires confirmation',
user=self,
user_to_merge=user_to_merge,
)
# If another user has this email as its username, get it
try:
unregistered_user = User.find_one(Q('username', 'eq', email) &
Q('_id', 'ne', self._id))
except NoResultsFound:
unregistered_user = None
if unregistered_user:
self.merge_user(unregistered_user)
self.save()
unregistered_user.username = None
if email not in self.emails:
self.emails.append(email)
# Complete registration if primary email
if email.lower() == self.username.lower():
self.register(self.username)
self.date_confirmed = dt.datetime.utcnow()
# Revoke token
del self.email_verifications[token]
# TODO: We can't assume that all unclaimed records are now claimed.
# Clear unclaimed records, so user's name shows up correctly on
# all projects
self.unclaimed_records = {}
self.save()
self.update_search_nodes()
return True
@property
def unconfirmed_emails(self):
# Handle when email_verifications field is None
email_verifications = self.email_verifications or {}
return [
each['email']
for each
in email_verifications.values()
]
def update_search_nodes(self):
"""Call `update_search` on all nodes on which the user is a
contributor. Needed to add self to contributor lists in search upon
registration or claiming.
"""
for node in self.contributed:
node.update_search()
def update_search_nodes_contributors(self):
"""
Bulk update contributor name on all nodes on which the user is
a contributor.
:return:
"""
from website.search import search
search.update_contributors(self.visible_contributor_to)
@property
def is_confirmed(self):
return bool(self.date_confirmed)
@property
def social_links(self):
social_user_fields = {}
for key, val in self.social.items():
if val and key in self.SOCIAL_FIELDS:
if not isinstance(val, basestring):
social_user_fields[key] = val
else:
social_user_fields[key] = self.SOCIAL_FIELDS[key].format(val)
return social_user_fields
@property
def biblio_name(self):
given_names = self.given_name + ' ' + self.middle_names
surname = self.family_name
if surname != given_names:
initials = [
name[0].upper() + '.'
for name in given_names.split(' ')
if name and re.search(r'\w', name[0], re.I)
]
return u'{0}, {1}'.format(surname, ' '.join(initials))
return surname
@property
def given_name_initial(self):
"""
The user's preferred initialization of their given name.
Some users with common names may choose to distinguish themselves from
their colleagues in this way. For instance, there could be two
well-known researchers in a single field named "Robert Walker".
"Walker, R" could then refer to either of them. "Walker, R.H." could
provide easy disambiguation.
NOTE: The internal representation for this should never end with a
period. "R" and "R.H" would be correct in the prior case, but
"R.H." would not.
"""
return self.given_name[0]
@property
def url(self):
return '/{}/'.format(self._primary_key)
@property
def api_url(self):
return '/api/v1/profile/{0}/'.format(self._primary_key)
@property
def absolute_url(self):
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
@property
def deep_url(self):
return '/profile/{}/'.format(self._primary_key)
def profile_image_url(self, size=None):
"""A generalized method for getting a user's profile picture urls.
We may choose to use some service other than gravatar in the future,
and should not commit ourselves to using a specific service (mostly
an API concern).
As long as we use gravatar, this is just a proxy to User.gravatar_url
"""
return self._gravatar_url(size)
def _gravatar_url(self, size):
return filters.gravatar(
self,
use_ssl=True,
size=size
)
def get_activity_points(self, db=None):
db = db or framework.mongo.database
return analytics.get_total_activity_count(self._primary_key, db=db)
def disable_account(self):
"""
Disables user account, making is_disabled true, while also unsubscribing user
from mailchimp emails.
"""
from website import mailchimp_utils
try:
mailchimp_utils.unsubscribe_mailchimp(
list_name=settings.MAILCHIMP_GENERAL_LIST,
user_id=self._id,
username=self.username
)
except mailchimp_utils.mailchimp.ListNotSubscribedError:
pass
except mailchimp_utils.mailchimp.InvalidApiKeyError:
if not settings.ENABLE_EMAIL_SUBSCRIPTIONS:
pass
else:
raise
self.is_disabled = True
@property
def is_disabled(self):
"""Whether or not this account has been disabled.
Abstracts ``User.date_disabled``.
:return: bool
"""
return self.date_disabled is not None
@is_disabled.setter
def is_disabled(self, val):
"""Set whether or not this account has been disabled."""
if val and not self.date_disabled:
self.date_disabled = dt.datetime.utcnow()
elif val is False:
self.date_disabled = None
@property
def is_merged(self):
'''Whether or not this account has been merged into another account.
'''
return self.merged_by is not None
@property
def profile_url(self):
return '/{}/'.format(self._id)
@property
def contributed(self):
from website.project.model import Node
return Node.find(Q('contributors', 'eq', self._id))
@property
def contributor_to(self):
from website.project.model import Node
return Node.find(
Q('contributors', 'eq', self._id) &
Q('is_deleted', 'ne', True) &
Q('is_collection', 'ne', True)
)
@property
def visible_contributor_to(self):
from website.project.model import Node
return Node.find(
Q('contributors', 'eq', self._id) &
Q('is_deleted', 'ne', True) &
Q('is_collection', 'ne', True) &
Q('visible_contributor_ids', 'eq', self._id)
)
def get_summary(self, formatter='long'):
return {
'user_fullname': self.fullname,
'user_profile_url': self.profile_url,
'user_display_name': name_formatters[formatter](self),
'user_is_claimed': self.is_claimed
}
def save(self, *args, **kwargs):
# TODO: Update mailchimp subscription on username change
# Avoid circular import
from framework.analytics import tasks as piwik_tasks
self.username = self.username.lower().strip() if self.username else None
ret = super(User, self).save(*args, **kwargs)
if self.SEARCH_UPDATE_FIELDS.intersection(ret) and self.is_confirmed:
self.update_search()
self.update_search_nodes_contributors()
if settings.PIWIK_HOST and not self.piwik_token:
piwik_tasks.update_user(self._id)
return ret
def update_search(self):
from website import search
try:
search.search.update_user(self)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
@classmethod
def find_by_email(cls, email):
try:
user = cls.find_one(
Q('emails', 'eq', email)
)
return [user]
except:
return []
def serialize(self, anonymous=False):
return {
'id': utils.privacy_info_handle(self._primary_key, anonymous),
'fullname': utils.privacy_info_handle(self.fullname, anonymous, name=True),
'registered': self.is_registered,
'url': utils.privacy_info_handle(self.url, anonymous),
'api_url': utils.privacy_info_handle(self.api_url, anonymous),
}
###### OSF-Specific methods ######
def watch(self, watch_config):
"""Watch a node by adding its WatchConfig to this user's ``watched``
list. Raises ``ValueError`` if the node is already watched.
:param watch_config: The WatchConfig to add.
:param save: Whether to save the user.
"""
watched_nodes = [each.node for each in self.watched]
if watch_config.node in watched_nodes:
raise ValueError('Node is already being watched.')
watch_config.save()
self.watched.append(watch_config)
return None
def unwatch(self, watch_config):
"""Unwatch a node by removing its WatchConfig from this user's ``watched``
list. Raises ``ValueError`` if the node is not already being watched.
:param watch_config: The WatchConfig to remove.
:param save: Whether to save the user.
"""
for each in self.watched:
if watch_config.node._id == each.node._id:
from framework.transactions.context import TokuTransaction # Avoid circular import
with TokuTransaction():
# Ensure that both sides of the relationship are removed
each.__class__.remove_one(each)
self.watched.remove(each)
self.save()
return None
raise ValueError('Node not being watched.')
def is_watching(self, node):
'''Return whether a not a user is watching a Node.'''
watched_node_ids = set([config.node._id for config in self.watched])
return node._id in watched_node_ids
def get_recent_log_ids(self, since=None):
'''Return a generator of recent logs' ids.
:param since: A datetime specifying the oldest time to retrieve logs
from. If ``None``, defaults to 60 days before today. Must be a tz-aware
datetime because PyMongo's generation times are tz-aware.
:rtype: generator of log ids (strings)
'''
log_ids = []
# Default since to 60 days before today if since is None
# timezone aware utcnow
utcnow = dt.datetime.utcnow().replace(tzinfo=pytz.utc)
since_date = since or (utcnow - dt.timedelta(days=60))
for config in self.watched:
# Extract the timestamps for each log from the log_id (fast!)
# The first 4 bytes of Mongo's ObjectId encodes time
# This prevents having to load each Log Object and access their
# date fields
node_log_ids = [log.pk for log in config.node.logs
if bson.ObjectId(log.pk).generation_time > since_date and
log.pk not in log_ids]
# Log ids in reverse chronological order
log_ids = _merge_into_reversed(log_ids, node_log_ids)
return (l_id for l_id in log_ids)
def get_daily_digest_log_ids(self):
'''Return a generator of log ids generated in the past day
(starting at UTC 00:00).
'''
utcnow = dt.datetime.utcnow()
midnight = dt.datetime(
utcnow.year, utcnow.month, utcnow.day,
0, 0, 0, tzinfo=pytz.utc
)
return self.get_recent_log_ids(since=midnight)
@property
def can_be_merged(self):
"""The ability of the `merge_user` method to fully merge the user"""
return all((addon.can_be_merged for addon in self.get_addons()))
def merge_user(self, user):
"""Merge a registered user into this account. This user will be
a contributor on any project. if the registered user and this account
are both contributors of the same project. Then it will remove the
registered user and set this account to the highest permission of the two
and set this account to be visible if either of the two are visible on
the project.
:param user: A User object to be merged.
"""
# Fail if the other user has conflicts.
if not user.can_be_merged:
raise MergeConflictError("Users cannot be merged")
# Move over the other user's attributes
# TODO: confirm
for system_tag in user.system_tags:
if system_tag not in self.system_tags:
self.system_tags.append(system_tag)
self.is_claimed = self.is_claimed or user.is_claimed
self.is_invited = self.is_invited or user.is_invited
# copy over profile only if this user has no profile info
if user.jobs and not self.jobs:
self.jobs = user.jobs
if user.schools and not self.schools:
self.schools = user.schools
if user.social and not self.social:
self.social = user.social
unclaimed = user.unclaimed_records.copy()
unclaimed.update(self.unclaimed_records)
self.unclaimed_records = unclaimed
# - unclaimed records should be connected to only one user
user.unclaimed_records = {}
security_messages = user.security_messages.copy()
security_messages.update(self.security_messages)
self.security_messages = security_messages
for key, value in user.mailchimp_mailing_lists.iteritems():
# subscribe to each list if either user was subscribed
subscription = value or self.mailchimp_mailing_lists.get(key)
signals.user_merged.send(self, list_name=key, subscription=subscription)
# clear subscriptions for merged user
signals.user_merged.send(user, list_name=key, subscription=False, send_goodbye=False)
for target_id, timestamp in user.comments_viewed_timestamp.iteritems():
if not self.comments_viewed_timestamp.get(target_id):
self.comments_viewed_timestamp[target_id] = timestamp
elif timestamp > self.comments_viewed_timestamp[target_id]:
self.comments_viewed_timestamp[target_id] = timestamp
self.emails.extend(user.emails)
user.emails = []
for k, v in user.email_verifications.iteritems():
email_to_confirm = v['email']
if k not in self.email_verifications and email_to_confirm != user.username:
self.email_verifications[k] = v
user.email_verifications = {}
for institution in user.affiliated_institutions:
self.affiliated_institutions.append(institution)
user._affiliated_institutions = []
# FOREIGN FIELDS
for watched in user.watched:
if watched not in self.watched:
self.watched.append(watched)
user.watched = []
for account in user.external_accounts:
if account not in self.external_accounts:
self.external_accounts.append(account)
user.external_accounts = []
# - addons
# Note: This must occur before the merged user is removed as a
# contributor on the nodes, as an event hook is otherwise fired
# which removes the credentials.
for addon in user.get_addons():
user_settings = self.get_or_add_addon(addon.config.short_name)
user_settings.merge(addon)
user_settings.save()
# Disconnect signal to prevent emails being sent about being a new contributor when merging users
# be sure to reconnect it at the end of this code block. Import done here to prevent circular import error.
from website.project.signals import contributor_added
from website.project.views.contributor import notify_added_contributor
from website.util import disconnected_from
# - projects where the user was a contributor
with disconnected_from(signal=contributor_added, listener=notify_added_contributor):
for node in user.contributed:
# Skip bookmark collection node
if node.is_bookmark_collection:
continue
# if both accounts are contributor of the same project
if node.is_contributor(self) and node.is_contributor(user):
if node.permissions[user._id] > node.permissions[self._id]:
permissions = node.permissions[user._id]
else:
permissions = node.permissions[self._id]
node.set_permissions(user=self, permissions=permissions)
visible1 = self._id in node.visible_contributor_ids
visible2 = user._id in node.visible_contributor_ids
if visible1 != visible2:
node.set_visible(user=self, visible=True, log=True, auth=Auth(user=self))
else:
node.add_contributor(
contributor=self,
permissions=node.get_permissions(user),
visible=node.get_visible(user),
log=False,
)
try:
node.remove_contributor(
contributor=user,
auth=Auth(user=self),
log=False,
)
except ValueError:
logger.error('Contributor {0} not in list on node {1}'.format(
user._id, node._id
))
node.save()
# - projects where the user was the creator
for node in user.created:
node.creator = self
node.save()
# - file that the user has checked_out, import done here to prevent import error
from website.files.models.base import FileNode
for file_node in FileNode.files_checked_out(user=user):
file_node.checkout = self
file_node.save()
# finalize the merge
remove_sessions_for_user(user)
# - username is set to None so the resultant user can set it primary
# in the future.
user.username = None
user.password = None
user.verification_key = None
user.osf_mailing_lists = {}
user.merged_by = self
user.save()
def get_projects_in_common(self, other_user, primary_keys=True):
"""Returns either a collection of "shared projects" (projects that both users are contributors for)
or just their primary keys
"""
if primary_keys:
projects_contributed_to = set(self.contributed.get_keys())
other_projects_primary_keys = set(other_user.contributed.get_keys())
return projects_contributed_to.intersection(other_projects_primary_keys)
else:
projects_contributed_to = set(self.contributed)
return projects_contributed_to.intersection(other_user.contributed)
def n_projects_in_common(self, other_user):
"""Returns number of "shared projects" (projects that both users are contributors for)"""
return len(self.get_projects_in_common(other_user, primary_keys=True))
def is_affiliated_with_institution(self, inst):
return inst in self.affiliated_institutions
def remove_institution(self, inst_id):
removed = False
for inst in self.affiliated_institutions:
if inst._id == inst_id:
self.affiliated_institutions.remove(inst)
removed = True
return removed
_affiliated_institutions = fields.ForeignField('node', list=True)
@property
def affiliated_institutions(self):
from website.institutions.model import Institution, AffiliatedInstitutionsList
return AffiliatedInstitutionsList([Institution(inst) for inst in self._affiliated_institutions], obj=self, private_target='_affiliated_institutions')
def get_node_comment_timestamps(self, target_id):
""" Returns the timestamp for when comments were last viewed on a node, file or wiki.
"""
default_timestamp = dt.datetime(1970, 1, 1, 12, 0, 0)
return self.comments_viewed_timestamp.get(target_id, default_timestamp)
def _merge_into_reversed(*iterables):
'''Merge multiple sorted inputs into a single output in reverse order.
'''
return sorted(itertools.chain(*iterables), reverse=True)
|
{
"content_hash": "d8f3a9f475a174ac896f81bfdebf1905",
"timestamp": "",
"source": "github",
"line_count": 1429,
"max_line_length": 157,
"avg_line_length": 36.423372988103566,
"alnum_prop": 0.6083306115391266,
"repo_name": "asanfilippo7/osf.io",
"id": "dc674f4e3f05ce0d61c4042b6f0b700e857217c6",
"size": "52073",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "framework/auth/core.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "139390"
},
{
"name": "HTML",
"bytes": "90924"
},
{
"name": "JavaScript",
"bytes": "1548354"
},
{
"name": "Mako",
"bytes": "650459"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "5029997"
}
],
"symlink_target": ""
}
|
def extractExplore(item):
"""
Explore
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
chp_prefixes = [
('geww ', 'Ghost Emperor Wild Wife: Dandy Eldest Miss', 'translated'),
('VGAFH', 'Village girl as head of the family: picked up a general for farming', 'translated'),
('The Rebirth of Deceased Consort that Astounded the World chapter ', 'The Rebirth of Deceased Consort that Astounded the World', 'translated'),
('Man Man Qing Luo chapter ', 'Man Man Qing Luo', 'translated'),
('Hilarious Pampered Consort ', 'Hilarious Pampered Consort', 'translated'),
('BTTS ', 'Back to the Sixties: Farm, Get Wealthy & Raise the Cubs', 'translated'),
('Campus Rebirth: The Strongest Female Agent', 'Campus Rebirth: The Strongest Female Agent', 'translated'),
('ESWHYMY ', 'Eldest Sister, Why Haven\'t You Married Yet', 'translated'),
('TVHISLAA ', 'Today Villain Husband Is Still Lying About Amnesia (Novel Transmigration)', 'translated'),
('Transmigrated into the Cannon Fodder\'s Daughter ', 'Transmigrated into the Cannon Fodder\'s Daughter', 'translated'),
]
for prefix, series, tl_type in chp_prefixes:
if item['title'].lower().startswith(prefix.lower()):
return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if item['title'].lower().startswith('geww '):
return buildReleaseMessageWithType(item, 'Ghost Emperor Wild Wife: Dandy Eldest Miss', vol, chp, frag=frag, postfix=postfix)
return False
|
{
"content_hash": "0a4e9671ade387cf30929e09749b73c1",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 176,
"avg_line_length": 77.45161290322581,
"alnum_prop": 0.4743856726364015,
"repo_name": "fake-name/ReadableWebProxy",
"id": "4a2e4cfce434777f8c284d2d5b257df394d9ca07",
"size": "2401",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WebMirror/management/rss_parser_funcs/feed_parse_extractExplore.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "105811"
},
{
"name": "Dockerfile",
"bytes": "1178"
},
{
"name": "HTML",
"bytes": "119737"
},
{
"name": "JavaScript",
"bytes": "3006524"
},
{
"name": "Jupyter Notebook",
"bytes": "148075"
},
{
"name": "Mako",
"bytes": "1454"
},
{
"name": "Python",
"bytes": "5264346"
},
{
"name": "Shell",
"bytes": "1059"
}
],
"symlink_target": ""
}
|
import os
import sys
from mozlog.structured import structuredlog, commandline
from .. import wptcommandline
from update import WPTUpdate
def remove_logging_args(args):
"""Take logging args out of the dictionary of command line arguments so
they are not passed in as kwargs to the update code. This is particularly
necessary here because the arguments are often of type file, which cannot
be serialized.
:param args: Dictionary of command line arguments.
"""
for name in args.keys():
if name.startswith("log_"):
args.pop(name)
def setup_logging(args, defaults):
"""Use the command line arguments to set up the logger.
:param args: Dictionary of command line arguments.
:param defaults: Dictionary of {formatter_name: stream} to use if
no command line logging is specified"""
logger = commandline.setup_logging("web-platform-tests-update", args, defaults)
remove_logging_args(args)
return logger
def run_update(logger, **kwargs):
updater = WPTUpdate(logger, **kwargs)
return updater.run()
def main():
args = wptcommandline.parse_args_update()
logger = setup_logging(args, {"mach": sys.stdout})
assert structuredlog.get_default_logger() is not None
success = run_update(logger, **args)
sys.exit(0 if success else 1)
|
{
"content_hash": "8f5da984c4a8d433b2a68e7ba72bdbd1",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 83,
"avg_line_length": 28.70212765957447,
"alnum_prop": 0.6968124536693847,
"repo_name": "youtube/cobalt",
"id": "497cb34ad0cb2ecfa0efd4cf059158b0b5f85ebd",
"size": "1349",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "third_party/web_platform_tests/tools/wptrunner/wptrunner/update/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
"""
Django settings for website project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'or91nby_*35%r755lmqahtr)h@fenfz)4rfmfy_s)^#3+fvlb-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'website.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'website.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
|
{
"content_hash": "0cd7dc1ac707e363a05b59f42c5eb327",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 91,
"avg_line_length": 25.53968253968254,
"alnum_prop": 0.6817899316345556,
"repo_name": "DevChrisCross/TwoPhaseSimplexMethod",
"id": "637c9b6c3f7ece4928e04a62f495de39040c5c9e",
"size": "3218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "website/website/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8899"
},
{
"name": "HTML",
"bytes": "11016"
},
{
"name": "JavaScript",
"bytes": "10645"
},
{
"name": "Python",
"bytes": "19091"
}
],
"symlink_target": ""
}
|
from __future__ import with_statement
import ConfigParser, os
confFilePath = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'agents.ini')
cfgp = ConfigParser.ConfigParser()
with open(confFilePath) as file:
cfgp.readfp(file)
for module_name in cfgp.sections():
mod = __import__('popserver.agents', globals(), locals(), [module_name], -1)
mod = getattr(mod, module_name)
for prop in cfgp.items(module_name):
setattr(mod, prop[0], prop[1])
# excepciones
class AgentException(Exception):
""" clase base para todas las Exceptions emitidas por los agentes """
pass
class ItemNotFoundException(AgentException):
""" el item solicitado no pudo ser encontrado por el agente en el servicio remoto """
pass
|
{
"content_hash": "ad1b5f49e3e4728910cf9c2f27589ff1",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 89,
"avg_line_length": 29.03846153846154,
"alnum_prop": 0.6980132450331126,
"repo_name": "santisiri/popego",
"id": "5b01ab0f7141d924801f76d0e7adeb13e8ce6efd",
"size": "1226",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "popego/popserver/popserver/agents/__init__.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1246"
},
{
"name": "C",
"bytes": "504141"
},
{
"name": "C++",
"bytes": "26125"
},
{
"name": "CSS",
"bytes": "342653"
},
{
"name": "FORTRAN",
"bytes": "4872"
},
{
"name": "GAP",
"bytes": "13267"
},
{
"name": "Genshi",
"bytes": "407"
},
{
"name": "Groff",
"bytes": "17116"
},
{
"name": "HTML",
"bytes": "383181"
},
{
"name": "JavaScript",
"bytes": "1090769"
},
{
"name": "Makefile",
"bytes": "2441"
},
{
"name": "Mako",
"bytes": "376944"
},
{
"name": "Python",
"bytes": "20895618"
},
{
"name": "Ruby",
"bytes": "3380"
},
{
"name": "Shell",
"bytes": "23581"
},
{
"name": "Smarty",
"bytes": "522"
},
{
"name": "TeX",
"bytes": "35712"
}
],
"symlink_target": ""
}
|
from os import path
import test
from pymake2 import *
#---------------------------------------
# SCRIPT
#---------------------------------------
r = run_program('python', ['--version'])
test.equal(r, 0, 'run_program() returned incorrect value')
test.success()
|
{
"content_hash": "ae0b69007d88142460fb55359b59378e",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 58,
"avg_line_length": 18.928571428571427,
"alnum_prop": 0.47924528301886793,
"repo_name": "philiparvidsson/pymake2",
"id": "d83e8cc1a2ac183650c82ee6c6d55624f448f1cc",
"size": "358",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/proc_run_program.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "55302"
}
],
"symlink_target": ""
}
|
import os
import sys
import glob
import subprocess
# Parse execs from command line arguments
def parse_execs():
exs = []
i=4
while i<len(sys.argv):
line = sys.argv[i]
ex = line.split(';')
exs.append(ex)
i+=1
return exs
# This print function detects if stdout is redirected.
# If stdout is redirected, the msg is printed to stdout and stderr for debug purposes
# If stdout is not redirected, the msg is only printed to stdout
def print_m(arg,**kwargs):
if sys.stdout.isatty(): # not redirected
print(arg,**kwargs)
else:
print(arg,**kwargs) # redirected
kwargs['file'] = sys.stderr
print(arg,**kwargs)
# Check cmd line args
if len(sys.argv) <= 4:
print("Invalid arguments")
print("Usage:")
print(' python lckey_to_csv.py path/to/benchmarks path/to/lc "KeyToExtract" [execs]')
print('Each exec has the form:')
print('exec_name;arg1;arg2;...;argn')
sys.exit(0)
BENCH_PATH = os.path.abspath(sys.argv[1]) + '/' # Benchmarks path (contains .scm files)
LC_PATH = os.path.abspath(sys.argv[2]) # LC path executable path
LC_KEY = sys.argv[3] # Key to extract
EXECS = parse_execs() # List of executions
benchmarks = sorted(glob.glob(BENCH_PATH + '*.scm'))
# Print csv header
print_m('X',end='')
for exec in EXECS:
print_m(';',end='')
print_m(exec[0],end='')
print_m('')
# For each benchmark
for benchmark in benchmarks:
benchname = os.path.basename(benchmark).replace('.scm','')
print_m(benchname,end='')
# For each exec
for exec in EXECS:
# Run exec with --stats
cmd = [LC_PATH,"--stats"]
cmd.append(benchmark)
cmd = cmd + exec[1:]
result = subprocess.run(cmd,stdout=subprocess.PIPE)
lines = result.stdout.decode("utf-8").split('\n')
# Find key:value line
found = False
for line in lines:
keyval = line.split(':')
if keyval[0] == LC_KEY:
assert(not found)
found = True
val = float(keyval[1].replace(' ','').replace('\t','').replace('\n',''))
print_m(';',end='')
print_m(val,end='')
assert(found)
print_m('')
|
{
"content_hash": "54ba06223aa21dd32f11d24753ee72a4",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 91,
"avg_line_length": 30.18421052631579,
"alnum_prop": 0.5727986050566696,
"repo_name": "bsaleil/lc",
"id": "0dabd80258ad129492b78fa7782716e4ad0bb353",
"size": "2294",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/graphs/lckey_to_csv.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "8240"
},
{
"name": "JavaScript",
"bytes": "16877"
},
{
"name": "Makefile",
"bytes": "2603"
},
{
"name": "Python",
"bytes": "73466"
},
{
"name": "Scheme",
"bytes": "15055126"
},
{
"name": "Shell",
"bytes": "8289"
}
],
"symlink_target": ""
}
|
"""Process data transforming the original one."""
# pylint: disable=C0103
from math import gcd
import numpy as np
from scipy import stats
from scipy.interpolate import interp1d
TYPE = 'mode'
# TYPE = 'lcm'
# lcm calculation
def lcm(a, b):
"""Least common multiple."""
return a * b // gcd(a, b)
# read the original data
csv_file = 'data/tension_curves.txt'
data = np.asarray([np.asarray(line.split(','), dtype=np.float)
for line in open(csv_file)])
n = data.shape[0]
# apply exponential smoothing
data_exp = []
for i, curve in enumerate(data):
l = curve.shape[0]
weights = np.array([2**j for j in range(l)])
numer = np.cumsum(np.multiply(curve, weights))
denom = np.cumsum(weights)
data_exp.append(np.divide(numer, denom))
data_exp = np.asarray(data_exp)
# warp the stories according to the mode
def warp(vector, length, kind='linear'):
n = vector.shape[0]
linspace = np.linspace(0, 1, n)
interp = interp1d(linspace, vector, kind=kind)
new_linspace = np.linspace(0, 1, length)
return interp(new_linspace)
lens = np.array([curve.shape[0] for curve in data_exp])
if TYPE == 'mode':
warp_len = stats.mode(lens)[0][0]
elif TYPE == 'lcm':
unique_lens = np.unique(lens)
warp_len = 1
for l in unique_lens:
warp_len = lcm(warp_len, int(l))
print("Mode: {:}".format(warp_len))
data_warped = np.empty((n, warp_len))
for i, curve in enumerate(data_exp):
data_warped[i] = warp(curve, warp_len)
if TYPE == 'mode':
np.savetxt('data/warped_curves.gz', data_warped)
elif TYPE == 'lcm':
np.savetxt('data/warped_curves_lcm.gz', data_warped)
|
{
"content_hash": "858e96f83019510ecaa87ac623328d89",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 62,
"avg_line_length": 28.68421052631579,
"alnum_prop": 0.6519877675840978,
"repo_name": "msilvestro/dupin",
"id": "064261ffa9d981928abd5def6b4965287c1514e0",
"size": "1635",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "data/process_data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "86149"
}
],
"symlink_target": ""
}
|
from test_framework.test_particl import ParticlTestFramework
import time
class MultiWalletTest(ParticlTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [ ['-debug','-noacceptnonstdtxn','-reservebalance=10000000','-stakethreadconddelayms=100'] for i in range(self.num_nodes)]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
self.connect_nodes_bi(0, 1)
self.connect_nodes_bi(0, 2)
self.sync_all()
def run_test(self):
nodes = self.nodes
self.import_genesis_coins_a(nodes[0])
self.import_genesis_coins_b(nodes[1])
nodes[2].extkeyimportmaster('sección grito médula hecho pauta posada nueve ebrio bruto buceo baúl mitad')
self.log.info('Check loaded wallets rescan any missed blocks')
nodes[2].createwallet('wallet_2')
assert (len(nodes[2].listwallets()) == 2)
w1 = nodes[2].get_wallet_rpc('default_wallet')
w2 = nodes[2].get_wallet_rpc('wallet_2')
w2.extkeyimportmaster('sección grito médula hecho pauta posada nueve ebrio bruto buceo baúl mitad')
addr = w1.getnewaddress()
nodes[0].sendtoaddress(addr, 1000)
self.stakeBlocks(1)
assert (w1.getwalletinfo()['total_balance'] == 1000)
assert (w2.getwalletinfo()['total_balance'] == 1000)
nodes[2].unloadwallet('wallet_2')
assert (len(nodes[2].listwallets()) == 1)
nodes[2].sendtoaddress(nodes[1].getnewaddress(), 100)
self.log.info('Test threshold values')
nodes[2].createwallet('w4')
nodes[2].createwallet('w5')
nodes[2].createwallet('w6')
w4 = nodes[2].get_wallet_rpc('w4')
w5 = nodes[2].get_wallet_rpc('w5')
w6 = nodes[2].get_wallet_rpc('w6')
mnemonic = w4.mnemonic('new')['master']
w4.extkeyimportmaster(mnemonic)
w5.extkeyimportmaster(mnemonic)
w6.extkeyimportmaster(mnemonic)
w5.walletsettings('stakingoptions', {'minstakeablevalue' : 1.0})
w6.walletsettings('other', {'minownedvalue' : 1.0})
w4_addr = w4.getnewaddress()
nodes[0].sendtoaddress(w4_addr, 1)
nodes[0].sendtoaddress(w4_addr, 0.99)
# Send to the 6th stealthaddress and 1st address
nodes[2].createwallet('w7')
w7 = nodes[2].get_wallet_rpc('w7')
w7_mnemonic = w7.mnemonic('new')['master']
w7.extkeyimportmaster(w7_mnemonic)
for k in range(5):
w7.getnewstealthaddress()
nodes[0].sendtoaddress(w7.getnewstealthaddress(), 1)
nodes[0].sendtoaddress(w7.getnewaddress(), 2)
self.sync_all()
self.stakeBlocks(1)
nodes[2].loadwallet('wallet_2')
w1 = nodes[2].get_wallet_rpc('default_wallet')
w2 = nodes[2].get_wallet_rpc('wallet_2')
assert (w1.getwalletinfo()['total_balance'] < 900)
assert (w1.getwalletinfo()['total_balance'] == w2.getwalletinfo()['total_balance'])
ro = nodes[2].getblockstats(nodes[2].getblockchaininfo()['blocks'])
assert (ro['height'] == 2)
self.log.info('createwallet with passphrase')
nodes[2].createwallet('wallet_3', False, False, 'password_abc')
w3 = nodes[2].get_wallet_rpc('wallet_3')
ro = w3.getwalletinfo()
assert ('hdseedid' in ro)
assert (ro['encryptionstatus'] == 'Locked')
w4.reservebalance(False)
w5.reservebalance(False)
w6.reservebalance(False)
time.sleep(0.5)
assert (float(w4.getbalances()['mine']['trusted']) == 1.99)
assert (float(w5.getbalances()['mine']['trusted']) == 1.99)
assert (float(w6.getbalances()['mine']['trusted']) == 1.0)
w4_stakinginfo = w4.getstakinginfo()
w5_stakinginfo = w5.getstakinginfo()
w6_stakinginfo = w6.getstakinginfo()
assert (float(w4_stakinginfo['minstakeablevalue']) == 0.00000001)
assert (w4_stakinginfo['weight'] == 199000000)
assert (w5_stakinginfo['minstakeablevalue'] == 1.0)
assert (w5_stakinginfo['weight'] == 100000000)
assert (float(w6_stakinginfo['minstakeablevalue']) == 0.00000001)
assert (w6_stakinginfo['weight'] == 100000000)
assert (float(w6.walletsettings('other')['other']['minownedvalue']) == 1.0)
nodes[2].createwallet('w8')
w8 = nodes[2].get_wallet_rpc('w8')
w8.extkeyimportmaster(w7_mnemonic, '', 'false', '', '', 0, {'lookaheadsize': 0, 'stealthv1lookaheadsize': 6})
print(w7.getbalances())
print(w8.getbalances())
assert (float(w8.getbalances()['mine']['trusted']) == 1.0)
nodes[2].createwallet('w9')
w9 = nodes[2].get_wallet_rpc('w9')
w9.extkeyimportmaster(w7_mnemonic)
assert (float(w9.getbalances()['mine']['trusted']) == 2.0)
if __name__ == '__main__':
MultiWalletTest().main()
|
{
"content_hash": "323d6c678b3381ba564ee8d098ef921c",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 148,
"avg_line_length": 39.671875,
"alnum_prop": 0.6136274123670736,
"repo_name": "particl/particl-core",
"id": "9a1a46e32321e2ffaca0066cb7732cbdeec7638e",
"size": "5299",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/functional/wallet_part_multiwallet.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28178"
},
{
"name": "Batchfile",
"bytes": "13"
},
{
"name": "C",
"bytes": "2889723"
},
{
"name": "C++",
"bytes": "13218778"
},
{
"name": "CMake",
"bytes": "29182"
},
{
"name": "Cap'n Proto",
"bytes": "1256"
},
{
"name": "Dockerfile",
"bytes": "1740"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "541"
},
{
"name": "M4",
"bytes": "229063"
},
{
"name": "Makefile",
"bytes": "159386"
},
{
"name": "Objective-C++",
"bytes": "5486"
},
{
"name": "Python",
"bytes": "3388224"
},
{
"name": "QMake",
"bytes": "1276"
},
{
"name": "Sage",
"bytes": "59728"
},
{
"name": "Scheme",
"bytes": "26427"
},
{
"name": "Shell",
"bytes": "190057"
}
],
"symlink_target": ""
}
|
from devp2p.service import BaseService
from ethereum.slogging import get_logger
log = get_logger('db')
# load available databases
dbs = {}
try:
from leveldb_service import LevelDB
except ImportError:
pass
else:
dbs['LevelDB'] = LevelDB
try:
from codernitydb_service import CodernityDB
except ImportError:
pass
else:
dbs['CodernityDB'] = CodernityDB
from ephemdb_service import EphemDB
dbs['EphemDB'] = EphemDB
class DBService(BaseService):
name = 'db'
default_config = dict(db=dict(implementation='LevelDB'))
def __init__(self, app):
super(DBService, self).__init__(app)
impl = self.app.config['db']['implementation']
if len(dbs) == 0:
log.warning('No db installed')
self.db_service = dbs[impl](app)
def start(self):
return self.db_service.start()
def _run(self):
return self.db_service._run()
def get(self, key):
return self.db_service.get(key)
def put(self, key, value):
return self.db_service.put(key, value)
def commit(self):
return self.db_service.commit()
def delete(self, key):
return self.db_service.delete(key)
def __contains__(self, key):
return key in self.db_service
def __eq__(self, other):
return isinstance(other, self.__class__) and self.db_service == other.db_service
def __repr__(self):
return repr(self.db_service)
|
{
"content_hash": "bf78cdee7508f8b608cee33e7cbc28bf",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 88,
"avg_line_length": 22.5,
"alnum_prop": 0.6333333333333333,
"repo_name": "danielnovy/test",
"id": "c077c975b3b31b8ece0dadf324da56106fa61223",
"size": "1440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyethapp/db_service.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1700"
},
{
"name": "Python",
"bytes": "101388"
}
],
"symlink_target": ""
}
|
import logging
import sys
import unicodedata
from ambari_client.core import errors
LOG = logging.getLogger(__name__)
ref_dic = {"cluster_name": "clusterRef"}
ref_class_dic = {"ClusterModelRef": "cluster_name"}
ref_pkg_dic = {"ClusterModelRef": "ambari_client.model.cluster"}
LIST_KEY = "items"
ALL = "ALL"
class ModelUtils(object):
@staticmethod
def _check_is_error(expected_class, model_dict, resource_root):
from ambari_client.model.status import StatusModel
from ambari_client.model.cluster import TaskModel
if expected_class == TaskModel:
resp = ModelUtils.create_model(
TaskModel,
model_dict.copy(),
resource_root,
"NO_KEY",
check_errors=False)
return
if "status" in model_dict:
resp = ModelUtils.create_model(
StatusModel,
model_dict.copy(),
resource_root,
"NO_KEY",
check_errors=False)
if expected_class != StatusModel or resp.is_error():
if resp.status in errors._exceptions_to_codes:
raise errors._exceptions_to_codes[
resp.status](
resp,
resource_root)
else:
raise errors.UnknownServerError(resp, resource_root)
@staticmethod
def get_model_list(
member_list_clss,
member_cls,
collection_dict,
resource_root,
RESOURCE_KEY_WORD,
check_errors=True):
"""
create a model.
@param member_list_clss : model_list class.
@param model_cls : model class.
@param collection_dict : collection dict used for creating the list of objects.
@param resource_root : resource object.
@param RESOURCE_KEY_WORD : tsake subset of model_dict based on this key.
@return: A ModelList object.
"""
tLIST_KEY = LIST_KEY
if check_errors:
ModelUtils._check_is_error(
member_list_clss,
collection_dict,
resource_root)
# print locals()
json_list = []
# remove items
if isinstance(collection_dict, dict) and tLIST_KEY in collection_dict:
json_list = collection_dict[tLIST_KEY]
LOG.debug(
"get_model_list: collection_dict is dict ? %s ; has_key = %s" %
(isinstance(
collection_dict,
dict),
LIST_KEY in collection_dict))
LOG.debug(
"get_model_list: collection_dict has %s ;subset = %s" %
(tLIST_KEY, str(json_list)))
else:
json_list = collection_dict
LOG.error(
"get_model_list: collection_dict is dict ? %s ; has_key = %s" %
(isinstance(
collection_dict,
dict),
LIST_KEY in collection_dict))
LOG.debug("get_model_list: json_list value : \n\t" + str(json_list))
if isinstance(json_list, list):
json_list_new = [x.get(RESOURCE_KEY_WORD) for x in json_list]
LOG.debug(
"get_model_list: json_list is list ? %s ; " %
(isinstance(
json_list,
list)))
else:
json_list_new = [json_list]
LOG.error(
"get_model_list: json_list is list ? %s ; " %
(isinstance(
json_list,
list)))
LOG.debug(
"get_model_list: json_list_new used for creating ModelList \n\t" +
str(json_list_new))
objects = [
ModelUtils.create_model(
member_cls,
x,
resource_root,
RESOURCE_KEY_WORD) for x in json_list_new]
LOG.debug(objects)
return member_list_clss(objects)
@staticmethod
def create_model(
model_cls,
model_dict,
resource_root,
RESOURCE_KEY_WORD,
check_errors=True):
"""
create a model.
@param model_cls : model class.
@param model_dict : model dict used for creating the object.
@param resource_root : resource object.
@param RESOURCE_KEY_WORD : tsake subset of model_dict based on this key.
@return: A model_cls object.
"""
if check_errors:
ModelUtils._check_is_error(model_cls, model_dict, resource_root)
# print locals()
rw_dict = {}
LOG.debug("model_dict = " + str(model_dict))
# extract model /keyword
if isinstance(model_dict, dict) and RESOURCE_KEY_WORD in model_dict:
model_dict = model_dict[RESOURCE_KEY_WORD]
if not isinstance(model_dict, list):
LOG.debug(
"model_dict has %s ;subset = %s" %
(RESOURCE_KEY_WORD, str(
model_dict.items())))
else:
LOG.debug(
"model_dict is list and has %s ;subset = %s" %
(RESOURCE_KEY_WORD, str(
model_dict)))
# check for Requests
if isinstance(model_dict, dict) and "Requests" in model_dict:
model_dict = model_dict["Requests"]
LOG.debug(
"model_dict has Requests ;subset = %s" %
(str(
model_dict.items())))
# check for composition i.e list of Models
if isinstance(model_dict, list):
LOG.debug(
"model_dict is list")
else:
for k, v in model_dict.items():
LOG.debug("key = %s ; value = %s " % (str(k), str(v)))
if k in model_cls.RW_ATTR:
LOG.debug(k + " is there in RW_ATTR")
rw_dict[k] = v
del model_dict[k]
rw_dict = get_unicode_kw(rw_dict)
obj = model_cls(resource_root, **rw_dict)
for attr in model_cls.RO_ATTR:
obj._setattr(attr, None)
for k, v in model_dict.items():
if k in model_cls.RO_ATTR:
obj._setattr(k, v)
else:
LOG.debug(
"Unexpected attribute '%s' in %s json" %
(k, model_cls.__name__))
for attr in model_cls.REF_ATTR:
LOG.debug("%s found as reference var" % (attr))
obj._setattr(getREF_class_name(attr), None)
for k, v in model_dict.items():
if k in model_cls.REF_ATTR:
obj._setattr(getREF_class_name(k), v)
else:
LOG.debug(
"Unknown attribute '%s' found in model_dict for %s " %
(k, model_cls.__name__))
return obj
# get attribute with REF
def getREF_class_name(REF_name):
if REF_name in ref_dic:
return ref_dic[str(REF_name)]
else:
return None
def getREF_var_name(REF_name):
if REF_name in ref_class_dic:
return ref_class_dic[str(REF_name)]
else:
return None
def get_REF_object(ref_class_name):
"""
Gets the Ref object based on class_name
"""
class_ref = getattr(
sys.modules[
ref_pkg_dic[ref_class_name]],
ref_class_name)
LOG.debug(class_ref)
return class_ref
def get_unicode(v):
# import unicodedata
if v:
if isinstance(v, unicode):
v = unicodedata.normalize('NFKD', v).encode('ascii', 'ignore')
LOG.debug(v)
elif isinstance(v, str):
LOG.debug("warning: string found while expecting unicode %s" % v)
return v
def retain_self_helper(memclass, self=None, **kwargs):
# print locals()
# from ambari_client.model.base_model import BaseModel
memclass.__init__(self, **kwargs)
def get_unicode_kw(dic):
"""
We use unicode strings as keys in kwargs.
"""
res = {}
for k, v in dic.iteritems():
res[str(k)] = v
return res
def get_config_type(service_name):
"""
get the config tmp_type based on service_name
"""
if service_name == "HDFS":
tmp_type = "hdfs-site"
elif service_name == "HDFS":
tmp_type = "core-site"
elif service_name == "MAPREDUCE":
tmp_type = "mapred-site"
elif service_name == "HBASE":
tmp_type = "hbase-site"
elif service_name == "OOZIE":
tmp_type = "oozie-site"
elif service_name == "HIVE":
tmp_type = "hive-site"
elif service_name == "WEBHCAT":
tmp_type = "webhcat-site"
else:
tmp_type = "global"
return tmp_type
def get_key_value(dictt, key):
"""
Search for some random key in the dict
"""
if isinstance(dictt, dict) and key in dictt:
return dictt[key]
elif isinstance(dictt, dict) and key not in dictt:
# check if values has it?
for v in dictt.values():
if isinstance(v, dict):
return get_key_value(v, key)
elif isinstance(v, list):
for l in list:
return get_key_value(l, key)
|
{
"content_hash": "19677a0e21a44d34ff7320d1d6d7be72",
"timestamp": "",
"source": "github",
"line_count": 299,
"max_line_length": 87,
"avg_line_length": 31.391304347826086,
"alnum_prop": 0.5124653739612188,
"repo_name": "radicalbit/ambari",
"id": "65dd153e47692d3c743040bf5bc5587924951f49",
"size": "10186",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "ambari-client/python-client/src/main/python/ambari_client/model/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "42212"
},
{
"name": "C",
"bytes": "331204"
},
{
"name": "C#",
"bytes": "182799"
},
{
"name": "C++",
"bytes": "257"
},
{
"name": "CSS",
"bytes": "1287531"
},
{
"name": "CoffeeScript",
"bytes": "4323"
},
{
"name": "FreeMarker",
"bytes": "2654"
},
{
"name": "Groovy",
"bytes": "88056"
},
{
"name": "HTML",
"bytes": "5098825"
},
{
"name": "Java",
"bytes": "29006663"
},
{
"name": "JavaScript",
"bytes": "17274453"
},
{
"name": "Makefile",
"bytes": "11111"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLSQL",
"bytes": "2160"
},
{
"name": "PLpgSQL",
"bytes": "314333"
},
{
"name": "PowerShell",
"bytes": "2087991"
},
{
"name": "Python",
"bytes": "14584206"
},
{
"name": "R",
"bytes": "1457"
},
{
"name": "Roff",
"bytes": "13935"
},
{
"name": "Ruby",
"bytes": "14478"
},
{
"name": "SQLPL",
"bytes": "2117"
},
{
"name": "Shell",
"bytes": "741459"
},
{
"name": "Vim script",
"bytes": "5813"
}
],
"symlink_target": ""
}
|
"""Tests for Efergy integration."""
from unittest.mock import AsyncMock, patch
from pyefergy import Efergy, exceptions
from homeassistant.components.efergy import DOMAIN
from homeassistant.const import CONF_API_KEY
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry, load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
TOKEN = "9p6QGJ7dpZfO3fqPTBk1fyEmjV1cGoLT"
MULTI_SENSOR_TOKEN = "9r6QGF7dpZfO3fqPTBl1fyRmjV1cGoLT"
CONF_DATA = {CONF_API_KEY: TOKEN}
HID = "12345678901234567890123456789012"
IMPORT_DATA = {"platform": "efergy", "app_token": TOKEN}
BASE_URL = "https://engage.efergy.com/mobile_proxy/"
def create_entry(hass: HomeAssistant, token: str = TOKEN) -> MockConfigEntry:
"""Create Efergy entry in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=HID,
data={CONF_API_KEY: token},
)
entry.add_to_hass(hass)
return entry
async def init_integration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
token: str = TOKEN,
error: bool = False,
) -> MockConfigEntry:
"""Set up the Efergy integration in Home Assistant."""
entry = create_entry(hass, token=token)
await mock_responses(hass, aioclient_mock, token=token, error=error)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
async def mock_responses(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
token: str = TOKEN,
error: bool = False,
):
"""Mock responses from Efergy."""
base_url = "https://engage.efergy.com/mobile_proxy/"
api = Efergy(
token, session=async_get_clientsession(hass), utc_offset=hass.config.time_zone
)
offset = api._utc_offset # pylint: disable=protected-access
if error:
aioclient_mock.get(
f"{base_url}getInstant?token={token}",
exc=exceptions.ConnectError,
)
return
aioclient_mock.get(
f"{base_url}getStatus?token={token}",
text=load_fixture("efergy/status.json"),
)
aioclient_mock.get(
f"{base_url}getInstant?token={token}",
text=load_fixture("efergy/instant.json"),
)
aioclient_mock.get(
f"{base_url}getEnergy?token={token}&offset={offset}&period=day",
text=load_fixture("efergy/daily_energy.json"),
)
aioclient_mock.get(
f"{base_url}getEnergy?token={token}&offset={offset}&period=week",
text=load_fixture("efergy/weekly_energy.json"),
)
aioclient_mock.get(
f"{base_url}getEnergy?token={token}&offset={offset}&period=month",
text=load_fixture("efergy/monthly_energy.json"),
)
aioclient_mock.get(
f"{base_url}getEnergy?token={token}&offset={offset}&period=year",
text=load_fixture("efergy/yearly_energy.json"),
)
aioclient_mock.get(
f"{base_url}getBudget?token={token}",
text=load_fixture("efergy/budget.json"),
)
aioclient_mock.get(
f"{base_url}getCost?token={token}&offset={offset}&period=day",
text=load_fixture("efergy/daily_cost.json"),
)
aioclient_mock.get(
f"{base_url}getCost?token={token}&offset={offset}&period=week",
text=load_fixture("efergy/weekly_cost.json"),
)
aioclient_mock.get(
f"{base_url}getCost?token={token}&offset={offset}&period=month",
text=load_fixture("efergy/monthly_cost.json"),
)
aioclient_mock.get(
f"{base_url}getCost?token={token}&offset={offset}&period=year",
text=load_fixture("efergy/yearly_cost.json"),
)
if token == TOKEN:
aioclient_mock.get(
f"{base_url}getCurrentValuesSummary?token={token}",
text=load_fixture("efergy/current_values_single.json"),
)
else:
aioclient_mock.get(
f"{base_url}getCurrentValuesSummary?token={token}",
text=load_fixture("efergy/current_values_multi.json"),
)
def _patch_efergy():
mocked_efergy = AsyncMock()
mocked_efergy.info = {}
mocked_efergy.info["hid"] = HID
mocked_efergy.info["mac"] = "AA:BB:CC:DD:EE:FF"
mocked_efergy.info["status"] = "on"
mocked_efergy.info["type"] = "EEEHub"
mocked_efergy.info["version"] = "2.3.7"
return patch(
"homeassistant.components.efergy.config_flow.Efergy",
return_value=mocked_efergy,
)
def _patch_efergy_status():
return patch("homeassistant.components.efergy.config_flow.Efergy.async_status")
async def setup_platform(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
platform: str,
token: str = TOKEN,
error: bool = False,
):
"""Set up the platform."""
entry = await init_integration(hass, aioclient_mock, token=token, error=error)
with patch("homeassistant.components.efergy.PLATFORMS", [platform]):
assert await async_setup_component(hass, DOMAIN, {})
return entry
|
{
"content_hash": "beb05a000b5126a539e8228f244e19f0",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 86,
"avg_line_length": 32.605095541401276,
"alnum_prop": 0.6645829263528032,
"repo_name": "jawilson/home-assistant",
"id": "c4f099df82246f1e21ae53e366be32b872ff3cfb",
"size": "5119",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/components/efergy/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2782"
},
{
"name": "Python",
"bytes": "40129467"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tweets', '0002_auto_20170123_1154'),
]
operations = [
migrations.AlterField(
model_name='timeline',
name='monitored_users',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=15), blank=True, size=None),
),
]
|
{
"content_hash": "edf5615bd26266139b4137eb127b71fd",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 127,
"avg_line_length": 26.473684210526315,
"alnum_prop": 0.6520874751491054,
"repo_name": "chocoelho/twitter-activities-monitor",
"id": "a018a1f21b4da66dee83938a4c0acd46b8443cb5",
"size": "576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tweets/migrations/0003_auto_20170123_1234.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2267"
},
{
"name": "HTML",
"bytes": "4291"
},
{
"name": "JavaScript",
"bytes": "20969"
},
{
"name": "Makefile",
"bytes": "253"
},
{
"name": "Python",
"bytes": "29092"
},
{
"name": "Shell",
"bytes": "1197"
}
],
"symlink_target": ""
}
|
from indy import wallet, signus
from ..utils import storage
from ..utils.wallet import create_and_open_wallet
import pytest
import logging
logging.basicConfig(level=logging.DEBUG)
@pytest.fixture(autouse=True)
def before_after_each():
storage.cleanup()
yield
storage.cleanup()
@pytest.fixture
async def wallet_handle():
handle = await create_and_open_wallet()
yield handle
await wallet.close_wallet(handle)
@pytest.mark.asyncio
async def test_store_their_did_works(wallet_handle):
await signus.store_their_did(wallet_handle, '{"did":"8wZcEriaNLNKtteJvx7f8i"}')
|
{
"content_hash": "1d133263ced5e4f758c09ffd0da690a8",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 83,
"avg_line_length": 21.357142857142858,
"alnum_prop": 0.7441471571906354,
"repo_name": "MRJCrunch/indy-sdk",
"id": "ad45f881aa03a5fcc6d5922973a590033a2fb081",
"size": "598",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wrappers/python/tests/signus/test_store_their_did.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "422160"
},
{
"name": "C++",
"bytes": "13207"
},
{
"name": "Groovy",
"bytes": "2445"
},
{
"name": "Java",
"bytes": "356302"
},
{
"name": "Objective-C",
"bytes": "620946"
},
{
"name": "Objective-C++",
"bytes": "590413"
},
{
"name": "Python",
"bytes": "267478"
},
{
"name": "Ruby",
"bytes": "4353"
},
{
"name": "Rust",
"bytes": "1487087"
},
{
"name": "Shell",
"bytes": "3186"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class ValueValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="value", parent_name="cone.colorbar.tickformatstop", **kwargs
):
super(ValueValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
{
"content_hash": "ef12a85f372cc697776b239ad5348341",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 87,
"avg_line_length": 34,
"alnum_prop": 0.6029411764705882,
"repo_name": "plotly/python-api",
"id": "6ebe58686070d5a5450945e49aa3efd8f18a10ff",
"size": "476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/cone/colorbar/tickformatstop/_value.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class dnstxtrec(base_resource) :
""" Configuration for TXT record resource. """
def __init__(self) :
self._domain = ""
self._String = []
self._ttl = 0
self._recordid = 0
self._type = ""
self._authtype = ""
self.___count = 0
@property
def domain(self) :
ur"""Name of the domain for the TXT record.<br/>Minimum length = 1.
"""
try :
return self._domain
except Exception as e:
raise e
@domain.setter
def domain(self, domain) :
ur"""Name of the domain for the TXT record.<br/>Minimum length = 1
"""
try :
self._domain = domain
except Exception as e:
raise e
@property
def String(self) :
ur"""Information to store in the TXT resource record. Enclose the string in single or double quotation marks. A TXT resource record can contain up to six strings, each of which can contain up to 255 characters. If you want to add a string of more than 255 characters, evaluate whether splitting it into two or more smaller strings, subject to the six-string limit, works for you.<br/>Maximum length = 255.
"""
try :
return self._String
except Exception as e:
raise e
@String.setter
def String(self, String) :
ur"""Information to store in the TXT resource record. Enclose the string in single or double quotation marks. A TXT resource record can contain up to six strings, each of which can contain up to 255 characters. If you want to add a string of more than 255 characters, evaluate whether splitting it into two or more smaller strings, subject to the six-string limit, works for you.<br/>Maximum length = 255
"""
try :
self._String = String
except Exception as e:
raise e
@property
def ttl(self) :
ur"""Time to Live (TTL), in seconds, for the record. TTL is the time for which the record must be cached by DNS proxies. The specified TTL is applied to all the resource records that are of the same record type and belong to the specified domain name. For example, if you add an address record, with a TTL of 36000, to the domain name example.com, the TTLs of all the address records of example.com are changed to 36000. If the TTL is not specified, the NetScaler appliance uses either the DNS zone's minimum TTL or, if the SOA record is not available on the appliance, the default value of 3600.<br/>Default value: 3600<br/>Maximum length = 2147483647.
"""
try :
return self._ttl
except Exception as e:
raise e
@ttl.setter
def ttl(self, ttl) :
ur"""Time to Live (TTL), in seconds, for the record. TTL is the time for which the record must be cached by DNS proxies. The specified TTL is applied to all the resource records that are of the same record type and belong to the specified domain name. For example, if you add an address record, with a TTL of 36000, to the domain name example.com, the TTLs of all the address records of example.com are changed to 36000. If the TTL is not specified, the NetScaler appliance uses either the DNS zone's minimum TTL or, if the SOA record is not available on the appliance, the default value of 3600.<br/>Default value: 3600<br/>Maximum length = 2147483647
"""
try :
self._ttl = ttl
except Exception as e:
raise e
@property
def recordid(self) :
ur"""Unique, internally generated record ID. View the details of the TXT record to obtain its record ID. Mutually exclusive with the string parameter.<br/>Minimum length = 1<br/>Maximum length = 65535.
"""
try :
return self._recordid
except Exception as e:
raise e
@recordid.setter
def recordid(self, recordid) :
ur"""Unique, internally generated record ID. View the details of the TXT record to obtain its record ID. Mutually exclusive with the string parameter.<br/>Minimum length = 1<br/>Maximum length = 65535
"""
try :
self._recordid = recordid
except Exception as e:
raise e
@property
def type(self) :
ur"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Default value: ADNS<br/>Possible values = ALL, ADNS, PROXY.
"""
try :
return self._type
except Exception as e:
raise e
@type.setter
def type(self, type) :
ur"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Default value: ADNS<br/>Possible values = ALL, ADNS, PROXY
"""
try :
self._type = type
except Exception as e:
raise e
@property
def authtype(self) :
ur"""Authentication type.<br/>Possible values = ALL, ADNS, PROXY.
"""
try :
return self._authtype
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(dnstxtrec_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.dnstxtrec
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.domain is not None :
return str(self.domain)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
ur""" Use this API to add dnstxtrec.
"""
try :
if type(resource) is not list :
addresource = dnstxtrec()
addresource.domain = resource.domain
addresource.String = resource.String
addresource.ttl = resource.ttl
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ dnstxtrec() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].domain = resource[i].domain
addresources[i].String = resource[i].String
addresources[i].ttl = resource[i].ttl
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
ur""" Use this API to delete dnstxtrec.
"""
try :
if type(resource) is not list :
deleteresource = dnstxtrec()
if type(resource) != type(deleteresource):
deleteresource.domain = resource
else :
deleteresource.domain = resource.domain
deleteresource.String = resource.String
deleteresource.recordid = resource.recordid
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ dnstxtrec() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].domain = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ dnstxtrec() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].domain = resource[i].domain
deleteresources[i].String = resource[i].String
deleteresources[i].recordid = resource[i].recordid
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the dnstxtrec resources that are configured on netscaler.
"""
try :
if not name :
obj = dnstxtrec()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = dnstxtrec()
obj.domain = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [dnstxtrec() for _ in range(len(name))]
obj = [dnstxtrec() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = dnstxtrec()
obj[i].domain = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_args(cls, client, args) :
ur""" Use this API to fetch all the dnstxtrec resources that are configured on netscaler.
# This uses dnstxtrec_args which is a way to provide additional arguments while fetching the resources.
"""
try :
obj = dnstxtrec()
option_ = options()
option_.args = nitro_util.object_to_string_withoutquotes(args)
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
ur""" Use this API to fetch filtered set of dnstxtrec resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = dnstxtrec()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
ur""" Use this API to count the dnstxtrec resources configured on NetScaler.
"""
try :
obj = dnstxtrec()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
ur""" Use this API to count filtered the set of dnstxtrec resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = dnstxtrec()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class Authtype:
ALL = "ALL"
ADNS = "ADNS"
PROXY = "PROXY"
class Type:
ALL = "ALL"
ADNS = "ADNS"
PROXY = "PROXY"
class dnstxtrec_response(base_response) :
def __init__(self, length=1) :
self.dnstxtrec = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.dnstxtrec = [dnstxtrec() for _ in range(length)]
|
{
"content_hash": "ba2dd84db4dfd3e9464f006a27eb32e7",
"timestamp": "",
"source": "github",
"line_count": 316,
"max_line_length": 656,
"avg_line_length": 34.199367088607595,
"alnum_prop": 0.6900157305450171,
"repo_name": "benfinke/ns_python",
"id": "1e26d3f52cdb94032aa2b6992ebdc13507f5049f",
"size": "11421",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "build/lib/nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnstxtrec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "21836782"
},
{
"name": "Shell",
"bytes": "513"
}
],
"symlink_target": ""
}
|
"""
Make sure macro expansion of $(TargetFileName) is handled.
"""
from __future__ import print_function
import TestGyp
import os
import sys
if sys.platform == 'win32':
print("This test is currently disabled: https://crbug.com/483696.")
sys.exit(0)
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
if not (test.format == 'msvs' and
int(os.environ.get('GYP_MSVS_VERSION', 0)) == 2013):
CHDIR = 'vs-macros'
test.run_gyp('targetfilename.gyp', chdir=CHDIR)
test.build('targetfilename.gyp', test.ALL, chdir=CHDIR)
test.built_file_must_exist('test_targetfilename_executable.exe', chdir=CHDIR)
test.built_file_must_exist('test_targetfilename_loadable_module.dll',
chdir=CHDIR)
test.built_file_must_exist('test_targetfilename_shared_library.dll',
chdir=CHDIR)
test.built_file_must_exist('test_targetfilename_static_library.lib',
chdir=CHDIR)
test.built_file_must_exist('test_targetfilename_product_extension.foo',
chdir=CHDIR)
test.pass_test()
|
{
"content_hash": "2eee13989d6963421a3cf0a3ccc99422",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 81,
"avg_line_length": 36,
"alnum_prop": 0.6353046594982079,
"repo_name": "msc-/gyp",
"id": "759e26c5669b2e2bbfcdfbb238bdd1ca5d1f7758",
"size": "1296",
"binary": false,
"copies": "9",
"ref": "refs/heads/remaster",
"path": "test/win/gyptest-macro-targetfilename.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1133"
},
{
"name": "Batchfile",
"bytes": "1115"
},
{
"name": "C",
"bytes": "39155"
},
{
"name": "C++",
"bytes": "41687"
},
{
"name": "Objective-C",
"bytes": "15819"
},
{
"name": "Objective-C++",
"bytes": "1873"
},
{
"name": "Python",
"bytes": "2209100"
},
{
"name": "Shell",
"bytes": "18353"
},
{
"name": "Swift",
"bytes": "116"
}
],
"symlink_target": ""
}
|
import shutil
import unittest
from typing import Optional
import cothread
import numpy as np
import pytest
from annotypes import Anno, add_call_types
from scanpointgenerator import (
CompoundGenerator,
ConcatGenerator,
LineGenerator,
StaticPointGenerator,
)
from malcolm.compat import OrderedDict
from malcolm.core import (
AbortedError,
AlarmSeverity,
AlarmStatus,
Context,
PartRegistrar,
Process,
)
from malcolm.modules import builtin, scanning
from malcolm.modules.builtin.defines import tmp_dir
from malcolm.modules.demo.blocks import detector_block, motion_block
from malcolm.modules.demo.parts import MotionChildPart
from malcolm.modules.demo.parts.motionchildpart import AExceptionStep
from malcolm.modules.scanning.controllers import RunnableController
from malcolm.modules.scanning.hooks import (
AAxesToMove,
ABreakpoints,
ACompletedSteps,
AContext,
AGenerator,
AStepsToDo,
PreRunHook,
UInfos,
ValidateHook,
)
from malcolm.modules.scanning.infos import ParameterTweakInfo
from malcolm.modules.scanning.parts import DetectorChildPart
from malcolm.modules.scanning.util import DetectorTable, RunnableStates
APartName = builtin.parts.APartName
AMri = builtin.parts.AMri
AInitialVisibility = builtin.parts.AInitialVisibility
AStateful = builtin.parts.AStateful
with Anno("Value to tweak duration to in Validate"):
AValidateDuration = float
class MisbehavingPauseException(Exception):
pass
class MisbehavingPart(MotionChildPart):
def __init__(
self,
name: APartName,
mri: AMri,
initial_visibility: AInitialVisibility = False,
stateful: AStateful = True,
validate_duration: AValidateDuration = 0.5,
) -> None:
super().__init__(
name, mri, initial_visibility=initial_visibility, stateful=stateful
)
self.validate_duration = validate_duration
def setup(self, registrar):
super(MisbehavingPart, self).setup(registrar)
self.register_hooked(ValidateHook, self.validate)
self.register_hooked(PreRunHook, self.on_pre_run)
@add_call_types
def validate(self, generator: AGenerator) -> UInfos:
# Always tweak to the same value
if generator.duration != self.validate_duration:
serialized = generator.to_dict()
new_generator = CompoundGenerator.from_dict(serialized)
new_generator.duration = self.validate_duration
return ParameterTweakInfo("generator", new_generator)
else:
return None
@add_call_types
def on_pre_run(self):
self.pre_run_test = True
# Allow CamelCase for arguments as they will be serialized by parent
# noinspection PyPep8Naming
@add_call_types
def on_configure(
self,
context: AContext,
completed_steps: ACompletedSteps,
steps_to_do: AStepsToDo,
# The following were passed from the user calling configure()
generator: AGenerator,
axesToMove: AAxesToMove,
breakpoints: ABreakpoints,
exceptionStep: AExceptionStep = 0,
) -> None:
super(MisbehavingPart, self).on_configure(
context, completed_steps, steps_to_do, generator, axesToMove, exceptionStep
)
if completed_steps == 3:
raise MisbehavingPauseException(
"Called magic number to make pause throw an exception"
)
class RunForeverPart(builtin.parts.ChildPart):
"""Part which runs forever and takes 1s to abort"""
def setup(self, registrar: PartRegistrar) -> None:
super(RunForeverPart, self).setup(registrar)
# Hooks
registrar.hook(scanning.hooks.RunHook, self.on_run)
registrar.hook(scanning.hooks.AbortHook, self.on_abort)
@add_call_types
def on_run(self, context: scanning.hooks.AContext) -> None:
# Wait forever here
while True:
context.sleep(1.0)
@add_call_types
def on_abort(self, context: scanning.hooks.AContext) -> None:
# Sleep for 1s before returning
context.sleep(1.0)
class TestRunnableStates(unittest.TestCase):
def setUp(self):
self.o = RunnableStates()
def test_init(self):
expected = OrderedDict()
expected["Resetting"] = {"Ready", "Fault", "Disabling"}
expected["Ready"] = {
"Configuring",
"Aborting",
"Saving",
"Fault",
"Disabling",
"Loading",
}
expected["Saving"] = {"Fault", "Ready", "Disabling"}
expected["Loading"] = {"Disabling", "Fault", "Ready"}
expected["Configuring"] = {"Armed", "Aborting", "Fault", "Disabling"}
expected["Armed"] = {
"Seeking",
"Aborting",
"Running",
"Fault",
"Disabling",
"Resetting",
}
expected["Running"] = {"PostRun", "Seeking", "Aborting", "Fault", "Disabling"}
expected["PostRun"] = {
"Finished",
"Armed",
"Seeking",
"Aborting",
"Fault",
"Disabling",
}
expected["Finished"] = {
"Seeking",
"Resetting",
"Configuring",
"Aborting",
"Fault",
"Disabling",
}
expected["Seeking"] = {
"Armed",
"Paused",
"Finished",
"Aborting",
"Fault",
"Disabling",
}
expected["Paused"] = {"Seeking", "Running", "Aborting", "Fault", "Disabling"}
expected["Aborting"] = {"Aborted", "Fault", "Disabling"}
expected["Aborted"] = {"Resetting", "Fault", "Disabling"}
expected["Fault"] = {"Resetting", "Disabling"}
expected["Disabling"] = {"Disabled", "Fault"}
expected["Disabled"] = {"Resetting"}
assert self.o._allowed == expected
possible_states = [
"Ready",
"Resetting",
"Saving",
"Loading",
"Configuring",
"Armed",
"Running",
"Seeking",
"PostRun",
"Finished",
"Paused",
"Aborting",
"Aborted",
"Fault",
"Disabling",
"Disabled",
]
assert self.o.possible_states == possible_states
class TestRunnableController(unittest.TestCase):
def setUp(self):
self.p = Process("process")
self.context = Context(self.p)
# Make a motion block to act as our child
self.config_dir = tmp_dir("config_dir")
for c in motion_block(mri="childBlock", config_dir=self.config_dir.value):
self.p.add_controller(c)
self.b_child = self.context.block_view("childBlock")
self.part = MisbehavingPart(
mri="childBlock", name="part", initial_visibility=True
)
# create a root block for the RunnableController block to reside in
self.c = RunnableController(mri="mainBlock", config_dir=self.config_dir.value)
self.c.add_part(self.part)
self.p.add_controller(self.c)
self.b = self.context.block_view("mainBlock")
self.ss = self.c.state_set
# start the process off
self.checkState(self.ss.DISABLED)
self.p.start()
self.checkState(self.ss.READY)
def tearDown(self):
self.p.stop(timeout=1)
shutil.rmtree(self.config_dir.value)
def checkState(self, state):
assert self.c.state.value == state
def checkSteps(self, configured, completed, total):
assert self.b.configuredSteps.value == configured
assert self.b.completedSteps.value == completed
assert self.b.totalSteps.value == total
def test_init(self):
assert self.c.completed_steps.value == 0
assert self.c.configured_steps.value == 0
assert self.c.total_steps.value == 0
assert list(self.b.configure.meta.takes.elements) == [
"generator",
"axesToMove",
"breakpoints",
"exceptionStep",
]
def test_reset(self):
self.c.disable()
self.checkState(self.ss.DISABLED)
self.c.reset()
self.checkState(self.ss.READY)
def test_modify_child(self):
# Save an initial setting for the child
self.b_child.save("init_child")
assert self.b_child.modified.value is False
x = self.context.block_view("childBlock:COUNTERX")
x.delta.put_value(31)
# x delta now at 31, child should be modified
assert x.delta.value == 31
assert self.b_child.modified.value is True
assert self.b_child.modified.alarm.severity == AlarmSeverity.MINOR_ALARM
assert self.b_child.modified.alarm.status == AlarmStatus.CONF_STATUS
assert self.b_child.modified.alarm.message == "x.delta.value = 31.0 not 1.0"
self.prepare_half_run()
self.b.run()
# x counter now at 3 (lower bound of first run of x in reverse),
# child should still be modified
assert self.b_child.modified.value is True
assert self.b_child.modified.alarm.severity == AlarmSeverity.MINOR_ALARM
assert self.b_child.modified.alarm.status == AlarmStatus.CONF_STATUS
assert self.b_child.modified.alarm.message == "x.delta.value = 31.0 not 1.0"
assert x.counter.value == 3.0
assert x.delta.value == 31
x.delta.put_value(1.0)
# x counter now at 0, child should be unmodified
assert x.delta.value == 1.0
assert self.b_child.modified.alarm.message == ""
assert self.b_child.modified.value is False
def test_modify_parent(self):
# Save an initial setting for child and parent
self.b_child.save("init_child")
self.b.save("init_parent")
# Change a value and save as a new child setting
x = self.context.block_view("childBlock:COUNTERX")
x.counter.put_value(31)
self.b_child.save("new_child")
assert self.b_child.modified.value is False
assert self.b.modified.value is True
assert self.b.modified.alarm.severity == AlarmSeverity.MINOR_ALARM
assert self.b.modified.alarm.status == AlarmStatus.CONF_STATUS
assert (
self.b.modified.alarm.message
== "part.design.value = 'new_child' not 'init_child'"
)
# Load the child again
self.b_child.design.put_value("new_child")
assert self.b.modified.value is True
# And check that loading parent resets it
self.b.design.put_value("init_parent")
assert self.b.modified.value is False
assert self.b_child.design.value == "init_child"
# Put back
self.b_child.design.put_value("new_child")
assert self.b.modified.value is True
# Do a configure, and check we get set back
self.prepare_half_run()
assert self.b_child.design.value == "init_child"
assert self.b_child.modified.value is False
assert self.b.modified.value is False
def test_abort(self):
self.b.abort()
self.checkState(self.ss.ABORTED)
def prepare_half_run(self, duration=0.01, exception=0):
line1 = LineGenerator("y", "mm", 0, 2, 3)
line2 = LineGenerator("x", "mm", 0, 2, 2, alternate=True)
compound = CompoundGenerator([line1, line2], [], [], duration)
self.b.configure(generator=compound, axesToMove=["x"], exceptionStep=exception)
def test_configure_run(self):
assert self.b.configure.meta.writeable is True
assert self.b.configure.meta.takes.elements["generator"].writeable is True
assert self.b.validate.meta.takes.elements["generator"].writeable is True
assert self.b.validate.meta.returns.elements["generator"].writeable is False
self.prepare_half_run()
self.checkSteps(2, 0, 6)
self.checkState(self.ss.ARMED)
assert self.b.configure.meta.writeable is False
assert self.b.configure.meta.takes.elements["generator"].writeable is True
assert self.b.validate.meta.takes.elements["generator"].writeable is True
assert self.b.validate.meta.returns.elements["generator"].writeable is False
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
def test_abort_during_run(self):
self.prepare_half_run()
self.b.run()
self.b.abort()
self.checkState(self.ss.ABORTED)
def test_pause_seek_resume(self):
self.prepare_half_run()
self.checkSteps(configured=2, completed=0, total=6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.pause(lastGoodStep=1)
self.checkState(self.ss.ARMED)
self.checkSteps(2, 1, 6)
self.b.run()
self.checkSteps(4, 2, 6)
self.b.completedSteps.put_value(5)
self.checkSteps(6, 5, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
def test_pause_seek_resume_at_boundaries_without_defined_lastGoodStep(self):
# When pausing at boundaries without lastGoodStep the scan should
# remain in the same state - Armed for the start of the next inner scan
# or Finished if the scan is complete.
self.prepare_half_run()
self.checkSteps(configured=2, completed=0, total=6)
self.b.pause()
self.checkState(self.ss.ARMED)
self.checkSteps(2, 0, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.pause()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.pause()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
self.b.pause()
self.checkState(self.ss.FINISHED)
def test_pause_seek_resume_outside_limits(self):
self.prepare_half_run()
self.checkSteps(configured=2, completed=0, total=6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.pause(lastGoodStep=7)
self.checkState(self.ss.ARMED)
self.checkSteps(6, 5, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
def test_resume_in_run(self):
self.prepare_half_run(duration=0.5)
f = self.b.run_async()
self.context.sleep(0.95)
self.b.pause()
self.checkState(self.ss.PAUSED)
self.checkSteps(2, 1, 6)
self.b.resume()
# Parent should be running, child won't have got request yet
# then = time.time()
self.checkState(self.ss.RUNNING)
self.context.wait_all_futures(f, timeout=2)
# now = time.time()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
# This test fails on Travis sometimes, looks like the docker container
# just gets starved
# self.assertAlmostEqual(now - then, 0.5, delta=0.1)
def test_pause_seek_resume_from_finished(self):
self.prepare_half_run()
self.checkSteps(configured=2, completed=0, total=6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
self.b.pause(lastGoodStep=1)
self.checkState(self.ss.ARMED)
self.checkSteps(2, 1, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
def test_pause_seek_resume_from_postrun(self):
self.prepare_half_run()
self.checkSteps(configured=2, completed=0, total=6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
self.b.pause(lastGoodStep=1)
self.checkState(self.ss.ARMED)
self.checkSteps(2, 1, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
def test_reset_from_finished(self):
self.prepare_half_run()
self.checkSteps(2, 0, 6)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
self.c.reset()
self.checkState(self.ss.READY)
def test_configure_from_finished(self):
self.prepare_half_run()
self.checkSteps(2, 0, 6)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
self.prepare_half_run()
self.checkSteps(2, 0, 6)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(4, 2, 6)
self.b.run()
self.checkState(self.ss.ARMED)
self.checkSteps(6, 4, 6)
self.b.run()
self.checkState(self.ss.FINISHED)
def test_run_exception(self):
self.prepare_half_run(exception=1)
with self.assertRaises(AssertionError):
self.b.run()
self.checkState(self.ss.FAULT)
def test_run_stop(self):
self.prepare_half_run(duration=0.1)
f = self.b.run_async()
self.context.sleep(0.1)
self.b.abort()
with self.assertRaises(AbortedError):
f.result()
self.checkState(self.ss.ABORTED)
def test_error_in_pause_returns_run(self):
self.prepare_half_run(duration=0.5)
f = self.b.run_async()
self.context.sleep(0.95)
with self.assertRaises(MisbehavingPauseException):
self.b.pause(lastGoodStep=3)
self.checkState(self.ss.FAULT)
with self.assertRaises(AbortedError):
f.result()
def test_prerun_gets_called(self):
self.prepare_half_run()
self.b.run()
assert self.part.pre_run_test
class TestRunnableControllerBreakpoints(unittest.TestCase):
def setUp(self):
self.p = Process("process1")
self.context = Context(self.p)
self.p2 = Process("process2")
self.context2 = Context(self.p2)
# Make a motion block to act as our child
self.config_dir = tmp_dir("config_dir")
for c in motion_block(mri="childBlock", config_dir=self.config_dir.value):
self.p.add_controller(c)
self.b_child = self.context.block_view("childBlock")
# create a root block for the RunnableController block to reside in
self.c = RunnableController(mri="mainBlock", config_dir=self.config_dir.value)
self.p.add_controller(self.c)
self.b = self.context.block_view("mainBlock")
self.ss = self.c.state_set
# start the process off
self.checkState(self.ss.DISABLED)
self.p.start()
self.checkState(self.ss.READY)
def tearDown(self):
self.p.stop(timeout=1)
shutil.rmtree(self.config_dir.value)
def checkState(self, state):
assert self.c.state.value == state
def checkSteps(self, configured, completed, total):
assert self.b.configuredSteps.value == configured
assert self.b.completedSteps.value == completed
assert self.b.totalSteps.value == total
def test_get_breakpoint_index(self):
line = LineGenerator("x", "mm", 0, 180, 100)
duration = 0.01
breakpoints = [10, 20, 30, 40]
self.b.configure(
generator=CompoundGenerator([line], [], [], duration),
axesToMove=["x"],
breakpoints=breakpoints,
)
test_steps = [0, 5, 10, 20, 30, 40, 60, 80, 100]
expected_indices = [0, 0, 1, 1, 2, 2, 3, 3, 3]
# Check the breakpoint_steps are set as expected
assert self.c.breakpoint_steps == [10, 30, 60, 100]
for step_num in range(len(test_steps)):
steps = test_steps[step_num]
index = expected_indices[step_num]
actual_index = self.c.get_breakpoint_index(steps)
assert (
actual_index == index
), f"Expected index {index} for {steps} steps, got {actual_index}"
def test_steps_per_run_one_axis(self):
line = LineGenerator("x", "mm", 0, 180, 10)
duration = 0.01
compound = CompoundGenerator([line], [], [], duration)
compound.prepare()
steps_per_run = self.c.get_steps_per_run(
generator=compound, axes_to_move=["x"], breakpoints=[]
)
assert steps_per_run == [10]
def test_steps_per_run_concat(self):
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
duration = 0.01
concat = ConcatGenerator([line1, line2, line3])
compound = CompoundGenerator([concat], [], [], duration)
compound.prepare()
breakpoints = [2, 3, 10, 2]
steps_per_run = self.c.get_steps_per_run(
generator=compound, axes_to_move=["x"], breakpoints=breakpoints
)
assert steps_per_run == breakpoints
def test_breakpoints_tomo(self):
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
duration = 0.01
concat = ConcatGenerator([line1, line2, line3])
breakpoints = [2, 3, 10, 2]
self.b.configure(
generator=CompoundGenerator([concat], [], [], duration),
axesToMove=["x"],
breakpoints=breakpoints,
)
assert self.c.configure_params.generator.size == 17
self.checkSteps(2, 0, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(5, 2, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(15, 5, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 15, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 17, 17)
self.checkState(self.ss.FINISHED)
def test_breakpoints_sum_larger_than_total_steps_raises_AssertionError(self):
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
duration = 0.01
concat = ConcatGenerator([line1, line2, line3])
breakpoints = [2, 3, 100, 2]
self.assertRaises(
AssertionError,
self.b.configure,
generator=CompoundGenerator([concat], [], [], duration),
axesToMove=["x"],
breakpoints=breakpoints,
)
def test_breakpoints_without_last(self):
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
duration = 0.01
concat = ConcatGenerator([line1, line2, line3])
breakpoints = [2, 3, 10]
self.b.configure(
generator=CompoundGenerator([concat], [], [], duration),
axesToMove=["x"],
breakpoints=breakpoints,
)
assert self.c.configure_params.generator.size == 17
self.checkSteps(2, 0, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(5, 2, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(15, 5, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 15, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 17, 17)
self.checkState(self.ss.FINISHED)
def test_breakpoints_rocking_tomo(self):
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
line4 = LineGenerator("x", "mm", 180, 0, 10)
duration = 0.01
concat = ConcatGenerator([line1, line2, line3, line4])
breakpoints = [2, 3, 10, 2]
self.b.configure(
generator=CompoundGenerator([concat], [], [], duration),
axesToMove=["x"],
breakpoints=breakpoints,
)
assert self.c.configure_params.generator.size == 27
self.checkSteps(2, 0, 27)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(5, 2, 27)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(15, 5, 27)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 15, 27)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(27, 17, 27)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(27, 27, 27)
self.checkState(self.ss.FINISHED)
def test_breakpoints_repeat_with_static(self):
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
duration = 0.01
concat = ConcatGenerator([line1, line2, line3])
staticGen = StaticPointGenerator(2)
breakpoints = [2, 3, 10, 2, 2, 3, 10, 2]
self.b.configure(
generator=CompoundGenerator([staticGen, concat], [], [], duration),
axesToMove=["x"],
breakpoints=breakpoints,
)
assert self.c.configure_params.generator.size == 34
self.checkState(self.ss.ARMED)
self.checkSteps(2, 0, 34)
self.b.run()
self.checkSteps(5, 2, 34)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(15, 5, 34)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 15, 34)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(19, 17, 34)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(22, 19, 34)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(32, 22, 34)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(34, 32, 34)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkState(self.ss.FINISHED)
def test_breakpoints_repeat_rocking_tomo(self):
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
line4 = LineGenerator("x", "mm", 180, 0, 10)
concat = ConcatGenerator([line1, line2, line3, line4])
staticGen = StaticPointGenerator(2)
duration = 0.01
breakpoints = [2, 3, 10, 2, 10, 2, 3, 10, 2, 10]
self.b.configure(
generator=CompoundGenerator([staticGen, concat], [], [], duration),
axesToMove=["x"],
breakpoints=breakpoints,
)
assert self.c.configure_params.generator.size == 54
self.checkState(self.ss.ARMED)
self.checkSteps(2, 0, 54)
self.b.run()
self.checkSteps(5, 2, 54)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(15, 5, 54)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 15, 54)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(27, 17, 54)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(29, 27, 54)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(32, 29, 54)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(42, 32, 54)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(44, 42, 54)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(54, 44, 54)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkState(self.ss.FINISHED)
def test_breakpoints_helical_scan(self):
line1 = LineGenerator(
["y", "x"], ["mm", "mm"], [-0.555556, -10], [-0.555556, -10], 5
)
line2 = LineGenerator(["y", "x"], ["mm", "mm"], [0, 0], [10, 180], 10)
line3 = LineGenerator(
["y", "x"], ["mm", "mm"], [10.555556, 190], [10.555556, 190], 2
)
duration = 0.01
concat = ConcatGenerator([line1, line2, line3])
breakpoints = [2, 3, 10, 2]
self.b.configure(
generator=CompoundGenerator([concat], [], [], duration),
axesToMove=["y", "x"],
breakpoints=breakpoints,
)
assert self.c.configure_params.generator.size == 17
self.checkState(self.ss.ARMED)
self.checkSteps(2, 0, 17)
self.b.run()
self.checkSteps(5, 2, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(15, 5, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 15, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkState(self.ss.FINISHED)
def test_breakpoints_with_pause(self):
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
duration = 0.01
concat = ConcatGenerator([line1, line2, line3])
breakpoints = [2, 3, 10, 2]
self.b.configure(
generator=CompoundGenerator([concat], [], [], duration),
axesToMove=["x"],
breakpoints=breakpoints,
)
assert self.c.configure_params.generator.size == 17
self.checkSteps(2, 0, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(5, 2, 17)
self.checkState(self.ss.ARMED)
# rewind
self.b.pause(lastGoodStep=1)
self.checkSteps(2, 1, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(5, 2, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(15, 5, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 15, 17)
self.checkState(self.ss.ARMED)
# rewind
self.b.pause(lastGoodStep=11)
self.checkSteps(15, 11, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 15, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 17, 17)
self.checkState(self.ss.FINISHED)
def test_breakpoints_with_pause_at_boundaries_without_lastGoodStep(self):
# We expect the pause call to be successful but not to have an effect
# when called at a breakpoint or at the end of a scan.
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
duration = 0.01
concat = ConcatGenerator([line1, line2, line3])
breakpoints = [2, 3, 10, 2]
self.b.configure(
generator=CompoundGenerator([concat], [], [], duration),
axesToMove=["x"],
breakpoints=breakpoints,
)
assert self.c.configure_params.generator.size == 17
self.checkSteps(2, 0, 17)
self.checkState(self.ss.ARMED)
# Pause
self.b.pause()
self.checkSteps(2, 0, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(5, 2, 17)
self.checkState(self.ss.ARMED)
# Pause
self.b.pause()
self.checkSteps(5, 2, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(15, 5, 17)
self.checkState(self.ss.ARMED)
# Pause
self.b.pause()
self.checkSteps(15, 5, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 15, 17)
self.checkState(self.ss.ARMED)
# Pause
self.b.pause()
self.checkSteps(17, 15, 17)
self.checkState(self.ss.ARMED)
self.b.run()
self.checkSteps(17, 17, 17)
self.checkState(self.ss.FINISHED)
# Pause
self.b.pause()
self.checkSteps(17, 17, 17)
self.checkState(self.ss.FINISHED)
def abort_after_1s(self):
# Need a new context as in a different cothread
c = Context(self.p)
b = c.block_view("mainBlock")
c.sleep(1.0)
self.checkState(self.ss.RUNNING)
b.abort()
self.checkState(self.ss.ABORTED)
def test_run_returns_in_ABORTED_state_when_aborted(self):
# Add our forever running part
forever_part = RunForeverPart(
mri="childBlock", name="forever_part", initial_visibility=True
)
self.c.add_part(forever_part)
# Configure our block
duration = 0.1
line1 = LineGenerator("y", "mm", 0, 2, 3)
line2 = LineGenerator("x", "mm", 0, 2, 2, alternate=True)
compound = CompoundGenerator([line1, line2], [], [], duration)
self.b.configure(generator=compound, axesToMove=["x"])
# Spawn the abort thread
abort_thread = cothread.Spawn(self.abort_after_1s, raise_on_wait=True)
# Do the run, which will be aborted
with self.assertRaises(AbortedError):
self.b.run()
self.checkState(self.ss.ABORTED)
# Check the abort thread didn't raise
abort_thread.Wait(1.0)
def test_breakpoints_tomo_with_outer_axis(self):
# Outer axis we don't move
outer_steps = 2
line_outer = LineGenerator("y", "mm", 0, 1, outer_steps)
# ConcatGenerator we do move
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
line3 = LineGenerator("x", "mm", 190, 190, 2)
concat = ConcatGenerator([line1, line2, line3])
compound = CompoundGenerator([line_outer, concat], [], [], duration=0.01)
breakpoints = [2, 3, 10, 2]
inner_steps = sum(breakpoints)
total_steps = inner_steps * outer_steps
self.b.configure(generator=compound, axesToMove=["x"], breakpoints=breakpoints)
# Configured, completed, total
self.checkSteps(2, 0, total_steps)
self.checkState(self.ss.ARMED)
# Check we have the full configured steps
assert self.c.configure_params.generator.size == total_steps
# Check our breakpoints steps
expected_breakpoint_steps = [2, 5, 15, 17, 19, 22, 32, 34]
self.assertEqual(expected_breakpoint_steps, self.c.breakpoint_steps)
# Run our controller through all but last breakpoint
breakpoints = len(expected_breakpoint_steps)
for index in range(breakpoints - 1):
self.b.run()
self.checkSteps(
expected_breakpoint_steps[index + 1],
expected_breakpoint_steps[index],
total_steps,
)
self.checkState(self.ss.ARMED)
# Final breakpoint
self.b.run()
self.checkSteps(total_steps, total_steps, total_steps)
self.checkState(self.ss.FINISHED)
def test_breakpoints_tomo_with_two_outer_axes(self):
# Outer axes we don't move
outer_steps = 2
line_outer = LineGenerator("y", "mm", 0, 1, outer_steps)
outer_outer_steps = 3
line_outer_outer = LineGenerator("z", "mm", 0, 1, outer_outer_steps)
# ConcatGenerator we do move
line1 = LineGenerator("x", "mm", -10, -10, 5)
line2 = LineGenerator("x", "mm", 0, 180, 10)
concat = ConcatGenerator([line1, line2])
compound = CompoundGenerator(
[line_outer_outer, line_outer, concat], [], [], duration=0.01
)
breakpoints = [2, 3, 10]
inner_steps = sum(breakpoints)
total_steps = inner_steps * outer_steps * outer_outer_steps
self.b.configure(generator=compound, axesToMove=["x"], breakpoints=breakpoints)
# Configured, completed, total
self.checkSteps(2, 0, total_steps)
self.checkState(self.ss.ARMED)
# Check we have the full configured steps
assert self.c.configure_params.generator.size == total_steps
# Check our breakpoints steps
expected_breakpoint_steps = [
2,
5,
15,
17,
20,
30,
32,
35,
45,
47,
50,
60,
62,
65,
75,
77,
80,
90,
]
self.assertEqual(expected_breakpoint_steps, self.c.breakpoint_steps)
# Run our controller through all but last breakpoint
breakpoints = len(expected_breakpoint_steps)
for index in range(breakpoints - 1):
self.b.run()
self.checkSteps(
expected_breakpoint_steps[index + 1],
expected_breakpoint_steps[index],
total_steps,
)
self.checkState(self.ss.ARMED)
# Final breakpoint
self.b.run()
self.checkSteps(total_steps, total_steps, total_steps)
self.checkState(self.ss.FINISHED)
def test_breakpoints_2d_inner_scan(self):
# Y-axis
outer_steps = 2
line_y = LineGenerator("y", "mm", 0, 1, outer_steps)
# X-axis
line_x_1 = LineGenerator("x", "mm", -10, -10, 5)
line_x_2 = LineGenerator("x", "mm", 0, 180, 10)
line_x_3 = LineGenerator("x", "mm", 190, 190, 2)
line_x = ConcatGenerator([line_x_1, line_x_2, line_x_3])
compound = CompoundGenerator([line_y, line_x], [], [], duration=0.01)
breakpoints = [2, 3, 10, 2, 17]
total_steps = sum(breakpoints)
# Configure the scan
self.b.configure(
generator=compound, axesToMove=["x", "y"], breakpoints=breakpoints
)
self.checkSteps(2, 0, total_steps)
self.checkState(self.ss.ARMED)
# Check we have the full amount of configured steps
assert self.c.configure_params.generator.size == total_steps
# Check our breakpoints steps
expected_breakpoint_steps = [2, 5, 15, 17, 34]
self.assertEqual(expected_breakpoint_steps, self.c.breakpoint_steps)
# Run our controller through all but last breakpoint
breakpoints = len(expected_breakpoint_steps)
for index in range(breakpoints - 1):
self.b.run()
self.checkSteps(
expected_breakpoint_steps[index + 1],
expected_breakpoint_steps[index],
total_steps,
)
self.checkState(self.ss.ARMED)
# Final breakpoint
self.b.run()
self.checkSteps(total_steps, total_steps, total_steps)
self.checkState(self.ss.FINISHED)
def test_breakpoints_2d_inner_scan_with_outer_axis(self):
# Outer axes we don't move
outer_steps = 2
line_outer = LineGenerator("z", "mm", 0, 1, outer_steps)
# Y-axis
line_y = LineGenerator("y", "mm", 0, 1, 2)
# X-axis
line_x_1 = LineGenerator("x", "mm", -10, -10, 5)
line_x_2 = LineGenerator("x", "mm", 0, 180, 10)
line_x_3 = LineGenerator("x", "mm", 190, 190, 2)
line_x = ConcatGenerator([line_x_1, line_x_2, line_x_3])
compound = CompoundGenerator(
[line_outer, line_y, line_x], [], [], duration=0.01
)
breakpoints = [2, 3, 10, 2, 17]
total_steps = sum(breakpoints) * outer_steps
# Configure the scan
self.b.configure(
generator=compound, axesToMove=["x", "y"], breakpoints=breakpoints
)
self.checkSteps(2, 0, total_steps)
self.checkState(self.ss.ARMED)
# Check we have the full amount of configured steps
assert self.c.configure_params.generator.size == total_steps
# Check our breakpoints steps
expected_breakpoint_steps = [2, 5, 15, 17, 34, 36, 39, 49, 51, 68]
self.assertEqual(expected_breakpoint_steps, self.c.breakpoint_steps)
# Run our controller through all but last breakpoint
breakpoints = len(expected_breakpoint_steps)
for index in range(breakpoints - 1):
self.b.run()
self.checkSteps(
expected_breakpoint_steps[index + 1],
expected_breakpoint_steps[index],
total_steps,
)
self.checkState(self.ss.ARMED)
# Final breakpoint
self.b.run()
self.checkSteps(total_steps, total_steps, total_steps)
self.checkState(self.ss.FINISHED)
class TestRunnableControllerValidation(unittest.TestCase):
"""This test class is to test validation with multiple parts tweaking
parameters"""
def setUp(self):
self.p = Process("process")
self.context = Context(self.p)
self.main_mri = "mainBlock"
self.detector_one_mri = "detector01"
self.detector_two_mri = "detector02"
self.detector_one_part_name = "detectorPart01"
self.detector_two_part_name = "detectorPart02"
# Make a motion block to act as our child
self.config_dir = tmp_dir("config_dir")
# Store a list of our detector block views
self.b_detectors = []
# create a root block for the RunnableController block to reside in
self.c = RunnableController(mri=self.main_mri, config_dir=self.config_dir.value)
# Set up the process
self.p.add_controller(self.c)
self.b = self.context.block_view(self.main_mri)
self.ss = self.c.state_set
def tearDown(self):
self.p.stop(timeout=1)
shutil.rmtree(self.config_dir.value)
def checkState(self, state):
assert self.c.state.value == state
def _start_process(self):
self.checkState(self.ss.DISABLED)
self.p.start()
self.checkState(self.ss.READY)
def _add_motion_block_and_part(self, mri="motion01", name="motionPart"):
# Add block
for c in motion_block(mri=mri, config_dir=self.config_dir.value):
self.p.add_controller(c)
self.b_motion = self.context.block_view(mri)
# Add part
self.motion_part = MisbehavingPart(mri=mri, name=name, initial_visibility=True)
self.c.add_part(self.motion_part)
def _add_detector_block_and_part(self, mri, name, readout_time=0.1):
# Block
for c in detector_block(
mri=mri, config_dir=self.config_dir.value, readout_time=readout_time
):
self.p.add_controller(c)
# Append block view
self.b_detectors.append(self.context.block_view(mri))
# Part
detector_part = DetectorChildPart(mri=mri, name=name, initial_visibility=True)
self.c.add_part(detector_part)
def _get_compound_generator(self, duration: float) -> CompoundGenerator:
line1 = LineGenerator("y", "mm", 0, 2, 3)
line2 = LineGenerator("x", "mm", 0, 2, 2)
return CompoundGenerator([line1, line2], [], [], duration=duration)
def _get_detector_table(
self,
detector_one_exposure: float,
detector_two_exposure: Optional[float] = None,
) -> DetectorTable:
if detector_two_exposure is None:
return DetectorTable(
[True],
[self.detector_one_part_name],
[self.detector_one_mri],
[detector_one_exposure],
[1],
)
else:
return DetectorTable(
[True, True],
[self.detector_one_part_name, self.detector_two_part_name],
[self.detector_one_mri, self.detector_two_mri],
[detector_one_exposure, detector_two_exposure],
[1, 1],
)
def test_validate_single_detector_calculates_correct_exposure_with_duration(self):
# Set up a single detector
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._start_process()
# Config
det_one_exposure = 0.89995
duration = 1.0
compound_generator = self._get_compound_generator(duration)
# Expected outputs
expected_detectors = self._get_detector_table(det_one_exposure)
# Validate
actual = self.b.validate(
generator=compound_generator, axesToMove=["x"], fileDir="/tmp"
)
# Check output
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_detectors.to_dict()
def test_validate_single_detector_calculates_correct_duration_with_exposure(self):
# Set up a single detector
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._start_process()
# Config
det_one_exposure = 0.89995
compound_generator = self._get_compound_generator(0.0)
detectors = self._get_detector_table(det_one_exposure)
# Expected outputs
expected_duration = 1.0
# Validate
actual = self.b.validate(
generator=compound_generator,
axesToMove=["x"],
fileDir="/tmp",
detectors=detectors,
)
# Check output
assert np.isclose(actual["generator"].duration, expected_duration)
actual["generator"].duration = 0.0
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == detectors.to_dict()
def test_validate_single_detector_succeeds_with_both_duration_and_exposure(self):
# Set up a single detector
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._start_process()
# Config
duration = 1.0
det_one_exposure = 0.3
compound_generator = self._get_compound_generator(duration)
expected_detectors = self._get_detector_table(det_one_exposure)
# Validate
actual = self.b.validate(
generator=compound_generator,
axesToMove=["x"],
fileDir="/tmp",
detectors=expected_detectors,
)
# Check output
assert actual["generator"].duration == duration
actual["generator"].duration = 0.0
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_detectors.to_dict()
def test_validate_single_detector_sets_min_exposure_with_zero_exposure_and_duration(
self,
):
# Set up a single detector
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._start_process()
# Config
duration = 0.0
det_one_exposure = 0.0
compound_generator = self._get_compound_generator(duration)
detectors = self._get_detector_table(det_one_exposure)
# Expected outputs
expected_duration = pytest.approx(0.100105)
expected_det_one_exposure = pytest.approx(0.0001)
expected_detectors = self._get_detector_table(expected_det_one_exposure)
# Validate
actual = self.b.validate(
generator=compound_generator,
axesToMove=["x"],
fileDir="/tmp",
detectors=detectors,
)
# Check output
assert actual["generator"].duration == expected_duration
actual["generator"].duration = 0.0
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_detectors.to_dict()
def test_validate_two_detectors_set_exposure_of_both_with_duration(self):
# Set up two detectors with different readout times
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._add_detector_block_and_part(
self.detector_two_mri, self.detector_two_part_name, readout_time=0.2
)
self._start_process()
# Config
duration = 1.0
compound_generator = self._get_compound_generator(duration)
# Expected outputs
expected_det_one_exposure = 0.89995
expected_det_two_exposure = 0.79995
expected_detectors = self._get_detector_table(
expected_det_one_exposure, expected_det_two_exposure
)
# Validate
actual = self.b.validate(
generator=compound_generator, axesToMove=["x"], fileDir="/tmp"
)
# Check output
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_detectors.to_dict()
def test_validate_two_detectors_set_exposure_of_one_with_duration_and_one_exposure(
self,
):
# Set up two detectors with different readout times
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._add_detector_block_and_part(
self.detector_two_mri, self.detector_two_part_name, readout_time=0.2
)
self._start_process()
# Config
duration = 1.0
det_one_exposure = 0.45
det_two_exposure = 0.0
compound_generator = self._get_compound_generator(duration)
detectors = self._get_detector_table(det_one_exposure, det_two_exposure)
# Expected outputs
expected_det_two_exposure = 0.79995
expected_detectors = self._get_detector_table(
det_one_exposure, expected_det_two_exposure
)
# Validate
actual = self.b.validate(
generator=compound_generator,
axesToMove=["x"],
fileDir="/tmp",
detectors=detectors,
)
# Check output
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_detectors.to_dict()
# Validate with the detectors swapped around
# Config
det_one_exposure = 0.0
det_two_exposure = 0.7
compound_generator = self._get_compound_generator(duration)
detectors = self._get_detector_table(det_one_exposure, det_two_exposure)
# Expected outputs
expected_det_one_exposure = 0.89995
expected_detectors = self._get_detector_table(
expected_det_one_exposure, det_two_exposure
)
actual = self.b.validate(
generator=compound_generator,
axesToMove=["x"],
fileDir="/tmp",
detectors=detectors,
)
# Check output
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_detectors.to_dict()
def test_validate_two_detectors_set_duration_and_one_exposure_with_one_exposure(
self,
):
# Set up two detectors with different readout times
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._add_detector_block_and_part(
self.detector_two_mri, self.detector_two_part_name, readout_time=0.2
)
self._start_process()
# Config
duration = 0.0
det_one_exposure = 0.5
det_two_exposure = 0.0
expected_duration = 0.60003
compound_generator = self._get_compound_generator(duration)
detectors = self._get_detector_table(det_one_exposure, det_two_exposure)
# Expected outputs
expected_det_two_exposure = 0.4
expected_detectors = self._get_detector_table(
det_one_exposure, expected_det_two_exposure
)
# Validate
actual = self.b.validate(
generator=compound_generator,
axesToMove=["x"],
fileDir="/tmp",
detectors=detectors,
)
# Check output
assert actual["generator"].duration == pytest.approx(expected_duration)
actual["generator"].duration = 0.0
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert np.allclose(actual["detectors"].to_dict()["exposure"], [0.5, 0.4])
# Validate with the detectors swapped around
det_one_exposure = 0.0
det_two_exposure = 0.7
compound_generator = self._get_compound_generator(duration)
detectors = self._get_detector_table(det_one_exposure, det_two_exposure)
# Expected ouputs
expected_duration = 0.900045
expected_det_one_exposure = pytest.approx(0.8)
expected_det_two_exposure = pytest.approx(0.7)
expected_detectors = self._get_detector_table(
expected_det_one_exposure, expected_det_two_exposure
)
actual = self.b.validate(
generator=compound_generator,
axesToMove=["x"],
fileDir="/tmp",
detectors=detectors,
)
# Check output
assert actual["generator"].duration == pytest.approx(
expected_duration, rel=1e-6
)
actual["generator"].duration = 0.0
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_detectors.to_dict()
def test_validate_two_detectors_set_duration_with_both_exposures(self):
# Set up two detectors with different readout times
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._add_detector_block_and_part(
self.detector_two_mri, self.detector_two_part_name, readout_time=0.2
)
self._start_process()
# Config
duration = 0.0
det_one_exposure = 0.3
det_two_exposure = 0.5
compound_generator = self._get_compound_generator(duration)
detectors = self._get_detector_table(det_one_exposure, det_two_exposure)
# Expected outputs
expected_duration = 0.700035
expected_det_one_exposure = pytest.approx(det_one_exposure)
expected_det_two_exposure = pytest.approx(det_two_exposure)
expected_detectors = self._get_detector_table(
expected_det_one_exposure, expected_det_two_exposure
)
# Validate
actual = self.b.validate(
generator=compound_generator,
axesToMove=["x"],
fileDir="/tmp",
detectors=detectors,
)
# Check output
assert actual["generator"].duration == pytest.approx(
expected_duration, rel=1e-6
)
actual["generator"].duration = 0.0
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_detectors.to_dict()
def test_validate_two_detectors_calculates_min_duration_for_no_duration_or_exposure(
self,
):
# Set up two detectors with different readout times
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._add_detector_block_and_part(
self.detector_two_mri, self.detector_two_part_name, readout_time=0.3
)
self._start_process()
# Config
duration = 0.0
det_one_exposure = 0.0
det_two_exposure = 0.0
compound_generator = self._get_compound_generator(duration)
detectors = self._get_detector_table(det_one_exposure, det_two_exposure)
# Expected outputs
expected_duration = pytest.approx(0.300115)
expected_det_one_exposure = pytest.approx(0.2001)
expected_det_two_exposure = pytest.approx(0.0001)
expected_detectors = self._get_detector_table(
expected_det_one_exposure, expected_det_two_exposure
)
# Validate
actual = self.b.validate(
generator=compound_generator,
axesToMove=["x"],
fileDir="/tmp",
detectors=detectors,
)
# Check output
assert actual["generator"].duration == expected_duration
actual["generator"].duration = 0.0
assert actual["generator"].to_dict() == compound_generator.to_dict()
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_detectors.to_dict()
def test_validate_with_line_generator_increases_duration_for_motion_part(self):
# Setup two detectors and our motion block
self._add_motion_block_and_part()
self._add_detector_block_and_part(
self.detector_one_mri, self.detector_one_part_name
)
self._add_detector_block_and_part(
self.detector_two_mri, self.detector_two_part_name, readout_time=0.25
)
self._start_process()
# Config
compound_generator = self._get_compound_generator(0.1)
# Expected outputs
expected_duration = 0.5
expected_det_one_exposure = 0.399975
expected_det_two_exposure = 0.249975
expected_table = self._get_detector_table(
expected_det_one_exposure, expected_det_two_exposure
)
# Call validate
actual = self.b.validate(
generator=compound_generator, axesToMove=["x"], fileDir="/tmp"
)
# Check output
assert actual["generator"].duration == expected_duration
assert actual["axesToMove"] == ["x"]
assert actual["detectors"].to_dict() == expected_table.to_dict()
actual["generator"].duration = 0.1
assert actual["generator"].to_dict() == compound_generator.to_dict()
|
{
"content_hash": "e5c8a69313a22030caa10867c08a2e38",
"timestamp": "",
"source": "github",
"line_count": 1786,
"max_line_length": 88,
"avg_line_length": 33.46248600223964,
"alnum_prop": 0.5869085067933874,
"repo_name": "dls-controls/pymalcolm",
"id": "c9041d2d562981c373c7c555886c21ef2263fde5",
"size": "59764",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_modules/test_scanning/test_runnablecontroller.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "549"
},
{
"name": "Python",
"bytes": "1583458"
},
{
"name": "Shell",
"bytes": "580"
}
],
"symlink_target": ""
}
|
import logging
import optparse
import sys
import tempfile
import unittest
from webkitpy.common.system.executive import Executive, ScriptError
from webkitpy.common.system import executive_mock
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.common.system.path import abspath_to_uri
from webkitpy.thirdparty.mock import Mock
from webkitpy.tool.mocktool import MockOptions
from webkitpy.common.system.executive_mock import MockExecutive, MockExecutive2
from webkitpy.common.system.systemhost_mock import MockSystemHost
from webkitpy.layout_tests.port import Port, Driver, DriverOutput
from webkitpy.layout_tests.port.base import VirtualTestSuite
from webkitpy.layout_tests.port.test import add_unit_tests_to_mock_filesystem, TestPort
class PortTest(unittest.TestCase):
def make_port(self, executive=None, with_tests=False, port_name=None, **kwargs):
host = MockSystemHost()
if executive:
host.executive = executive
if with_tests:
add_unit_tests_to_mock_filesystem(host.filesystem)
return TestPort(host, **kwargs)
return Port(host, port_name or 'baseport', **kwargs)
def test_default_child_processes(self):
port = self.make_port()
self.assertIsNotNone(port.default_child_processes())
def test_format_wdiff_output_as_html(self):
output = "OUTPUT %s %s %s" % (Port._WDIFF_DEL, Port._WDIFF_ADD, Port._WDIFF_END)
html = self.make_port()._format_wdiff_output_as_html(output)
expected_html = "<head><style>.del { background: #faa; } .add { background: #afa; }</style></head><pre>OUTPUT <span class=del> <span class=add> </span></pre>"
self.assertEqual(html, expected_html)
def test_wdiff_command(self):
port = self.make_port()
port._path_to_wdiff = lambda: "/path/to/wdiff"
command = port._wdiff_command("/actual/path", "/expected/path")
expected_command = [
"/path/to/wdiff",
"--start-delete=##WDIFF_DEL##",
"--end-delete=##WDIFF_END##",
"--start-insert=##WDIFF_ADD##",
"--end-insert=##WDIFF_END##",
"/actual/path",
"/expected/path",
]
self.assertEqual(command, expected_command)
def _file_with_contents(self, contents, encoding="utf-8"):
new_file = tempfile.NamedTemporaryFile()
new_file.write(contents.encode(encoding))
new_file.flush()
return new_file
def test_pretty_patch_os_error(self):
port = self.make_port(executive=executive_mock.MockExecutive2(exception=OSError))
oc = OutputCapture()
oc.capture_output()
self.assertEqual(port.pretty_patch_text("patch.txt"),
port._pretty_patch_error_html)
# This tests repeated calls to make sure we cache the result.
self.assertEqual(port.pretty_patch_text("patch.txt"),
port._pretty_patch_error_html)
oc.restore_output()
def test_pretty_patch_script_error(self):
# FIXME: This is some ugly white-box test hacking ...
port = self.make_port(executive=executive_mock.MockExecutive2(exception=ScriptError))
port._pretty_patch_available = True
self.assertEqual(port.pretty_patch_text("patch.txt"),
port._pretty_patch_error_html)
# This tests repeated calls to make sure we cache the result.
self.assertEqual(port.pretty_patch_text("patch.txt"),
port._pretty_patch_error_html)
def test_wdiff_text(self):
port = self.make_port()
port.wdiff_available = lambda: True
port._run_wdiff = lambda a, b: 'PASS'
self.assertEqual('PASS', port.wdiff_text(None, None))
def test_diff_text(self):
port = self.make_port()
# Make sure that we don't run into decoding exceptions when the
# filenames are unicode, with regular or malformed input (expected or
# actual input is always raw bytes, not unicode).
port.diff_text('exp', 'act', 'exp.txt', 'act.txt')
port.diff_text('exp', 'act', u'exp.txt', 'act.txt')
port.diff_text('exp', 'act', u'a\xac\u1234\u20ac\U00008000', 'act.txt')
port.diff_text('exp' + chr(255), 'act', 'exp.txt', 'act.txt')
port.diff_text('exp' + chr(255), 'act', u'exp.txt', 'act.txt')
# Though expected and actual files should always be read in with no
# encoding (and be stored as str objects), test unicode inputs just to
# be safe.
port.diff_text(u'exp', 'act', 'exp.txt', 'act.txt')
port.diff_text(
u'a\xac\u1234\u20ac\U00008000', 'act', 'exp.txt', 'act.txt')
# And make sure we actually get diff output.
diff = port.diff_text('foo', 'bar', 'exp.txt', 'act.txt')
self.assertIn('foo', diff)
self.assertIn('bar', diff)
self.assertIn('exp.txt', diff)
self.assertIn('act.txt', diff)
self.assertNotIn('nosuchthing', diff)
def test_setup_test_run(self):
port = self.make_port()
# This routine is a no-op. We just test it for coverage.
port.setup_test_run()
def test_test_dirs(self):
port = self.make_port()
port.host.filesystem.write_text_file(port.layout_tests_dir() + '/canvas/test', '')
port.host.filesystem.write_text_file(port.layout_tests_dir() + '/css2.1/test', '')
dirs = port.test_dirs()
self.assertIn('canvas', dirs)
self.assertIn('css2.1', dirs)
def test_skipped_perf_tests(self):
port = self.make_port()
def add_text_file(dirname, filename, content='some content'):
dirname = port.host.filesystem.join(port.perf_tests_dir(), dirname)
port.host.filesystem.maybe_make_directory(dirname)
port.host.filesystem.write_text_file(port.host.filesystem.join(dirname, filename), content)
add_text_file('inspector', 'test1.html')
add_text_file('inspector', 'unsupported_test1.html')
add_text_file('inspector', 'test2.html')
add_text_file('inspector/resources', 'resource_file.html')
add_text_file('unsupported', 'unsupported_test2.html')
add_text_file('', 'Skipped', '\n'.join(['Layout', '', 'SunSpider', 'Supported/some-test.html']))
self.assertEqual(port.skipped_perf_tests(), ['Layout', 'SunSpider', 'Supported/some-test.html'])
def test_get_option__set(self):
options, args = optparse.OptionParser().parse_args([])
options.foo = 'bar'
port = self.make_port(options=options)
self.assertEqual(port.get_option('foo'), 'bar')
def test_get_option__unset(self):
port = self.make_port()
self.assertIsNone(port.get_option('foo'))
def test_get_option__default(self):
port = self.make_port()
self.assertEqual(port.get_option('foo', 'bar'), 'bar')
def test_additional_platform_directory(self):
port = self.make_port(port_name='foo')
port.default_baseline_search_path = lambda: ['LayoutTests/platform/foo']
layout_test_dir = port.layout_tests_dir()
test_file = 'fast/test.html'
# No additional platform directory
self.assertEqual(
port.expected_baselines(test_file, '.txt'),
[(None, 'fast/test-expected.txt')])
self.assertEqual(port.baseline_path(), 'LayoutTests/platform/foo')
# Simple additional platform directory
port._options.additional_platform_directory = ['/tmp/local-baselines']
port._filesystem.write_text_file('/tmp/local-baselines/fast/test-expected.txt', 'foo')
self.assertEqual(
port.expected_baselines(test_file, '.txt'),
[('/tmp/local-baselines', 'fast/test-expected.txt')])
self.assertEqual(port.baseline_path(), '/tmp/local-baselines')
# Multiple additional platform directories
port._options.additional_platform_directory = ['/foo', '/tmp/local-baselines']
self.assertEqual(
port.expected_baselines(test_file, '.txt'),
[('/tmp/local-baselines', 'fast/test-expected.txt')])
self.assertEqual(port.baseline_path(), '/foo')
def test_nonexistant_expectations(self):
port = self.make_port(port_name='foo')
port.expectations_files = lambda: ['/mock-checkout/third_party/WebKit/LayoutTests/platform/exists/TestExpectations', '/mock-checkout/third_party/WebKit/LayoutTests/platform/nonexistant/TestExpectations']
port._filesystem.write_text_file('/mock-checkout/third_party/WebKit/LayoutTests/platform/exists/TestExpectations', '')
self.assertEqual('\n'.join(port.expectations_dict().keys()), '/mock-checkout/third_party/WebKit/LayoutTests/platform/exists/TestExpectations')
def test_additional_expectations(self):
port = self.make_port(port_name='foo')
port.port_name = 'foo'
port._filesystem.write_text_file('/mock-checkout/third_party/WebKit/LayoutTests/platform/foo/TestExpectations', '')
port._filesystem.write_text_file(
'/tmp/additional-expectations-1.txt', 'content1\n')
port._filesystem.write_text_file(
'/tmp/additional-expectations-2.txt', 'content2\n')
self.assertEqual('\n'.join(port.expectations_dict().values()), '')
port._options.additional_expectations = [
'/tmp/additional-expectations-1.txt']
self.assertEqual('\n'.join(port.expectations_dict().values()), 'content1\n')
port._options.additional_expectations = [
'/tmp/nonexistent-file', '/tmp/additional-expectations-1.txt']
self.assertEqual('\n'.join(port.expectations_dict().values()), 'content1\n')
port._options.additional_expectations = [
'/tmp/additional-expectations-1.txt', '/tmp/additional-expectations-2.txt']
self.assertEqual('\n'.join(port.expectations_dict().values()), 'content1\n\ncontent2\n')
def test_additional_env_var(self):
port = self.make_port(options=optparse.Values({'additional_env_var': ['FOO=BAR', 'BAR=FOO']}))
self.assertEqual(port.get_option('additional_env_var'), ['FOO=BAR', 'BAR=FOO'])
environment = port.setup_environ_for_server()
self.assertTrue(('FOO' in environment) & ('BAR' in environment))
self.assertEqual(environment['FOO'], 'BAR')
self.assertEqual(environment['BAR'], 'FOO')
def test_find_no_paths_specified(self):
port = self.make_port(with_tests=True)
layout_tests_dir = port.layout_tests_dir()
tests = port.tests([])
self.assertNotEqual(len(tests), 0)
def test_find_one_test(self):
port = self.make_port(with_tests=True)
tests = port.tests(['failures/expected/image.html'])
self.assertEqual(len(tests), 1)
def test_find_glob(self):
port = self.make_port(with_tests=True)
tests = port.tests(['failures/expected/im*'])
self.assertEqual(len(tests), 2)
def test_find_with_skipped_directories(self):
port = self.make_port(with_tests=True)
tests = port.tests(['userscripts'])
self.assertNotIn('userscripts/resources/iframe.html', tests)
def test_find_with_skipped_directories_2(self):
port = self.make_port(with_tests=True)
tests = port.tests(['userscripts/resources'])
self.assertEqual(tests, [])
def test_is_test_file(self):
filesystem = MockFileSystem()
self.assertTrue(Port.is_test_file(filesystem, '', 'foo.html'))
self.assertTrue(Port.is_test_file(filesystem, '', 'foo.svg'))
self.assertTrue(Port.is_test_file(filesystem, '', 'test-ref-test.html'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo.png'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-expected.html'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-expected.svg'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-expected.xht'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-expected-mismatch.html'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-expected-mismatch.svg'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-expected-mismatch.xhtml'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-ref.html'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-notref.html'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-notref.xht'))
self.assertFalse(Port.is_test_file(filesystem, '', 'foo-ref.xhtml'))
self.assertFalse(Port.is_test_file(filesystem, '', 'ref-foo.html'))
self.assertFalse(Port.is_test_file(filesystem, '', 'notref-foo.xhr'))
def test_parse_reftest_list(self):
port = self.make_port(with_tests=True)
port.host.filesystem.files['bar/reftest.list'] = "\n".join(["== test.html test-ref.html",
"",
"# some comment",
"!= test-2.html test-notref.html # more comments",
"== test-3.html test-ref.html",
"== test-3.html test-ref2.html",
"!= test-3.html test-notref.html",
"fuzzy(80,500) == test-3 test-ref.html"])
# Note that we don't support the syntax in the last line; the code should ignore it, rather than crashing.
reftest_list = Port._parse_reftest_list(port.host.filesystem, 'bar')
self.assertEqual(reftest_list, {'bar/test.html': [('==', 'bar/test-ref.html')],
'bar/test-2.html': [('!=', 'bar/test-notref.html')],
'bar/test-3.html': [('==', 'bar/test-ref.html'), ('==', 'bar/test-ref2.html'), ('!=', 'bar/test-notref.html')]})
def test_reference_files(self):
port = self.make_port(with_tests=True)
self.assertEqual(port.reference_files('passes/svgreftest.svg'), [('==', port.layout_tests_dir() + '/passes/svgreftest-expected.svg')])
self.assertEqual(port.reference_files('passes/xhtreftest.svg'), [('==', port.layout_tests_dir() + '/passes/xhtreftest-expected.html')])
self.assertEqual(port.reference_files('passes/phpreftest.php'), [('!=', port.layout_tests_dir() + '/passes/phpreftest-expected-mismatch.svg')])
def test_operating_system(self):
self.assertEqual('mac', self.make_port().operating_system())
def test_http_server_supports_ipv6(self):
port = self.make_port()
self.assertTrue(port.http_server_supports_ipv6())
port.host.platform.os_name = 'cygwin'
self.assertFalse(port.http_server_supports_ipv6())
port.host.platform.os_name = 'win'
self.assertFalse(port.http_server_supports_ipv6())
def test_check_httpd_success(self):
port = self.make_port(executive=MockExecutive2())
port.path_to_apache = lambda: '/usr/sbin/httpd'
capture = OutputCapture()
capture.capture_output()
self.assertTrue(port.check_httpd())
_, _, logs = capture.restore_output()
self.assertEqual('', logs)
def test_httpd_returns_error_code(self):
port = self.make_port(executive=MockExecutive2(exit_code=1))
port.path_to_apache = lambda: '/usr/sbin/httpd'
capture = OutputCapture()
capture.capture_output()
self.assertFalse(port.check_httpd())
_, _, logs = capture.restore_output()
self.assertEqual('httpd seems broken. Cannot run http tests.\n', logs)
def test_test_exists(self):
port = self.make_port(with_tests=True)
self.assertTrue(port.test_exists('passes'))
self.assertTrue(port.test_exists('passes/text.html'))
self.assertFalse(port.test_exists('passes/does_not_exist.html'))
self.assertTrue(port.test_exists('virtual'))
self.assertFalse(port.test_exists('virtual/does_not_exist.html'))
self.assertTrue(port.test_exists('virtual/passes/text.html'))
def test_test_isfile(self):
port = self.make_port(with_tests=True)
self.assertFalse(port.test_isfile('passes'))
self.assertTrue(port.test_isfile('passes/text.html'))
self.assertFalse(port.test_isfile('passes/does_not_exist.html'))
self.assertFalse(port.test_isfile('virtual'))
self.assertTrue(port.test_isfile('virtual/passes/text.html'))
self.assertFalse(port.test_isfile('virtual/does_not_exist.html'))
def test_test_isdir(self):
port = self.make_port(with_tests=True)
self.assertTrue(port.test_isdir('passes'))
self.assertFalse(port.test_isdir('passes/text.html'))
self.assertFalse(port.test_isdir('passes/does_not_exist.html'))
self.assertFalse(port.test_isdir('passes/does_not_exist/'))
self.assertTrue(port.test_isdir('virtual'))
self.assertFalse(port.test_isdir('virtual/does_not_exist.html'))
self.assertFalse(port.test_isdir('virtual/does_not_exist/'))
self.assertFalse(port.test_isdir('virtual/passes/text.html'))
def test_tests(self):
port = self.make_port(with_tests=True)
tests = port.tests([])
self.assertIn('passes/text.html', tests)
self.assertIn('virtual/passes/text.html', tests)
tests = port.tests(['passes'])
self.assertIn('passes/text.html', tests)
self.assertIn('passes/passes/test-virtual-passes.html', tests)
self.assertNotIn('virtual/passes/text.html', tests)
tests = port.tests(['virtual/passes'])
self.assertNotIn('passes/text.html', tests)
self.assertIn('virtual/passes/test-virtual-passes.html', tests)
self.assertIn('virtual/passes/passes/test-virtual-passes.html', tests)
self.assertNotIn('virtual/passes/test-virtual-virtual/passes.html', tests)
self.assertNotIn('virtual/passes/virtual/passes/test-virtual-passes.html', tests)
def test_build_path(self):
port = self.make_port(options=optparse.Values({'build_directory': '/my-build-directory/'}))
self.assertEqual(port._build_path(), '/my-build-directory/Release')
def test_dont_require_http_server(self):
port = self.make_port()
self.assertEqual(port.requires_http_server(), False)
class NaturalCompareTest(unittest.TestCase):
def setUp(self):
self._port = TestPort(MockSystemHost())
def assert_cmp(self, x, y, result):
self.assertEqual(cmp(self._port._natural_sort_key(x), self._port._natural_sort_key(y)), result)
def test_natural_compare(self):
self.assert_cmp('a', 'a', 0)
self.assert_cmp('ab', 'a', 1)
self.assert_cmp('a', 'ab', -1)
self.assert_cmp('', '', 0)
self.assert_cmp('', 'ab', -1)
self.assert_cmp('1', '2', -1)
self.assert_cmp('2', '1', 1)
self.assert_cmp('1', '10', -1)
self.assert_cmp('2', '10', -1)
self.assert_cmp('foo_1.html', 'foo_2.html', -1)
self.assert_cmp('foo_1.1.html', 'foo_2.html', -1)
self.assert_cmp('foo_1.html', 'foo_10.html', -1)
self.assert_cmp('foo_2.html', 'foo_10.html', -1)
self.assert_cmp('foo_23.html', 'foo_10.html', 1)
self.assert_cmp('foo_23.html', 'foo_100.html', -1)
class KeyCompareTest(unittest.TestCase):
def setUp(self):
self._port = TestPort(MockSystemHost())
def assert_cmp(self, x, y, result):
self.assertEqual(cmp(self._port.test_key(x), self._port.test_key(y)), result)
def test_test_key(self):
self.assert_cmp('/a', '/a', 0)
self.assert_cmp('/a', '/b', -1)
self.assert_cmp('/a2', '/a10', -1)
self.assert_cmp('/a2/foo', '/a10/foo', -1)
self.assert_cmp('/a/foo11', '/a/foo2', 1)
self.assert_cmp('/ab', '/a/a/b', -1)
self.assert_cmp('/a/a/b', '/ab', 1)
self.assert_cmp('/foo-bar/baz', '/foo/baz', -1)
class VirtualTestSuiteTest(unittest.TestCase):
def test_basic(self):
suite = VirtualTestSuite('suite', 'base/foo', ['--args'])
self.assertEqual(suite.name, 'virtual/suite/base/foo')
self.assertEqual(suite.base, 'base/foo')
self.assertEqual(suite.args, ['--args'])
def test_no_slash(self):
suite = VirtualTestSuite('suite/bar', 'base/foo', ['--args'])
self.assertFalse(hasattr(suite, 'name'))
self.assertFalse(hasattr(suite, 'base'))
self.assertFalse(hasattr(suite, 'args'))
def test_legacy(self):
suite = VirtualTestSuite('suite/bar', 'base/foo', ['--args'], use_legacy_naming=True)
self.assertEqual(suite.name, 'virtual/suite/bar')
self.assertEqual(suite.base, 'base/foo')
self.assertEqual(suite.args, ['--args'])
|
{
"content_hash": "6f16c0e77cd84460db1772606bc7c3ab",
"timestamp": "",
"source": "github",
"line_count": 442,
"max_line_length": 211,
"avg_line_length": 46.7262443438914,
"alnum_prop": 0.6348714472473732,
"repo_name": "ondra-novak/blink",
"id": "319de1e82c76bf1734c9db403f816127ab246438",
"size": "22180",
"binary": false,
"copies": "1",
"ref": "refs/heads/nw",
"path": "Tools/Scripts/webkitpy/layout_tests/port/base_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "12983"
},
{
"name": "Bison",
"bytes": "64327"
},
{
"name": "C",
"bytes": "1487362"
},
{
"name": "C++",
"bytes": "40237536"
},
{
"name": "CSS",
"bytes": "537586"
},
{
"name": "Java",
"bytes": "66510"
},
{
"name": "JavaScript",
"bytes": "26502253"
},
{
"name": "Makefile",
"bytes": "677"
},
{
"name": "Objective-C",
"bytes": "23525"
},
{
"name": "Objective-C++",
"bytes": "377730"
},
{
"name": "PHP",
"bytes": "166434"
},
{
"name": "Perl",
"bytes": "585757"
},
{
"name": "Python",
"bytes": "3997910"
},
{
"name": "Ruby",
"bytes": "141818"
},
{
"name": "Shell",
"bytes": "8806"
},
{
"name": "XSLT",
"bytes": "49099"
}
],
"symlink_target": ""
}
|
"""Lexer for bib(la)tex.
Port of the lexer from bibtex-ruby with a few changes. This lexer also supports
parentheses instead of braces for string, preamble and comment entries, e.g.
'@string(var = 1)' and generates tokens rather returning a list.
"""
from bibpy.lexers.base_lexer import BaseLexer
# A custom lexer is necessary as funcparserlib's lexing infrastructure is
# not up to the task of lexing bib(la)tex's somewhat complicated
# context-dependent structure like nested braces and comments.
class BibLexer(BaseLexer):
"""Lexer for generating bib tokens."""
def __init__(self):
"""Initialise the lexer."""
super().__init__()
self.reset('')
self.mode = 'comment'
self._modes = {
'bib': self.lex_main,
'entry': self.lex_entry,
'value': self.lex_braced,
'parens': self.lex_parens,
'comment': self.lex_comment
}
self._compile_regexes([
('lbrace', (r'{', self.lex_lbrace)),
('rbrace', (r'}', self.lex_rbrace)),
('equals', (r'\s*(=)\s*', None)),
('comma', (r',', None)),
('number', (r'-?(0|([1-9][0-9]*))', None)),
('name', (r"[ ]*[\w\-:?'\.]+[ ]*", None)),
('entry', (r'@', self.found_entry)),
('string', (r'"[^"]+"', self.lex_string)),
('lparen', (r'\(', self.lex_lparen)),
('rparen', (r'\)', self.lex_rparen)),
('concat', (r'[ ]*#[ ]*', None)),
('space', (r'[ \t\r\n]+', None)),
])
def reset(self, string):
"""Reset the internal state of the lexer."""
super().reset(string)
self.in_entry = False
def found_entry(self, value):
"""Handler for finding a bibliographic entry."""
self.in_entry = True
self.ignore_whitespace = True
return self.make_token('entry', value)
def lex_lbrace(self, value):
"""Lex a left brace."""
self.brace_level += 1
if self.brace_level == 1 and self.bibtype in ('comment', 'preamble'):
self.mode = 'value'
elif self.brace_level > 1:
self.mode = 'value'
return self.make_token('lbrace', value)
def lex_rbrace(self, value):
"""Lex a right brace."""
self.brace_level -= 1
if self.brace_level == 0:
self.in_entry = False
self.mode = 'comment'
elif self.brace_level < 0:
raise self.raise_unbalanced()
return self.make_token('rbrace', value)
def lex_lparen(self, value):
"""Lex a left parenthesis."""
if self.bibtype == 'string':
self.mode = 'bib'
self.ignore_whitespace = True
elif self.bibtype in ('comment', 'preamble'):
self.mode = 'parens'
return self.make_token('lparen', value)
def lex_rparen(self, value):
"""Lex a right parenthesis."""
return self.make_token('rparen', value)
def lex_parens(self):
"""Lex a set of possibly nested parentheses and its contents."""
paren_level = 1
content = ''
while True:
before, token = self.until('parens')
if token == '(':
paren_level += 1
content += before + token
elif token == ')':
paren_level -= 1
content += before
if paren_level == 0:
yield self.make_token('content', content)
yield self.make_token('rparen', token)
self.mode = 'bib'
break
def lex_braced(self):
"""Lex a possibly nested braced expression and its contents."""
content = ''
while True:
before, token = self.until('braces')
if token == '{':
self.brace_level += 1
content += before + token
elif token == '}':
self.brace_level -= 1
content += before
if self.brace_level == 0:
yield self.make_token('content', content)
yield self.make_token('rbrace', token)
self.in_entry = False
self.ignore_whitespace = False
self.mode = 'comment'
self.bibtype = None
break
elif self.brace_level == 1 and self.bibtype not in\
('comment', 'string', 'preamble'):
yield self.make_token('content', content)
yield self.make_token('rbrace', token)
self.mode = 'bib'
break
else:
content += token
def lex_comment(self):
"""Lex a non-entry comment."""
comment, entry = self.until('entry')
if comment:
yield self.make_token('comment', comment)
if entry == '@':
self.mode = 'entry'
self.in_entry = True
self.ignore_whitespace = True
yield self.make_token('entry', entry)
def lex_entry(self):
"""Lex a bibliographic entry."""
self.brace_level = 0
bibtype = self.expect('name')
entry_type = bibtype.value.lower()
if entry_type in ('comment', 'preamble', 'string'):
self.bibtype = entry_type
else:
self.bibtype = 'entry'
yield bibtype
self.mode = 'bib'
self.ignore_whitespace = True
def lex_main(self):
for _, token in self.scan(search_type='match'):
if token is not None:
yield token
else:
self.raise_error('Unmatched characters')
|
{
"content_hash": "e176789e22b7ba2f5e4a50d30d24597b",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 79,
"avg_line_length": 32.92265193370166,
"alnum_prop": 0.4846450746769592,
"repo_name": "MisanthropicBit/bibpy",
"id": "56ce80f5374a8e3013c287802c78061fa098cb40",
"size": "5984",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bibpy/lexers/biblexer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "13208"
},
{
"name": "Python",
"bytes": "224105"
},
{
"name": "TeX",
"bytes": "361020"
}
],
"symlink_target": ""
}
|
"""Tests for the Mac OS X preprocess plug-ins."""
import os
import unittest
from dfvfs.helpers import file_system_searcher
from dfvfs.path import fake_path_spec
from plaso.artifacts import knowledge_base
from plaso.preprocessors import macosx
from plaso.preprocessors import test_lib
class MacOSXBuildTest(test_lib.PreprocessPluginTest):
"""Tests for the Mac OS X build information preprocess plug-in object."""
_FILE_DATA = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" '
'"http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n'
'<plist version="1.0">\n'
'<dict>\n'
'\t<key>ProductBuildVersion</key>\n'
'\t<string>13C64</string>\n'
'\t<key>ProductCopyright</key>\n'
'\t<string>1983-2014 Apple Inc.</string>\n'
'\t<key>ProductName</key>\n'
'\t<string>Mac OS X</string>\n'
'\t<key>ProductUserVisibleVersion</key>\n'
'\t<string>10.9.2</string>\n'
'\t<key>ProductVersion</key>\n'
'\t<string>10.9.2</string>\n'
'</dict>\n'
'</plist>\n')
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._fake_file_system = self._BuildSingleFileFakeFileSystem(
u'/System/Library/CoreServices/SystemVersion.plist',
self._FILE_DATA)
mount_point = fake_path_spec.FakePathSpec(location=u'/')
self._searcher = file_system_searcher.FileSystemSearcher(
self._fake_file_system, mount_point)
def testGetValue(self):
"""Tests the GetValue function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
plugin = macosx.MacOSXBuild()
plugin.Run(self._searcher, knowledge_base_object)
build = knowledge_base_object.GetValue('build')
self.assertEquals(build, u'10.9.2')
class MacOSXHostname(test_lib.PreprocessPluginTest):
"""Tests for the Mac OS X hostname preprocess plug-in object."""
# Note that is only part of the normal preferences.plist file data.
_FILE_DATA = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" '
'"http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n'
'<plist version="1.0">\n'
'<dict>\n'
'\t<key>System</key>\n'
'\t<dict>\n'
'\t\t<key>Network</key>\n'
'\t\t<dict>\n'
'\t\t\t<key>HostNames</key>\n'
'\t\t\t<dict>\n'
'\t\t\t\t<key>LocalHostName</key>\n'
'\t\t\t\t<string>Plaso\'s Mac mini</string>\n'
'\t\t\t</dict>\n'
'\t\t</dict>\n'
'\t\t<key>System</key>\n'
'\t\t<dict>\n'
'\t\t\t<key>ComputerName</key>\n'
'\t\t\t<string>Plaso\'s Mac mini</string>\n'
'\t\t\t<key>ComputerNameEncoding</key>\n'
'\t\t\t<integer>0</integer>\n'
'\t\t</dict>\n'
'\t</dict>\n'
'</dict>\n'
'</plist>\n')
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._fake_file_system = self._BuildSingleFileFakeFileSystem(
u'/Library/Preferences/SystemConfiguration/preferences.plist',
self._FILE_DATA)
mount_point = fake_path_spec.FakePathSpec(location=u'/')
self._searcher = file_system_searcher.FileSystemSearcher(
self._fake_file_system, mount_point)
def testGetValue(self):
"""Tests the GetValue function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
plugin = macosx.MacOSXHostname()
plugin.Run(self._searcher, knowledge_base_object)
self.assertEquals(knowledge_base_object.hostname, u'Plaso\'s Mac mini')
class MacOSXKeyboard(test_lib.PreprocessPluginTest):
"""Tests for the Mac OS X keyboard layout preprocess plug-in object."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
file_object = open(os.path.join(
self._TEST_DATA_PATH, u'com.apple.HIToolbox.plist'))
file_data = file_object.read()
file_object.close()
self._fake_file_system = self._BuildSingleFileFakeFileSystem(
u'/Library/Preferences/com.apple.HIToolbox.plist',
file_data)
mount_point = fake_path_spec.FakePathSpec(location=u'/')
self._searcher = file_system_searcher.FileSystemSearcher(
self._fake_file_system, mount_point)
def testGetValue(self):
"""Tests the GetValue function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
plugin = macosx.MacOSXKeyboard()
plugin.Run(self._searcher, knowledge_base_object)
keyboard_layout = knowledge_base_object.GetValue('keyboard_layout')
self.assertEquals(keyboard_layout, u'US')
class MacOSXTimezone(test_lib.PreprocessPluginTest):
"""Tests for the Mac OS X timezone preprocess plug-in object."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._fake_file_system = self._BuildSingleLinkFakeFileSystem(
u'/private/etc/localtime', u'/usr/share/zoneinfo/Europe/Amsterdam')
mount_point = fake_path_spec.FakePathSpec(location=u'/')
self._searcher = file_system_searcher.FileSystemSearcher(
self._fake_file_system, mount_point)
def testGetValue(self):
"""Tests the GetValue function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
plugin = macosx.MacOSXTimeZone()
plugin.Run(self._searcher, knowledge_base_object)
time_zone_str = knowledge_base_object.GetValue('time_zone_str')
self.assertEquals(time_zone_str, u'Europe/Amsterdam')
class MacOSXUsersTest(test_lib.PreprocessPluginTest):
"""Tests for the Mac OS X usernames preprocess plug-in object."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
file_object = open(os.path.join(
self._TEST_DATA_PATH, u'com.apple.HIToolbox.plist'))
file_data = file_object.read()
file_object.close()
self._fake_file_system = self._BuildSingleFileFakeFileSystem(
u'/private/var/db/dslocal/nodes/Default/users/nobody.plist',
file_data)
mount_point = fake_path_spec.FakePathSpec(location=u'/')
self._searcher = file_system_searcher.FileSystemSearcher(
self._fake_file_system, mount_point)
def testGetValue(self):
"""Tests the GetValue function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
plugin = macosx.MacOSXUsers()
plugin.Run(self._searcher, knowledge_base_object)
users = knowledge_base_object.GetValue('users')
self.assertEquals(len(users), 1)
# TODO: fix the parsing of the following values to match the behavior on
# Mac OS X.
# The string -2 is converted into the integer -1.
self.assertEquals(users[0].get('uid', None), -1)
# 'home' is 0 which represents: /var/empty but we convert it
# into u'<not set>'.
self.assertEquals(users[0].get('path', None), u'<not set>')
# 'name' is 0 which represents: nobody but we convert it into u'<not set>'.
self.assertEquals(users[0].get('name', None), u'<not set>')
# 'realname' is 0 which represents: 'Unprivileged User' but we convert it
# into u'N/A'.
self.assertEquals(users[0].get('realname', None), u'N/A')
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "4838ef68d88f993667f3f2969cb51293",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 79,
"avg_line_length": 34.97549019607843,
"alnum_prop": 0.6613875262789068,
"repo_name": "cvandeplas/plaso",
"id": "5e80c7b1da77823c71a8e1c2e05fbdcdb9531b08",
"size": "7833",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plaso/preprocessors/macosx_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2812257"
},
{
"name": "Shell",
"bytes": "22724"
}
],
"symlink_target": ""
}
|
"""Wordcount exercise
Google's Python class
The main() below is already defined and complete. It calls print_words()
and print_top() functions which you write.
1. For the --count flag, implement a print_words(filename) function that counts
how often each word appears in the text and prints:
word1 count1
word2 count2
...
Print the above list in order sorted by word (python will sort punctuation to
come before letters -- that's fine). Store all the words as lowercase,
so 'The' and 'the' count as the same word.
2. For the --topcount flag, implement a print_top(filename) which is similar
to print_words() but which prints just the top 20 most common words sorted
so the most common word is first, then the next most common, and so on.
Use str.split() (no arguments) to split on all whitespace.
Workflow: don't build the whole program at once. Get it to an intermediate
milestone and print your data structure and sys.exit(0).
When that's working, try for the next milestone.
Optional: define a helper function to avoid code duplication inside
print_words() and print_top().
"""
import sys
from string_tool import StringTool
# +++your code here+++
# Define print_words(filename) and print_top(filename) functions.
# You could write a helper utility function that reads a file
# and builds and returns a word/count dict for it.
# Then print_words() and print_top() can just call the utility function.
def count_words(filename):
string_tool = StringTool()
words_file = open(filename, 'r')
word_counts = {}
for line in words_file:
cleaned_string = string_tool.clean_string(line)
# split on whitespace
line_list = cleaned_string.split()
for word in line_list:
word_lower = word.lower()
if not (word_lower in word_counts):
word_counts[word_lower] = 1
else:
word_counts[word_lower] += 1
words_file.close()
return word_counts
def print_words(filename):
word_counts = count_words(filename)
words = sorted(word_counts.keys())
for word in words:
print word, word_counts[word]
def print_top(filename):
"""
print the top 20 most common words sorted
so the most common word is first, then the next most common, and so on.
Sorts dictionary keys list ordered by corresponding value, highest value first
"""
word_counts = count_words(filename)
words = sorted(word_counts.keys(), key=lambda word_key: word_counts[word_key], reverse=True)
# if top end of range is greater than length, will use entire list
words_top = words[:20]
for word in words_top:
print word, word_counts[word]
def print_top_items(filename):
"""
print the top 20 most common words sorted
so the most common word is first, then the next most common, and so on.
Alternative implementation of print_top.
Sorts dictionary items (i.e. (key, value) tuples) into a list ordered by item value, highest value first
In Python 3, items are a view and can be iterated over.
http://docs.python.org/3.3/library/stdtypes.html#dict-views
"""
word_counts = count_words(filename)
# key:item[0], value:item[1]
items_sorted = sorted(word_counts.items(), key=lambda item: item[1], reverse=True)
# if top end of range is greater than length, will use entire list
items_top = items_sorted[:20]
for item in items_top:
print item[0], item[1]
###
# This basic command line argument parsing code is provided and
# calls the print_words() and print_top() functions which you must define.
def main():
if len(sys.argv) != 3:
print 'usage: ./wordcount.py {--count | --topcount} file'
sys.exit(1)
option = sys.argv[1]
filename = sys.argv[2]
if option == '--count':
print_words(filename)
elif option == '--topcount':
print_top(filename)
print_top_items(filename)
else:
print 'unknown option: ' + option
sys.exit(1)
if __name__ == '__main__':
main()
|
{
"content_hash": "1a50b4c77237459b71f5a2726dbfe79c",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 108,
"avg_line_length": 33.08196721311475,
"alnum_prop": 0.6778989098116948,
"repo_name": "beepscore/google-python-exercises",
"id": "ecb30f7b4acf9803f47a0bdad0e211c4b1744b16",
"size": "4267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "basic/wordcount.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "634"
},
{
"name": "Python",
"bytes": "95234"
}
],
"symlink_target": ""
}
|
import unittest
import warnings
import sys
import IECore
warnings.simplefilter( "error", DeprecationWarning )
from ClassLoader import *
from AttributeCache import *
from BlindDataHolder import *
from CompoundData import *
from CompoundObject import *
from Imath import *
from ImathVectorData import *
from IndexedIO import *
from KDTree import *
from BoundedKDTree import *
from MessageHandler import *
from ObjectIO import *
from Object import *
from ObjectReader import *
from ObjectWriter import *
from ParameterParser import *
from Parameterised import *
from Parameters import *
from PDCReader import *
from PDCWriter import *
from SimpleTypedData import *
from TypedDataAsObject import *
from VectorData import *
from FileSequence import *
from EXRImageReader import *
from EXRImageWriter import *
from PointsPrimitive import *
from ImagePrimitive import *
from PerlinNoise import *
from Turbulence import *
from MeshPrimitive import *
from Shader import *
from SearchPath import *
from CachedReader import *
from Reader import *
from RunTimeTyped import *
from Op import *
from MemoryUsage import *
from FileSequenceParameter import *
from WrapperToPython import *
from RemovePrimitiveVariables import *
from RenamePrimitiveVariables import *
from WrapperGarbageCollection import *
from FormattedParameterHelp import *
from MotionPrimitive import *
from Transform import *
from Group import *
from NamespacePollution import *
from OptionalCompoundParameter import *
from ObjectInterpolation import *
from InterpolatedCache import *
from TransformationMatrixData import *
from ReversedFrameList import *
from BinaryFrameList import *
from PointsExpressionOp import *
from FrameList import *
from FrameListParameter import *
from Struct import *
from Enum import *
from HeaderGenerator import *
from Camera import *
from NURBS import *
from Curry import *
from Menus import *
from DataCastOp import *
from DataPromoteOp import *
from MatrixMultiplyOp import *
from PointBoundsOp import *
from ImplicitSurfaceFunction import *
from CachedImplicitSurfaceFunction import *
from MarchingCubes import *
from PointMeshOp import *
from CSGImplicitSurfaceFunction import *
from ParticleMeshOp import *
from PrimitiveEvaluator import *
from MeshPrimitiveEvaluator import *
from PrimitiveImplicitSurfaceFunction import *
from MeshPrimitiveImplicitSurfaceOp import *
from InternedStringTest import InternedStringTest
from Writer import *
from TriangulateOp import *
from SpherePrimitiveEvaluator import *
from SearchReplaceOp import *
from CINImageReader import *
from CINImageWriter import *
from DPXImageReader import *
from DPXImageWriter import *
from InverseDistanceWeightedInterpolation import *
from ImageCropOp import *
from MeshPrimitiveShrinkWrapOp import *
from ImagePrimitiveEvaluator import *
from CapturingMessageHandler import *
from FileExaminer import *
from Math import *
from FileSequenceVectorParameter import *
from TriangleAlgoTest import *
from ColorTransformOpTest import *
from TransformOpTest import *
from LineSegmentTest import *
from CubicBasisTest import *
from CurvesPrimitiveTest import *
from ImageDiffOp import *
from TriangulatorTest import *
from BezierAlgoTest import *
from MeshNormalsOpTest import *
from PrimitiveTest import *
from MeshMergeOpTest import *
from UniformRandomPointDistributionOpTest import *
from UnicodeToStringTest import *
from MappedRandomPointDistributionOpTest import *
from RadixSortTest import *
from ImathRootsTest import *
from AngleConversionTest import *
from LuminanceOpTest import *
from SummedAreaOpTest import *
from GradeTest import *
from MedianCutSamplerTest import *
from EnvMapSamplerTest import *
from RandomTest import *
from MeshVertexReorderOpTest import *
from SplineTest import *
from SplineDataTest import *
from TypeIdTest import *
from LayeredDictTest import *
from SplineParameterTest import *
from AttributeStateTest import *
from CoordinateSystemTest import *
from SplineToImageTest import *
from DisplayTest import *
from MeshTangentsOpTest import *
from CubeColorLookupTest import *
from CubeColorLookupDataTest import *
from CubeColorTransformOpTest import *
from CompoundVectorParameterTest import *
from UVDistortOpTest import *
from ObjectVectorTest import *
from ImagePremultiplyOpTest import *
from ImageUnpremultiplyOpTest import *
from ImageCompositeOpTest import *
from ImageSequenceCompositeOpTest import *
from YUVImageWriter import *
from OversamplesCalculatorTest import *
from DateTimeDataTest import *
from DateTimeParameterTest import *
from SequenceLsOpTest import *
from SGIImageReaderTest import *
from TimeDurationDataTest import *
from TimePeriodDataTest import *
from PatchMeshPrimitiveTest import *
from CurveExtrudeOp import *
from ParameterisedProceduralTest import *
from LevenbergMarquardtTest import *
from TypedDataTest import *
from DataTraitsTest import *
from ColorSpaceTransformOpTest import *
from TGAImageReaderTest import *
from TGAImageWriterTest import *
from BINParticleWriterTest import *
from BINMeshReaderTest import *
from BGEOParticleReader import *
from NParticleReader import *
from IFFHairReader import *
from FaceAreaOpTest import FaceAreaOpTest
from CurvesMergeOpTest import CurvesMergeOpTest
from CurvesPrimitiveEvaluatorTest import CurvesPrimitiveEvaluatorTest
from SubstitutedDictTest import SubstitutedDictTest
from PointDistributionTest import PointDistributionTest
from HitMissTransformTest import HitMissTransformTest
from CurveTracerTest import CurveTracerTest
from ImageThinnerTest import ImageThinnerTest
from CurveLineariserTest import CurveLineariserTest
from IDXReaderTest import IDXReaderTest
from ThreadingTest import ThreadingTest
from ImageConvolveOpTest import *
from StringUtilTest import *
from ClassParameterTest import ClassParameterTest
from ClassVectorParameterTest import ClassVectorParameterTest
from CurveTangentsOpTest import CurveTangentsOpTest
from MarschnerLookupTableOpTest import MarschnerLookupTableOpTest
from SmoothSkinningDataTest import *
from IgnoredExceptionsTest import IgnoredExceptionsTest
from PrimitiveVariableTest import PrimitiveVariableTest
from FaceVaryingPromotionOpTest import FaceVaryingPromotionOpTest
from MeshDistortionsOpTest import TestMeshDistortionsOp
from PointVelocityDisplaceOp import *
from HexConversionTest import HexConversionTest
from CompressAndDecompressSmoothSkinningDataOpsTest import CompressAndDecompressSmoothSkinningDataOpsTest
from BasicPreset import TestBasicPreset
from RelativePreset import TestRelativePreset
from ReorderSmoothSkinningInfluencesOpTest import ReorderSmoothSkinningInfluencesOpTest
from NormalizeSmoothSkinningWeightsOpTest import NormalizeSmoothSkinningWeightsOpTest
from LimitSmoothSkinningInfluencesOpTest import LimitSmoothSkinningInfluencesOpTest
from MixSmoothSkinningWeightsOpTest import MixSmoothSkinningWeightsOpTest
from SmoothSmoothSkinningWeightsOpTest import SmoothSmoothSkinningWeightsOpTest
from PointSmoothSkinningOpTest import PointSmoothSkinningOpTest
from AddAndRemoveSmoothSkinningInfluencesOpTest import AddAndRemoveSmoothSkinningInfluencesOpTest
from LookupTest import LookupTest
from ParameterAlgoTest import ParameterAlgoTest
from PointsPrimitiveEvaluatorTest import PointsPrimitiveEvaluatorTest
from PointsMotionOpTest import PointsMotionOpTest
from CamelCaseTest import CamelCaseTest
from CapturingRendererTest import CapturingRendererTest
from LightTest import LightTest
from ContrastSmoothSkinningWeightsOpTest import ContrastSmoothSkinningWeightsOpTest
from CameraControllerTest import CameraControllerTest
from PointDistributionOpTest import PointDistributionOpTest
from LRUCacheTest import LRUCacheTest
from DataInterleaveOpTest import DataInterleaveOpTest
from DataConvertOpTest import DataConvertOpTest
from DeepPixelTest import DeepPixelTest
from ConfigLoaderTest import ConfigLoaderTest
from MurmurHashTest import MurmurHashTest
from BoolVectorData import BoolVectorDataTest
from CompoundParameterTest import CompoundParameterTest
from DiskPrimitiveTest import DiskPrimitiveTest
from ClampOpTest import ClampOpTest
from SWAReaderTest import SWAReaderTest
from ImfTest import *
from TimeCodeDataTest import TimeCodeDataTest
from TimeCodeParameterTest import TimeCodeParameterTest
from OptionsTest import OptionsTest
from NullObjectTest import NullObjectTest
from ModelCacheTest import ModelCacheTest
from SceneCacheTest import SceneCacheTest
from LinkedSceneTest import LinkedSceneTest
from StandardRadialLensModelTest import StandardRadialLensModelTest
from LensDistortOpTest import LensDistortOpTest
if IECore.withASIO() :
from DisplayDriverTest import *
if IECore.withTIFF() :
from TIFFImageReader import *
from TIFFImageWriter import *
if IECore.withJPEG() :
from JPEGImageReader import *
from JPEGImageWriter import *
if IECore.withFreeType() :
from FontTest import *
if IECore.withPNG() :
from PNGImageReader import TestPNGReader
unittest.TestProgram(
testRunner = unittest.TextTestRunner(
stream = IECore.CompoundStream(
[
sys.stderr,
open( "test/IECore/resultsPython.txt", "w" )
]
),
verbosity = 2
)
)
|
{
"content_hash": "1a21d7aff42c76baddd56aca30f2567a",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 105,
"avg_line_length": 34.51136363636363,
"alnum_prop": 0.8632422346613983,
"repo_name": "code-google-com/cortex-vfx",
"id": "d907c80a7fc999fd972146e0dc48f7a1a15ed1d3",
"size": "10959",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "test/IECore/All.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "58407"
},
{
"name": "C++",
"bytes": "10544047"
},
{
"name": "CMake",
"bytes": "14161"
},
{
"name": "Diff",
"bytes": "14535"
},
{
"name": "GLSL",
"bytes": "31102"
},
{
"name": "Mathematica",
"bytes": "255937"
},
{
"name": "Python",
"bytes": "4463622"
},
{
"name": "Shell",
"bytes": "961"
},
{
"name": "Slash",
"bytes": "7896"
},
{
"name": "Tcl",
"bytes": "1796"
}
],
"symlink_target": ""
}
|
from unittest import TestCase
from pecanraw import RawHook
from pecan import make_app, expose
from webtest import TestApp
class TestHooks(TestCase):
def test_basic_single_hook(self):
run_hook = []
class RootController(object):
@expose()
def index(self):
return 'Hello, World!'
import sys
from StringIO import StringIO
out = StringIO()
sys.stdout = out
app = TestApp(make_app(RootController(), hooks=[RawHook()]))
response = app.get('/')
self.assertEqual(response.status_int, 200)
self.assertEqual(response.body, 'Hello, World!')
expected = ['method:', 'GET',
'response:', '200', 'OK',
'url:', '/',
'method:', 'RootController.index',
'context:', '{}',
'params:', 'NestedMultiDict([])',
'hooks:', '[<pecanraw.RawHook', 'object', 'at',]
self.assertEqual(out.getvalue().split()[:-1], expected)
|
{
"content_hash": "955a6efc195e4d6c2cfd2d8c5c445d7c",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 68,
"avg_line_length": 32.06060606060606,
"alnum_prop": 0.5274102079395085,
"repo_name": "alfredodeza/pecan-raw",
"id": "32c4da3cc88030bf33d7c188e94920a34434720f",
"size": "1058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pecanraw/tests/test_hooks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19437"
},
{
"name": "Shell",
"bytes": "360090"
}
],
"symlink_target": ""
}
|
from contextlib import contextmanager
import inspect
from itertools import chain
import os
from django.conf import settings
from django.template.base import Lexer, TOKEN_BLOCK
from django.utils.decorators import method_decorator
from django.utils.termcolors import colorize
from sekizai.helpers import validate_template
from cms import constants
from cms.models import AliasPluginModel
from cms.utils import get_cms_setting
from cms.utils.compat import DJANGO_1_8
from cms.utils.compat.dj import is_installed, get_app_paths
SUCCESS = 1
WARNING = 2
ERROR = 3
SKIPPED = 4
CHECKERS = []
class FileOutputWrapper(object):
"""
Wraps two file-like objects (that support at the very least the 'write'
method) into an API to be used by the check function further down in
this module.
The following properties are public (and required) by alternative implementations:
errors: integer count of errors encountered
successes: integer count of successes encountered
warnings: integer count of warnings encountered
skips: integer count of skips encountered
successful: Whether the checks were successful (no errors)
They must also provide these methods:
write_line(message=''): writes a message to stdout
write_stderr_line(message=''): writes a message to stderr
success(message): reports and registers a successful check
error(message): reports and registers an error
warn(message); reports and registers a warning
skip(message): reports and registers a skipped check
section(title): A context manager that starts a new section. For the
Section API see FileSectionWrapper
"""
def __init__(self, stdout, stderr):
self.stdout = stdout
self.stderr = stderr
self.section_wrapper = FileSectionWrapper
self.errors = 0
self.successes = 0
self.warnings = 0
self.skips = 0
def colorize(self, msg, opts=(), **kwargs):
return colorize(msg, opts=opts, **kwargs)
def write_line(self, message=''):
self.write(u'%s\n' % message)
def write(self, message):
self.stdout.write(message)
def write_stderr_line(self, message=''):
self.write_stderr(u'%s\n' % message)
def write_stderr(self, message):
self.stderr.write(message)
def success(self, message):
self.successes += 1
self.write_line(u'%s %s' % (message, self.colorize('[OK]', fg='green', opts=['bold'])))
def error(self, message):
self.errors += 1
self.write_stderr_line(u'%s %s' % (message, self.colorize('[ERROR]', fg='red', opts=['bold'])))
def warn(self, message):
self.warnings += 1
self.write_stderr_line(u'%s %s' % (message, self.colorize('[WARNING]', fg='yellow', opts=['bold'])))
def skip(self, message):
self.skips += 1
self.write_line(u'%s %s' % (message, self.colorize('[SKIP]', fg='blue', opts=['bold'])))
@method_decorator(contextmanager)
def section(self, title):
self.write_line(self.colorize(title, opts=['bold']))
self.write_line(self.colorize('=' * len(title), opts=['bold']))
self.write_line()
wrapper = self.section_wrapper(self)
try:
yield wrapper
except:
self.error('Checker failed, see traceback')
raise
self.errors += wrapper.errors
self.successes += wrapper.successes
self.warnings += wrapper.warnings
self.skips += wrapper.skips
self.write_line('')
@property
def successful(self):
return not self.errors
class FileSectionWrapper(FileOutputWrapper):
"""
Used from FileOutputWrapper to report checks in a section.
If you want to provide your own output class, you may want to subclass
this class for the section reporting too. If you want to use your own,
you must defined at least the same API as FileOutputWrapper, as well
as these four additional methods:
finish_success(message): End the section (successfully)
finish_error(message): End the section with errors
finish_warning(message): End this section with a warning
finish_skip(message): End this (skipped) section
"""
def __init__(self, wrapper):
super(FileSectionWrapper, self).__init__(wrapper.stdout, wrapper.stderr)
self.wrapper = wrapper
def write_line(self, message=''):
self.write(u' - %s\n' % message)
def write_stderr_line(self, message=''):
self.write_stderr(u' - %s\n' % message)
def finish_success(self, message):
self.wrapper.write_line()
self.wrapper.success(message)
def finish_error(self, message):
self.wrapper.write_line()
self.wrapper.error(message)
def finish_warning(self, message):
self.wrapper.write_line()
self.wrapper.warning(message)
def finish_skip(self, message):
self.wrapper.write_lin()
self.wrapper.skip(message)
def define_check(func):
"""
Helper decorator to register a check function.
"""
CHECKERS.append(func)
return func
@define_check
def check_sekizai(output):
with output.section("Sekizai") as section:
if is_installed('sekizai'):
section.success("Sekizai is installed")
else:
section.error("Sekizai is not installed, could not find 'sekizai' in INSTALLED_APPS")
processors = list(chain(*[template['OPTIONS'].get('context_processors', []) for template in settings.TEMPLATES]))
if 'sekizai.context_processors.sekizai' in processors:
section.success("Sekizai template context processor is installed")
else:
section.error("Sekizai template context processor is not installed, could not find 'sekizai.context_processors.sekizai' in TEMPLATES option context_processors")
for template, _ in get_cms_setting('TEMPLATES'):
if template == constants.TEMPLATE_INHERITANCE_MAGIC:
continue
if validate_template(template, ['js', 'css']):
section.success("Sekizai namespaces 'js' and 'css' found in %r" % template)
else:
section.error("Sekizai namespaces 'js' and 'css' not found in %r" % template)
if section.successful:
section.finish_success("Sekizai configuration okay")
else:
section.finish_error("Sekizai configuration has errors")
@define_check
def check_i18n(output):
with output.section("Internationalization") as section:
if isinstance(getattr(settings, 'CMS_LANGUAGES', {}), dict):
section.success("New style CMS_LANGUAGES")
else:
section.warn("Old style (tuple based) CMS_LANGUAGES, please switch to the new (dictionary based) style")
if getattr(settings, 'LANGUAGE_CODE', '').find('_') > -1:
section.warn("LANGUAGE_CODE must contain a valid language code, not a locale (e.g.: 'en-us' instead of 'en_US'): '%s' provided" % getattr(settings, 'LANGUAGE_CODE', ''))
for lang in getattr(settings, 'LANGUAGES', ()):
if lang[0].find('_') > -1:
section.warn("LANGUAGES must contain valid language codes, not locales (e.g.: 'en-us' instead of 'en_US'): '%s' provided" % lang[0])
if settings.SITE_ID == hash(settings.SITE_ID):
for site, items in get_cms_setting('LANGUAGES').items():
if type(site) == int:
for lang in items:
if lang['code'].find('_') > -1:
section.warn("CMS_LANGUAGES entries must contain valid language codes, not locales (e.g.: 'en-us' instead of 'en_US'): '%s' provided" % lang['code'])
else:
section.error("SITE_ID must be an integer, not %r" % settings.SITE_ID)
for deprecated in ['CMS_HIDE_UNTRANSLATED', 'CMS_LANGUAGE_FALLBACK', 'CMS_LANGUAGE_CONF', 'CMS_SITE_LANGUAGES', 'CMS_FRONTEND_LANGUAGES']:
if hasattr(settings, deprecated):
section.warn("Deprecated setting %s found. This setting is now handled in the new style CMS_LANGUAGES and can be removed" % deprecated)
@define_check
def check_middlewares(output):
with output.section("Middlewares") as section:
required_middlewares = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
'cms.middleware.language.LanguageCookieMiddleware',
)
for middleware in required_middlewares:
if middleware not in settings.MIDDLEWARE_CLASSES:
section.error("%s middleware must be in MIDDLEWARE_CLASSES" % middleware)
@define_check
def check_deprecated_settings(output):
with output.section("Deprecated settings") as section:
found = False
for deprecated in ['CMS_FLAT_URLS', 'CMS_MODERATOR']:
if hasattr(settings, deprecated):
section.warn("Deprecated setting %s found. This setting is no longer in use and can be removed" % deprecated)
found = True
if not found:
section.skip("No deprecated settings found")
@define_check
def check_plugin_instances(output):
from cms.management.commands.subcommands.list import plugin_report
with output.section("Plugin instances") as section:
# get the report
report = plugin_report()
section.success("Plugin instances of %s types found in the database" % len(report))
# loop over plugin types in the report
for plugin_type in report:
# warn about those that are not installed
if not plugin_type["model"]:
section.error("%s has instances but is no longer installed" % plugin_type["type"] )
# warn about those that have unsaved instances
if plugin_type["unsaved_instances"]:
section.error("%s has %s unsaved instances" % (plugin_type["type"], len(plugin_type["unsaved_instances"])))
if section.successful:
section.finish_success("The plugins in your database are in good order")
else:
section.finish_error("There are potentially serious problems with the plugins in your database. \nEven if your site works, you should run the 'manage.py cms list plugins' \ncommand and then the 'manage.py cms delete_orphaned_plugins' command. \nThis will alter your database; read the documentation before using it.")
@define_check
def check_copy_relations(output):
from cms.plugin_pool import plugin_pool
from cms.extensions import extension_pool
from cms.extensions.models import BaseExtension
from cms.models.pluginmodel import CMSPlugin
c_to_s = lambda klass: '%s.%s' % (klass.__module__, klass.__name__)
def get_class(method_name, model):
for cls in inspect.getmro(model):
if method_name in cls.__dict__:
return cls
return None
with output.section('Presence of "copy_relations"') as section:
plugin_pool.discover_plugins()
for plugin in plugin_pool.plugins.values():
plugin_class = plugin.model
if get_class('copy_relations', plugin_class) is not CMSPlugin or plugin_class is CMSPlugin:
# this class defines a ``copy_relations`` method, nothing more
# to do
continue
for rel in plugin_class._meta.many_to_many:
section.warn('%s has a many-to-many relation to %s,\n but no "copy_relations" method defined.' % (
c_to_s(plugin_class),
c_to_s(rel.model),
))
for rel in plugin_class._meta.get_all_related_objects():
if rel.model != CMSPlugin and not issubclass(rel.model, plugin.model) and rel.model != AliasPluginModel:
section.warn('%s has a foreign key from %s,\n but no "copy_relations" method defined.' % (
c_to_s(plugin_class),
c_to_s(rel.model),
))
for extension in chain(extension_pool.page_extensions, extension_pool.title_extensions):
if get_class('copy_relations', extension) is not BaseExtension:
# OK, looks like there is a 'copy_relations' defined in the
# extension... move along...
continue
for rel in extension._meta.many_to_many:
section.warn('%s has a many-to-many relation to %s,\n but no "copy_relations" method defined.' % (
extension,
rel.related,
))
for rel in extension._meta.get_all_related_objects():
if rel.model != extension:
section.warn('%s has a foreign key from %s,\n but no "copy_relations" method defined.' % (
c_to_s(extension),
c_to_s(rel.model),
))
if not section.warnings:
section.finish_success('All plugins and page/title extensions have "copy_relations" method if needed.')
else:
section.finish_success('Some plugins or page/title extensions do not define a "copy_relations" method.\nThis might lead to data loss when publishing or copying plugins/extensions.\nSee https://django-cms.readthedocs.org/en/latest/extending_cms/custom_plugins.html#handling-relations or https://django-cms.readthedocs.org/en/latest/extending_cms/extending_page_title.html#handling-relations.')
def _load_all_templates(directory):
"""
Loads all templates in a directory (recursively) and yields tuples of
template tokens and template paths.
"""
if os.path.exists(directory):
for name in os.listdir(directory):
path = os.path.join(directory, name)
if os.path.isdir(path):
for template in _load_all_templates(path):
yield template
elif path.endswith('.html'):
with open(path, 'rb') as fobj:
source = fobj.read().decode(settings.FILE_CHARSET)
if DJANGO_1_8:
lexer = Lexer(source, path)
else:
lexer = Lexer(source)
yield lexer.tokenize(), path
@define_check
def deprecations(output):
# deprecated placeholder_tags scan (1 in 3.1)
templates_dirs = getattr(settings, 'TEMPLATES', [])[0]['DIRS']
templates_dirs.extend(
[os.path.join(path, 'templates') for path in get_app_paths()]
)
with output.section('Usage of deprecated placeholder_tags') as section:
for template_dir in templates_dirs:
for tokens, path in _load_all_templates(template_dir):
for token in tokens:
if token.token_type == TOKEN_BLOCK:
bits = token.split_contents()
if bits[0] == 'load' and 'placeholder_tags' in bits:
section.warn(
'Usage of deprecated template tag library '
'placeholder tags in template %s' % path
)
def check(output):
"""
Checks the configuration/environment of this django CMS installation.
'output' should be an object that provides the same API as FileOutputWrapper.
Returns whether the configuration/environment are okay (has no errors)
"""
title = "Checking django CMS installation"
border = '*' * len(title)
output.write_line(output.colorize(border, opts=['bold']))
output.write_line(output.colorize(title, opts=['bold']))
output.write_line(output.colorize(border, opts=['bold']))
output.write_line()
for checker in CHECKERS:
checker(output)
output.write_line()
with output.section("OVERALL RESULTS"):
if output.errors:
output.write_stderr_line(output.colorize("%s errors!" % output.errors, opts=['bold'], fg='red'))
if output.warnings:
output.write_stderr_line(output.colorize("%s warnings!" % output.warnings, opts=['bold'], fg='yellow'))
if output.skips:
output.write_line(output.colorize("%s checks skipped!" % output.skips, opts=['bold'], fg='blue'))
output.write_line(output.colorize("%s checks successful!" % output.successes, opts=['bold'], fg='green'))
output.write_line()
if output.errors:
output.write_stderr_line(output.colorize('Please check the errors above', opts=['bold'], fg='red'))
elif output.warnings:
output.write_stderr_line(output.colorize('Installation okay, but please check warnings above', opts=['bold'], fg='yellow'))
else:
output.write_line(output.colorize('Installation okay', opts=['bold'], fg='green'))
return output.successful
|
{
"content_hash": "2ed7056da47cb19dc4ada2e48d331889",
"timestamp": "",
"source": "github",
"line_count": 397,
"max_line_length": 404,
"avg_line_length": 43.86146095717884,
"alnum_prop": 0.62579681846896,
"repo_name": "keimlink/django-cms",
"id": "c75164080e6f8daec33110a4d17a46a0e0d0785d",
"size": "17437",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "cms/utils/check.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "128012"
},
{
"name": "HTML",
"bytes": "105180"
},
{
"name": "JavaScript",
"bytes": "667899"
},
{
"name": "Python",
"bytes": "1978594"
},
{
"name": "XSLT",
"bytes": "5917"
}
],
"symlink_target": ""
}
|
import copy
import errno
import json
import os
import time
import urllib
from tempest_lib.common.utils import misc as misc_utils
from tempest_lib import exceptions as lib_exc
from tempest.common import glance_http
from tempest.common import service_client
from tempest import exceptions
from tempest.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class ImageClientJSON(service_client.ServiceClient):
def __init__(self, auth_provider, catalog_type, region, endpoint_type=None,
build_interval=None, build_timeout=None,
disable_ssl_certificate_validation=None,
ca_certs=None, **kwargs):
super(ImageClientJSON, self).__init__(
auth_provider,
catalog_type,
region,
endpoint_type=endpoint_type,
build_interval=build_interval,
build_timeout=build_timeout,
disable_ssl_certificate_validation=(
disable_ssl_certificate_validation),
ca_certs=ca_certs,
**kwargs)
self._http = None
self.dscv = disable_ssl_certificate_validation
self.ca_certs = ca_certs
def _image_meta_from_headers(self, headers):
meta = {'properties': {}}
for key, value in headers.iteritems():
if key.startswith('x-image-meta-property-'):
_key = key[22:]
meta['properties'][_key] = value
elif key.startswith('x-image-meta-'):
_key = key[13:]
meta[_key] = value
for key in ['is_public', 'protected', 'deleted']:
if key in meta:
meta[key] = meta[key].strip().lower() in ('t', 'true', 'yes',
'1')
for key in ['size', 'min_ram', 'min_disk']:
if key in meta:
try:
meta[key] = int(meta[key])
except ValueError:
pass
return meta
def _image_meta_to_headers(self, fields):
headers = {}
fields_copy = copy.deepcopy(fields)
copy_from = fields_copy.pop('copy_from', None)
if copy_from is not None:
headers['x-glance-api-copy-from'] = copy_from
for key, value in fields_copy.pop('properties', {}).iteritems():
headers['x-image-meta-property-%s' % key] = str(value)
for key, value in fields_copy.pop('api', {}).iteritems():
headers['x-glance-api-property-%s' % key] = str(value)
for key, value in fields_copy.iteritems():
headers['x-image-meta-%s' % key] = str(value)
return headers
def _get_file_size(self, obj):
"""Analyze file-like object and attempt to determine its size.
:param obj: file-like object, typically redirected from stdin.
:retval The file's size or None if it cannot be determined.
"""
# For large images, we need to supply the size of the
# image file. See LP Bugs #827660 and #845788.
if hasattr(obj, 'seek') and hasattr(obj, 'tell'):
try:
obj.seek(0, os.SEEK_END)
obj_size = obj.tell()
obj.seek(0)
return obj_size
except IOError as e:
if e.errno == errno.ESPIPE:
# Illegal seek. This means the user is trying
# to pipe image data to the client, e.g.
# echo testdata | bin/glance add blah..., or
# that stdin is empty, or that a file-like
# object which doesn't support 'seek/tell' has
# been supplied.
return None
else:
raise
else:
# Cannot determine size of input image
return None
def _get_http(self):
return glance_http.HTTPClient(auth_provider=self.auth_provider,
filters=self.filters,
insecure=self.dscv,
ca_certs=self.ca_certs)
def _create_with_data(self, headers, data):
resp, body_iter = self.http.raw_request('POST', '/v1/images',
headers=headers, body=data)
self._error_checker('POST', '/v1/images', headers, data, resp,
body_iter)
body = json.loads(''.join([c for c in body_iter]))
return service_client.ResponseBody(resp, body['image'])
def _update_with_data(self, image_id, headers, data):
url = '/v1/images/%s' % image_id
resp, body_iter = self.http.raw_request('PUT', url, headers=headers,
body=data)
self._error_checker('PUT', url, headers, data,
resp, body_iter)
body = json.loads(''.join([c for c in body_iter]))
return service_client.ResponseBody(resp, body['image'])
@property
def http(self):
if self._http is None:
self._http = self._get_http()
return self._http
def create_image(self, name, container_format, disk_format, **kwargs):
params = {
"name": name,
"container_format": container_format,
"disk_format": disk_format,
}
headers = {}
for option in ['is_public', 'location', 'properties',
'copy_from', 'min_ram']:
if option in kwargs:
params[option] = kwargs.get(option)
headers.update(self._image_meta_to_headers(params))
if 'data' in kwargs:
return self._create_with_data(headers, kwargs.get('data'))
resp, body = self.post('v1/images', None, headers)
self.expected_success(201, resp.status)
body = json.loads(body)
return service_client.ResponseBody(resp, body['image'])
def update_image(self, image_id, name=None, container_format=None,
data=None, properties=None):
params = {}
headers = {}
if name is not None:
params['name'] = name
if container_format is not None:
params['container_format'] = container_format
if properties is not None:
params['properties'] = properties
headers.update(self._image_meta_to_headers(params))
if data is not None:
return self._update_with_data(image_id, headers, data)
url = 'v1/images/%s' % image_id
resp, body = self.put(url, data, headers)
self.expected_success(200, resp.status)
body = json.loads(body)
return service_client.ResponseBody(resp, body['image'])
def delete_image(self, image_id):
url = 'v1/images/%s' % image_id
resp, body = self.delete(url)
self.expected_success(200, resp.status)
return service_client.ResponseBody(resp, body)
def image_list(self, **kwargs):
url = 'v1/images'
if len(kwargs) > 0:
url += '?%s' % urllib.urlencode(kwargs)
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return service_client.ResponseBodyList(resp, body['images'])
def image_list_detail(self, properties=dict(), changes_since=None,
**kwargs):
url = 'v1/images/detail'
params = {}
for key, value in properties.items():
params['property-%s' % key] = value
kwargs.update(params)
if changes_since is not None:
kwargs['changes-since'] = changes_since
if len(kwargs) > 0:
url += '?%s' % urllib.urlencode(kwargs)
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return service_client.ResponseBodyList(resp, body['images'])
def get_image_meta(self, image_id):
url = 'v1/images/%s' % image_id
resp, __ = self.head(url)
self.expected_success(200, resp.status)
body = self._image_meta_from_headers(resp)
return service_client.ResponseBody(resp, body)
def get_image(self, image_id):
url = 'v1/images/%s' % image_id
resp, body = self.get(url)
self.expected_success(200, resp.status)
return service_client.ResponseBodyData(resp, body)
def is_resource_deleted(self, id):
try:
self.get_image_meta(id)
except lib_exc.NotFound:
return True
return False
@property
def resource_type(self):
"""Returns the primary type of resource this client works with."""
return 'image_meta'
def get_image_membership(self, image_id):
url = 'v1/images/%s/members' % image_id
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return service_client.ResponseBody(resp, body)
def get_shared_images(self, member_id):
url = 'v1/shared-images/%s' % member_id
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return service_client.ResponseBody(resp, body)
def add_member(self, member_id, image_id, can_share=False):
url = 'v1/images/%s/members/%s' % (image_id, member_id)
body = None
if can_share:
body = json.dumps({'member': {'can_share': True}})
resp, __ = self.put(url, body)
self.expected_success(204, resp.status)
return service_client.ResponseBody(resp)
def delete_member(self, member_id, image_id):
url = 'v1/images/%s/members/%s' % (image_id, member_id)
resp, __ = self.delete(url)
self.expected_success(204, resp.status)
return service_client.ResponseBody(resp)
# NOTE(afazekas): just for the wait function
def _get_image_status(self, image_id):
meta = self.get_image_meta(image_id)
status = meta['status']
return status
# NOTE(afazkas): Wait reinvented again. It is not in the correct layer
def wait_for_image_status(self, image_id, status):
"""Waits for a Image to reach a given status."""
start_time = time.time()
old_value = value = self._get_image_status(image_id)
while True:
dtime = time.time() - start_time
time.sleep(self.build_interval)
if value != old_value:
LOG.info('Value transition from "%s" to "%s"'
'in %d second(s).', old_value,
value, dtime)
if value == status:
return value
if value == 'killed':
raise exceptions.ImageKilledException(image_id=image_id,
status=status)
if dtime > self.build_timeout:
message = ('Time Limit Exceeded! (%ds)'
'while waiting for %s, '
'but we got %s.' %
(self.build_timeout, status, value))
caller = misc_utils.find_test_caller()
if caller:
message = '(%s) %s' % (caller, message)
raise exceptions.TimeoutException(message)
time.sleep(self.build_interval)
old_value = value
value = self._get_image_status(image_id)
|
{
"content_hash": "b765b787a90552c2d021880c503a95c5",
"timestamp": "",
"source": "github",
"line_count": 309,
"max_line_length": 79,
"avg_line_length": 37.32686084142395,
"alnum_prop": 0.5464713022368649,
"repo_name": "rzarzynski/tempest",
"id": "01a9c54ecb76716226b570b7b26fef2a213eb2eb",
"size": "12159",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tempest/services/image/v1/json/image_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "695"
},
{
"name": "Python",
"bytes": "2888467"
},
{
"name": "Shell",
"bytes": "8560"
}
],
"symlink_target": ""
}
|
from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.travisty',
version='0.1.dev0',
description='Tangled travisty integration (core)',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled']),
install_requires=[
'tangled>=0.1a8',
],
extras_require={
'dev': [
'tangled[dev]',
],
},
entry_points="""
[tangled.scripts]
travisty = tangled.travisty.command:Command
""",
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
],
)
|
{
"content_hash": "80e0a3e3c5bc7218ec86f87c8af5bf2e",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 59,
"avg_line_length": 26.21875,
"alnum_prop": 0.600715137067938,
"repo_name": "TangledWeb/tangled.travisty",
"id": "1a95fc721171ab7ceaa4755cc38d66ae5a945b1f",
"size": "839",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4425"
}
],
"symlink_target": ""
}
|
from collections import namedtuple
###############
# Structs #
###############
# Request payloads
ProduceRequest = namedtuple("ProduceRequest",
["topic", "partition", "messages"])
FetchRequest = namedtuple("FetchRequest",
["topic", "partition", "offset", "max_bytes"])
OffsetRequest = namedtuple("OffsetRequest",
["topic", "partition", "time", "max_offsets"])
OffsetCommitRequest = namedtuple("OffsetCommitRequest",
["topic", "partition", "offset", "metadata"])
OffsetFetchRequest = namedtuple("OffsetFetchRequest", ["topic", "partition"])
# Response payloads
ProduceResponse = namedtuple("ProduceResponse",
["topic", "partition", "error", "offset"])
FetchResponse = namedtuple("FetchResponse", ["topic", "partition", "error",
"highwaterMark", "messages"])
OffsetResponse = namedtuple("OffsetResponse",
["topic", "partition", "error", "offsets"])
OffsetCommitResponse = namedtuple("OffsetCommitResponse",
["topic", "partition", "error"])
OffsetFetchResponse = namedtuple("OffsetFetchResponse",
["topic", "partition", "offset",
"metadata", "error"])
BrokerMetadata = namedtuple("BrokerMetadata", ["nodeId", "host", "port"])
PartitionMetadata = namedtuple("PartitionMetadata",
["topic", "partition", "leader",
"replicas", "isr"])
# Other useful structs
OffsetAndMessage = namedtuple("OffsetAndMessage", ["offset", "message"])
Message = namedtuple("Message", ["magic", "attributes", "key", "value"])
TopicAndPartition = namedtuple("TopicAndPartition", ["topic", "partition"])
#################
# Exceptions #
#################
class KafkaError(RuntimeError):
pass
class BrokerResponseError(KafkaError):
pass
class UnknownError(BrokerResponseError):
errno = -1
message = 'UNKNOWN'
class OffsetOutOfRangeError(BrokerResponseError):
errno = 1
message = 'OFFSET_OUT_OF_RANGE'
class InvalidMessageError(BrokerResponseError):
errno = 2
message = 'INVALID_MESSAGE'
class UnknownTopicOrPartitionError(BrokerResponseError):
errno = 3
message = 'UNKNOWN_TOPIC_OR_PARTITON'
class InvalidFetchRequestError(BrokerResponseError):
errno = 4
message = 'INVALID_FETCH_SIZE'
class LeaderNotAvailableError(BrokerResponseError):
errno = 5
message = 'LEADER_NOT_AVAILABLE'
class NotLeaderForPartitionError(BrokerResponseError):
errno = 6
message = 'NOT_LEADER_FOR_PARTITION'
class RequestTimedOutError(BrokerResponseError):
errno = 7
message = 'REQUEST_TIMED_OUT'
class BrokerNotAvailableError(BrokerResponseError):
errno = 8
message = 'BROKER_NOT_AVAILABLE'
class ReplicaNotAvailableError(BrokerResponseError):
errno = 9
message = 'REPLICA_NOT_AVAILABLE'
class MessageSizeTooLargeError(BrokerResponseError):
errno = 10
message = 'MESSAGE_SIZE_TOO_LARGE'
class StaleControllerEpochError(BrokerResponseError):
errno = 11
message = 'STALE_CONTROLLER_EPOCH'
class OffsetMetadataTooLargeError(BrokerResponseError):
errno = 12
message = 'OFFSET_METADATA_TOO_LARGE'
class StaleLeaderEpochCodeError(BrokerResponseError):
errno = 13
message = 'STALE_LEADER_EPOCH_CODE'
class KafkaUnavailableError(KafkaError):
pass
class LeaderUnavailableError(KafkaError):
pass
class PartitionUnavailableError(KafkaError):
pass
class FailedPayloadsError(KafkaError):
pass
class ConnectionError(KafkaError):
pass
class BufferUnderflowError(KafkaError):
pass
class ChecksumError(KafkaError):
pass
class ConsumerFetchSizeTooSmall(KafkaError):
pass
class ConsumerNoMoreData(KafkaError):
pass
class BufferTooLargeError(KafkaError):
def __init__(self, topic, partition, old_offset, new_offset):
self.topic = topic
self.partition = partition
self.old_offset = old_offset
self.new_offset = new_offset
def __repr__(self):
return 'topic: {0}, partition: {1}, old_offset: {2}, new_offset: {3}'.format(self.topic, self.partition, self.old_offset, self.new_offset)
class ProtocolError(KafkaError):
pass
class UnsupportedCodecError(KafkaError):
pass
kafka_errors = {
-1: UnknownError,
1: OffsetOutOfRangeError,
2: InvalidMessageError,
3: UnknownTopicOrPartitionError,
4: InvalidFetchRequestError,
5: LeaderNotAvailableError,
6: NotLeaderForPartitionError,
7: RequestTimedOutError,
8: BrokerNotAvailableError,
9: ReplicaNotAvailableError,
10: MessageSizeTooLargeError,
11: StaleControllerEpochError,
12: OffsetMetadataTooLargeError,
13: StaleLeaderEpochCodeError,
}
def check_error(response):
error = kafka_errors.get(response.error)
if error:
raise error(response)
|
{
"content_hash": "532d0de3a9da3437cbf82d9de1dc11c5",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 146,
"avg_line_length": 23.94312796208531,
"alnum_prop": 0.6597387173396675,
"repo_name": "Livefyre/kafka-python",
"id": "b222e19bbaf2977b6535d35e1dfbe5a5c8ccd807",
"size": "5052",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kafka/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "183"
},
{
"name": "Python",
"bytes": "223378"
},
{
"name": "Shell",
"bytes": "2375"
}
],
"symlink_target": ""
}
|
from __future__ import division
from io import BytesIO
import pytest
from pyglet import gl, image
from ...annotations import Platform, require_platform, require_gl_extension
from ...base.event_loop import EventLoopFixture
class ImageTestFixture(EventLoopFixture):
def __init__(self, request, test_data):
super(ImageTestFixture, self).__init__(request)
self.test_data = test_data
self.show_checkerboard = True
self.show_triangle_left = False
self.show_text = True
self.left_texture = None
self.right_texture = None
self.checkerboard = image.create(32, 32, image.CheckerImagePattern())
def on_draw(self):
# Do not call super class draw, we need to split the clearing and the drawing
# the text box.
self.clear()
self.draw_checkerboard()
self.draw_left()
self.draw_triangle_left()
self.draw_right()
if self.show_text:
self.draw_text()
def draw_checkerboard(self):
if self.show_checkerboard:
gl.glPushMatrix()
gl.glScalef(self.window.width/float(self.checkerboard.width),
self.window.height/float(self.checkerboard.height),
1.)
gl.glMatrixMode(gl.GL_TEXTURE)
gl.glPushMatrix()
gl.glScalef(self.window.width/float(self.checkerboard.width),
self.window.height/float(self.checkerboard.height),
1.)
gl.glMatrixMode(gl.GL_MODELVIEW)
self.checkerboard.blit(0, 0, 0)
gl.glMatrixMode(gl.GL_TEXTURE)
gl.glPopMatrix()
gl.glMatrixMode(gl.GL_MODELVIEW)
gl.glPopMatrix()
def draw_left(self):
if self.left_texture:
self.left_texture.blit(
self.window.width // 4 - self.left_texture.width // 2,
(self.window.height - self.left_texture.height) // 2,
0)
def draw_right(self):
if self.right_texture:
x = self.window.width * 3 // 4 - self.right_texture.width // 2
x = max((x, self.window.width // 2))
self.right_texture.blit(
x,
(self.window.height - self.right_texture.height) // 2,
0)
def load_left(self, image_file, decoder=None):
self.left_texture = image.load(image_file, decoder=decoder).texture
def copy_left_to_right(self, encoder=None):
buf = BytesIO()
self.left_texture.save("file.png",
buf,
encoder=encoder)
buf.seek(0)
self.right_texture = image.load("file.png", buf).texture
def enable_alpha(self):
gl.glEnable(gl.GL_BLEND)
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
def load_right_arb(self, image_file, pixel_format):
img = image.load(image_file)
img.format = pixel_format
img.data # forces conversion
self.right_texture = img.texture
def draw_triangle_left(self):
if self.show_triangle_left:
w = 200
h = 200
x = self.window.width // 4 - w // 2
y = (self.window.height - h) // 2
gl.glEnable(gl.GL_DEPTH_TEST)
gl.glBegin(gl.GL_TRIANGLES)
gl.glColor4f(1, 0, 0, 1)
gl.glVertex3f(x, y, -1)
gl.glColor4f(0, 1, 0, 1)
gl.glVertex3f(x+w, y, 0)
gl.glColor4f(0, 0, 1, 1)
gl.glVertex3f(x, y+h, 1)
gl.glEnd()
gl.glDisable(gl.GL_DEPTH_TEST)
gl.glColor4f(1, 1, 1, 1)
def copy_color_buffer(self):
self.right_texture = \
image.get_buffer_manager().get_color_buffer().texture
def save_and_load_color_buffer(self):
stream = BytesIO()
image.get_buffer_manager().get_color_buffer().save('buffer.png', stream)
stream.seek(0)
self.right_texture = image.load('buffer.png', stream)
def save_and_load_depth_buffer(self):
stream = BytesIO()
image.get_buffer_manager().get_depth_buffer().save('buffer.png', stream)
stream.seek(0)
self.right_texture = image.load('buffer.png', stream)
def test_image_loading(self, decoder, image_name):
"""Test loading images."""
self.create_window(width=800, height=600)
self.load_left(self.test_data.get_file("images", image_name), decoder)
self.enable_alpha()
self.ask_question(
"Do you see the {} image on a checkerboard background?".format(image_name)
)
def test_image_saving(self, encoder, image_name):
"""Test saving images."""
self.create_window(width=800, height=600)
self.load_left(self.test_data.get_file("images", image_name))
self.copy_left_to_right(encoder)
self.enable_alpha()
self.ask_question(
"Do you see the {} image twice on a checkerboard background?".format(image_name)
)
@pytest.fixture
def image_test(request, test_data):
return ImageTestFixture(request, test_data)
bmp_images = ['rgb_16bpp.bmp', 'rgb_1bpp.bmp', 'rgb_24bpp.bmp', 'rgb_32bpp.bmp', 'rgb_4bpp.bmp',
'rgb_8bpp.bmp', 'rgba_32bpp.bmp']
dds_images = ['rgba_dxt1.dds', 'rgba_dxt3.dds', 'rgba_dxt5.dds', 'rgb_dxt1.dds']
png_images = ['la.png', 'l.png', 'rgba.png', 'rgb.png']
pypng_images = png_images + ['rgb_8bpp.png', 'rgb_8bpp_trans.png']
gif_images = ['8bpp.gif']
def test_checkerboard(image_test):
"""Test that the checkerboard pattern looks correct."""
image_test.create_window()
image_test.ask_question(
"Do you see a checkboard pattern in two levels of grey?"
)
@pytest.mark.parametrize('image_name', bmp_images)
def test_bmp_loading(image_test, image_name):
"""Test loading BMP images."""
from pyglet.image.codecs.bmp import BMPImageDecoder
image_test.test_image_loading(BMPImageDecoder(), image_name)
@pytest.mark.parametrize('image_name', dds_images)
def test_dds_loading(image_test, image_name):
"""Test loading DDS images."""
from pyglet.image.codecs.dds import DDSImageDecoder
image_test.test_image_loading(DDSImageDecoder(), image_name)
@pytest.mark.parametrize('image_name', png_images)
def test_pil_loading(image_test, image_name):
"""Test loading PNG images using PIL"""
try:
from PIL import Image
except ImportError:
pytest.skip('PIL not available')
from pyglet.image.codecs.pil import PILImageDecoder
image_test.test_image_loading(PILImageDecoder(), image_name)
@pytest.mark.parametrize('image_name', png_images + gif_images)
@require_platform(Platform.LINUX)
def test_gdkpixbuf2_loading(image_test, image_name):
"""Test loading PNG images using Linux specific GdkPixbuf2."""
from pyglet.image.codecs.gdkpixbuf2 import GdkPixbuf2ImageDecoder
image_test.test_image_loading(GdkPixbuf2ImageDecoder(), image_name)
@pytest.mark.parametrize('image_name', png_images)
@require_platform(Platform.WINDOWS)
def test_gdiplus_loading(image_test, image_name):
"""Test loading PNG images using Windows specific GDI+."""
from pyglet.image.codecs.gdiplus import GDIPlusDecoder
image_test.test_image_loading(GDIPlusDecoder(), image_name)
@pytest.mark.parametrize('image_name', png_images)
@require_platform(Platform.OSX)
def test_quartz_loading(image_test, image_name):
"""Test loading PNG images using OSX specific Quartz."""
from pyglet.image.codecs.quartz import QuartzImageDecoder
image_test.test_image_loading(QuartzImageDecoder(), image_name)
@pytest.mark.parametrize('image_name', png_images)
@require_platform(Platform.OSX)
def test_quicktime_loading(image_test, image_name):
"""Test loading PNG images using OSX specific QuickTime."""
from pyglet.image.codecs.quicktime import QuickTimeDecoder
image_test.test_image_loading(QuickTimeDecoder(), image_name)
@pytest.mark.parametrize('image_name', pypng_images)
def test_pypng_loading(image_test, image_name):
"""Test loading PNG images using PyPNG."""
from pyglet.image.codecs.png import PNGImageDecoder
image_test.test_image_loading(PNGImageDecoder(), image_name)
@pytest.mark.parametrize('image_name', png_images)
def test_pil_saving(image_test, image_name):
"""Test saving images using PIL."""
try:
from PIL import Image
except ImportError:
pytest.skip('PIL not available')
from pyglet.image.codecs.pil import PILImageEncoder
image_test.test_image_saving(PILImageEncoder(), image_name)
@pytest.mark.parametrize('image_name', png_images)
def test_pypng_saving(image_test, image_name):
"""Test saving images using PyPNG."""
from pyglet.image.codecs.png import PNGImageEncoder
image_test.test_image_saving(PNGImageEncoder(), image_name)
@pytest.mark.parametrize('image_name', ['rgb.png', 'rgba.png'])
@require_gl_extension('GL_ARB_imaging')
def test_arb(image_test, image_name):
"""Test swapping color channels using the ARB imaging extension."""
image_test.create_window()
image_test.load_left(image_test.test_data.get_file('images', image_name))
image_test.load_right_arb(image_test.test_data.get_file('images', image_name), 'GRB')
image_test.ask_question(
"In the right image red and green should be swapped."
)
def test_buffer_copy(image_test):
"""Test colour buffer copy to texture.
A scene consisting of a single coloured triangle will be rendered. The
colour buffer will then be saved to a stream and loaded as a texture.
You might see the original scene first shortly before the
buffer image appears (because retrieving and saving the image is a slow
operation).
"""
image_test.create_window(width=800, height=600)
image_test.show_triangle_left = True
image_test.show_text = False
image_test.show_checkerboard = False
def step(dt):
image_test.copy_color_buffer()
image_test.show_text = True
return True
image_test.schedule_once(step)
image_test.ask_question(
'You should see the same coloured triangle left and right.'
)
def test_buffer_saving(image_test):
"""Test colour buffer save.
A scene consisting of a single coloured triangle will be rendered. The
colour buffer will then be saved to a stream and loaded as a texture.
You might see the original scene first shortly before the
buffer image appears (because retrieving and saving the image is a slow
operation).
"""
image_test.create_window(width=800, height=600)
image_test.show_triangle_left = True
image_test.show_text = False
image_test.show_checkerboard = False
def step(dt):
image_test.save_and_load_color_buffer()
image_test.show_text = True
return True
image_test.schedule_once(step)
image_test.ask_question(
'You should see the same coloured triangle left and right.'
)
def test_depth_buffer_saving(image_test):
"""Test depth buffer save.
A scene consisting of a single coloured triangle will be rendered. The
depth buffer will then be saved to a stream and loaded as a texture.
You might see the original scene first for up to several seconds before the
depth buffer image appears (because retrieving and saving the image is
a slow operation).
"""
image_test.create_window(width=800, height=600)
image_test.show_triangle_left = True
image_test.show_text = False
image_test.show_checkerboard = False
def step(dt):
image_test.save_and_load_depth_buffer()
image_test.show_text = True
return True
image_test.schedule_once(step)
image_test.ask_question(
'You should see a coloured triangle left and its depth buffer right. '
'The bottom-left corner is lightest, the bottom-right is darker and '
'the top corner is darkest (corresponding the depth of the triangle.'
)
|
{
"content_hash": "8ea4c0e28a03c032bcf28a530dc46eda",
"timestamp": "",
"source": "github",
"line_count": 335,
"max_line_length": 96,
"avg_line_length": 36.22686567164179,
"alnum_prop": 0.6462590639419907,
"repo_name": "nicememory/pie",
"id": "831e28923b7501db0b2388b850fd84073d943f8c",
"size": "12136",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyglet/tests/interactive/image/test_image.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5318"
},
{
"name": "C",
"bytes": "6624"
},
{
"name": "CSS",
"bytes": "1828"
},
{
"name": "HTML",
"bytes": "9229"
},
{
"name": "JavaScript",
"bytes": "6751"
},
{
"name": "Makefile",
"bytes": "5773"
},
{
"name": "PHP",
"bytes": "2190"
},
{
"name": "Python",
"bytes": "9377528"
},
{
"name": "Shell",
"bytes": "664"
},
{
"name": "Vim script",
"bytes": "2952"
}
],
"symlink_target": ""
}
|
import unittest
from should_dsl import should, should_not
from fluidity import StateMachine, state, transition
from fluidity import InvalidTransition
class MyMachine(StateMachine):
initial_state = 'created'
state('created')
state('waiting')
state('processed')
state('canceled')
transition(from_='created', event='queue', to='waiting')
transition(from_='waiting', event='process', to='processed')
transition(from_=['waiting', 'created'], event='cancel', to='canceled')
class FluidityEvent(unittest.TestCase):
def test_its_declaration_creates_a_method_with_its_name(self):
machine = MyMachine()
machine |should| respond_to('queue')
machine |should| respond_to('process')
def test_it_changes_machine_state(self):
machine = MyMachine()
machine.current_state |should| equal_to('created')
machine.queue()
machine.current_state |should| equal_to('waiting')
machine.process()
machine.current_state |should| equal_to('processed')
def test_it_ensures_event_order(self):
machine = MyMachine()
machine.process |should| throw(InvalidTransition, message='Cannot process from created')
machine.queue()
machine.queue |should| throw(InvalidTransition, message='Cannot queue from waiting')
machine.process |should_not| throw(Exception)
def test_it_accepts_multiple_origin_states(self):
machine = MyMachine()
machine.cancel |should_not| throw(Exception)
machine = MyMachine()
machine.queue()
machine.cancel |should_not| throw(Exception)
machine = MyMachine()
machine.queue()
machine.process()
machine.cancel |should| throw(Exception)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "9620aaab582cb94fb2752f9701f8e18c",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 96,
"avg_line_length": 30.677966101694917,
"alnum_prop": 0.6607734806629835,
"repo_name": "nsi-iff/fluidity",
"id": "b8036df6ff1ae4e517b9f6dab1e2ba01f6f8a8d5",
"size": "1810",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/event_spec.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32850"
}
],
"symlink_target": ""
}
|
import MySQLdb
db = MySQLdb.connect(user="beeruser",db="beerdb")
c = db.cursor()
c.execute("""SELECT now()-Time, Flag from Times WHERE ID = 5""")
#print c.fetchall()[0][0]
[time_on, state] = c.fetchone()
print time_on
print state
#c.execute("""UPDATE Times SET Time=Now(), Flag = 1 WHERE ID = %s""",(self.dbid))
#db.commit()
|
{
"content_hash": "3d50c37273b4316e075c69bfe9b6e8f3",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 81,
"avg_line_length": 27.333333333333332,
"alnum_prop": 0.6554878048780488,
"repo_name": "Wollert/beer",
"id": "d64a4547e4f10d8d161a637c8313fba4dd63fd44",
"size": "328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_db.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "75747"
}
],
"symlink_target": ""
}
|
__author__ = 'Davide Monfrecola'
import requests
import json
from phantomrestclient import auth
class PhantomRequests():
def __init__(self):
self.auth = auth.PhantomAuth()
self.api_url = 'https://phantom.nimbusproject.org/api/dev'
self.access_token = self.auth.read_token_from_file()
def get_request(self, entity, id=""):
if id is not "":
url = "%s/%s/%s" % (self.api_url, entity, id)
else:
url = "%s/%s" % (self.api_url, entity)
r = requests.get(url,
headers={'Authorization': 'Basic %s' % self.access_token})
if r.status_code == requests.codes.ok:
return r.text
else:
r.raise_for_status()
def post_request(self, entity, data):
r = requests.post("%s/%s" % (self.api_url, entity), data=json.dumps(data),
headers={'Authorization': 'Basic %s' % self.access_token})
if r.status_code == requests.codes.ok:
return r.text
else:
r.raise_for_status()
def put_request(self, entity, id, data):
r = requests.put("%s/%s/%s" % (self.api_url, entity, id), data=json.dumps(data),
headers={'Authorization': 'Basic %s' % self.access_token})
if r.status_code == requests.codes.ok:
return r.text
else:
r.raise_for_status()
def delete_request(self, entity, id):
r = requests.delete("%s/%s/%s" % (self.api_url, entity, id),
headers={'Authorization': 'Basic %s' % self.access_token})
if r.status_code == requests.codes.ok:
return r.text
else:
r.raise_for_status()
def get_all_domains(self):
return self.get_request('domains')
def get_all_launchconfigurations(self):
return self.get_request('launchconfigurations')
def get_all_sites(self, details=True):
entity = 'sites'
if details is True:
entity + '?details=true'
return self.get_request(entity=entity)
def get_credentials(self, cloud_name):
return self.get_request('credentials/sites/' + cloud_name)
def create_lc(self, parameters):
return self.post_request(entity='launchconfigurations', data=parameters)
def update_lc(self, id, parameters):
return self.put_request(entity='launchconfigurations', id=id, data=parameters)
def delete_lc(self, id):
return self.delete_request(entity='launchconfigurations', id=id)
def create_domain(self, parameters):
return self.post_request(entity='domains', data=parameters)
def update_domain(self, id, parameters):
return self.put_request(entity='domains', id=id, data=parameters)
def delete_domain(self, id):
return self.delete_request(entity='domains', id=id)
|
{
"content_hash": "d3d077046a015392153d5e9ccf713882",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 88,
"avg_line_length": 33.811764705882354,
"alnum_prop": 0.5894224077940153,
"repo_name": "trampfox/nimbus-phantom-rest-client",
"id": "1ce2fb5baa10fc1a1933642484e911da36103d4c",
"size": "2874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phantomrestclient/phantomrequests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "11443"
}
],
"symlink_target": ""
}
|
"""
Default Controllers
"""
# -----------------------------------------------------------------------------
def index():
"""
Main Home Page
"""
auth.settings.register_onvalidation = _register_validation
auth.configure_user_fields()
current.menu.oauth = S3MainMenu.menu_oauth()
page = None
if len(request.args):
# Use the first non-numeric argument as page name
# (RESTful custom controllers may have record IDs in Ajax URLs)
for arg in request.args:
pname = arg.split(".", 1)[0] if "." in arg else arg
if not pname.isdigit():
page = pname
break
# Module name for custom controllers
name = "controllers"
custom = None
templates = settings.get_template()
if page:
# Go to a custom page,
# - args[0] = name of the class in /modules/templates/<template>/controllers.py
# - other args & vars passed through
if not isinstance(templates, (tuple, list)):
templates = (templates,)
for template in templates[::-1]:
package = "applications.%s.modules.templates.%s" % (appname, template)
try:
custom = getattr(__import__(package, fromlist = [name]), name)
except (ImportError, AttributeError):
# No Custom Page available, continue with the default
#page = "modules/templates/%s/controllers.py" % template
#current.log.warning("File not loadable",
# "%s, %s" % (page, sys.exc_info()[1]))
continue
else:
if hasattr(custom, page):
controller = getattr(custom, page)()
elif page != "login":
raise HTTP(404, "Function not found: %s()" % page)
else:
controller = custom.index()
output = controller()
return output
elif templates != "default":
# Try a Custom Homepage
if not isinstance(templates, (tuple, list)):
templates = (templates,)
for template in templates[::-1]:
package = "applications.%s.modules.templates.%s" % (appname, template)
try:
custom = getattr(__import__(package, fromlist = [name]), name)
except (ImportError, AttributeError):
# No Custom Page available, continue with the next option, or default
# @ToDo: cache this result in session
#import sys
#current.log.warning("Custom homepage cannot be loaded",
#sys.exc_info()[1])
continue
else:
if hasattr(custom, "index"):
output = custom.index()()
return output
# Default Homepage
title = settings.get_system_name()
response.title = title
# CMS Contents for homepage
item = ""
#has_module = settings.has_module
if settings.has_module("cms"):
table = s3db.cms_post
ltable = s3db.cms_post_module
query = (ltable.module == "default") & \
((ltable.resource == None) | (ltable.resource == "index")) & \
(ltable.post_id == table.id) & \
(table.deleted != True)
item = db(query).select(table.body,
limitby = (0, 1)
).first()
if item:
item = DIV(XML(item.body))
else:
item = ""
# Menu boxes
from s3layouts import S3HomepageMenuLayout as HM
sit_menu = HM("Situation Awareness")(
HM("Map", c="gis", f="index", icon="map-marker"),
HM("Incidents", c="event", f="incident_report", icon="incident"),
HM("Alerts", c="cap", f="alert", icon="alert"),
HM("Assessments", c="survey", f="series", icon="assessment"),
)
org_menu = HM("Who is doing What and Where")(
HM("Organizations", c="org", f="organisation", icon="organisation"),
HM("Facilities", c="org", f="facility", icon="facility"),
HM("Activities", c="project", f="activity", icon="activity"),
HM("Projects", c="project", f="project", icon="project"),
)
res_menu = HM("Manage Resources")(
HM("Staff", c="hrm", f="staff", t="hrm_human_resource", icon="staff"),
HM("Volunteers", c="vol", f="volunteer", t="hrm_human_resource", icon="volunteer"),
HM("Relief Goods", c="inv", f="inv_item", icon="goods"),
HM("Assets", c="asset", f="asset", icon="asset"),
)
aid_menu = HM("Manage Aid")(
HM("Requests", c="req", f="req", icon="request"),
HM("Commitments", c="req", f="commit", icon="commit"),
HM("Sent Shipments", c="inv", f="send", icon="shipment"),
HM("Received Shipments", c="inv", f="recv", icon="delivery"),
)
# @todo: re-integrate or deprecate (?)
#if has_module("cr"):
# table = s3db.cr_shelter
# SHELTERS = s3.crud_strings["cr_shelter"].title_list
#else:
# SHELTERS = ""
#facility_box = HM("Facilities", _id="facility_box")(
# HM("Facilities", c="org", f="facility"),
# HM("Hospitals", c="hms", f="hospital"),
# HM("Offices", c="org", f="office"),
# HM(SHELTERS, c="cr", f="shelter"),
# HM("Warehouses", c="inv", f="warehouse"),
# HM("Map", c="gis", f="index",
# icon="/%s/static/img/map_icon_128.png" % appname,
# ),
#)
# Check logged in AND permissions
roles = session.s3.roles
table = s3db.org_organisation
has_permission = auth.s3_has_permission
AUTHENTICATED = auth.get_system_roles().AUTHENTICATED
if AUTHENTICATED in roles and has_permission("read", table):
org_items = organisation()
datatable_ajax_source = "/%s/default/organisation.aadata" % appname
# List of Organisations
if has_permission("create", table):
create = A(T("Create Organization"),
_href = URL(c = "org",
f = "organisation",
args = ["create"],
),
_id = "add-org-btn",
_class = "action-btn",
)
else:
create = ""
org_box = DIV(create,
H3(T("Organizations")),
org_items,
_id = "org-box",
_class = "menu-box"
)
s3.actions = None
response.view = "default/index.html"
# Quick Access Box for Sites
permission = auth.permission
permission.controller = "org"
permission.function = "site"
permitted_facilities = auth.permitted_facilities(redirect_on_error = False)
if permitted_facilities:
facilities = s3db.org_SiteRepresent().bulk(permitted_facilities,
include_blank = False,
)
facility_list = [(fac, facilities[fac]) for fac in facilities]
facility_list = sorted(facility_list, key=lambda fac: fac[1])
facility_opts = [OPTION(fac[1], _value=fac[0])
for fac in facility_list]
manage_facility_box = DIV(H3(T("Manage Your Facilities")),
SELECT(_id = "manage-facility-select",
*facility_opts
),
A(T("Go"),
_href = URL(c="default", f="site",
args=[facility_list[0][0]],
),
_id = "manage-facility-btn",
_class = "action-btn"
),
_id = "manage-facility-box",
_class = "menu-box"
)
s3.jquery_ready.append('''$('#manage-facility-select').change(function(){
$('#manage-facility-btn').attr('href',S3.Ap.concat('/default/site/',$('#manage-facility-select').val()))})
$('#manage-facility-btn').click(function(){
if (($('#manage-facility-btn').attr('href').toString())===S3.Ap.concat('/default/site/None'))
{$("#manage-facility-box").append("<div class='alert alert-error'>%s</div>")
return false}})''' % (T("Please Select a Facility")))
else:
manage_facility_box = ""
else:
datatable_ajax_source = ""
manage_facility_box = ""
org_box = ""
# Login/Registration forms
self_registration = settings.get_security_registration_visible()
registered = False
login_form = None
login_div = None
register_form = None
register_div = None
if AUTHENTICATED not in roles:
# This user isn't yet logged-in
if "registered" in request.cookies:
# This browser has logged-in before
registered = True
# Provide a login box on front page
auth.messages.submit_button = T("Login")
login_form = auth.login(inline = True)
login_div = DIV(H3(T("Login")),
P(XML(T("Registered users can %(login)s to access the system") % \
{"login": B(T("login"))})))
if self_registration:
# Provide a Registration box on front page
register_form = auth.register()
register_div = DIV(H3(T("Register")),
P(XML(T("If you would like to help, then please %(sign_up_now)s") % \
{"sign_up_now": B(T("sign-up now"))})))
if request.env.request_method == "POST":
if login_form.errors:
hide, show = "#register_form", "#login_form"
else:
hide, show = "#login_form", "#register_form"
post_script = \
'''$('%s').addClass('hide')
$('%s').removeClass('hide')''' % (hide, show)
else:
post_script = ""
register_script = \
'''$('#register-btn').attr('href','#register')
$('#login-btn').attr('href','#login')
%s
$('#register-btn').click(function(){
$('#register_form').removeClass('hide')
$('#login_form').addClass('hide')
})
$('#login-btn').click(function(){
$('#register_form').addClass('hide')
$('#login_form').removeClass('hide')
})''' % post_script
s3.jquery_ready.append(register_script)
# Output dict for the view
output = {"title": title,
# CMS Contents
"item": item,
# Menus
"sit_menu": sit_menu,
"org_menu": org_menu,
"res_menu": res_menu,
"aid_menu": aid_menu,
#"facility_box": facility_box,
# Quick Access Boxes
"manage_facility_box": manage_facility_box,
"org_box": org_box,
# Login Form
"login_div": login_div,
"login_form": login_form,
# Registration Form
"register_div": register_div,
"register_form": register_form,
# Control Data
"self_registration": self_registration,
"registered": registered,
"r": None, # Required for dataTable to work
"datatable_ajax_source": datatable_ajax_source,
}
#if get_vars.tour:
# output = s3db.tour_builder(output)
return output
# -----------------------------------------------------------------------------
def about():
"""
The About page provides details on the software dependencies and
versions available to this instance of Sahana Eden.
"""
_custom_view("about")
# Allow editing of page content from browser using CMS module
if settings.has_module("cms"):
ADMIN = auth.s3_has_role("ADMIN")
table = s3db.cms_post
ltable = s3db.cms_post_module
module = "default"
resource = "about"
query = (ltable.module == module) & \
((ltable.resource == None) | \
(ltable.resource == resource)) & \
(ltable.post_id == table.id) & \
(table.deleted != True)
item = db(query).select(table.id,
table.body,
limitby = (0, 1)
).first()
get_vars = {"module": module,
"resource": resource,
}
if item:
from s3 import S3XMLContents
contents = S3XMLContents(item.body)
if ADMIN:
item = DIV(contents,
BR(),
A(T("Edit"),
_href=URL(c="cms", f="post",
args = [item.id, "update"],
vars = get_vars,
),
_class="action-btn"))
else:
item = DIV(contents)
elif ADMIN:
if s3.crud.formstyle == "bootstrap":
_class = "btn"
else:
_class = "action-btn"
item = A(T("Edit"),
_href=URL(c="cms", f="post",
args = "create",
vars = get_vars,
),
_class="%s cms-edit" % _class)
else:
item = H2(T("About"))
else:
item = H2(T("About"))
response.title = T("About")
# Technical Support Details
if not settings.get_security_version_info() or \
settings.get_security_version_info_requires_login() and \
not auth.s3_logged_in():
return {"details": "",
"item": item,
}
import platform
import string
import subprocess
eden_version = open(os.path.join(request.folder, "VERSION"), "r").read()
web2py_version = open(_apath("../VERSION"), "r").read()[8:]
python_version = platform.python_version()
os_version = platform.platform()
# Database
if db_string.find("sqlite") != -1:
try:
import sqlite3
sqlite_version = sqlite3.version
except:
sqlite_version = T("Unknown")
database = TR(TD("SQLite"),
TD(sqlite_version))
elif db_string.find("mysql") != -1:
database_name = settings.database.get("database", "sahana")
try:
# @ToDo: Support using pymysql & Warn
import MySQLdb
mysqldb_version = MySQLdb.__revision__
except:
mysqldb_version = T("Not installed or incorrectly configured.")
mysql_version = T("Unknown")
else:
#mysql_version = (subprocess.Popen(["mysql", "--version"], stdout=subprocess.PIPE).communicate()[0]).rstrip()[10:]
con = MySQLdb.connect(host = settings.database.get("host", "localhost"),
port = settings.database.get("port", None) or 3306,
db = database_name,
user = settings.database.get("username", "sahana"),
passwd = settings.database.get("password", "password")
)
cur = con.cursor()
cur.execute("SELECT VERSION()")
mysql_version = cur.fetchone()
database = TAG[""](TR(TD("MySQL"),
TD(mysql_version)),
TR(TD("MySQLdb python driver"),
TD(mysqldb_version)),
TR(TD("Database"),
TD(database_name)),
)
else:
# Postgres
database_name = settings.database.get("database", "sahana")
try:
# @ToDo: Support using pg8000 & Warn
import psycopg2
psycopg_version = psycopg2.__version__
except:
psycopg_version = T("Not installed or incorrectly configured.")
pgsql_version = T("Unknown")
else:
#pgsql_reply = (subprocess.Popen(["psql", "--version"], stdout=subprocess.PIPE).communicate()[0])
#pgsql_version = string.split(pgsql_reply)[2]
con = psycopg2.connect(host = settings.database.get("host", "localhost"),
port = settings.database.get("port", None) or 5432,
database = database_name,
user = settings.database.get("username", "sahana"),
password = settings.database.get("password", "password")
)
cur = con.cursor()
cur.execute("SELECT version()")
pgsql_version = cur.fetchone()
database = TAG[""](TR(TD("PostgreSQL"),
TD(pgsql_version)),
TR(TD("psycopg2 python driver"),
TD(psycopg_version)),
TR(TD("Database"),
TD(database_name)),
)
# Libraries
try:
from lxml import etree
lxml_version = ".".join([str(i) for i in etree.LXML_VERSION])
except:
lxml_version = T("Not installed or incorrectly configured.")
try:
import reportlab
reportlab_version = reportlab.Version
except:
reportlab_version = T("Not installed or incorrectly configured.")
try:
import shapely
shapely_version = shapely.__version__
except:
shapely_version = T("Not installed or incorrectly configured.")
try:
import xlrd
xlrd_version = xlrd.__VERSION__
except:
xlrd_version = T("Not installed or incorrectly configured.")
try:
import xlwt
xlwt_version = xlwt.__VERSION__
except:
xlwt_version = T("Not installed or incorrectly configured.")
details = DIV(TABLE(THEAD(),
TBODY(TR(TD(STRONG(T("Configuration"))),
TD(),
_class = "odd",
),
TR(TD(T("Public URL")),
TD(settings.get_base_public_url()),
),
TR(TD(STRONG(T("Core Components"))),
TD(STRONG(T("Version"))),
_class = "odd",
),
TR(TD(settings.get_system_name_short()),
TD(eden_version),
),
TR(TD(T("Web Server")),
TD(request.env.server_software),
_class = "odd",
),
TR(TD("Web2Py"),
TD(web2py_version),
),
TR(TD("Python"),
TD(python_version),
_class = "odd",
),
TR(TD("Operating System"),
TD(os_version),
),
TR(TD(STRONG(T("Database"))),
TD(),
_class = "odd",
),
database,
TR(TD(STRONG(T("Other Components"))),
TD(),
_class = "odd",
),
TR(TD("lxml"),
TD(lxml_version),
),
TR(TD("ReportLab"),
TD(reportlab_version),
_class = "odd",
),
TR(TD("Shapely"),
TD(shapely_version),
),
TR(TD("xlrd"),
TD(xlrd_version),
_class = "odd",
),
TR(TD("xlwt"),
TD(xlwt_version),
),
_class = "dataTable display"),
_class = "table-container")
)
return {"item": item,
"details": details,
}
# -----------------------------------------------------------------------------
def audit():
"""
RESTful CRUD Controller for Audit Logs
- used e.g. for Site Activity
"""
return s3_rest_controller("s3", "audit")
# -----------------------------------------------------------------------------
#def call():
# """
# Call an XMLRPC, JSONRPC or RSS service
# - NB This is currently unused in Sahana Eden
# """
# # If webservices don't use sessions, avoid cluttering up the storage
# #session.forget()
# return service()
# -----------------------------------------------------------------------------
def contact():
"""
Give the user options to contact the site admins.
Either:
An internal Support Requests database
or:
Custom View
"""
if auth.is_logged_in() and settings.has_module("support"):
# Provide an internal Support Requests ticketing system.
prefix = "support"
resourcename = "req"
tablename = "%s_%s" % (prefix, resourcename)
table = s3db[tablename]
# Pre-processor
def prep(r):
if r.interactive:
# Only Admins should be able to update ticket status
status = table.status
actions = table.actions
if not auth.s3_has_role("ADMIN"):
status.writable = False
actions.writable = False
if r.method != "update":
status.readable = False
status.writable = False
actions.readable = False
actions.writable = False
return True
s3.prep = prep
output = s3_rest_controller(prefix, resourcename)
return output
templates = settings.get_template()
if templates != "default":
# Try a Custom Controller
if not isinstance(templates, (tuple, list)):
templates = (templates,)
for template in templates[::-1]:
package = "applications.%s.modules.templates.%s" % (appname, template)
name = "controllers"
try:
custom = getattr(__import__(package, fromlist=[name]), name)
except (ImportError, AttributeError):
# No Custom Page available, try a custom view
pass
else:
if hasattr(custom, "contact"):
controller = getattr(custom, "contact")()
return controller()
# Try a Custom View
for template in templates:
view = os.path.join(request.folder,
"modules",
"templates",
template,
"views",
"contact.html")
if os.path.exists(view):
try:
# Pass view as file not str to work in compiled mode
response.view = open(view, "rb")
except IOError:
from gluon.http import HTTP
raise HTTP("404", "Unable to open Custom View: %s" % view)
response.title = T("Contact us")
return {}
if settings.has_module("cms"):
# Use CMS
return s3db.cms_index("default", "contact",
page_name = T("Contact Us"))
# Just use default HTML View
return {}
# -----------------------------------------------------------------------------
def download():
"""
Download a file
"""
try:
filename = request.args[0]
except:
# No legitimate interactive request comes here without a filename,
# so this hits mainly non-interactive clients, and those do not
# recognize an error condition from a HTTP 303 => better to raise
# a proper error than to redirect:
raise HTTP(400, "No file specified")
#session.error = T("Need to specify the file to download!")
#redirect(URL(f="index"))
# Check Permissions
tablename = filename.split(".", 1)[0]
if "_" in tablename:
# Load the Model
table = s3db.table(tablename)
if table and not auth.s3_has_permission("read", tablename):
auth.permission.fail()
return response.download(request, db)
# -----------------------------------------------------------------------------
def get_settings():
"""
Function to lookup the value of one or more deployment_settings
Responds to GET requests.
Requires admin permissions
Used by edentest_robot.py
"""
# Check if the request has a valid authorization header with admin cred.
if not auth.s3_has_role("ADMIN"):
auth.permission.format = None
auth.permission.fail()
elif not settings.get_base_allow_testing():
raise HTTP("405", "Testing not allowed")
else:
# Example request: /get_settings/template
asked = request.args
return_settings = {}
for setting in asked:
func_name = "get_%s" % setting
function = getattr(settings, func_name)
# Example function: settings.get_template()
try:
value = function()
except TypeError:
continue
return_settings[setting] = value
return response.json(return_settings)
# -----------------------------------------------------------------------------
def group():
"""
RESTful CRUD controller
- needed when group add form embedded in default/person
- only create method is allowed, when opened in an inline form.
"""
# Check if it is called from a inline form
if auth.permission.format != "popup":
return ""
# Pre-process
def prep(r):
if r.method != "create":
return False
return True
s3.prep = prep
return s3_rest_controller("pr", "group")
# -----------------------------------------------------------------------------
def help():
""" CMS page or Custom View """
_custom_view("help")
# Allow editing of page content from browser using CMS module
if settings.has_module("cms"):
ADMIN = auth.s3_has_role("ADMIN")
table = s3db.cms_post
ltable = s3db.cms_post_module
module = "default"
resource = "help"
query = (ltable.module == module) & \
((ltable.resource == None) | \
(ltable.resource == resource)) & \
(ltable.post_id == table.id) & \
(table.deleted != True)
item = db(query).select(table.id,
table.body,
limitby=(0, 1)).first()
get_vars = {"module": module,
"resource": resource,
}
if item:
if ADMIN:
item = DIV(XML(item.body),
BR(),
A(T("Edit"),
_href=URL(c="cms", f="post",
args=[item.id, "update"],
vars=get_vars,
),
_class="action-btn"))
else:
item = DIV(XML(item.body))
elif ADMIN:
if s3.crud.formstyle == "bootstrap":
_class = "btn"
else:
_class = "action-btn"
item = A(T("Edit"),
_href=URL(c="cms", f="post",
args="create",
vars=get_vars,
),
_class="%s cms-edit" % _class)
else:
item = TAG[""](H2(T("Help")),
A(T("User & Administration Guide"),
_href="http://eden.sahanafoundation.org/wiki/UserGuidelines",
_target="_blank"),
" - online version")
else:
item = TAG[""](H2(T("Help")),
A(T("User & Administration Guide"),
_href="http://eden.sahanafoundation.org/wiki/UserGuidelines",
_target="_blank"),
" - online version")
response.title = T("Help")
return {"item": item}
# -----------------------------------------------------------------------------
#def load_all_models():
# """
# Controller to load all models in web browser
# - to make it easy to debug in Eclipse
# """
# s3db.load_all_models()
# return "ok"
# -----------------------------------------------------------------------------
def masterkey():
""" Master Key Verification and Context Query """
# Challenge the client to login with master key
if not auth.s3_logged_in():
auth.permission.fail()
# If successfully logged-in, provide context information for
# the master key (e.g. project UUID + title, master key UUID)
from s3.s3masterkey import S3MasterKey
return S3MasterKey.context()
# -----------------------------------------------------------------------------
def message():
""" Show a confirmation screen """
#if "verify_email_sent" in request.args:
title = T("Account Registered - Please Check Your Email")
message = T( "%(system_name)s has sent an email to %(email)s to verify your email address.\nPlease check your email to verify this address. If you do not receive this email please check you junk email or spam filters." )\
% {"system_name": settings.get_system_name(),
"email": request.vars.email}
image = "email_icon.png"
return {"title": title,
"message": message,
"image_src": "/%s/static/img/%s" % (appname, image),
}
# -----------------------------------------------------------------------------
def organisation():
"""
Function to handle pagination for the org list on the homepage
"""
representation = request.extension
resource = s3db.resource("org_organisation")
totalrows = resource.count()
display_start = int(get_vars.start) if get_vars.start else 0
display_length = int(get_vars.limit) if get_vars.limit else 10
limit = display_length
list_fields = ["id", "name"]
default_orderby = orderby = "org_organisation.name asc"
if representation == "aadata":
query, orderby, left = resource.datatable_filter(list_fields, get_vars)
if orderby is None:
orderby = default_orderby
if query:
resource.add_filter(query)
else:
limit = 4 * limit
data = resource.select(list_fields,
start=display_start,
limit=limit,
orderby=orderby,
count=True,
represent=True)
filteredrows = data["numrows"]
rfields = data["rfields"]
data = data["rows"]
dt = S3DataTable(rfields, data)
dt.defaultActionButtons(resource)
s3.no_formats = True
if representation == "html":
items = dt.html(totalrows,
totalrows,
"org_dt",
dt_ajax_url=URL(c="default",
f="organisation",
extension="aadata",
vars={"id": "org_dt"},
),
dt_pageLength=display_length,
dt_pagination="true",
)
elif representation == "aadata":
draw = get_vars.get("draw")
if draw:
draw = int(draw)
items = dt.json(totalrows,
filteredrows,
"org_dt",
draw)
else:
from gluon.http import HTTP
raise HTTP(415, ERROR.BAD_FORMAT)
return items
# -----------------------------------------------------------------------------
def page():
"""
Show a custom CMS page
"""
try:
page = request.args[0]
except:
raise HTTP(400, "Page not specified")
# Find a post with the given page name that is linked to this controller:
ctable = s3db.cms_post
ltable = s3db.cms_post_module
join = ltable.on((ltable.post_id == ctable.id) & \
(ltable.module == "default") & \
(ltable.resource == "page") & \
(ltable.deleted == False))
query = (ctable.name == page) & \
(ctable.deleted == False)
row = db(query).select(ctable.id,
ctable.title,
ctable.body,
join = join,
cache = s3db.cache,
limitby = (0, 1)
).first()
try:
title = row.title
except:
raise HTTP(404, "Page not found in CMS")
if row.body:
from io import StringIO
try:
body = current.response.render(StringIO(row.body), {})
except:
body = row.body
else:
body = ""
item = DIV(XML(body), _class="cms-item")
if auth.s3_has_role("ADMIN"):
# Add edit-action
item.append(BR())
item.append(A(current.T("Edit"),
_href = URL(c="cms", f="post",
args = [row.id, "update"],
vars = {"page": page},
),
_class = "action-btn",
))
response.title = title
_custom_view("page")
return {"item": item,
}
# -----------------------------------------------------------------------------
def person():
"""
Profile to show:
- User Details
- Person Details
- Staff/Volunteer Record
- Map Config
"""
# Get person_id of current user
if auth.s3_logged_in():
person_id = str(auth.s3_logged_in_person())
else:
person_id = None
# Fix request args:
# - leave as-is if this is an options/validate Ajax-request
# - otherwise, make sure person_id is the first argument
request_args = request.args
if not request_args or \
request_args[0] != person_id and \
request_args[-1] not in ("options.s3json", "validate.json"):
if not person_id:
# Call to profile before login (e.g. from link in welcome email)
# => redirect to login, then return here
redirect(URL(f = "user",
args = ["login"],
vars = {"_next": URL(f="person", args=request_args)},
))
request.args = [person_id]
if settings.get_auth_profile_controller() == "hrm":
table = s3db.hrm_human_resource
query = (table.person_id == person_id) & \
(table.deleted == False)
hr = db(query).select(table.id,
limitby = (0, 1)
)
if hr:
# Use the HRM controller/rheader
request.get_vars["profile"] = 1
return s3db.hrm_person_controller()
# Use the PR controller/rheader
set_method = s3db.set_method
# Custom Method for User
def auth_profile_method(r, **attr):
# Custom View
response.view = "update.html"
current.menu.breadcrumbs = None
# RHeader for consistency
rheader = attr.get("rheader", None)
if callable(rheader):
rheader = rheader(r)
table = auth.settings.table_user
tablename = table._tablename
next = URL(c = "default",
f = "person",
args = [person_id,
"user_profile",
],
)
onaccept = lambda form: auth.s3_approve_user(form.vars),
auth.configure_user_fields()
form = auth.profile(next = next,
onaccept = onaccept)
return {"title": s3.crud_strings["pr_person"]["title_display"],
"rheader": rheader,
"form": form,
}
set_method("pr", "person",
method = "user_profile",
action = auth_profile_method)
# Custom Method for Contacts
set_method("pr", "person",
method = "contacts",
action = s3db.pr_Contacts)
#if settings.has_module("asset"):
# # Assets as component of people
# s3db.add_components("pr_person", asset_asset="assigned_to_id")
# CRUD pre-process
def prep(r):
if r.method in ("options", "validate"):
return True
if r.interactive and r.method != "import":
# Load default model to override CRUD Strings
tablename = "pr_person"
table = s3db[tablename]
# Users can not delete their own person record
r.resource.configure(deletable = False)
s3.crud_strings[tablename].update(
title_display = T("Personal Profile"),
title_update = T("Personal Profile"))
# Organisation-dependent Fields
#set_org_dependent_field = settings.set_org_dependent_field
#set_org_dependent_field("pr_person_details", "father_name")
#set_org_dependent_field("pr_person_details", "mother_name")
#set_org_dependent_field("pr_person_details", "affiliations")
#set_org_dependent_field("pr_person_details", "company")
if r.component:
if r.component_name == "physical_description":
# Hide all but those details that we want
# Lock all the fields
table = r.component.table
for field in table.fields:
table[field].writable = False
table[field].readable = False
# Now enable those that we want
table.ethnicity.writable = True
table.ethnicity.readable = True
table.blood_type.writable = True
table.blood_type.readable = True
table.medical_conditions.writable = True
table.medical_conditions.readable = True
table.other_details.writable = True
table.other_details.readable = True
elif r.component_name == "config":
from s3db.gis import gis_config_form_setup
gis_config_form_setup()
# Create forms use this
# (update forms are in gis/config())
crud_fields = ["name",
"pe_default",
"default_location_id",
"zoom",
"lat",
"lon",
#"projection_id",
#"symbology_id",
#"wmsbrowser_url",
#"wmsbrowser_name",
]
osm_table = s3db.gis_layer_openstreetmap
openstreetmap = db(osm_table.deleted == False).select(osm_table.id,
limitby = (0, 1)
)
if openstreetmap:
# OpenStreetMap config
s3db.add_components("gis_config",
auth_user_options = {"joinby": "pe_id",
"pkey": "pe_id",
"multiple": False,
},
)
crud_fields += ["user_options.osm_oauth_consumer_key",
"user_options.osm_oauth_consumer_secret",
]
crud_form = s3base.S3SQLCustomForm(*crud_fields)
list_fields = ["name",
"pe_default",
]
s3db.configure("gis_config",
crud_form = crud_form,
insertable = False,
list_fields = list_fields,
)
else:
table.pe_label.readable = False
table.pe_label.writable = False
table.age_group.readable = False
table.age_group.writable = False
# Assume volunteers only between 12-81
dob = table.date_of_birth
dob.widget = S3CalendarWidget(past_months = 972,
future_months = -144,
)
return True
else:
# Disable non-interactive & import
return False
s3.prep = prep
# CRUD post-process
def postp(r, output):
if r.interactive and r.component:
if r.component_name == "config":
update_url = URL(c="gis", f="config",
args = "[id]",
)
s3_action_buttons(r, update_url=update_url)
s3.actions.append({"url": URL(c="gis", f="index",
vars = {"config":"[id]"}
),
"label": s3_str(T("Show")),
"_class": "action-btn",
})
elif r.component_name == "asset":
# Provide a link to assign a new Asset
# @ToDo: Proper Widget to do this inline
output["add_btn"] = A(T("Assign Asset"),
_href = URL(c="asset", f="asset"),
_id = "add-btn",
_class = "action-btn",
)
return output
s3.postp = postp
if settings.get_hrm_record_tab():
hr_tab = (T("Staff/Volunteer Record"), "human_resource")
else:
hr_tab = None
if settings.get_hrm_staff_experience() == "experience":
experience_tab = (T("Experience"), "experience")
else:
experience_tab = None
if settings.get_hrm_use_certificates():
certificates_tab = (T("Certificates"), "certificate")
else:
certificates_tab = None
if settings.get_hrm_use_credentials():
credentials_tab = (T("Credentials"), "credential")
else:
credentials_tab = None
if settings.get_hrm_use_description():
description_tab = (T("Description"), "physical_description")
else:
description_tab = None
if settings.get_pr_use_address():
address_tab = (T("Address"), "address")
else:
address_tab = None
if settings.get_hrm_use_education():
education_tab = (T("Education"), "education")
else:
education_tab = None
if settings.get_hrm_use_id():
id_tab = (T("ID"), "identity")
else:
id_tab = None
if settings.get_hrm_use_skills():
skills_tab = (T("Skills"), "competency")
else:
skills_tab = None
teams = settings.get_hrm_teams()
if teams:
teams_tab = (T(teams), "group_membership")
else:
teams_tab = None
if settings.get_hrm_use_trainings():
trainings_tab = (T("Trainings"), "training")
else:
trainings_tab = None
setting = settings.get_pr_contacts_tabs()
if setting:
contacts_tab = (settings.get_pr_contacts_tab_label(), "contacts")
else:
contacts_tab = None
tabs = [(T("Person Details"), None),
(T("User Account"), "user_profile"),
hr_tab,
id_tab,
description_tab,
address_tab,
contacts_tab,
education_tab,
trainings_tab,
certificates_tab,
skills_tab,
credentials_tab,
experience_tab,
teams_tab,
#(T("Assets"), "asset"),
#(T("My Subscriptions"), "subscription"),
(T("My Maps"), "config"),
]
return s3_rest_controller("pr", "person",
rheader = lambda r, tabs=tabs: \
s3db.pr_rheader(r, tabs=tabs))
# -----------------------------------------------------------------------------
def privacy():
""" Custom View """
_custom_view("privacy")
response.title = T("Privacy")
return {}
# -----------------------------------------------------------------------------
def public_url():
""" Simple check for use in monitoring scripts """
return settings.get_base_public_url()
# -----------------------------------------------------------------------------
def rapid():
""" Set/remove rapid data entry flag """
val = get_vars.get("val", True)
if val == "0":
val = False
else:
val = True
session.s3.rapid_data_entry = val
response.view = "xml.html"
return {"item": str(session.s3.rapid_data_entry)}
# -----------------------------------------------------------------------------
def site():
"""
@ToDo: Avoid redirect
"""
try:
site_id = request.args[0]
except:
raise HTTP(404)
table = s3db.org_site
record = db(table.site_id == site_id).select(table.instance_type,
limitby=(0, 1)).first()
tablename = record.instance_type
table = s3db.table(tablename)
if table:
query = (table.site_id == site_id)
id = db(query).select(table.id,
limitby = (0, 1)).first().id
cf = tablename.split("_", 1)
redirect(URL(c = cf[0],
f = cf[1],
args = [id]))
# -----------------------------------------------------------------------------
def skill():
"""
RESTful CRUD controller
- needed when skill add form embedded in default/person
- only create method is allowed, when opened in an inline form.
"""
# Check if it is called from a inline form
if auth.permission.format != "popup":
return ""
# Pre-process
def prep(r):
if r.method != "create":
return False
return True
s3.prep = prep
return s3_rest_controller("hrm", "skill")
# -----------------------------------------------------------------------------
def tables():
"""
RESTful CRUD Controller for Dynamic Table Models
"""
return s3_rest_controller("s3", "table",
rheader = s3db.s3_table_rheader,
csv_template = ("s3", "table"),
csv_stylesheet = ("s3", "table.xsl"),
)
# -----------------------------------------------------------------------------
def table():
"""
RESTful CRUD Controller for Dynamic Table Contents
NB: First argument is the resource name, i.e. the name of
the dynamic table without prefix, e.g.:
default/table/test to access s3dt_test table
"""
args = request.args
if len(args):
return s3_rest_controller(dynamic = args[0].rsplit(".", 1)[0])
else:
raise HTTP(400, "No resource specified")
# -----------------------------------------------------------------------------
def tos():
""" Custom View """
_custom_view("tos")
response.title = T("Terms of Service")
return {}
# -----------------------------------------------------------------------------
def user():
""" Auth functions based on arg. See gluon/tools.py """
auth_settings = auth.settings
utable = auth_settings.table_user
arg = request.args(0)
if arg == "verify_email":
# Ensure we use the user's language
key = request.args[-1]
query = (utable.registration_key == key)
user = db(query).select(utable.language,
limitby=(0, 1)).first()
if not user:
redirect(auth_settings.verify_email_next)
session.s3.language = user.language
auth_settings.on_failed_authorization = URL(f="error")
auth.configure_user_fields()
auth_settings.profile_onaccept = auth.s3_user_profile_onaccept
auth_settings.register_onvalidation = _register_validation
# Check for template-specific customisations
customise = settings.customise_auth_user_controller
if customise:
customise(arg = arg)
self_registration = settings.get_security_self_registration()
login_form = register_form = None
current.menu.oauth = S3MainMenu.menu_oauth()
if not settings.get_auth_password_changes():
# Block Password changes as these are managed externally (OpenID / SMTP / LDAP)
auth_settings.actions_disabled = ("change_password",
"retrieve_password",
)
elif not settings.get_auth_password_retrieval():
# Block password retrieval
auth_settings.actions_disabled = ("retrieve_password",
)
header = response.s3_user_header or ""
if arg == "login":
title = response.title = T("Login")
# @ToDo: move this code to /modules/s3/s3aaa.py:def login()?
auth.messages.submit_button = T("Login")
form = auth()
#form = auth.login()
login_form = form
elif arg == "register":
# @ToDo: move this code to /modules/s3/s3aaa.py:def register()?
if not self_registration:
session.error = T("Registration not permitted")
redirect(URL(f="index"))
if response.title:
# Customised
title = response.title
else:
# Default
title = response.title = T("Register")
form = register_form = auth.register()
elif arg == "change_password":
title = response.title = T("Change Password")
form = auth()
# Add client-side validation
js_global = []
js_append = js_global.append
js_append('''S3.password_min_length=%i''' % settings.get_auth_password_min_length())
js_append('''i18n.password_min_chars="%s"''' % T("You must enter a minimum of %d characters"))
js_append('''i18n.weak="%s"''' % T("Weak"))
js_append('''i18n.normal="%s"''' % T("Normal"))
js_append('''i18n.medium="%s"''' % T("Medium"))
js_append('''i18n.strong="%s"''' % T("Strong"))
js_append('''i18n.very_strong="%s"''' % T("Very Strong"))
script = '''\n'''.join(js_global)
s3.js_global.append(script)
if s3.debug:
s3.scripts.append("/%s/static/scripts/jquery.pstrength.2.1.0.js" % appname)
else:
s3.scripts.append("/%s/static/scripts/jquery.pstrength.2.1.0.min.js" % appname)
s3.jquery_ready.append(
'''$('.password:eq(1)').pstrength({
'minChar': S3.password_min_length,
'minCharText': i18n.password_min_chars,
'verdicts': [i18n.weak, i18n.normal, i18n.medium, i18n.strong, i18n.very_strong]
})''')
elif arg == "retrieve_password":
title = response.title = T("Lost Password")
form = auth()
elif arg == "profile":
title = response.title = T("User Profile")
form = auth.profile()
elif arg == "consent":
title = response.title = T("Consent")
form = auth.consent()
elif arg == "options.s3json":
# Used when adding organisations from registration form
return s3_rest_controller(prefix="auth", resourcename="user")
else:
# logout or verify_email
title = ""
form = auth()
if form:
if s3.crud.submit_style:
form[0][-1][1][0]["_class"] = s3.crud.submit_style
templates = settings.get_template()
if templates != "default":
# Try a Custom View
folder = request.folder
if not isinstance(templates, (tuple, list)):
templates = (templates,)
for template in templates[::-1]:
view = os.path.join(folder,
"modules",
"templates",
template,
"views",
"user.html")
if os.path.exists(view):
try:
# Pass view as file not str to work in compiled mode
response.view = open(view, "rb")
except IOError:
from gluon.http import HTTP
raise HTTP("404", "Unable to open Custom View: %s" % view)
else:
break
return {"title": title,
"header": header,
"form": form,
"login_form": login_form,
"register_form": register_form,
"self_registration": self_registration,
}
# -----------------------------------------------------------------------------
def video():
""" Custom View """
_custom_view("video")
response.title = T("Video Tutorials")
return {}
# -----------------------------------------------------------------------------
def view():
""" Custom View """
view = request.args(0)
_custom_view(view)
response.title = view
return {}
# =============================================================================
# Login Methods
# =============================================================================
def facebook():
""" Login using Facebook """
channel = s3db.msg_facebook_login()
if not channel:
redirect(URL(f="user", args=request.args, vars=get_vars))
from s3oauth import FaceBookAccount
auth.settings.login_form = FaceBookAccount(channel)
form = auth()
return {"form": form}
# -----------------------------------------------------------------------------
def google():
""" Login using Google """
channel = settings.get_auth_google()
if not channel:
redirect(URL(f="user", args=request.args, vars=get_vars))
from s3oauth import GooglePlusAccount
auth.settings.login_form = GooglePlusAccount(channel)
form = auth()
return {"form": form}
# -----------------------------------------------------------------------------
def humanitarian_id():
""" Login using Humanitarian.ID """
channel = settings.get_auth_humanitarian_id()
if not channel:
redirect(URL(f="user", args=request.args, vars=get_vars))
from s3oauth import HumanitarianIDAccount
auth.settings.login_form = HumanitarianIDAccount(channel)
form = auth()
return {"form": form}
# -----------------------------------------------------------------------------
def openid_connect():
""" Login using OpenID Connect """
channel = settings.get_auth_openid_connect()
if not channel:
redirect(URL(f="user", args=request.args, vars=get_vars))
from s3oauth import OpenIDConnectAccount
auth.settings.login_form = OpenIDConnectAccount(channel)
form = auth()
return {"form": form}
# =============================================================================
# Helpers
# =============================================================================
def _apath(path = ""):
""" Application path """
from gluon.fileutils import up
opath = up(request.folder)
# @ToDo: This path manipulation is very OS specific.
while path[:3] == "../": opath, path=up(opath), path[3:]
return os.path.join(opath, path).replace("\\", "/")
# -----------------------------------------------------------------------------
def _custom_view(filename):
"""
See if there is a custom view for a page &, if so, use that
"""
templates = settings.get_template()
if templates != "default":
folder = request.folder
if not isinstance(templates, (tuple, list)):
templates = (templates,)
for template in templates[::-1]:
# Try a Custom View
view = os.path.join(folder,
"modules",
"templates",
template,
"views",
"%s.html" % filename)
if os.path.exists(view):
try:
# Pass view as file not str to work in compiled mode
response.view = open(view, "rb")
except IOError:
from gluon.http import HTTP
raise HTTP("404", "Unable to open Custom View: %s" % view)
else:
break
# -----------------------------------------------------------------------------
def _register_validation(form):
""" Validate the fields in registration form """
form_vars = form.vars
# Mobile Phone
mobile = form_vars.get("mobile")
if mobile:
import re
regex = re.compile(SINGLE_PHONE_NUMBER_PATTERN)
if not regex.match(mobile):
form.errors.mobile = T("Invalid phone number")
elif settings.get_auth_registration_mobile_phone_mandatory():
form.errors.mobile = T("Phone number is required")
# Home Phone
home = form_vars.get("home")
if home:
import re
regex = re.compile(SINGLE_PHONE_NUMBER_PATTERN)
if not regex.match(home):
form.errors.home = T("Invalid phone number")
org = settings.get_auth_registration_organisation_default()
if org:
# Add to default organisation
form_vars.organisation_id = org
return
# END =========================================================================
|
{
"content_hash": "8f2e4e2e5be074f89dfed7ed76b8826b",
"timestamp": "",
"source": "github",
"line_count": 1678,
"max_line_length": 225,
"avg_line_length": 35.88140643623361,
"alnum_prop": 0.45612782142204655,
"repo_name": "flavour/eden",
"id": "3f20cbc90f18cf17a9d20e1a59a911e5abb9e7a0",
"size": "60234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "controllers/default.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "727"
},
{
"name": "CSS",
"bytes": "3351335"
},
{
"name": "HTML",
"bytes": "1367727"
},
{
"name": "JavaScript",
"bytes": "20109418"
},
{
"name": "NSIS",
"bytes": "3934"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "31407527"
},
{
"name": "Ruby",
"bytes": "8291"
},
{
"name": "Shell",
"bytes": "5059"
},
{
"name": "XSLT",
"bytes": "3274119"
}
],
"symlink_target": ""
}
|
import mdsclient
from plugins.contracts import ICartridgeAgentPlugin
from xml.dom.minidom import parse
import socket
from modules.util.log import LogFactory
import time
import subprocess
import os
class WSO2ISMetaDataHandler(ICartridgeAgentPlugin):
def run_plugin(self, values):
log = LogFactory().get_log(__name__)
log.info("Starting wso2is metadata handler...")
# read tomcat app related values from metadata
mds_response = None
while mds_response is None:
log.debug("Waiting for SSO_ISSUER and CALLBACK_URL to be available from metadata service for app ID: %s"
% values["APPLICATION_ID"])
time.sleep(5)
mds_response = mdsclient.get(app=True)
if mds_response is not None:
if mds_response.properties.get("SSO_ISSUER") is None or \
mds_response.properties.get("CALLBACK_URL") is None:
mds_response = None
# mds_response = mdsclient.get()
issuer = mds_response.properties["SSO_ISSUER"]
acs = mds_response.properties["CALLBACK_URL"]
# add a service provider in the security/sso-idp-config.xml file
# is_root = values["APPLICATION_PATH"]
is_root = os.environ.get("CARBON_HOME")
sso_idp_file = "%s/repository/conf/security/sso-idp-config.xml" % is_root
# <SSOIdentityProviderConfig>
# <ServiceProviders>
# <ServiceProvider>
# <Issuer>wso2.my.dashboard</Issuer>
# <AssertionConsumerService>https://is.wso2.com/dashboard/acs</AssertionConsumerService>
# <SignAssertion>true</SignAssertion>
# <SignResponse>true</SignResponse>
# <EnableAttributeProfile>false</EnableAttributeProfile>
# <IncludeAttributeByDefault>false</IncludeAttributeByDefault>
# <Claims>
# <Claim>http://wso2.org/claims/role</Claim>
# </Claims>
# <EnableSingleLogout>false</EnableSingleLogout>
# <SingleLogoutUrl></SingleLogoutUrl>
# <EnableAudienceRestriction>true</EnableAudienceRestriction>
# <AudiencesList>
# <Audience>carbonServer</Audience>
# </AudiencesList>
# <ConsumingServiceIndex></ConsumingServiceIndex>
# </ServiceProvider>
with open(sso_idp_file, "r") as f:
sp_dom = parse(f)
root_element = sp_dom.documentElement
sps_element = sp_dom.getElementsByTagName("ServiceProviders")[0]
sp_entry = sp_dom.createElement("ServiceProvider")
sp_entry_issuer = sp_dom.createElement("Issuer")
sp_entry_issuer.appendChild(sp_dom.createTextNode(issuer))
sp_entry_acs = sp_dom.createElement("AssertionConsumerService")
sp_entry_acs.appendChild(sp_dom.createTextNode(acs))
sp_entry_sign_resp = sp_dom.createElement("SignResponse")
sp_entry_sign_resp.appendChild(sp_dom.createTextNode("true"))
sp_entry_sign_assert = sp_dom.createElement("SignAssertion")
sp_entry_sign_assert.appendChild(sp_dom.createTextNode("true"))
sp_entry_single_logout = sp_dom.createElement("EnableSingleLogout")
sp_entry_single_logout.appendChild(sp_dom.createTextNode("true"))
sp_entry_attribute_profile = sp_dom.createElement("EnableAttributeProfile")
sp_entry_attribute_profile.appendChild(sp_dom.createTextNode("true"))
sp_entry.appendChild(sp_entry_issuer)
sp_entry.appendChild(sp_entry_acs)
sp_entry.appendChild(sp_entry_sign_resp)
sp_entry.appendChild(sp_entry_sign_assert)
sp_entry.appendChild(sp_entry_single_logout)
sp_entry.appendChild(sp_entry_attribute_profile)
sps_element.appendChild(sp_entry)
with open(sso_idp_file, 'w+') as f:
root_element.writexml(f, newl="\n")
# root_element.writexml(f)
# data = json.loads(urllib.urlopen("http://ip.jsontest.com/").read())
# ip_entry = data["ip"]
# publish SAML_ENDPOINT to metadata service
# member_hostname = socket.gethostname()
member_hostname = values["HOST_NAME"]
# read kubernetes service https port
log.info("Reading port mappings...")
port_mappings_str = values["PORT_MAPPINGS"]
https_port = None
# port mappings format: """NAME:mgt-console|PROTOCOL:https|PORT:4500|PROXY_PORT:8443;
# NAME:tomcat-http|PROTOCOL:http|PORT:4501|PROXY_PORT:7280;"""
log.info("Port mappings: %s" % port_mappings_str)
if port_mappings_str is not None:
port_mappings_array = port_mappings_str.split(";")
if port_mappings_array:
for port_mapping in port_mappings_array:
log.debug("port_mapping: %s" % port_mapping)
name_value_array = port_mapping.split("|")
protocol = name_value_array[1].split(":")[1]
port = name_value_array[2].split(":")[1]
if protocol == "https":
https_port = port
log.info("Kubernetes service port of wso2is management console https transport: %s" % https_port)
saml_endpoint = "https://%s:%s/samlsso" % (member_hostname, https_port)
saml_endpoint_property = {"key": "SAML_ENDPOINT", "values": [ saml_endpoint ]}
mdsclient.put(saml_endpoint_property, app=True)
log.info("Published property to metadata API: SAML_ENDPOINT: %s" % saml_endpoint)
# start servers
log.info("Starting WSO2 IS server")
# set configurations
carbon_replace_command = "sed -i \"s/CLUSTER_HOST_NAME/%s/g\" %s" % (member_hostname, "${CARBON_HOME}/repository/conf/carbon.xml")
p = subprocess.Popen(carbon_replace_command, shell=True)
output, errors = p.communicate()
log.debug("Set carbon.xml hostname")
catalina_replace_command = "sed -i \"s/STRATOS_IS_PROXY_PORT/%s/g\" %s" % (https_port, "${CARBON_HOME}/repository/conf/tomcat/catalina-server.xml")
p = subprocess.Popen(catalina_replace_command, shell=True)
output, errors = p.communicate()
log.debug("Set catalina-server.xml proxy port")
wso2is_start_command = "exec ${CARBON_HOME}/bin/wso2server.sh start"
env_var = os.environ.copy()
p = subprocess.Popen(wso2is_start_command, env=env_var, shell=True)
output, errors = p.communicate()
log.debug("WSO2 IS server started")
log.info("wso2is metadata handler completed")
|
{
"content_hash": "00cb9a74636052e3d4232b39d6c2cac8",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 155,
"avg_line_length": 43.54545454545455,
"alnum_prop": 0.6194452728899493,
"repo_name": "pubudu538/stratos",
"id": "0a7a2d4845bcec9c2bbb4aeeb41fd328ba939f4d",
"size": "7490",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "tools/docker-images/cartridge-docker-images/service-images/wso2is-saml-sso/packs/plugins/WSO2ISMetaDataHandler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "17184"
},
{
"name": "C",
"bytes": "27195"
},
{
"name": "CSS",
"bytes": "97339"
},
{
"name": "HTML",
"bytes": "153216"
},
{
"name": "Java",
"bytes": "6429326"
},
{
"name": "JavaScript",
"bytes": "3620546"
},
{
"name": "Python",
"bytes": "647261"
},
{
"name": "Ruby",
"bytes": "3546"
},
{
"name": "Shell",
"bytes": "130188"
}
],
"symlink_target": ""
}
|
import time
from vtdb import field_types
from vtdb import dbexceptions
from vtdb import tablet as tablet_conn
from vtdb import cursor
import framework
import nocache_cases
import environment
import utils
class TestNocache(framework.TestCase):
def test_data(self):
cu = self.env.execute("select * from vtocc_test where intval=1")
self.assertEqual(cu.description, [('intval', 3), ('floatval', 4), ('charval', 253), ('binval', 253)])
self.assertEqual(cu.rowcount, 1)
self.assertEqual(cu.fetchone(), (1, 1.12345, "\xc2\xa2", "\x00\xff"))
cu = self.env.execute("select * from vtocc_test where intval=2")
self.assertEqual(cu.fetchone(), (2, None, '', None))
def test_binary(self):
self.env.conn.begin()
binary_data = '\x00\'\"\b\n\r\t\x1a\\\x00\x0f\xf0\xff'
self.env.execute("insert into vtocc_test values(4, null, null, '\\0\\'\\\"\\b\\n\\r\\t\\Z\\\\\x00\x0f\xf0\xff')")
bvar = {'bindata': binary_data}
self.env.execute("insert into vtocc_test values(5, null, null, :bindata)", bvar)
self.env.conn.commit()
cu = self.env.execute("select * from vtocc_test where intval=4")
self.assertEqual(cu.fetchone()[3], binary_data)
cu = self.env.execute("select * from vtocc_test where intval=5")
self.assertEqual(cu.fetchone()[3], binary_data)
self.env.conn.begin()
self.env.execute("delete from vtocc_test where intval in (4,5)")
self.env.conn.commit()
def test_simple_read(self):
vstart = self.env.debug_vars()
cu = self.env.execute("select * from vtocc_test limit 2")
vend = self.env.debug_vars()
self.assertEqual(cu.rowcount, 2)
self.assertEqual(vstart.mget("Queries.TotalCount", 0)+1, vend.Queries.TotalCount)
self.assertEqual(vstart.mget("Queries.Histograms.PASS_SELECT.Count", 0)+1, vend.Queries.Histograms.PASS_SELECT.Count)
def test_nocache_list_arg(self):
cu = self.env.execute("select * from vtocc_test where intval in ::list", {"list": field_types.List([2, 3, 4])})
self.assertEqual(cu.rowcount, 2)
cu = self.env.execute("select * from vtocc_test where intval in ::list", {"list": field_types.List([3, 4])})
self.assertEqual(cu.rowcount, 1)
cu = self.env.execute("select * from vtocc_test where intval in ::list", {"list": field_types.List([3])})
self.assertEqual(cu.rowcount, 1)
with self.assertRaises(dbexceptions.DatabaseError):
cu = self.env.execute("select * from vtocc_test where intval in ::list", {"list": field_types.List()})
def test_commit(self):
vstart = self.env.debug_vars()
self.env.txlog.reset()
self.env.conn.begin()
self.assertNotEqual(self.env.conn.transaction_id, 0)
self.env.execute("insert into vtocc_test (intval, floatval, charval, binval) values(4, null, null, null)")
self.env.conn.commit()
time.sleep(0.1)
txlog = self.env.txlog.read().split('\t')
self.assertEqual(txlog[4], "commit")
self.assertEqual(txlog[5], "insert into vtocc_test (intval, floatval, charval, binval) values(4, null, null, null)")
cu = self.env.execute("select * from vtocc_test")
self.assertEqual(cu.rowcount, 4)
self.env.conn.begin()
self.env.execute("delete from vtocc_test where intval=4")
self.env.conn.commit()
cu = self.env.execute("select * from vtocc_test")
self.assertEqual(cu.rowcount, 3)
vend = self.env.debug_vars()
# We should have at least one connection
self.assertEqual(vstart.mget("Transactions.TotalCount", 0)+2, vend.Transactions.TotalCount)
self.assertEqual(vstart.mget("Transactions.Histograms.Completed.Count", 0)+2, vend.Transactions.Histograms.Completed.Count)
self.assertEqual(vstart.mget("Queries.TotalCount", 0)+8, vend.Queries.TotalCount)
self.assertEqual(vstart.mget("Queries.Histograms.BEGIN.Count", 0)+2, vend.Queries.Histograms.BEGIN.Count)
self.assertEqual(vstart.mget("Queries.Histograms.COMMIT.Count", 0)+2, vend.Queries.Histograms.COMMIT.Count)
self.assertEqual(vstart.mget("Queries.Histograms.INSERT_PK.Count", 0)+1, vend.Queries.Histograms.INSERT_PK.Count)
self.assertEqual(vstart.mget("Queries.Histograms.DML_PK.Count", 0)+1, vend.Queries.Histograms.DML_PK.Count)
self.assertEqual(vstart.mget("Queries.Histograms.PASS_SELECT.Count", 0)+2, vend.Queries.Histograms.PASS_SELECT.Count)
def test_integrity_error(self):
vstart = self.env.debug_vars()
self.env.conn.begin()
try:
self.env.execute("insert into vtocc_test values(1, null, null, null)")
except dbexceptions.IntegrityError as e:
self.assertContains(str(e), "error: duplicate")
else:
self.fail("Did not receive exception")
finally:
self.env.conn.rollback()
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("InfoErrors.DupKey", 0)+1, vend.InfoErrors.DupKey)
def test_rollback(self):
vstart = self.env.debug_vars()
self.env.txlog.reset()
self.env.conn.begin()
self.assertNotEqual(self.env.conn.transaction_id, 0)
self.env.execute("insert into vtocc_test values(4, null, null, null)")
self.env.conn.rollback()
time.sleep(0.1)
txlog = self.env.txlog.read().split('\t')
self.assertEqual(txlog[4], "rollback")
self.assertEqual(txlog[5], "insert into vtocc_test values(4, null, null, null)")
cu = self.env.execute("select * from vtocc_test")
self.assertEqual(cu.rowcount, 4)
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("Transactions.TotalCount", 0)+1, vend.Transactions.TotalCount)
self.assertEqual(vstart.mget("Transactions.Histograms.Aborted.Count", 0)+1, vend.Transactions.Histograms.Aborted.Count)
self.assertEqual(vstart.mget("Queries.Histograms.BEGIN.Count", 0)+1, vend.Queries.Histograms.BEGIN.Count)
self.assertEqual(vstart.mget("Queries.Histograms.ROLLBACK.Count", 0)+1, vend.Queries.Histograms.ROLLBACK.Count)
def test_nontx_dml(self):
vstart = self.env.debug_vars()
results = self.env.execute("insert into vtocc_test values(444, null, null, null)")
vend = self.env.debug_vars()
self.assertEqual(results.description, [])
def test_trailing_comment(self):
vstart = self.env.debug_vars()
bv={'ival': 1}
self.env.execute("select * from vtocc_test where intval=:ival", bv)
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("QueryCacheLength", 0)+1, vend.QueryCacheLength)
# This should not increase the query cache size
self.env.execute("select * from vtocc_test where intval=:ival /* trailing comment */", bv)
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("QueryCacheLength", 0)+1, vend.QueryCacheLength)
# This should also not increase the query cache size
self.env.execute("select * from vtocc_test where intval=:ival /* trailing comment1 */ /* comment2 */", bv)
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("QueryCacheLength", 0)+1, vend.QueryCacheLength)
def test_complex_dmls(self):
self.env.conn.begin()
try:
with self.assertRaises(dbexceptions.DatabaseError):
self.env.execute("insert into vtocc_a(eid, id, name, foo) values (7, 1+1, '', '')")
with self.assertRaises(dbexceptions.DatabaseError):
self.env.execute("insert into vtocc_d(eid, id) values (1, 1)")
with self.assertRaises(dbexceptions.DatabaseError):
self.env.execute("update vtocc_a set eid = 1+1 where eid = 1 and id = 1")
with self.assertRaises(dbexceptions.DatabaseError):
self.env.execute("insert into vtocc_d(eid, id) values (1, 1)")
self.env.execute("delete from upsert_test")
with self.assertRaises(dbexceptions.DatabaseError):
self.env.execute("insert into upsert_test(id1, id2) values (1, 1), (2, 2) on duplicate key update id1 = 1")
self.env.execute("delete from upsert_test")
with self.assertRaises(dbexceptions.DatabaseError):
self.env.execute("insert into upsert_test(id1, id2) select eid, id from vtocc_a limit 1 on duplicate key update id2 = id1")
self.env.execute("delete from upsert_test")
with self.assertRaises(dbexceptions.DatabaseError):
self.env.execute("insert into upsert_test(id1, id2) values (1, 1) on duplicate key update id1 = 2+1")
self.env.execute("delete from upsert_test")
with self.assertRaises(dbexceptions.DatabaseError):
self.env.execute("insert into upsert_test(id1, id2) values (1, 1)")
self.env.execute("insert into upsert_test(id1, id2) values (2, 1) on duplicate key update id2 = 2")
finally:
self.env.conn.rollback()
def test_pass_dml(self):
self.env.execute("set vt_strict_mode=0")
self.env.conn.begin()
try:
self.env.execute("insert into vtocc_a(eid, id, name, foo) values (7, 1+1, '', '')")
self.env.execute("insert into vtocc_d(eid, id) values (1, 1)")
self.env.execute("insert into vtocc_a(eid, id, name, foo) values (8, 2, '', '') on duplicate key update id = 2+1")
self.env.execute("update vtocc_a set eid = 1+1 where eid = 1 and id = 1")
self.env.execute("insert into vtocc_d(eid, id) values (1, 1)")
self.env.execute("update vtocc_a set eid = :eid where eid = 1 and id = 1", {"eid": 3})
finally:
self.env.conn.rollback()
self.env.execute("set vt_strict_mode=1")
def test_select_lock(self):
for lock_mode in ['for update', 'lock in share mode']:
try:
self.env.execute("select * from vtocc_test where intval=2 %s" % lock_mode)
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "error: Disallowed")
else:
self.fail("Did not receive exception")
# If these throw no exceptions, we're good
self.env.conn.begin()
self.env.execute("select * from vtocc_test where intval=2 %s" % lock_mode)
self.env.conn.commit()
# Make sure the row is not locked for read
self.env.execute("select * from vtocc_test where intval=2")
def test_pool_size(self):
vstart = self.env.debug_vars()
self.env.execute("set vt_pool_size=1")
self.assertRaises(dbexceptions.DatabaseError, self.env.execute, "select sleep(3) from dual")
self.env.execute("select 1 from dual")
vend = self.env.debug_vars()
self.assertEqual(vend.ConnPoolCapacity, 1)
self.assertEqual(vstart.ConnPoolWaitCount+1, vend.ConnPoolWaitCount)
self.env.execute("set vt_pool_size=16")
vend = self.env.debug_vars()
self.assertEqual(vend.ConnPoolCapacity, 16)
def test_transaction_cap(self):
self.env.execute("set vt_transaction_cap=1")
self.env.execute("set vt_txpool_timeout=0.5")
vstart = self.env.debug_vars()
self.assertEqual(vstart.TransactionPoolPoolTimeout, 5e8)
co2 = self.env.connect()
self.env.conn.begin()
try:
cu2 = cursor.TabletCursor(co2)
start = time.time()
co2.begin()
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "tx_pool_full")
self.assertTrue(time.time()-start >= 0.4)
else:
self.fail("Did not receive exception")
finally:
cu2.close()
co2.close()
self.env.conn.commit()
vend = self.env.debug_vars()
self.assertEqual(vend.TransactionPoolCapacity, 1)
self.env.execute("set vt_transaction_cap=20")
self.env.execute("set vt_txpool_timeout=1")
vend = self.env.debug_vars()
self.assertEqual(vend.TransactionPoolCapacity, 20)
self.assertEqual(vend.TransactionPoolPoolTimeout, 1e9)
self.assertEqual(vstart.mget("Errors.TxPoolFull", 0) + 1, vend.Errors.TxPoolFull)
def test_transaction_timeout(self):
self.env.execute("set vt_transaction_timeout=0.25")
# wait for any pending transactions to timeout
time.sleep(0.3)
vstart = self.env.debug_vars()
self.env.txlog.reset()
self.env.conn.begin()
vmid = self.env.debug_vars()
self.assertEqual(vstart.TransactionPoolAvailable, vmid.TransactionPoolAvailable+1)
time.sleep(0.3)
try:
self.env.conn.commit()
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "not_in_tx: Transaction")
else:
self.fail("Did not receive exception")
time.sleep(0.1)
txlog = self.env.txlog.read().split('\t')
self.assertEqual(txlog[4], "kill")
vend = self.env.debug_vars()
self.assertEqual(vstart.TransactionPoolAvailable, vend.TransactionPoolAvailable)
self.assertEqual(vend.TransactionPoolTimeout, 250000000)
self.assertEqual(vstart.mget("Kills.Transactions", 0)+1, vend.Kills.Transactions)
self.env.execute("set vt_transaction_timeout=30")
vend = self.env.debug_vars()
self.assertEqual(vend.TransactionPoolTimeout, 30000000000)
def test_query_cache(self):
self.env.execute("set vt_query_cache_size=1")
bv={'ival1': 1, 'ival2': 1}
self.env.execute("select * from vtocc_test where intval=:ival1", bv)
self.env.execute("select * from vtocc_test where intval=:ival2", bv)
vend = self.env.debug_vars()
self.assertEqual(vend.QueryCacheLength, 1)
self.assertEqual(vend.QueryCacheSize, 1)
self.assertEqual(vend.QueryCacheCapacity, 1)
self.env.execute("set vt_query_cache_size=5000")
self.env.execute("select * from vtocc_test where intval=:ival1", bv)
vend = self.env.debug_vars()
self.assertEqual(vend.QueryCacheLength, 2)
self.assertEqual(vend.QueryCacheSize, 2)
self.assertEqual(vend.QueryCacheCapacity, 5000)
self.env.execute("select * from vtocc_test where intval=1")
vend = self.env.debug_vars()
self.assertEqual(vend.QueryCacheLength, 3)
self.assertEqual(vend.QueryCacheSize, 3)
self.assertEqual(vend.QueryCacheCapacity, 5000)
def test_schema_reload_time(self):
vend = self.env.debug_vars()
self.assertEqual(vend.SchemaReloadTime, 1800 * 1e9)
mcu = self.env.mysql_conn.cursor()
mcu.execute("create table vtocc_temp(intval int)")
# This should cause a reload
self.env.execute("set vt_schema_reload_time=600")
vend = self.env.debug_vars()
self.assertEqual(vend.SchemaReloadTime, 600 * 1e9)
try:
for i in range(10):
try:
self.env.execute("select * from vtocc_temp")
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "not found in schema")
time.sleep(1)
else:
break
# Should not throw an exception
self.env.execute("select * from vtocc_temp")
finally:
mcu.execute("drop table vtocc_temp")
mcu.close()
def test_max_result_size(self):
self.env.execute("set vt_max_result_size=2")
vend = self.env.debug_vars()
self.assertEqual(vend.MaxResultSize, 2)
try:
self.env.execute("select * from vtocc_test")
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "error: Row")
else:
self.fail("Did not receive exception")
self.env.execute("set vt_max_result_size=10000")
vend = self.env.debug_vars()
self.assertEqual(vend.MaxResultSize, 10000)
def test_max_dml_rows(self):
self.env.conn.begin()
self.env.execute("insert into vtocc_a(eid, id, name, foo) values (3, 1, '', ''), (3, 2, '', ''), (3, 3, '', '')")
self.env.conn.commit()
# Verify all three rows are updated in a single DML.
self.env.querylog.reset()
self.env.conn.begin()
self.env.execute("update vtocc_a set foo='fghi' where eid = 3")
self.env.conn.commit()
log = self.env.querylog.tailer.read()
self.assertContains(log, "update vtocc_a set foo = 'fghi' where (eid = 3 and id = 1) or (eid = 3 and id = 2) or (eid = 3 and id = 3) /* _stream vtocc_a (eid id ) (3 1 ) (3 2 ) (3 3 )")
# Verify that rows get split, and if pk changes, those values are also
# split correctly.
self.env.execute("set vt_max_dml_rows=2")
self.env.querylog.reset()
self.env.conn.begin()
self.env.execute("update vtocc_a set eid=2 where eid = 3")
self.env.conn.commit()
log = self.env.querylog.tailer.read()
self.assertContains(log, "update vtocc_a set eid = 2 where (eid = 3 and id = 1) or (eid = 3 and id = 2) /* _stream vtocc_a (eid id ) (3 1 ) (3 2 ) (2 1 ) (2 2 )")
self.assertContains(log, "update vtocc_a set eid = 2 where (eid = 3 and id = 3) /* _stream vtocc_a (eid id ) (3 3 ) (2 3 )")
# Verify that a normal update get split correctly.
self.env.querylog.reset()
self.env.conn.begin()
self.env.execute("update vtocc_a set foo='fghi' where eid = 2")
self.env.conn.commit()
log = self.env.querylog.tailer.read()
self.assertContains(log, "update vtocc_a set foo = 'fghi' where (eid = 2 and id = 1) or (eid = 2 and id = 2) /* _stream vtocc_a (eid id ) (2 1 ) (2 2 )")
self.assertContains(log, "update vtocc_a set foo = 'fghi' where (eid = 2 and id = 3) /* _stream vtocc_a (eid id ) (2 3 )")
# Verify that a delete get split correctly.
self.env.querylog.reset()
self.env.conn.begin()
self.env.execute("delete from vtocc_a where eid = 2")
self.env.conn.commit()
log = self.env.querylog.tailer.read()
self.assertContains(log, "delete from vtocc_a where (eid = 2 and id = 1) or (eid = 2 and id = 2) /* _stream vtocc_a (eid id ) (2 1 ) (2 2 )")
self.assertContains(log, "delete from vtocc_a where (eid = 2 and id = 3) /* _stream vtocc_a (eid id ) (2 3 )")
# Reset vt_max_dml_rows
self.env.execute("set vt_max_dml_rows=500")
def test_query_timeout(self):
vstart = self.env.debug_vars()
conn = tablet_conn.connect(self.env.address, '', 'test_keyspace', '0', 5, user='youtube-dev-dedicated', password='vtpass')
cu = cursor.TabletCursor(conn)
self.env.execute("set vt_query_timeout=0.25")
try:
conn.begin()
cu.execute("select sleep(0.5) from vtocc_test", {})
except dbexceptions.DatabaseError as e:
if "error: Query" not in str(e) and "error: the query was killed" not in str(e):
self.fail("Query not killed as expected")
else:
self.fail("Did not receive exception")
try:
cu.execute("select 1 from dual", {})
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "not_in_tx: Transaction")
else:
self.fail("Did not receive exception")
cu.close()
conn.close()
vend = self.env.debug_vars()
self.assertEqual(vend.QueryTimeout, 250000000)
self.assertEqual(vstart.mget("Kills.Queries", 0)+1, vend.Kills.Queries)
self.env.execute("set vt_query_timeout=30")
vend = self.env.debug_vars()
self.assertEqual(vend.QueryTimeout, 30000000000)
def test_idle_timeout(self):
self.env.execute("set vt_idle_timeout=1")
time.sleep(2)
self.env.execute("select 1 from dual")
vend = self.env.debug_vars()
self.assertEqual(vend.ConnPoolIdleTimeout, 1000000000)
self.assertEqual(vend.TransactionPoolIdleTimeout, 1000000000)
self.env.execute("set vt_idle_timeout=1800")
vend = self.env.debug_vars()
self.assertEqual(vend.ConnPoolIdleTimeout, 1800000000000)
self.assertEqual(vend.TransactionPoolIdleTimeout, 1800000000000)
def test_consolidation(self):
vstart = self.env.debug_vars()
# The first call always does a full fetch for field info
self.assertRaises(dbexceptions.DatabaseError, self.env.execute, "select sleep(3) from dual")
time.sleep(2)
for i in range(2):
try:
self.env.execute("select sleep(3) from dual")
except dbexceptions.OperationalError:
pass
vend = self.env.debug_vars()
self.assertEqual(vstart.mget("Waits.TotalCount", 0)+1, vend.Waits.TotalCount)
self.assertEqual(vstart.mget("Waits.Histograms.Consolidations.Count", 0)+1, vend.Waits.Histograms.Consolidations.Count)
def test_batch(self):
queries = ["select * from vtocc_a where id = :a", "select * from vtocc_b where id = :b"]
bvars = [{"a":2}, {"b":2}]
results = self.env.conn._execute_batch(queries, bvars, False)
self.assertEqual(results, [([(1L, 2L, 'bcde', 'fghi')], 1, 0, [('eid', 8), ('id', 3), ('name', 253), ('foo', 253)]), ([(1L, 2L)], 1, 0, [('eid', 8), ('id', 3)])])
# Not in transaction, as_transaction false
queries = [
"insert into vtocc_test (intval, floatval, charval, binval) values(4, null, null, null)",
"select * from vtocc_test where intval = 4",
"delete from vtocc_test where intval = 4",
]
results = self.env.conn._execute_batch(queries, [{}, {}, {}], False)
self.assertEqual(results[1][0], [(4L, None, None, None)])
# In transaction, as_transaction false
self.env.conn.begin()
results = self.env.conn._execute_batch(queries, [{}, {}, {}], False)
self.assertEqual(results[1][0], [(4L, None, None, None)])
self.env.conn.commit()
# In transaction, as_transaction true
self.env.conn.begin()
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*cannot start a new transaction.*'):
self.env.conn._execute_batch(queries, [{}, {}, {}], True)
self.env.conn.rollback()
# Not in transaction, as_transaction true
results = self.env.conn._execute_batch(queries, [{}, {}, {}], True)
self.assertEqual(results[1][0], [(4L, None, None, None)])
def test_bind_in_select(self):
bv = {'bv': 1}
cu = self.env.execute('select :bv from vtocc_test', bv)
self.assertEqual(cu.description, [('1', 8)])
bv = {'bv': 'abcd'}
cu = self.env.execute('select :bv from vtocc_test', bv)
self.assertEqual(cu.description, [('abcd', 253)])
def test_types(self):
self._verify_mismatch("insert into vtocc_ints(tiny) values('str')")
self._verify_mismatch("insert into vtocc_ints(tiny) values(:str)", {"str": "str"})
self._verify_mismatch("insert into vtocc_ints(tiny) values(1.2)")
self._verify_mismatch("insert into vtocc_ints(tiny) values(:fl)", {"fl": 1.2})
self._verify_mismatch("insert into vtocc_strings(vb) values(1)")
self._verify_mismatch("insert into vtocc_strings(vb) values(:id)", {"id": 1})
self._verify_error("insert into vtocc_strings(vb) values('12345678901234567')", None, "error: Data too long")
self._verify_error("insert into vtocc_ints(tiny) values(-129)", None, "error: Out of range")
try:
self.env.conn.begin()
self.env.execute("insert into vtocc_ints(tiny, medium) values(1, -129)")
self.env.execute("insert into vtocc_fracts(id, num) values(1, 1)")
self.env.execute("insert into vtocc_strings(vb) values('a')")
self.env.conn.commit()
self._verify_mismatch("insert into vtocc_strings(vb) select tiny from vtocc_ints")
self._verify_mismatch("insert into vtocc_ints(tiny) select num from vtocc_fracts")
self._verify_mismatch("insert into vtocc_ints(tiny) select vb from vtocc_strings")
self._verify_error("insert into vtocc_ints(tiny) select medium from vtocc_ints", None, "error: Out of range")
finally:
self.env.conn.begin()
self.env.execute("delete from vtocc_ints")
self.env.execute("delete from vtocc_fracts")
self.env.execute("delete from vtocc_strings")
self.env.conn.commit()
def test_customrules(self):
bv = {'asdfg': 1}
try:
self.env.execute("select * from vtocc_test where intval=:asdfg", bv)
self.fail("Bindvar asdfg should not be allowed by custom rule")
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "error: Query disallowed")
# Test dynamic custom rule for vttablet
if self.env.env == "vttablet":
if environment.topo_server().flavor() == 'zookeeper':
# Make a change to the rule
self.env.change_customrules()
time.sleep(3)
try:
self.env.execute("select * from vtocc_test where intval=:asdfg", bv)
except dbexceptions.DatabaseError as e:
self.fail("Bindvar asdfg should be allowed after a change of custom rule, Err=" + str(e))
# Restore the rule
self.env.restore_customrules()
time.sleep(3)
try:
self.env.execute("select * from vtocc_test where intval=:asdfg", bv)
self.fail("Bindvar asdfg should not be allowed by custom rule")
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), "error: Query disallowed")
def test_health(self):
self.assertEqual(self.env.health(), "ok")
def test_query_stats(self):
bv = {'eid': 1}
self.env.execute("select eid as query_stats from vtocc_a where eid = :eid", bv)
self._verify_query_stats(self.env.query_stats(), "select eid as query_stats from vtocc_a where eid = :eid", "vtocc_a", "PASS_SELECT", 1, 2, 0)
tstartQueryCounts = self._get_vars_query_stats(self.env.debug_vars()["QueryCounts"], "vtocc_a", "PASS_SELECT")
tstartRowCounts = self._get_vars_query_stats(self.env.debug_vars()["QueryRowCounts"], "vtocc_a", "PASS_SELECT")
tstartErrorCounts = self._get_vars_query_stats(self.env.debug_vars()["QueryErrorCounts"], "vtocc_a", "PASS_SELECT")
tstartTimesNs = self._get_vars_query_stats(self.env.debug_vars()["QueryTimesNs"], "vtocc_a", "PASS_SELECT")
self.assertEqual(tstartQueryCounts, 1)
self.assertEqual(tstartRowCounts, 2)
self.assertEqual(tstartErrorCounts, 0)
self.assertTrue(tstartTimesNs > 0)
try:
self.env.execute("select eid as query_stats from vtocc_a where dontexist(eid) = :eid", bv)
except dbexceptions.DatabaseError:
pass
else:
self.fail("Did not receive exception: " + query)
self._verify_query_stats(self.env.query_stats(), "select eid as query_stats from vtocc_a where dontexist(eid) = :eid", "vtocc_a", "PASS_SELECT", 1, 0, 1)
tendQueryCounts = self._get_vars_query_stats(self.env.debug_vars()["QueryCounts"], "vtocc_a", "PASS_SELECT")
tendRowCounts = self._get_vars_query_stats(self.env.debug_vars()["QueryRowCounts"], "vtocc_a", "PASS_SELECT")
tendErrorCounts = self._get_vars_query_stats(self.env.debug_vars()["QueryErrorCounts"], "vtocc_a", "PASS_SELECT")
tendTimesNs = self._get_vars_query_stats(self.env.debug_vars()["QueryTimesNs"], "vtocc_a", "PASS_SELECT")
self.assertEqual(tstartQueryCounts+1, tendQueryCounts)
self.assertEqual(tstartRowCounts, tendRowCounts)
self.assertEqual(tstartErrorCounts+1, tendErrorCounts)
self.assertTrue((tendTimesNs - tstartTimesNs) > 0)
def test_other(self):
cu = self.env.execute("show variables like 'version'")
for v in cu:
self.assertEqual(v[0], 'version')
cu = self.env.execute("describe vtocc_a")
self.assertEqual(cu.rowcount, 4)
cu = self.env.execute("explain vtocc_a")
self.assertEqual(cu.rowcount, 4)
def _verify_mismatch(self, query, bindvars=None):
self._verify_error(query, bindvars, "error: type mismatch")
def _verify_error(self, query, bindvars, err):
self.env.conn.begin()
try:
self.env.execute(query, bindvars)
except dbexceptions.DatabaseError as e:
self.assertContains(str(e), err)
else:
self.fail("Did not receive exception: " + query)
finally:
self.env.conn.rollback()
def _get_vars_query_stats(self, query_stats, table, plan):
return query_stats[table + "." + plan]
def _verify_query_stats(self, query_stats, query, table, plan, count, rows, errors):
for stat in query_stats:
if stat["Query"] != query:
continue
self.assertEqual(stat["Table"], table)
self.assertEqual(stat["Plan"], plan)
self.assertEqual(stat["QueryCount"], count)
self.assertEqual(stat["RowCount"], rows)
self.assertEqual(stat["ErrorCount"], errors)
self.assertTrue(stat["Time"] > 0)
return
self.fail("query %s not found" % query)
def test_sqls(self):
error_count = self.env.run_cases(nocache_cases.cases)
if error_count != 0:
self.fail("test_execution errors: %d"%(error_count))
def test_table_acl_no_access(self):
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*table acl error.*'):
self.env.execute("select * from vtocc_acl_no_access where key1=1")
self.env.conn.begin()
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*table acl error.*'):
self.env.execute("delete from vtocc_acl_no_access where key1=1")
self.env.conn.commit()
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*table acl error.*'):
self.env.execute("alter table vtocc_acl_no_access comment 'comment'")
cu = cursor.StreamCursor(self.env.conn)
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*table acl error.*'):
cu.execute("select * from vtocc_acl_no_access where key1=1", {})
cu.close()
def test_table_acl_read_only(self):
self.env.execute("select * from vtocc_acl_read_only where key1=1")
self.env.conn.begin()
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*table acl error.*'):
self.env.execute("delete from vtocc_acl_read_only where key1=1")
self.env.conn.commit()
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*table acl error.*'):
self.env.execute("alter table vtocc_acl_read_only comment 'comment'")
cu = cursor.StreamCursor(self.env.conn)
cu.execute("select * from vtocc_acl_read_only where key1=1", {})
cu.fetchall()
cu.close()
def test_table_acl_read_write(self):
self.env.execute("select * from vtocc_acl_read_write where key1=1")
self.env.conn.begin()
self.env.execute("delete from vtocc_acl_read_write where key1=1")
self.env.conn.commit()
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*table acl error.*'):
self.env.execute("alter table vtocc_acl_read_write comment 'comment'")
cu = cursor.StreamCursor(self.env.conn)
cu.execute("select * from vtocc_acl_read_write where key1=1", {})
cu.fetchall()
cu.close()
def test_table_acl_admin(self):
self.env.execute("select * from vtocc_acl_admin where key1=1")
self.env.conn.begin()
self.env.execute("delete from vtocc_acl_admin where key1=1")
self.env.conn.commit()
self.env.execute("alter table vtocc_acl_admin comment 'comment'")
cu = cursor.StreamCursor(self.env.conn)
cu.execute("select * from vtocc_acl_admin where key1=1", {})
cu.fetchall()
cu.close()
def test_table_acl_unmatched(self):
self.env.execute("select * from vtocc_acl_unmatched where key1=1")
self.env.conn.begin()
self.env.execute("delete from vtocc_acl_unmatched where key1=1")
self.env.conn.commit()
self.env.execute("alter table vtocc_acl_unmatched comment 'comment'")
cu = cursor.StreamCursor(self.env.conn)
cu.execute("select * from vtocc_acl_unmatched where key1=1", {})
cu.fetchall()
cu.close()
def test_table_acl_all_user_read_only(self):
self.env.execute("select * from vtocc_acl_all_user_read_only where key1=1")
self.env.conn.begin()
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*table acl error.*'):
self.env.execute("delete from vtocc_acl_all_user_read_only where key1=1")
self.env.conn.commit()
with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*table acl error.*'):
self.env.execute("alter table vtocc_acl_all_user_read_only comment 'comment'")
cu = cursor.StreamCursor(self.env.conn)
cu.execute("select * from vtocc_acl_all_user_read_only where key1=1", {})
cu.fetchall()
cu.close()
# This is a super-slow test. Uncomment and test if you change
# the server-side reconnect logic.
#def test_server_reconnect(self):
# self.env.execute("set vt_pool_size=1")
# self.env.execute("select * from vtocc_test limit :l", {"l": 1})
# self.env.tablet.shutdown_mysql()
# time.sleep(5)
# self.env.tablet.start_mysql()
# time.sleep(5)
# self.env.execute("select * from vtocc_test limit :l", {"l": 1})
# self.env.conn.begin()
# self.env.tablet.shutdown_mysql()
# time.sleep(5)
# self.env.tablet.start_mysql()
# time.sleep(5)
# with self.assertRaisesRegexp(dbexceptions.DatabaseError, ".*server has gone away.*"):
# self.env.execute("select * from vtocc_test limit :l", {"l": 1})
# self.env.conn.rollback()
# self.env.execute("set vt_pool_size=16")
# Super-slow test.
#def test_mysql_shutdown(self):
# self.env.execute("select * from vtocc_test limit :l", {"l": 1})
# self.env.tablet.shutdown_mysql()
# time.sleep(5)
# with self.assertRaisesRegexp(dbexceptions.DatabaseError, '.*state NOT_SERVING.*'):
# self.env.execute("select * from vtocc_test limit :l", {"l": 1})
# self.env.tablet.start_mysql()
# time.sleep(5)
|
{
"content_hash": "42475eee0583afc04f01b2e8eb3377f6",
"timestamp": "",
"source": "github",
"line_count": 698,
"max_line_length": 188,
"avg_line_length": 46.289398280802295,
"alnum_prop": 0.6718043949241721,
"repo_name": "skyportsystems/vitess",
"id": "f76b6ec8522c04ae502a65b6b77ada2ad3025d37",
"size": "32310",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/queryservice_tests/nocache_tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "40319"
},
{
"name": "CSS",
"bytes": "80182"
},
{
"name": "Go",
"bytes": "4581355"
},
{
"name": "HTML",
"bytes": "81084"
},
{
"name": "Java",
"bytes": "252526"
},
{
"name": "JavaScript",
"bytes": "66316"
},
{
"name": "Liquid",
"bytes": "18108"
},
{
"name": "Makefile",
"bytes": "7544"
},
{
"name": "PHP",
"bytes": "7167"
},
{
"name": "PLpgSQL",
"bytes": "10070"
},
{
"name": "Protocol Buffer",
"bytes": "62086"
},
{
"name": "Python",
"bytes": "955569"
},
{
"name": "Ruby",
"bytes": "465"
},
{
"name": "Shell",
"bytes": "24467"
},
{
"name": "Yacc",
"bytes": "19014"
}
],
"symlink_target": ""
}
|
from netmiko.cisco_base_connection import CiscoSSHConnection
class CiscoS300Base(CiscoSSHConnection):
"""
Support for Cisco SG300 series of devices.
Note, must configure the following to disable SG300 from prompting for username twice:
configure terminal
ip ssh password-auth
"""
def session_preparation(self) -> None:
"""Prepare the session after the connection has been established."""
self.ansi_escape_codes = True
self._test_channel_read(pattern=r"[>#]")
self.set_base_prompt()
self.set_terminal_width(command="terminal width 511", pattern="terminal")
self.disable_paging(command="terminal datadump")
def save_config(
self,
cmd: str = "write memory",
confirm: bool = True,
confirm_response: str = "Y",
) -> str:
return super().save_config(
cmd=cmd, confirm=confirm, confirm_response=confirm_response
)
class CiscoS300SSH(CiscoS300Base):
pass
class CiscoS300Telnet(CiscoS300Base):
"""
Support for Cisco SG300 series of devices, with telnet.
Note: can be used with Sx200 series, with telnet enabled.
"""
pass
|
{
"content_hash": "11b36e9840d3eecdb7150d2001374177",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 90,
"avg_line_length": 27.674418604651162,
"alnum_prop": 0.6563025210084034,
"repo_name": "ktbyers/netmiko",
"id": "1058c4a3b5a34f435665f9e3d1f33f23a46c29f9",
"size": "1190",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "netmiko/cisco/cisco_s300.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jinja",
"bytes": "384"
},
{
"name": "Python",
"bytes": "726727"
},
{
"name": "Shell",
"bytes": "21540"
}
],
"symlink_target": ""
}
|
import traceback
import sublime
import sublime_plugin
from anaconda_go.lib import go
from anaconda_go.lib.plugin import typing
from anaconda_go.lib.helpers import get_settings, get_scope
from anaconda_go.lib.plugin import Worker, Callback, ExplorerPanel, is_code
class AnacondaGoCallers(sublime_plugin.TextCommand):
"""Execute guru and callers any returnd definition
"""
def run(self, edit: sublime.Edit) -> None:
try:
view = self.view
scope = get_settings(view, 'anaconda_go_guru_scope')
row, col = view.rowcol(view.sel()[0].begin())
offset = view.text_point(row, col)
code = view.substr(sublime.Region(0, view.size()))
data = {
'vid': view.id(),
'scope': scope if scope is not None else get_scope(view, go.GOPATH), # noqa
'path': view.file_name(),
'offset': offset,
'modified_buffer': self._modified_buffer(view, code),
'go_env': {
'GOROOT': go.GOROOT,
'GOPATH': go.GOPATH,
'CGO_ENABLED': go.CGO_ENABLED
},
'method': 'callers',
'handler': 'anaGonda'
}
Worker().execute(
Callback(
on_success=self._on_success,
on_failure=self._on_failure,
on_timeout=self._on_timeout
),
**data
)
except Exception as err:
print('anaconda_go: callers error')
print(traceback.print_exc())
def is_enabled(self) -> bool:
"""Determine if this command is enabled or not
"""
if len(sublime.active_window().views()) == 0:
return False
if not go.ANAGONDA_PRESENT:
return False
return is_code(self.view, lang='go')
def _on_success(self, data):
"""Process result and normalize it for anaconda's goto
"""
if not data['result']:
sublime.status_message('Symbol not found...')
return
callers = []
for result in data['result']:
p, l, c = result['pos'].split(':')
callers.append({
'title': result['caller'],
'location': 'File: {} Line: {} Column: {}'.format(p, l, c),
'position': result['pos']
})
ExplorerPanel(self.view, callers).show([])
def _on_failure(self, data: typing.Dict) -> None:
"""Fired on failures from the callback
"""
print('anaconda_go: callers error')
print(data['error'])
sublime.status_message(data['error'])
def _on_timeout(self, data: typing.Dict) -> None:
"""Fired when the callback times out
"""
print('Golang callers definition timed out')
def _modified_buffer(self, view: sublime.View, code: str) -> str:
"""Guru needs this to use unsaved buffers instead of files
"""
return '\n'.join([
view.file_name(), str(len(code.encode('utf8'))), code
])
|
{
"content_hash": "6f9dcde04775353e43ca1e037f6ce21c",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 92,
"avg_line_length": 31.445544554455445,
"alnum_prop": 0.5214105793450882,
"repo_name": "danalec/dotfiles",
"id": "65fa4fe8fd11a42e5bf8f0ea420e29349d1731ee",
"size": "3305",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sublime/.config/sublime-text-3/Packages/anaconda_go/commands/callers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "699"
},
{
"name": "CSS",
"bytes": "32865"
},
{
"name": "GLSL",
"bytes": "10062"
},
{
"name": "HTML",
"bytes": "4806"
},
{
"name": "JavaScript",
"bytes": "817118"
},
{
"name": "Lua",
"bytes": "34246"
},
{
"name": "PHP",
"bytes": "2263892"
},
{
"name": "Python",
"bytes": "9571271"
},
{
"name": "Ruby",
"bytes": "56701"
},
{
"name": "Shell",
"bytes": "280060"
},
{
"name": "Smarty",
"bytes": "5128"
},
{
"name": "Vim script",
"bytes": "26736"
}
],
"symlink_target": ""
}
|
from CIM15.CDPSM.Connectivity.IEC61970.Wires.Switch import Switch
class Fuse(Switch):
"""An overcurrent protective device with a circuit opening fusible part that is heated and severed by the passage of overcurrent through it. A fuse is considered a switching device because it breaks current.
"""
def __init__(self, *args, **kw_args):
"""Initialises a new 'Fuse' instance.
"""
super(Fuse, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = []
_many_refs = []
|
{
"content_hash": "5709522ee98a6d0b8c8693dc7740fee8",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 211,
"avg_line_length": 30.05263157894737,
"alnum_prop": 0.6234676007005254,
"repo_name": "rwl/PyCIM",
"id": "d2cc1a63ef7a2378d83f7f35fd5e725364b05384",
"size": "1671",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CIM15/CDPSM/Connectivity/IEC61970/Wires/Fuse.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7420564"
}
],
"symlink_target": ""
}
|
"""Simple MNIST classifier example with JIT XLA and timelines.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from tensorflow.python.client import timeline
FLAGS = None
def main(_):
# Import data
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
# Create the model
x = tf.placeholder(tf.float32, [None, 784])
w = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.matmul(x, w) + b
# Define loss and optimizer
y_ = tf.placeholder(tf.float32, [None, 10])
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.nn.softmax(y)),
# reduction_indices=[1]))
#
# can be numerically unstable.
#
# So here we use tf.nn.softmax_cross_entropy_with_logits on the raw
# outputs of 'y', and then average across the batch.
cross_entropy = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
config = tf.ConfigProto()
jit_level = 0
if FLAGS.xla:
# Turns on XLA JIT compilation.
jit_level = tf.OptimizerOptions.ON_1
config.graph_options.optimizer_options.global_jit_level = jit_level
run_metadata = tf.RunMetadata()
sess = tf.Session(config=config)
tf.global_variables_initializer().run(session=sess)
# Train
train_loops = 1000
for i in range(train_loops):
batch_xs, batch_ys = mnist.train.next_batch(100)
# Create a timeline for the last loop and export to json to view with
# chrome://tracing/.
if i == train_loops - 1:
sess.run(train_step,
feed_dict={x: batch_xs,
y_: batch_ys},
options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE),
run_metadata=run_metadata)
trace = timeline.Timeline(step_stats=run_metadata.step_stats)
trace_file = open('timeline.ctf.json', 'w')
trace_file.write(trace.generate_chrome_trace_format())
else:
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
# Test trained model
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(sess.run(accuracy,
feed_dict={x: mnist.test.images,
y_: mnist.test.labels}))
sess.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--data_dir',
type=str,
default='/tmp/tensorflow/mnist/input_data',
help='Directory for storing input data')
parser.add_argument(
'--xla', type=bool, default=True, help='Turn xla via JIT on')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
{
"content_hash": "7631e8304ac0487c4b4c58e6c053f57e",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 77,
"avg_line_length": 32.10752688172043,
"alnum_prop": 0.6510381781647689,
"repo_name": "tomasreimers/tensorflow-emscripten",
"id": "bf3f2fb015ff782f2866c7eb6147ce6d68e72a1e",
"size": "3675",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tensorflow/examples/tutorials/mnist/mnist_softmax_xla.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6963"
},
{
"name": "C",
"bytes": "128946"
},
{
"name": "C++",
"bytes": "20395723"
},
{
"name": "CMake",
"bytes": "112969"
},
{
"name": "CSS",
"bytes": "774"
},
{
"name": "Go",
"bytes": "96872"
},
{
"name": "HTML",
"bytes": "534896"
},
{
"name": "Java",
"bytes": "215238"
},
{
"name": "JavaScript",
"bytes": "21911"
},
{
"name": "Jupyter Notebook",
"bytes": "1833593"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "43095"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "64592"
},
{
"name": "Protocol Buffer",
"bytes": "187293"
},
{
"name": "Python",
"bytes": "16394795"
},
{
"name": "Shell",
"bytes": "344425"
},
{
"name": "TypeScript",
"bytes": "761620"
}
],
"symlink_target": ""
}
|
'''
This is a custom module used to calculate what the user inputs
'''
def subtract (List):
retval = List[0]
for item in List[1:]:
retval -= item
return retval
def multiply (List):
retval = 1
for item in List:
retval *= item
return retval
def divide (List):
retval = List[0]
for item in List[1:]:
retval = retval / item
return retval
def modulo (Int):
if len(Int) != 2:
return 'ArgumentError: modulo only takes 2 arguments.'
errors.append('ArgumentError: modulo only takes 2 arguments.')
else:
if Int[0] < Int[1]:
errors.append('ArgumentError: The dividend is smaller than the divisor.')
return 'ArgumentError: The dividend is smaller than the divisor.'
else:
return Int[0] % Int[1]
|
{
"content_hash": "868bc291d75b9e8b9ba6cc38157db4ab",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 85,
"avg_line_length": 26.09375,
"alnum_prop": 0.592814371257485,
"repo_name": "hookoored/Emalplip",
"id": "b7bf1e9717692ec7fc69be60d7f0c414c9132a53",
"size": "835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "functions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13828"
}
],
"symlink_target": ""
}
|
{
'name': 'Uruguay - Chart of Accounts',
'version': '0.1',
'author': 'Uruguay l10n Team & Guillem Barba',
'category': 'Localization/Account Charts',
'website': 'https://launchpad.net/openerp-uruguay',
'description': """
General Chart of Accounts.
==========================
Provide Templates for Chart of Accounts, Taxes for Uruguay.
""",
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'account_types.xml',
'taxes_code_template.xml',
'account_chart_template.xml',
'taxes_template.xml',
'l10n_uy_wizard.xml',
],
'demo': [],
'auto_install': False,
'installable': True,
'images': ['images/config_chart_l10n_uy.jpeg','images/l10n_uy_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
{
"content_hash": "0a0ed7e840dad84747a430b96b6698aa",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 79,
"avg_line_length": 28.20689655172414,
"alnum_prop": 0.5904645476772616,
"repo_name": "chjw8016/GreenOdoo7-haibao",
"id": "2a6f10ddb2984ad2b44e2450f22a494d616cc369",
"size": "1887",
"binary": false,
"copies": "168",
"ref": "refs/heads/master",
"path": "openerp/addons/l10n_uy/__openerp__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "90846"
},
{
"name": "CSS",
"bytes": "384369"
},
{
"name": "JavaScript",
"bytes": "1730589"
},
{
"name": "PHP",
"bytes": "14033"
},
{
"name": "Python",
"bytes": "9394626"
},
{
"name": "Shell",
"bytes": "5172"
},
{
"name": "XSLT",
"bytes": "156761"
}
],
"symlink_target": ""
}
|
import shutil
from subprocess import check_call, CalledProcessError
from .read import get_names
def has_interface_column(tab_file):
'''
Returns True if the tabular output file has the v6.1 'interface' column.
'''
try:
val = get_names(tab_file)[1] == 'interface'
except IOError:
raise
except TypeError:
return False
else:
return(val)
def strip_interface_column(tab_file):
'''
Strips the 'interface' column from a Dakota 6.1 tabular output file.
'''
try:
bak_file = tab_file + '.orig'
shutil.copyfile(tab_file, bak_file)
cmd = 'cat ' + bak_file +' | colrm 9 18 > ' + tab_file
check_call(cmd, shell=True)
except (IOError, CalledProcessError):
raise
def main():
import argparse
from dakota_utils import __version__, convert_script
parser = argparse.ArgumentParser(
description="Converts a Dakota tabular output file to v6.0 format.")
parser.add_argument("output_file",
help="path to a Dakota v6.1 tabular output file")
parser.add_argument('--version', action='version',
version=convert_script + ' ' + __version__)
args = parser.parse_args()
if has_interface_column(args.output_file) is False:
print('Error: Not a Dakota v6.1 tabular output file.')
return
else:
strip_interface_column(args.output_file)
if __name__ == '__main__':
main()
|
{
"content_hash": "1b892bc38b1e147f8be862fc86b89184",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 76,
"avg_line_length": 27.90566037735849,
"alnum_prop": 0.6098715348208249,
"repo_name": "mcflugen/dakota-experiments",
"id": "05721b2c45f9244bcea2793a28b4fde203733380",
"size": "1595",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dakota_utils/convert.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61862"
},
{
"name": "Shell",
"bytes": "4465"
}
],
"symlink_target": ""
}
|
"""
Definition of Interval.
"""
class Interval(object):
def __init__(self, start, end):
self.start = start
self.end = end
class Solution:
"""
@param list1: one of the given list
@param list2: another list
@return: the new sorted list of interval
"""
def mergeTwoInterval(self, list1, list2):
# write your code here
if list1 is None or list1 == []:
return list2
if list2 is None or list2 == []:
return list1
def compare(interval0, interval1):
if interval0.start < interval1.start:
return -1
elif interval0.start == interval1.start:
return 0
else:
return 1
result = []
pos1, pos2 = 0, 0
max1, max2 = len(list1), len(list2)
start, end = None, None
while pos1 < max1 and pos2 < max2:
if compare(list1[pos1], list2[pos2]) <= 0:
curr_interval = Interval(list1[pos1].start, list1[pos1].end)
pos1 += 1
else:
curr_interval = Interval(list2[pos2].start, list2[pos2].end)
pos2 += 1
if start is None:
start = curr_interval.start
end = curr_interval.end
else:
if curr_interval.start <= end:
end = max(curr_interval.end, end)
else:
result.append(Interval(start, end))
start = curr_interval.start
end = curr_interval.end
while pos1 < max1:
if list1[pos1].start <= end:
end = max(list1[pos1].end, end)
else:
result.append(Interval(start, end))
start = list1[pos1].start
end = list1[pos1].end
pos1 += 1
while pos2 < max2:
if list2[pos2].start <= end:
end = max(list2[pos2].end, end)
else:
result.append(Interval(start, end))
start = list2[pos2].start
end = list2[pos2].end
pos2 += 1
result.append(Interval(start, end))
return result
if __name__ == "__main__":
sln = Solution()
list1 = [Interval(1,2),Interval(3,4)]
list2 = [Interval(2,3),Interval(5,6)]
res = sln.mergeTwoInterval(list1, list2)
for i in res:
print((i.start, i.end))
#assert(res == [Interval(1,4),Interval(5,6)])
|
{
"content_hash": "76363460f9ff4eaf67b97f62751cdbdd",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 76,
"avg_line_length": 30.646341463414632,
"alnum_prop": 0.49144448865897333,
"repo_name": "euccas/CodingPuzzles-Python",
"id": "ce5acf4b3fb3023b1e6e4f9d2dc299856bb95d80",
"size": "2513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leet/source/datastructure/merge_two_sorted_intervals.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "4241"
},
{
"name": "Java",
"bytes": "9159"
},
{
"name": "Jupyter Notebook",
"bytes": "14456"
},
{
"name": "Python",
"bytes": "251867"
},
{
"name": "Ruby",
"bytes": "2895"
}
],
"symlink_target": ""
}
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.dsdv', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress', import_from_module='ns.network')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## int-to-type.h (module 'core'): ns3::IntToType<0> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['0'])
## int-to-type.h (module 'core'): ns3::IntToType<0>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 0 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<1> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['1'])
## int-to-type.h (module 'core'): ns3::IntToType<1>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 1 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<2> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['2'])
## int-to-type.h (module 'core'): ns3::IntToType<2>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 2 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<3> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['3'])
## int-to-type.h (module 'core'): ns3::IntToType<3>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 3 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<4> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['4'])
## int-to-type.h (module 'core'): ns3::IntToType<4>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 4 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<5> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['5'])
## int-to-type.h (module 'core'): ns3::IntToType<5>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 5 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<6> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['6'])
## int-to-type.h (module 'core'): ns3::IntToType<6>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 6 >'], import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress [class]
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e [enumeration]
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper [class]
module.add_class('Ipv4RoutingHelper', allow_subclassing=True, import_from_module='ns.internet')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress [class]
module.add_class('Ipv6InterfaceAddress', import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e [enumeration]
module.add_enum('State_e', ['TENTATIVE', 'DEPRECATED', 'PREFERRED', 'PERMANENT', 'HOMEADDRESS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e [enumeration]
module.add_enum('Scope_e', ['HOST', 'LINKLOCAL', 'GLOBAL'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## timer.h (module 'core'): ns3::Timer [class]
module.add_class('Timer', import_from_module='ns.core')
## timer.h (module 'core'): ns3::Timer::DestroyPolicy [enumeration]
module.add_enum('DestroyPolicy', ['CANCEL_ON_DESTROY', 'REMOVE_ON_DESTROY', 'CHECK_ON_DESTROY'], outer_class=root_module['ns3::Timer'], import_from_module='ns.core')
## timer.h (module 'core'): ns3::Timer::State [enumeration]
module.add_enum('State', ['RUNNING', 'EXPIRED', 'SUSPENDED'], outer_class=root_module['ns3::Timer'], import_from_module='ns.core')
## timer-impl.h (module 'core'): ns3::TimerImpl [class]
module.add_class('TimerImpl', allow_subclassing=True, import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper [class]
module.add_class('DsdvHelper', parent=root_module['ns3::Ipv4RoutingHelper'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header [class]
module.add_class('Ipv4Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'CS1', 'AF11', 'AF12', 'AF13', 'CS2', 'AF21', 'AF22', 'AF23', 'CS3', 'AF31', 'AF32', 'AF33', 'CS4', 'AF41', 'AF42', 'AF43', 'CS5', 'EF', 'CS6', 'CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType [enumeration]
module.add_enum('EcnType', ['NotECT', 'ECT1', 'ECT0', 'CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header [class]
module.add_class('Ipv6Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::NextHeader_e [enumeration]
module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream [class]
module.add_class('RandomVariableStream', import_from_module='ns.core', parent=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable [class]
module.add_class('SequentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## socket.h (module 'network'): ns3::Socket [class]
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
## socket.h (module 'network'): ns3::Socket::SocketErrno [enumeration]
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::SocketType [enumeration]
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::SocketAddressTag [class]
module.add_class('SocketAddressTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTosTag [class]
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTtlTag [class]
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag [class]
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag [class]
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag [class]
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable [class]
module.add_class('TriangularRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable [class]
module.add_class('UniformRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable [class]
module.add_class('WeibullRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable [class]
module.add_class('ZetaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable [class]
module.add_class('ZipfRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable [class]
module.add_class('ConstantRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable [class]
module.add_class('DeterministicRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable [class]
module.add_class('EmpiricalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable [class]
module.add_class('ErlangRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable [class]
module.add_class('ExponentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable [class]
module.add_class('GammaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol [class]
module.add_class('IpL4Protocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::RxStatus [enumeration]
module.add_enum('RxStatus', ['RX_OK', 'RX_CSUM_FAILED', 'RX_ENDPOINT_CLOSED', 'RX_ENDPOINT_UNREACH'], outer_class=root_module['ns3::IpL4Protocol'], import_from_module='ns.internet')
## ipv4.h (module 'internet'): ns3::Ipv4 [class]
module.add_class('Ipv4', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface [class]
module.add_class('Ipv4Interface', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol [class]
module.add_class('Ipv4L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv4'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_BAD_CHECKSUM', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv4L3Protocol'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute [class]
module.add_class('Ipv4MulticastRoute', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route [class]
module.add_class('Ipv4Route', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol [class]
module.add_class('Ipv4RoutingProtocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-interface.h (module 'internet'): ns3::Ipv6Interface [class]
module.add_class('Ipv6Interface', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable [class]
module.add_class('LogNormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable [class]
module.add_class('NormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable [class]
module.add_class('ParetoRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## nstime.h (module 'core'): ns3::TimeChecker [class]
module.add_class('TimeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type='map')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace dsdv
nested_module = module.add_cpp_namespace('dsdv')
register_types_ns3_dsdv(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_dsdv(module):
root_module = module.get_root()
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RouteFlags [enumeration]
module.add_enum('RouteFlags', ['VALID', 'INVALID'])
## dsdv-packet.h (module 'dsdv'): ns3::dsdv::DsdvHeader [class]
module.add_class('DsdvHeader', parent=root_module['ns3::Header'])
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::PacketQueue [class]
module.add_class('PacketQueue')
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::QueueEntry [class]
module.add_class('QueueEntry')
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol [class]
module.add_class('RoutingProtocol', parent=root_module['ns3::Ipv4RoutingProtocol'])
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTable [class]
module.add_class('RoutingTable')
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTableEntry [class]
module.add_class('RoutingTableEntry')
module.add_container('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry >', ('ns3::Ipv4Address', 'ns3::dsdv::RoutingTableEntry'), container_type='map')
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3IntToType__0_methods(root_module, root_module['ns3::IntToType< 0 >'])
register_Ns3IntToType__1_methods(root_module, root_module['ns3::IntToType< 1 >'])
register_Ns3IntToType__2_methods(root_module, root_module['ns3::IntToType< 2 >'])
register_Ns3IntToType__3_methods(root_module, root_module['ns3::IntToType< 3 >'])
register_Ns3IntToType__4_methods(root_module, root_module['ns3::IntToType< 4 >'])
register_Ns3IntToType__5_methods(root_module, root_module['ns3::IntToType< 5 >'])
register_Ns3IntToType__6_methods(root_module, root_module['ns3::IntToType< 6 >'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4InterfaceAddress_methods(root_module, root_module['ns3::Ipv4InterfaceAddress'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv4RoutingHelper_methods(root_module, root_module['ns3::Ipv4RoutingHelper'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6InterfaceAddress_methods(root_module, root_module['ns3::Ipv6InterfaceAddress'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3Timer_methods(root_module, root_module['ns3::Timer'])
register_Ns3TimerImpl_methods(root_module, root_module['ns3::TimerImpl'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3DsdvHelper_methods(root_module, root_module['ns3::DsdvHelper'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Ipv4Header_methods(root_module, root_module['ns3::Ipv4Header'])
register_Ns3Ipv6Header_methods(root_module, root_module['ns3::Ipv6Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream'])
register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketAddressTag_methods(root_module, root_module['ns3::SocketAddressTag'])
register_Ns3SocketIpTosTag_methods(root_module, root_module['ns3::SocketIpTosTag'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketIpv6HopLimitTag_methods(root_module, root_module['ns3::SocketIpv6HopLimitTag'])
register_Ns3SocketIpv6TclassTag_methods(root_module, root_module['ns3::SocketIpv6TclassTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable'])
register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable'])
register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable'])
register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable'])
register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable'])
register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable'])
register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable'])
register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable'])
register_Ns3IpL4Protocol_methods(root_module, root_module['ns3::IpL4Protocol'])
register_Ns3Ipv4_methods(root_module, root_module['ns3::Ipv4'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4Interface_methods(root_module, root_module['ns3::Ipv4Interface'])
register_Ns3Ipv4L3Protocol_methods(root_module, root_module['ns3::Ipv4L3Protocol'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv4MulticastRoute_methods(root_module, root_module['ns3::Ipv4MulticastRoute'])
register_Ns3Ipv4Route_methods(root_module, root_module['ns3::Ipv4Route'])
register_Ns3Ipv4RoutingProtocol_methods(root_module, root_module['ns3::Ipv4RoutingProtocol'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6Interface_methods(root_module, root_module['ns3::Ipv6Interface'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable'])
register_Ns3TimeChecker_methods(root_module, root_module['ns3::TimeChecker'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3DsdvDsdvHeader_methods(root_module, root_module['ns3::dsdv::DsdvHeader'])
register_Ns3DsdvPacketQueue_methods(root_module, root_module['ns3::dsdv::PacketQueue'])
register_Ns3DsdvQueueEntry_methods(root_module, root_module['ns3::dsdv::QueueEntry'])
register_Ns3DsdvRoutingProtocol_methods(root_module, root_module['ns3::dsdv::RoutingProtocol'])
register_Ns3DsdvRoutingTable_methods(root_module, root_module['ns3::dsdv::RoutingTable'])
register_Ns3DsdvRoutingTableEntry_methods(root_module, root_module['ns3::dsdv::RoutingTableEntry'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'bool',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'bool',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFullCopy() const [member function]
cls.add_method('CreateFullCopy',
'ns3::Buffer',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentEndOffset() const [member function]
cls.add_method('GetCurrentEndOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentStartOffset() const [member function]
cls.add_method('GetCurrentStartOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t adjustment, int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t adjustment, int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3Inet6SocketAddress_methods(root_module, cls):
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Inet6SocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Inet6SocketAddress const &', 'arg0')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6) [constructor]
cls.add_constructor([param('char const *', 'ipv6')])
## inet6-socket-address.h (module 'network'): static ns3::Inet6SocketAddress ns3::Inet6SocketAddress::ConvertFrom(ns3::Address const & addr) [member function]
cls.add_method('ConvertFrom',
'ns3::Inet6SocketAddress',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): ns3::Ipv6Address ns3::Inet6SocketAddress::GetIpv6() const [member function]
cls.add_method('GetIpv6',
'ns3::Ipv6Address',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): uint16_t ns3::Inet6SocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): static bool ns3::Inet6SocketAddress::IsMatchingType(ns3::Address const & addr) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetIpv6(ns3::Ipv6Address ipv6) [member function]
cls.add_method('SetIpv6',
'void',
[param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3InetSocketAddress_methods(root_module, cls):
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::InetSocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::InetSocketAddress const &', 'arg0')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4) [constructor]
cls.add_constructor([param('char const *', 'ipv4')])
## inet-socket-address.h (module 'network'): static ns3::InetSocketAddress ns3::InetSocketAddress::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::InetSocketAddress',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): ns3::Ipv4Address ns3::InetSocketAddress::GetIpv4() const [member function]
cls.add_method('GetIpv4',
'ns3::Ipv4Address',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint16_t ns3::InetSocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): static bool ns3::InetSocketAddress::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetIpv4(ns3::Ipv4Address address) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ipv4Address', 'address')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3IntToType__0_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType(ns3::IntToType<0> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 0 > const &', 'arg0')])
return
def register_Ns3IntToType__1_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType(ns3::IntToType<1> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 1 > const &', 'arg0')])
return
def register_Ns3IntToType__2_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType(ns3::IntToType<2> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 2 > const &', 'arg0')])
return
def register_Ns3IntToType__3_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType(ns3::IntToType<3> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 3 > const &', 'arg0')])
return
def register_Ns3IntToType__4_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType(ns3::IntToType<4> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 4 > const &', 'arg0')])
return
def register_Ns3IntToType__5_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType(ns3::IntToType<5> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 5 > const &', 'arg0')])
return
def register_Ns3IntToType__6_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType(ns3::IntToType<6> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 6 > const &', 'arg0')])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4InterfaceAddress_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4Address local, ns3::Ipv4Mask mask) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'local'), param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4InterfaceAddress const & o) [copy constructor]
cls.add_constructor([param('ns3::Ipv4InterfaceAddress const &', 'o')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetLocal() const [member function]
cls.add_method('GetLocal',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Mask ns3::Ipv4InterfaceAddress::GetMask() const [member function]
cls.add_method('GetMask',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e ns3::Ipv4InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): bool ns3::Ipv4InterfaceAddress::IsSecondary() const [member function]
cls.add_method('IsSecondary',
'bool',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetBroadcast(ns3::Ipv4Address broadcast) [member function]
cls.add_method('SetBroadcast',
'void',
[param('ns3::Ipv4Address', 'broadcast')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetLocal(ns3::Ipv4Address local) [member function]
cls.add_method('SetLocal',
'void',
[param('ns3::Ipv4Address', 'local')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetMask(ns3::Ipv4Mask mask) [member function]
cls.add_method('SetMask',
'void',
[param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetPrimary() [member function]
cls.add_method('SetPrimary',
'void',
[])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetScope(ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetSecondary() [member function]
cls.add_method('SetSecondary',
'void',
[])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv4RoutingHelper_methods(root_module, cls):
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper::Ipv4RoutingHelper() [constructor]
cls.add_constructor([])
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper::Ipv4RoutingHelper(ns3::Ipv4RoutingHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4RoutingHelper const &', 'arg0')])
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper * ns3::Ipv4RoutingHelper::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ipv4RoutingHelper *',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-helper.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4RoutingHelper::Create(ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableAllAt(ns3::Time printTime, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableAllAt',
'void',
[param('ns3::Time', 'printTime'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableAllEvery(ns3::Time printInterval, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableAllEvery',
'void',
[param('ns3::Time', 'printInterval'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableAt(ns3::Time printTime, ns3::Ptr<ns3::Node> node, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableAt',
'void',
[param('ns3::Time', 'printTime'), param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableEvery(ns3::Time printInterval, ns3::Ptr<ns3::Node> node, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableEvery',
'void',
[param('ns3::Time', 'printInterval'), param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[])
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6InterfaceAddress_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address, ns3::Ipv6Prefix prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6Prefix', 'prefix')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6InterfaceAddress const & o) [copy constructor]
cls.add_constructor([param('ns3::Ipv6InterfaceAddress const &', 'o')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6InterfaceAddress::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): uint32_t ns3::Ipv6InterfaceAddress::GetNsDadUid() const [member function]
cls.add_method('GetNsDadUid',
'uint32_t',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Prefix ns3::Ipv6InterfaceAddress::GetPrefix() const [member function]
cls.add_method('GetPrefix',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e ns3::Ipv6InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv6InterfaceAddress::Scope_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e ns3::Ipv6InterfaceAddress::GetState() const [member function]
cls.add_method('GetState',
'ns3::Ipv6InterfaceAddress::State_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetAddress(ns3::Ipv6Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetNsDadUid(uint32_t uid) [member function]
cls.add_method('SetNsDadUid',
'void',
[param('uint32_t', 'uid')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetScope(ns3::Ipv6InterfaceAddress::Scope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv6InterfaceAddress::Scope_e', 'scope')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetState(ns3::Ipv6InterfaceAddress::State_e state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::Ipv6InterfaceAddress::State_e', 'state')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 20 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & time) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'time')],
is_static=True)
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3Timer_methods(root_module, cls):
## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Timer const &', 'arg0')])
## timer.h (module 'core'): ns3::Timer::Timer() [constructor]
cls.add_constructor([])
## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer::DestroyPolicy destroyPolicy) [constructor]
cls.add_constructor([param('ns3::Timer::DestroyPolicy', 'destroyPolicy')])
## timer.h (module 'core'): void ns3::Timer::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelay() const [member function]
cls.add_method('GetDelay',
'ns3::Time',
[],
is_const=True)
## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelayLeft() const [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[],
is_const=True)
## timer.h (module 'core'): ns3::Timer::State ns3::Timer::GetState() const [member function]
cls.add_method('GetState',
'ns3::Timer::State',
[],
is_const=True)
## timer.h (module 'core'): bool ns3::Timer::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## timer.h (module 'core'): bool ns3::Timer::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## timer.h (module 'core'): bool ns3::Timer::IsSuspended() const [member function]
cls.add_method('IsSuspended',
'bool',
[],
is_const=True)
## timer.h (module 'core'): void ns3::Timer::Remove() [member function]
cls.add_method('Remove',
'void',
[])
## timer.h (module 'core'): void ns3::Timer::Resume() [member function]
cls.add_method('Resume',
'void',
[])
## timer.h (module 'core'): void ns3::Timer::Schedule() [member function]
cls.add_method('Schedule',
'void',
[])
## timer.h (module 'core'): void ns3::Timer::Schedule(ns3::Time delay) [member function]
cls.add_method('Schedule',
'void',
[param('ns3::Time', 'delay')])
## timer.h (module 'core'): void ns3::Timer::SetDelay(ns3::Time const & delay) [member function]
cls.add_method('SetDelay',
'void',
[param('ns3::Time const &', 'delay')])
## timer.h (module 'core'): void ns3::Timer::Suspend() [member function]
cls.add_method('Suspend',
'void',
[])
return
def register_Ns3TimerImpl_methods(root_module, cls):
## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl() [constructor]
cls.add_constructor([])
## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl(ns3::TimerImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimerImpl const &', 'arg0')])
## timer-impl.h (module 'core'): void ns3::TimerImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## timer-impl.h (module 'core'): ns3::EventId ns3::TimerImpl::Schedule(ns3::Time const & delay) [member function]
cls.add_method('Schedule',
'ns3::EventId',
[param('ns3::Time const &', 'delay')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3DsdvHelper_methods(root_module, cls):
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper::DsdvHelper(ns3::DsdvHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DsdvHelper const &', 'arg0')])
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper::DsdvHelper() [constructor]
cls.add_constructor([])
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper * ns3::DsdvHelper::Copy() const [member function]
cls.add_method('Copy',
'ns3::DsdvHelper *',
[],
is_const=True, is_virtual=True)
## dsdv-helper.h (module 'dsdv'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::DsdvHelper::Create(ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_const=True, is_virtual=True)
## dsdv-helper.h (module 'dsdv'): void ns3::DsdvHelper::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Ipv4Header_methods(root_module, cls):
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header(ns3::Ipv4Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Header const &', 'arg0')])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header() [constructor]
cls.add_constructor([])
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::DscpTypeToString(ns3::Ipv4Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv4Header::DscpType', 'dscp')],
is_const=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::EcnTypeToString(ns3::Ipv4Header::EcnType ecn) const [member function]
cls.add_method('EcnTypeToString',
'std::string',
[param('ns3::Ipv4Header::EcnType', 'ecn')],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::EnableChecksum() [member function]
cls.add_method('EnableChecksum',
'void',
[])
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType ns3::Ipv4Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv4Header::DscpType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType ns3::Ipv4Header::GetEcn() const [member function]
cls.add_method('GetEcn',
'ns3::Ipv4Header::EcnType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetFragmentOffset() const [member function]
cls.add_method('GetFragmentOffset',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetIdentification() const [member function]
cls.add_method('GetIdentification',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::TypeId ns3::Ipv4Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetPayloadSize() const [member function]
cls.add_method('GetPayloadSize',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): static ns3::TypeId ns3::Ipv4Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsChecksumOk() const [member function]
cls.add_method('IsChecksumOk',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsDontFragment() const [member function]
cls.add_method('IsDontFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsLastFragment() const [member function]
cls.add_method('IsLastFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDestination(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'destination')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDontFragment() [member function]
cls.add_method('SetDontFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDscp(ns3::Ipv4Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv4Header::DscpType', 'dscp')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetEcn(ns3::Ipv4Header::EcnType ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('ns3::Ipv4Header::EcnType', 'ecn')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetFragmentOffset(uint16_t offsetBytes) [member function]
cls.add_method('SetFragmentOffset',
'void',
[param('uint16_t', 'offsetBytes')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetIdentification(uint16_t identification) [member function]
cls.add_method('SetIdentification',
'void',
[param('uint16_t', 'identification')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetLastFragment() [member function]
cls.add_method('SetLastFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMayFragment() [member function]
cls.add_method('SetMayFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMoreFragments() [member function]
cls.add_method('SetMoreFragments',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetPayloadSize(uint16_t size) [member function]
cls.add_method('SetPayloadSize',
'void',
[param('uint16_t', 'size')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetProtocol(uint8_t num) [member function]
cls.add_method('SetProtocol',
'void',
[param('uint8_t', 'num')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetSource(ns3::Ipv4Address source) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'source')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3Ipv6Header_methods(root_module, cls):
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header(ns3::Ipv6Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Header const &', 'arg0')])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header() [constructor]
cls.add_constructor([])
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetDestinationAddress() const [member function]
cls.add_method('GetDestinationAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetFlowLabel() const [member function]
cls.add_method('GetFlowLabel',
'uint32_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::TypeId ns3::Ipv6Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint16_t ns3::Ipv6Header::GetPayloadLength() const [member function]
cls.add_method('GetPayloadLength',
'uint16_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetSourceAddress() const [member function]
cls.add_method('GetSourceAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetTrafficClass() const [member function]
cls.add_method('GetTrafficClass',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): static ns3::TypeId ns3::Ipv6Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDestinationAddress(ns3::Ipv6Address dst) [member function]
cls.add_method('SetDestinationAddress',
'void',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetFlowLabel(uint32_t flow) [member function]
cls.add_method('SetFlowLabel',
'void',
[param('uint32_t', 'flow')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetHopLimit(uint8_t limit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'limit')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetNextHeader(uint8_t next) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'next')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetPayloadLength(uint16_t len) [member function]
cls.add_method('SetPayloadLength',
'void',
[param('uint16_t', 'len')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetSourceAddress(ns3::Ipv6Address src) [member function]
cls.add_method('SetSourceAddress',
'void',
[param('ns3::Ipv6Address', 'src')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetTrafficClass(uint8_t traffic) [member function]
cls.add_method('SetTrafficClass',
'void',
[param('uint8_t', 'traffic')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3RandomVariableStream_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::RandomVariableStream::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream::RandomVariableStream() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetStream(int64_t stream) [member function]
cls.add_method('SetStream',
'void',
[param('int64_t', 'stream')])
## random-variable-stream.h (module 'core'): int64_t ns3::RandomVariableStream::GetStream() const [member function]
cls.add_method('GetStream',
'int64_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetAntithetic(bool isAntithetic) [member function]
cls.add_method('SetAntithetic',
'void',
[param('bool', 'isAntithetic')])
## random-variable-stream.h (module 'core'): bool ns3::RandomVariableStream::IsAntithetic() const [member function]
cls.add_method('IsAntithetic',
'bool',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::RandomVariableStream::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::RandomVariableStream::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): ns3::RngStream * ns3::RandomVariableStream::Peek() const [member function]
cls.add_method('Peek',
'ns3::RngStream *',
[],
is_const=True, visibility='protected')
return
def register_Ns3SequentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::SequentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable::SequentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): ns3::Ptr<ns3::RandomVariableStream> ns3::SequentialRandomVariable::GetIncrement() const [member function]
cls.add_method('GetIncrement',
'ns3::Ptr< ns3::RandomVariableStream >',
[],
is_const=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetConsecutive() const [member function]
cls.add_method('GetConsecutive',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4MulticastRoute > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4Route > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Socket_methods(root_module, cls):
## socket.h (module 'network'): ns3::Socket::Socket(ns3::Socket const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Socket const &', 'arg0')])
## socket.h (module 'network'): ns3::Socket::Socket() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): int ns3::Socket::Bind(ns3::Address const & address) [member function]
cls.add_method('Bind',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind() [member function]
cls.add_method('Bind',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind6() [member function]
cls.add_method('Bind6',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::BindToNetDevice(ns3::Ptr<ns3::NetDevice> netdevice) [member function]
cls.add_method('BindToNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'netdevice')],
is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Close() [member function]
cls.add_method('Close',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Connect(ns3::Address const & address) [member function]
cls.add_method('Connect',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): static ns3::Ptr<ns3::Socket> ns3::Socket::CreateSocket(ns3::Ptr<ns3::Node> node, ns3::TypeId tid) [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::TypeId', 'tid')],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::GetAllowBroadcast() const [member function]
cls.add_method('GetAllowBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Socket::GetBoundNetDevice() [member function]
cls.add_method('GetBoundNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[])
## socket.h (module 'network'): ns3::Socket::SocketErrno ns3::Socket::GetErrno() const [member function]
cls.add_method('GetErrno',
'ns3::Socket::SocketErrno',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTos() const [member function]
cls.add_method('GetIpTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTtl() const [member function]
cls.add_method('GetIpTtl',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6HopLimit() const [member function]
cls.add_method('GetIpv6HopLimit',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6Tclass() const [member function]
cls.add_method('GetIpv6Tclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Node> ns3::Socket::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetRxAvailable() const [member function]
cls.add_method('GetRxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetSockName(ns3::Address & address) const [member function]
cls.add_method('GetSockName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Socket::SocketType ns3::Socket::GetSocketType() const [member function]
cls.add_method('GetSocketType',
'ns3::Socket::SocketType',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetTxAvailable() const [member function]
cls.add_method('GetTxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::Socket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTos() const [member function]
cls.add_method('IsIpRecvTos',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTtl() const [member function]
cls.add_method('IsIpRecvTtl',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvHopLimit() const [member function]
cls.add_method('IsIpv6RecvHopLimit',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvTclass() const [member function]
cls.add_method('IsIpv6RecvTclass',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsRecvPktInfo() const [member function]
cls.add_method('IsRecvPktInfo',
'bool',
[],
is_const=True)
## socket.h (module 'network'): int ns3::Socket::Listen() [member function]
cls.add_method('Listen',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv(uint32_t maxSize, uint32_t flags) [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv() [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[])
## socket.h (module 'network'): int ns3::Socket::Recv(uint8_t * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Recv',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(uint32_t maxSize, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::RecvFrom(uint8_t * buf, uint32_t size, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p')])
## socket.h (module 'network'): int ns3::Socket::Send(uint8_t const * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): int ns3::Socket::SendTo(ns3::Ptr<ns3::Packet> p, uint32_t flags, ns3::Address const & toAddress) [member function]
cls.add_method('SendTo',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags'), param('ns3::Address const &', 'toAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::SendTo(uint8_t const * buf, uint32_t size, uint32_t flags, ns3::Address const & address) [member function]
cls.add_method('SendTo',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address const &', 'address')])
## socket.h (module 'network'): void ns3::Socket::SetAcceptCallback(ns3::Callback<bool, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionRequest, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> newConnectionCreated) [member function]
cls.add_method('SetAcceptCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionRequest'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'newConnectionCreated')])
## socket.h (module 'network'): bool ns3::Socket::SetAllowBroadcast(bool allowBroadcast) [member function]
cls.add_method('SetAllowBroadcast',
'bool',
[param('bool', 'allowBroadcast')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetCloseCallbacks(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> normalClose, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> errorClose) [member function]
cls.add_method('SetCloseCallbacks',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'normalClose'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'errorClose')])
## socket.h (module 'network'): void ns3::Socket::SetConnectCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionSucceeded, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionFailed) [member function]
cls.add_method('SetConnectCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionSucceeded'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionFailed')])
## socket.h (module 'network'): void ns3::Socket::SetDataSentCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> dataSent) [member function]
cls.add_method('SetDataSentCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'dataSent')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTos(bool ipv4RecvTos) [member function]
cls.add_method('SetIpRecvTos',
'void',
[param('bool', 'ipv4RecvTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTtl(bool ipv4RecvTtl) [member function]
cls.add_method('SetIpRecvTtl',
'void',
[param('bool', 'ipv4RecvTtl')])
## socket.h (module 'network'): void ns3::Socket::SetIpTos(uint8_t ipTos) [member function]
cls.add_method('SetIpTos',
'void',
[param('uint8_t', 'ipTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpTtl(uint8_t ipTtl) [member function]
cls.add_method('SetIpTtl',
'void',
[param('uint8_t', 'ipTtl')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6HopLimit(uint8_t ipHopLimit) [member function]
cls.add_method('SetIpv6HopLimit',
'void',
[param('uint8_t', 'ipHopLimit')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvHopLimit(bool ipv6RecvHopLimit) [member function]
cls.add_method('SetIpv6RecvHopLimit',
'void',
[param('bool', 'ipv6RecvHopLimit')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvTclass(bool ipv6RecvTclass) [member function]
cls.add_method('SetIpv6RecvTclass',
'void',
[param('bool', 'ipv6RecvTclass')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6Tclass(int ipTclass) [member function]
cls.add_method('SetIpv6Tclass',
'void',
[param('int', 'ipTclass')])
## socket.h (module 'network'): void ns3::Socket::SetRecvCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> arg0) [member function]
cls.add_method('SetRecvCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'arg0')])
## socket.h (module 'network'): void ns3::Socket::SetRecvPktInfo(bool flag) [member function]
cls.add_method('SetRecvPktInfo',
'void',
[param('bool', 'flag')])
## socket.h (module 'network'): void ns3::Socket::SetSendCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> sendCb) [member function]
cls.add_method('SetSendCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'sendCb')])
## socket.h (module 'network'): int ns3::Socket::ShutdownRecv() [member function]
cls.add_method('ShutdownRecv',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::ShutdownSend() [member function]
cls.add_method('ShutdownSend',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTos() const [member function]
cls.add_method('IsManualIpTos',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTtl() const [member function]
cls.add_method('IsManualIpTtl',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6HopLimit() const [member function]
cls.add_method('IsManualIpv6HopLimit',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6Tclass() const [member function]
cls.add_method('IsManualIpv6Tclass',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionFailed() [member function]
cls.add_method('NotifyConnectionFailed',
'void',
[],
visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::NotifyConnectionRequest(ns3::Address const & from) [member function]
cls.add_method('NotifyConnectionRequest',
'bool',
[param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionSucceeded() [member function]
cls.add_method('NotifyConnectionSucceeded',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataRecv() [member function]
cls.add_method('NotifyDataRecv',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataSent(uint32_t size) [member function]
cls.add_method('NotifyDataSent',
'void',
[param('uint32_t', 'size')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyErrorClose() [member function]
cls.add_method('NotifyErrorClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNewConnectionCreated(ns3::Ptr<ns3::Socket> socket, ns3::Address const & from) [member function]
cls.add_method('NotifyNewConnectionCreated',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket'), param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNormalClose() [member function]
cls.add_method('NotifyNormalClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifySend(uint32_t spaceAvailable) [member function]
cls.add_method('NotifySend',
'void',
[param('uint32_t', 'spaceAvailable')],
visibility='protected')
return
def register_Ns3SocketAddressTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag(ns3::SocketAddressTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketAddressTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketAddressTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::Address ns3::SocketAddressTag::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketAddressTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketAddressTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketAddressTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::SetAddress(ns3::Address addr) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'addr')])
return
def register_Ns3SocketIpTosTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag(ns3::SocketIpTosTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTosTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTosTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTosTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTosTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTosTag::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTosTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
return
def register_Ns3SocketIpTtlTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag(ns3::SocketIpTtlTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTtlTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTtlTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTtlTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTtlTag::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTtlTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3SocketIpv6HopLimitTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag(ns3::SocketIpv6HopLimitTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6HopLimitTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6HopLimitTag::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6HopLimitTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6HopLimitTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6HopLimitTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::SetHopLimit(uint8_t hopLimit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'hopLimit')])
return
def register_Ns3SocketIpv6TclassTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag(ns3::SocketIpv6TclassTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6TclassTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6TclassTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6TclassTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6TclassTag::GetTclass() const [member function]
cls.add_method('GetTclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6TclassTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::SetTclass(uint8_t tclass) [member function]
cls.add_method('SetTclass',
'void',
[param('uint8_t', 'tclass')])
return
def register_Ns3SocketSetDontFragmentTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag(ns3::SocketSetDontFragmentTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketSetDontFragmentTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Disable() [member function]
cls.add_method('Disable',
'void',
[])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Enable() [member function]
cls.add_method('Enable',
'void',
[])
## socket.h (module 'network'): ns3::TypeId ns3::SocketSetDontFragmentTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketSetDontFragmentTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketSetDontFragmentTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::SocketSetDontFragmentTag::IsEnabled() const [member function]
cls.add_method('IsEnabled',
'bool',
[],
is_const=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TriangularRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::TriangularRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable::TriangularRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue(double mean, double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger(uint32_t mean, uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3UniformRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::UniformRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable::UniformRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue(double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger(uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3WeibullRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::WeibullRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable::WeibullRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetScale() const [member function]
cls.add_method('GetScale',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue(double scale, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'scale'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZetaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZetaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable::ZetaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue(double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger(uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZipfRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZipfRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable::ZipfRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue(uint32_t n, double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'n'), param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger(uint32_t n, uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'n'), param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3ConstantRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ConstantRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable::ConstantRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetConstant() const [member function]
cls.add_method('GetConstant',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue(double constant) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'constant')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger(uint32_t constant) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'constant')])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3DeterministicRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::DeterministicRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable::DeterministicRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::DeterministicRandomVariable::SetValueArray(double * values, uint64_t length) [member function]
cls.add_method('SetValueArray',
'void',
[param('double *', 'values'), param('uint64_t', 'length')])
## random-variable-stream.h (module 'core'): double ns3::DeterministicRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::DeterministicRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3EmpiricalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable::EmpiricalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::CDF(double v, double c) [member function]
cls.add_method('CDF',
'void',
[param('double', 'v'), param('double', 'c')])
## random-variable-stream.h (module 'core'): uint32_t ns3::EmpiricalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::EmpiricalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::Interpolate(double arg0, double arg1, double arg2, double arg3, double arg4) [member function]
cls.add_method('Interpolate',
'double',
[param('double', 'arg0'), param('double', 'arg1'), param('double', 'arg2'), param('double', 'arg3'), param('double', 'arg4')],
visibility='private', is_virtual=True)
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::Validate() [member function]
cls.add_method('Validate',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3ErlangRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ErlangRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable::ErlangRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetK() const [member function]
cls.add_method('GetK',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetLambda() const [member function]
cls.add_method('GetLambda',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue(uint32_t k, double lambda) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'k'), param('double', 'lambda')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger(uint32_t k, uint32_t lambda) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'k'), param('uint32_t', 'lambda')])
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3ExponentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ExponentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable::ExponentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue(double mean, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger(uint32_t mean, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3GammaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::GammaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable::GammaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetBeta() const [member function]
cls.add_method('GetBeta',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue(double alpha, double beta) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha'), param('double', 'beta')])
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger(uint32_t alpha, uint32_t beta) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha'), param('uint32_t', 'beta')])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3IpL4Protocol_methods(root_module, cls):
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::IpL4Protocol() [constructor]
cls.add_constructor([])
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::IpL4Protocol(ns3::IpL4Protocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IpL4Protocol const &', 'arg0')])
## ip-l4-protocol.h (module 'internet'): ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv4Address,ns3::Ipv4Address,unsigned char,ns3::Ptr<ns3::Ipv4Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::IpL4Protocol::GetDownTarget() const [member function]
cls.add_method('GetDownTarget',
'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv4Address, ns3::Ipv4Address, unsigned char, ns3::Ptr< ns3::Ipv4Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv6Address,ns3::Ipv6Address,unsigned char,ns3::Ptr<ns3::Ipv6Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::IpL4Protocol::GetDownTarget6() const [member function]
cls.add_method('GetDownTarget6',
'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv6Address, ns3::Ipv6Address, unsigned char, ns3::Ptr< ns3::Ipv6Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): int ns3::IpL4Protocol::GetProtocolNumber() const [member function]
cls.add_method('GetProtocolNumber',
'int',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): static ns3::TypeId ns3::IpL4Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::RxStatus ns3::IpL4Protocol::Receive(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::Ipv4Interface> incomingInterface) [member function]
cls.add_method('Receive',
'ns3::IpL4Protocol::RxStatus',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::Ipv4Interface >', 'incomingInterface')],
is_pure_virtual=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::RxStatus ns3::IpL4Protocol::Receive(ns3::Ptr<ns3::Packet> p, ns3::Ipv6Header const & header, ns3::Ptr<ns3::Ipv6Interface> incomingInterface) [member function]
cls.add_method('Receive',
'ns3::IpL4Protocol::RxStatus',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv6Header const &', 'header'), param('ns3::Ptr< ns3::Ipv6Interface >', 'incomingInterface')],
is_pure_virtual=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::ReceiveIcmp(ns3::Ipv4Address icmpSource, uint8_t icmpTtl, uint8_t icmpType, uint8_t icmpCode, uint32_t icmpInfo, ns3::Ipv4Address payloadSource, ns3::Ipv4Address payloadDestination, uint8_t const * payload) [member function]
cls.add_method('ReceiveIcmp',
'void',
[param('ns3::Ipv4Address', 'icmpSource'), param('uint8_t', 'icmpTtl'), param('uint8_t', 'icmpType'), param('uint8_t', 'icmpCode'), param('uint32_t', 'icmpInfo'), param('ns3::Ipv4Address', 'payloadSource'), param('ns3::Ipv4Address', 'payloadDestination'), param('uint8_t const *', 'payload')],
is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::ReceiveIcmp(ns3::Ipv6Address icmpSource, uint8_t icmpTtl, uint8_t icmpType, uint8_t icmpCode, uint32_t icmpInfo, ns3::Ipv6Address payloadSource, ns3::Ipv6Address payloadDestination, uint8_t const * payload) [member function]
cls.add_method('ReceiveIcmp',
'void',
[param('ns3::Ipv6Address', 'icmpSource'), param('uint8_t', 'icmpTtl'), param('uint8_t', 'icmpType'), param('uint8_t', 'icmpCode'), param('uint32_t', 'icmpInfo'), param('ns3::Ipv6Address', 'payloadSource'), param('ns3::Ipv6Address', 'payloadDestination'), param('uint8_t const *', 'payload')],
is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::SetDownTarget(ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv4Address,ns3::Ipv4Address,unsigned char,ns3::Ptr<ns3::Ipv4Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetDownTarget',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv4Address, ns3::Ipv4Address, unsigned char, ns3::Ptr< ns3::Ipv4Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::SetDownTarget6(ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv6Address,ns3::Ipv6Address,unsigned char,ns3::Ptr<ns3::Ipv6Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetDownTarget6',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv6Address, ns3::Ipv6Address, unsigned char, ns3::Ptr< ns3::Ipv6Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv4_methods(root_module, cls):
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4(ns3::Ipv4 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4 const &', 'arg0')])
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4() [constructor]
cls.add_constructor([])
## ipv4.h (module 'internet'): bool ns3::Ipv4::AddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForAddress(ns3::Ipv4Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForPrefix(ns3::Ipv4Address address, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'address'), param('ns3::Ipv4Mask', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): static ns3::TypeId ns3::Ipv4::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4Interface_methods(root_module, cls):
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface::Ipv4Interface(ns3::Ipv4Interface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Interface const &', 'arg0')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface::Ipv4Interface() [constructor]
cls.add_constructor([])
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::AddAddress(ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('ns3::Ipv4InterfaceAddress', 'address')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::GetAddress(uint32_t index) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'index')],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ptr<ns3::ArpCache> ns3::Ipv4Interface::GetArpCache() const [member function]
cls.add_method('GetArpCache',
'ns3::Ptr< ns3::ArpCache >',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Interface::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): uint16_t ns3::Ipv4Interface::GetMetric() const [member function]
cls.add_method('GetMetric',
'uint16_t',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): uint32_t ns3::Ipv4Interface::GetNAddresses() const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): static ns3::TypeId ns3::Ipv4Interface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsDown() const [member function]
cls.add_method('IsDown',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsForwarding() const [member function]
cls.add_method('IsForwarding',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsUp() const [member function]
cls.add_method('IsUp',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::RemoveAddress(uint32_t index) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'index')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::Send(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Address dest) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Address', 'dest')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetArpCache(ns3::Ptr<ns3::ArpCache> arg0) [member function]
cls.add_method('SetArpCache',
'void',
[param('ns3::Ptr< ns3::ArpCache >', 'arg0')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetDown() [member function]
cls.add_method('SetDown',
'void',
[])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetForwarding(bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('bool', 'val')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetMetric(uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint16_t', 'metric')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetUp() [member function]
cls.add_method('SetUp',
'void',
[])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv4L3Protocol_methods(root_module, cls):
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::Ipv4L3Protocol() [constructor]
cls.add_constructor([])
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::AddAddress(uint32_t i, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'i'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4L3Protocol::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4L3Protocol::GetAddress(uint32_t interfaceIndex, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Interface> ns3::Ipv4L3Protocol::GetInterface(uint32_t i) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::Ipv4Interface >',
[param('uint32_t', 'i')],
is_const=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForAddress(ns3::Ipv4Address addr) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'addr')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForPrefix(ns3::Ipv4Address addr, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'addr'), param('ns3::Ipv4Mask', 'mask')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMetric(uint32_t i) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMtu(uint32_t i) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4L3Protocol::GetNetDevice(uint32_t i) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4L3Protocol::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4L3Protocol::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4L3Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsForwarding(uint32_t i) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsUp(uint32_t i) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<ns3::Packet const> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')])
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interfaceIndex, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4L3Protocol::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDefaultTtl(uint8_t ttl) [member function]
cls.add_method('SetDefaultTtl',
'void',
[param('uint8_t', 'ttl')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDown(uint32_t i) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetForwarding(uint32_t i, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'i'), param('bool', 'val')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetMetric(uint32_t i, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'i'), param('uint16_t', 'metric')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetUp(uint32_t i) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::PROT_NUMBER [variable]
cls.add_static_attribute('PROT_NUMBER', 'uint16_t const', is_const=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
visibility='private', is_virtual=True)
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv4MulticastRoute_methods(root_module, cls):
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute(ns3::Ipv4MulticastRoute const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MulticastRoute const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetGroup() const [member function]
cls.add_method('GetGroup',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetOrigin() const [member function]
cls.add_method('GetOrigin',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetOutputTtl(uint32_t oif) [member function]
cls.add_method('GetOutputTtl',
'uint32_t',
[param('uint32_t', 'oif')],
deprecated=True)
## ipv4-route.h (module 'internet'): std::map<unsigned int, unsigned int, std::less<unsigned int>, std::allocator<std::pair<unsigned int const, unsigned int> > > ns3::Ipv4MulticastRoute::GetOutputTtlMap() const [member function]
cls.add_method('GetOutputTtlMap',
'std::map< unsigned int, unsigned int >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetParent() const [member function]
cls.add_method('GetParent',
'uint32_t',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetGroup(ns3::Ipv4Address const group) [member function]
cls.add_method('SetGroup',
'void',
[param('ns3::Ipv4Address const', 'group')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOrigin(ns3::Ipv4Address const origin) [member function]
cls.add_method('SetOrigin',
'void',
[param('ns3::Ipv4Address const', 'origin')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOutputTtl(uint32_t oif, uint32_t ttl) [member function]
cls.add_method('SetOutputTtl',
'void',
[param('uint32_t', 'oif'), param('uint32_t', 'ttl')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetParent(uint32_t iif) [member function]
cls.add_method('SetParent',
'void',
[param('uint32_t', 'iif')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_INTERFACES [variable]
cls.add_static_attribute('MAX_INTERFACES', 'uint32_t const', is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_TTL [variable]
cls.add_static_attribute('MAX_TTL', 'uint32_t const', is_const=True)
return
def register_Ns3Ipv4Route_methods(root_module, cls):
cls.add_output_stream_operator()
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route(ns3::Ipv4Route const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Route const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetGateway() const [member function]
cls.add_method('GetGateway',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Route::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetDestination(ns3::Ipv4Address dest) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'dest')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetGateway(ns3::Ipv4Address gw) [member function]
cls.add_method('SetGateway',
'void',
[param('ns3::Ipv4Address', 'gw')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetOutputDevice(ns3::Ptr<ns3::NetDevice> outputDevice) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'outputDevice')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetSource(ns3::Ipv4Address src) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'src')])
return
def register_Ns3Ipv4RoutingProtocol_methods(root_module, cls):
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol() [constructor]
cls.add_constructor([])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol(ns3::Ipv4RoutingProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4RoutingProtocol const &', 'arg0')])
## ipv4-routing-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): bool ns3::Ipv4RoutingProtocol::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6Interface_methods(root_module, cls):
## ipv6-interface.h (module 'internet'): ns3::Ipv6Interface::Ipv6Interface(ns3::Ipv6Interface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Interface const &', 'arg0')])
## ipv6-interface.h (module 'internet'): ns3::Ipv6Interface::Ipv6Interface() [constructor]
cls.add_constructor([])
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::AddAddress(ns3::Ipv6InterfaceAddress iface) [member function]
cls.add_method('AddAddress',
'bool',
[param('ns3::Ipv6InterfaceAddress', 'iface')])
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::GetAddress(uint32_t index) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'index')],
is_const=True)
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::GetAddressMatchingDestination(ns3::Ipv6Address dst) [member function]
cls.add_method('GetAddressMatchingDestination',
'ns3::Ipv6InterfaceAddress',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetBaseReachableTime() const [member function]
cls.add_method('GetBaseReachableTime',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint8_t ns3::Ipv6Interface::GetCurHopLimit() const [member function]
cls.add_method('GetCurHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv6Interface::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True, is_virtual=True)
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::GetLinkLocalAddress() const [member function]
cls.add_method('GetLinkLocalAddress',
'ns3::Ipv6InterfaceAddress',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetMetric() const [member function]
cls.add_method('GetMetric',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint32_t ns3::Ipv6Interface::GetNAddresses() const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetReachableTime() const [member function]
cls.add_method('GetReachableTime',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetRetransTimer() const [member function]
cls.add_method('GetRetransTimer',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): static ns3::TypeId ns3::Ipv6Interface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::IsDown() const [member function]
cls.add_method('IsDown',
'bool',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::IsForwarding() const [member function]
cls.add_method('IsForwarding',
'bool',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::IsUp() const [member function]
cls.add_method('IsUp',
'bool',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::RemoveAddress(uint32_t index) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'index')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::Send(ns3::Ptr<ns3::Packet> p, ns3::Ipv6Address dest) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv6Address', 'dest')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetBaseReachableTime(uint16_t baseReachableTime) [member function]
cls.add_method('SetBaseReachableTime',
'void',
[param('uint16_t', 'baseReachableTime')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetCurHopLimit(uint8_t curHopLimit) [member function]
cls.add_method('SetCurHopLimit',
'void',
[param('uint8_t', 'curHopLimit')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetDown() [member function]
cls.add_method('SetDown',
'void',
[])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetForwarding(bool forward) [member function]
cls.add_method('SetForwarding',
'void',
[param('bool', 'forward')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetMetric(uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint16_t', 'metric')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetNsDadUid(ns3::Ipv6Address address, uint32_t uid) [member function]
cls.add_method('SetNsDadUid',
'void',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'uid')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetReachableTime(uint16_t reachableTime) [member function]
cls.add_method('SetReachableTime',
'void',
[param('uint16_t', 'reachableTime')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetRetransTimer(uint16_t retransTimer) [member function]
cls.add_method('SetRetransTimer',
'void',
[param('uint16_t', 'retransTimer')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetState(ns3::Ipv6Address address, ns3::Ipv6InterfaceAddress::State_e state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6InterfaceAddress::State_e', 'state')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetUp() [member function]
cls.add_method('SetUp',
'void',
[])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3LogNormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::LogNormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable::LogNormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetMu() const [member function]
cls.add_method('GetMu',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetSigma() const [member function]
cls.add_method('GetSigma',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue(double mu, double sigma) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mu'), param('double', 'sigma')])
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger(uint32_t mu, uint32_t sigma) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mu'), param('uint32_t', 'sigma')])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3NormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::INFINITE_VALUE [variable]
cls.add_static_attribute('INFINITE_VALUE', 'double const', is_const=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::NormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::NormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetVariance() const [member function]
cls.add_method('GetVariance',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue(double mean, double variance, double bound=ns3::NormalRandomVariable::INFINITE_VALUE) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'variance'), param('double', 'bound', default_value='ns3::NormalRandomVariable::INFINITE_VALUE')])
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger(uint32_t mean, uint32_t variance, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'variance'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::_Ios_Openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint8_t const * ns3::Packet::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
deprecated=True, is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> arg0) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'arg0')])
return
def register_Ns3ParetoRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ParetoRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable::ParetoRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue(double mean, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger(uint32_t mean, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3TimeChecker_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker(ns3::TimeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeChecker const &', 'arg0')])
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3DsdvDsdvHeader_methods(root_module, cls):
cls.add_output_stream_operator()
## dsdv-packet.h (module 'dsdv'): ns3::dsdv::DsdvHeader::DsdvHeader(ns3::dsdv::DsdvHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::DsdvHeader const &', 'arg0')])
## dsdv-packet.h (module 'dsdv'): ns3::dsdv::DsdvHeader::DsdvHeader(ns3::Ipv4Address dst=ns3::Ipv4Address(), uint32_t hopcount=0, uint32_t dstSeqNo=0) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'dst', default_value='ns3::Ipv4Address()'), param('uint32_t', 'hopcount', default_value='0'), param('uint32_t', 'dstSeqNo', default_value='0')])
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## dsdv-packet.h (module 'dsdv'): ns3::Ipv4Address ns3::dsdv::DsdvHeader::GetDst() const [member function]
cls.add_method('GetDst',
'ns3::Ipv4Address',
[],
is_const=True)
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::GetDstSeqno() const [member function]
cls.add_method('GetDstSeqno',
'uint32_t',
[],
is_const=True)
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::GetHopCount() const [member function]
cls.add_method('GetHopCount',
'uint32_t',
[],
is_const=True)
## dsdv-packet.h (module 'dsdv'): ns3::TypeId ns3::dsdv::DsdvHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): static ns3::TypeId ns3::dsdv::DsdvHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::SetDst(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDst',
'void',
[param('ns3::Ipv4Address', 'destination')])
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::SetDstSeqno(uint32_t sequenceNumber) [member function]
cls.add_method('SetDstSeqno',
'void',
[param('uint32_t', 'sequenceNumber')])
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::SetHopCount(uint32_t hopCount) [member function]
cls.add_method('SetHopCount',
'void',
[param('uint32_t', 'hopCount')])
return
def register_Ns3DsdvPacketQueue_methods(root_module, cls):
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::PacketQueue::PacketQueue(ns3::dsdv::PacketQueue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::PacketQueue const &', 'arg0')])
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::PacketQueue::PacketQueue() [constructor]
cls.add_constructor([])
## dsdv-packet-queue.h (module 'dsdv'): bool ns3::dsdv::PacketQueue::Dequeue(ns3::Ipv4Address dst, ns3::dsdv::QueueEntry & entry) [member function]
cls.add_method('Dequeue',
'bool',
[param('ns3::Ipv4Address', 'dst'), param('ns3::dsdv::QueueEntry &', 'entry')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::DropPacketWithDst(ns3::Ipv4Address dst) [member function]
cls.add_method('DropPacketWithDst',
'void',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-packet-queue.h (module 'dsdv'): bool ns3::dsdv::PacketQueue::Enqueue(ns3::dsdv::QueueEntry & entry) [member function]
cls.add_method('Enqueue',
'bool',
[param('ns3::dsdv::QueueEntry &', 'entry')])
## dsdv-packet-queue.h (module 'dsdv'): bool ns3::dsdv::PacketQueue::Find(ns3::Ipv4Address dst) [member function]
cls.add_method('Find',
'bool',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetCountForPacketsWithDst(ns3::Ipv4Address dst) [member function]
cls.add_method('GetCountForPacketsWithDst',
'uint32_t',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetMaxPacketsPerDst() const [member function]
cls.add_method('GetMaxPacketsPerDst',
'uint32_t',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetMaxQueueLen() const [member function]
cls.add_method('GetMaxQueueLen',
'uint32_t',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Time ns3::dsdv::PacketQueue::GetQueueTimeout() const [member function]
cls.add_method('GetQueueTimeout',
'ns3::Time',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetSize() [member function]
cls.add_method('GetSize',
'uint32_t',
[])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::SetMaxPacketsPerDst(uint32_t len) [member function]
cls.add_method('SetMaxPacketsPerDst',
'void',
[param('uint32_t', 'len')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::SetMaxQueueLen(uint32_t len) [member function]
cls.add_method('SetMaxQueueLen',
'void',
[param('uint32_t', 'len')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::SetQueueTimeout(ns3::Time t) [member function]
cls.add_method('SetQueueTimeout',
'void',
[param('ns3::Time', 't')])
return
def register_Ns3DsdvQueueEntry_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::QueueEntry::QueueEntry(ns3::dsdv::QueueEntry const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::QueueEntry const &', 'arg0')])
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::QueueEntry::QueueEntry(ns3::Ptr<ns3::Packet const> pa=0, ns3::Ipv4Header const & h=ns3::Ipv4Header(), ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb=ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>(), ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb=ns3::Callback<void, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>()) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet const >', 'pa', default_value='0'), param('ns3::Ipv4Header const &', 'h', default_value='ns3::Ipv4Header()'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb', default_value='ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>()'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb', default_value='ns3::Callback<void, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>()')])
## dsdv-packet-queue.h (module 'dsdv'): ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::dsdv::QueueEntry::GetErrorCallback() const [member function]
cls.add_method('GetErrorCallback',
'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Time ns3::dsdv::QueueEntry::GetExpireTime() const [member function]
cls.add_method('GetExpireTime',
'ns3::Time',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Ipv4Header ns3::dsdv::QueueEntry::GetIpv4Header() const [member function]
cls.add_method('GetIpv4Header',
'ns3::Ipv4Header',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Ptr<ns3::Packet const> ns3::dsdv::QueueEntry::GetPacket() const [member function]
cls.add_method('GetPacket',
'ns3::Ptr< ns3::Packet const >',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::dsdv::QueueEntry::GetUnicastForwardCallback() const [member function]
cls.add_method('GetUnicastForwardCallback',
'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetErrorCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('SetErrorCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetExpireTime(ns3::Time exp) [member function]
cls.add_method('SetExpireTime',
'void',
[param('ns3::Time', 'exp')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetIpv4Header(ns3::Ipv4Header h) [member function]
cls.add_method('SetIpv4Header',
'void',
[param('ns3::Ipv4Header', 'h')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetPacket(ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('SetPacket',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'p')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetUnicastForwardCallback(ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb) [member function]
cls.add_method('SetUnicastForwardCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb')])
return
def register_Ns3DsdvRoutingProtocol_methods(root_module, cls):
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol::RoutingProtocol(ns3::dsdv::RoutingProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::RoutingProtocol const &', 'arg0')])
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol::RoutingProtocol() [constructor]
cls.add_constructor([])
## dsdv-routing-protocol.h (module 'dsdv'): int64_t ns3::dsdv::RoutingProtocol::AssignStreams(int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('int64_t', 'stream')])
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::GetEnableBufferFlag() const [member function]
cls.add_method('GetEnableBufferFlag',
'bool',
[],
is_const=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::GetEnableRAFlag() const [member function]
cls.add_method('GetEnableRAFlag',
'bool',
[],
is_const=True)
## dsdv-routing-protocol.h (module 'dsdv'): static ns3::TypeId ns3::dsdv::RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::GetWSTFlag() const [member function]
cls.add_method('GetWSTFlag',
'bool',
[],
is_const=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True, is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): ns3::Ptr<ns3::Ipv4Route> ns3::dsdv::RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetEnableBufferFlag(bool f) [member function]
cls.add_method('SetEnableBufferFlag',
'void',
[param('bool', 'f')])
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetEnableRAFlag(bool f) [member function]
cls.add_method('SetEnableRAFlag',
'void',
[param('bool', 'f')])
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetWSTFlag(bool f) [member function]
cls.add_method('SetWSTFlag',
'void',
[param('bool', 'f')])
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol::DSDV_PORT [variable]
cls.add_static_attribute('DSDV_PORT', 'uint32_t const', is_const=True)
return
def register_Ns3DsdvRoutingTable_methods(root_module, cls):
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTable::RoutingTable(ns3::dsdv::RoutingTable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::RoutingTable const &', 'arg0')])
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTable::RoutingTable() [constructor]
cls.add_constructor([])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::AddIpv4Event(ns3::Ipv4Address address, ns3::EventId id) [member function]
cls.add_method('AddIpv4Event',
'bool',
[param('ns3::Ipv4Address', 'address'), param('ns3::EventId', 'id')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::AddRoute(ns3::dsdv::RoutingTableEntry & r) [member function]
cls.add_method('AddRoute',
'bool',
[param('ns3::dsdv::RoutingTableEntry &', 'r')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::AnyRunningEvent(ns3::Ipv4Address address) [member function]
cls.add_method('AnyRunningEvent',
'bool',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::DeleteAllRoutesFromInterface(ns3::Ipv4InterfaceAddress iface) [member function]
cls.add_method('DeleteAllRoutesFromInterface',
'void',
[param('ns3::Ipv4InterfaceAddress', 'iface')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::DeleteIpv4Event(ns3::Ipv4Address address) [member function]
cls.add_method('DeleteIpv4Event',
'bool',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::DeleteRoute(ns3::Ipv4Address dst) [member function]
cls.add_method('DeleteRoute',
'bool',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::ForceDeleteIpv4Event(ns3::Ipv4Address address) [member function]
cls.add_method('ForceDeleteIpv4Event',
'bool',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): ns3::EventId ns3::dsdv::RoutingTable::GetEventId(ns3::Ipv4Address address) [member function]
cls.add_method('GetEventId',
'ns3::EventId',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::GetListOfAllRoutes(std::map<ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry, std::less<ns3::Ipv4Address>, std::allocator<std::pair<ns3::Ipv4Address const, ns3::dsdv::RoutingTableEntry> > > & allRoutes) [member function]
cls.add_method('GetListOfAllRoutes',
'void',
[param('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry > &', 'allRoutes')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::GetListOfDestinationWithNextHop(ns3::Ipv4Address nxtHp, std::map<ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry, std::less<ns3::Ipv4Address>, std::allocator<std::pair<ns3::Ipv4Address const, ns3::dsdv::RoutingTableEntry> > > & dstList) [member function]
cls.add_method('GetListOfDestinationWithNextHop',
'void',
[param('ns3::Ipv4Address', 'nxtHp'), param('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry > &', 'dstList')])
## dsdv-rtable.h (module 'dsdv'): ns3::Time ns3::dsdv::RoutingTable::Getholddowntime() const [member function]
cls.add_method('Getholddowntime',
'ns3::Time',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::LookupRoute(ns3::Ipv4Address dst, ns3::dsdv::RoutingTableEntry & rt) [member function]
cls.add_method('LookupRoute',
'bool',
[param('ns3::Ipv4Address', 'dst'), param('ns3::dsdv::RoutingTableEntry &', 'rt')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::LookupRoute(ns3::Ipv4Address id, ns3::dsdv::RoutingTableEntry & rt, bool forRouteInput) [member function]
cls.add_method('LookupRoute',
'bool',
[param('ns3::Ipv4Address', 'id'), param('ns3::dsdv::RoutingTableEntry &', 'rt'), param('bool', 'forRouteInput')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Print(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('Print',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Purge(std::map<ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry, std::less<ns3::Ipv4Address>, std::allocator<std::pair<ns3::Ipv4Address const, ns3::dsdv::RoutingTableEntry> > > & removedAddresses) [member function]
cls.add_method('Purge',
'void',
[param('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry > &', 'removedAddresses')])
## dsdv-rtable.h (module 'dsdv'): uint32_t ns3::dsdv::RoutingTable::RoutingTableSize() [member function]
cls.add_method('RoutingTableSize',
'uint32_t',
[])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Setholddowntime(ns3::Time t) [member function]
cls.add_method('Setholddowntime',
'void',
[param('ns3::Time', 't')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::Update(ns3::dsdv::RoutingTableEntry & rt) [member function]
cls.add_method('Update',
'bool',
[param('ns3::dsdv::RoutingTableEntry &', 'rt')])
return
def register_Ns3DsdvRoutingTableEntry_methods(root_module, cls):
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTableEntry::RoutingTableEntry(ns3::dsdv::RoutingTableEntry const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::RoutingTableEntry const &', 'arg0')])
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTableEntry::RoutingTableEntry(ns3::Ptr<ns3::NetDevice> dev=0, ns3::Ipv4Address dst=ns3::Ipv4Address(), u_int32_t m_seqNo=0, ns3::Ipv4InterfaceAddress iface=ns3::Ipv4InterfaceAddress(), u_int32_t hops=0, ns3::Ipv4Address nextHop=ns3::Ipv4Address(), ns3::Time lifetime=ns3::Simulator::Now( ), ns3::Time SettlingTime=ns3::Simulator::Now( ), bool changedEntries=false) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev', default_value='0'), param('ns3::Ipv4Address', 'dst', default_value='ns3::Ipv4Address()'), param('u_int32_t', 'm_seqNo', default_value='0'), param('ns3::Ipv4InterfaceAddress', 'iface', default_value='ns3::Ipv4InterfaceAddress()'), param('u_int32_t', 'hops', default_value='0'), param('ns3::Ipv4Address', 'nextHop', default_value='ns3::Ipv4Address()'), param('ns3::Time', 'lifetime', default_value='ns3::Simulator::Now( )'), param('ns3::Time', 'SettlingTime', default_value='ns3::Simulator::Now( )'), param('bool', 'changedEntries', default_value='false')])
## dsdv-rtable.h (module 'dsdv'): ns3::Ipv4Address ns3::dsdv::RoutingTableEntry::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTableEntry::GetEntriesChanged() const [member function]
cls.add_method('GetEntriesChanged',
'bool',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RouteFlags ns3::dsdv::RoutingTableEntry::GetFlag() const [member function]
cls.add_method('GetFlag',
'ns3::dsdv::RouteFlags',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): uint32_t ns3::dsdv::RoutingTableEntry::GetHop() const [member function]
cls.add_method('GetHop',
'uint32_t',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ipv4InterfaceAddress ns3::dsdv::RoutingTableEntry::GetInterface() const [member function]
cls.add_method('GetInterface',
'ns3::Ipv4InterfaceAddress',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Time ns3::dsdv::RoutingTableEntry::GetLifeTime() const [member function]
cls.add_method('GetLifeTime',
'ns3::Time',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ipv4Address ns3::dsdv::RoutingTableEntry::GetNextHop() const [member function]
cls.add_method('GetNextHop',
'ns3::Ipv4Address',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ptr<ns3::NetDevice> ns3::dsdv::RoutingTableEntry::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ptr<ns3::Ipv4Route> ns3::dsdv::RoutingTableEntry::GetRoute() const [member function]
cls.add_method('GetRoute',
'ns3::Ptr< ns3::Ipv4Route >',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): uint32_t ns3::dsdv::RoutingTableEntry::GetSeqNo() const [member function]
cls.add_method('GetSeqNo',
'uint32_t',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Time ns3::dsdv::RoutingTableEntry::GetSettlingTime() const [member function]
cls.add_method('GetSettlingTime',
'ns3::Time',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::Print(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('Print',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetEntriesChanged(bool entriesChanged) [member function]
cls.add_method('SetEntriesChanged',
'void',
[param('bool', 'entriesChanged')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetFlag(ns3::dsdv::RouteFlags flag) [member function]
cls.add_method('SetFlag',
'void',
[param('ns3::dsdv::RouteFlags', 'flag')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetHop(uint32_t hopCount) [member function]
cls.add_method('SetHop',
'void',
[param('uint32_t', 'hopCount')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetInterface(ns3::Ipv4InterfaceAddress iface) [member function]
cls.add_method('SetInterface',
'void',
[param('ns3::Ipv4InterfaceAddress', 'iface')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetLifeTime(ns3::Time lifeTime) [member function]
cls.add_method('SetLifeTime',
'void',
[param('ns3::Time', 'lifeTime')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetNextHop(ns3::Ipv4Address nextHop) [member function]
cls.add_method('SetNextHop',
'void',
[param('ns3::Ipv4Address', 'nextHop')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetOutputDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetRoute(ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SetRoute',
'void',
[param('ns3::Ptr< ns3::Ipv4Route >', 'route')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetSeqNo(uint32_t sequenceNumber) [member function]
cls.add_method('SetSeqNo',
'void',
[param('uint32_t', 'sequenceNumber')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetSettlingTime(ns3::Time settlingTime) [member function]
cls.add_method('SetSettlingTime',
'void',
[param('ns3::Time', 'settlingTime')])
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_dsdv(module.get_submodule('dsdv'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_dsdv(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
{
"content_hash": "0435d2cb7a3fce83409f25201a7cad39",
"timestamp": "",
"source": "github",
"line_count": 6891,
"max_line_length": 934,
"avg_line_length": 63.97155710346829,
"alnum_prop": 0.60408367889517,
"repo_name": "Chiru/NVE_Simulation",
"id": "bafc41d9800f06c694af324c2cb3971882f78ac4",
"size": "440828",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "NS3/src/dsdv/bindings/modulegen__gcc_LP64.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "587430"
},
{
"name": "C++",
"bytes": "15139819"
},
{
"name": "DOT",
"bytes": "2792"
},
{
"name": "M",
"bytes": "5446"
},
{
"name": "Matlab",
"bytes": "18438"
},
{
"name": "Objective-C",
"bytes": "15035"
},
{
"name": "Perl",
"bytes": "302841"
},
{
"name": "Prolog",
"bytes": "2793"
},
{
"name": "Python",
"bytes": "32484684"
},
{
"name": "Scala",
"bytes": "51"
},
{
"name": "Shell",
"bytes": "7282"
}
],
"symlink_target": ""
}
|
import collections
import copy
import netaddr
from neutron_lib import constants
from neutron_lib import exceptions
from oslo_config import cfg
from oslo_log import log as logging
from oslo_policy import policy as oslo_policy
from oslo_utils import excutils
import six
import webob.exc
from neutron._i18n import _, _LE, _LI
from neutron.api import api_common
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.api.v2 import resource as wsgi_resource
from neutron.common import constants as n_const
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.db import api as db_api
from neutron import policy
from neutron import quota
from neutron.quota import resource_registry
LOG = logging.getLogger(__name__)
FAULT_MAP = {exceptions.NotFound: webob.exc.HTTPNotFound,
exceptions.Conflict: webob.exc.HTTPConflict,
exceptions.InUse: webob.exc.HTTPConflict,
exceptions.BadRequest: webob.exc.HTTPBadRequest,
exceptions.ServiceUnavailable: webob.exc.HTTPServiceUnavailable,
exceptions.NotAuthorized: webob.exc.HTTPForbidden,
netaddr.AddrFormatError: webob.exc.HTTPBadRequest,
oslo_policy.PolicyNotAuthorized: webob.exc.HTTPForbidden
}
class Controller(object):
LIST = 'list'
SHOW = 'show'
CREATE = 'create'
UPDATE = 'update'
DELETE = 'delete'
@property
def plugin(self):
return self._plugin
@property
def resource(self):
return self._resource
def __init__(self, plugin, collection, resource, attr_info,
allow_bulk=False, member_actions=None, parent=None,
allow_pagination=False, allow_sorting=False):
if member_actions is None:
member_actions = []
self._plugin = plugin
self._collection = collection.replace('-', '_')
self._resource = resource.replace('-', '_')
self._attr_info = attr_info
self._allow_bulk = allow_bulk
self._allow_pagination = allow_pagination
self._allow_sorting = allow_sorting
self._native_bulk = self._is_native_bulk_supported()
self._native_pagination = self._is_native_pagination_supported()
self._native_sorting = self._is_native_sorting_supported()
self._policy_attrs = [name for (name, info) in self._attr_info.items()
if info.get('required_by_policy')]
self._notifier = n_rpc.get_notifier('network')
# use plugin's dhcp notifier, if this is already instantiated
agent_notifiers = getattr(plugin, 'agent_notifiers', {})
self._dhcp_agent_notifier = (
agent_notifiers.get(constants.AGENT_TYPE_DHCP) or
dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
)
if cfg.CONF.notify_nova_on_port_data_changes:
from neutron.notifiers import nova
self._nova_notifier = nova.Notifier()
self._member_actions = member_actions
self._primary_key = self._get_primary_key()
if self._allow_pagination and self._native_pagination:
# Native pagination need native sorting support
if not self._native_sorting:
raise exceptions.Invalid(
_("Native pagination depend on native sorting")
)
if not self._allow_sorting:
LOG.info(_LI("Allow sorting is enabled because native "
"pagination requires native sorting"))
self._allow_sorting = True
if parent:
self._parent_id_name = '%s_id' % parent['member_name']
parent_part = '_%s' % parent['member_name']
else:
self._parent_id_name = None
parent_part = ''
self._plugin_handlers = {
self.LIST: 'get%s_%s' % (parent_part, self._collection),
self.SHOW: 'get%s_%s' % (parent_part, self._resource)
}
for action in [self.CREATE, self.UPDATE, self.DELETE]:
self._plugin_handlers[action] = '%s%s_%s' % (action, parent_part,
self._resource)
def _get_primary_key(self, default_primary_key='id'):
for key, value in six.iteritems(self._attr_info):
if value.get('primary_key', False):
return key
return default_primary_key
def _is_native_bulk_supported(self):
native_bulk_attr_name = ("_%s__native_bulk_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_bulk_attr_name, False)
def _is_native_pagination_supported(self):
native_pagination_attr_name = ("_%s__native_pagination_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_pagination_attr_name, False)
def _is_native_sorting_supported(self):
native_sorting_attr_name = ("_%s__native_sorting_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_sorting_attr_name, False)
def _exclude_attributes_by_policy(self, context, data):
"""Identifies attributes to exclude according to authZ policies.
Return a list of attribute names which should be stripped from the
response returned to the user because the user is not authorized
to see them.
"""
attributes_to_exclude = []
for attr_name in data.keys():
attr_data = self._attr_info.get(attr_name)
if attr_data and attr_data['is_visible']:
if policy.check(
context,
'%s:%s' % (self._plugin_handlers[self.SHOW], attr_name),
data,
might_not_exist=True,
pluralized=self._collection):
# this attribute is visible, check next one
continue
# if the code reaches this point then either the policy check
# failed or the attribute was not visible in the first place
attributes_to_exclude.append(attr_name)
return attributes_to_exclude
def _view(self, context, data, fields_to_strip=None):
"""Build a view of an API resource.
:param context: the neutron context
:param data: the object for which a view is being created
:param fields_to_strip: attributes to remove from the view
:returns: a view of the object which includes only attributes
visible according to API resource declaration and authZ policies.
"""
fields_to_strip = ((fields_to_strip or []) +
self._exclude_attributes_by_policy(context, data))
return self._filter_attributes(context, data, fields_to_strip)
def _filter_attributes(self, context, data, fields_to_strip=None):
if not fields_to_strip:
return data
return dict(item for item in six.iteritems(data)
if (item[0] not in fields_to_strip))
def _do_field_list(self, original_fields):
fields_to_add = None
# don't do anything if fields were not specified in the request
if original_fields:
fields_to_add = [attr for attr in self._policy_attrs
if attr not in original_fields]
original_fields.extend(self._policy_attrs)
return original_fields, fields_to_add
def __getattr__(self, name):
if name in self._member_actions:
@db_api.retry_db_errors
def _handle_action(request, id, **kwargs):
arg_list = [request.context, id]
# Ensure policy engine is initialized
policy.init()
# Fetch the resource and verify if the user can access it
try:
parent_id = kwargs.get(self._parent_id_name)
resource = self._item(request,
id,
do_authz=True,
field_list=None,
parent_id=parent_id)
except oslo_policy.PolicyNotAuthorized:
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
body = copy.deepcopy(kwargs.pop('body', None))
# Explicit comparison with None to distinguish from {}
if body is not None:
arg_list.append(body)
# It is ok to raise a 403 because accessibility to the
# object was checked earlier in this method
policy.enforce(request.context,
name,
resource,
pluralized=self._collection)
ret_value = getattr(self._plugin, name)(*arg_list, **kwargs)
# It is simply impossible to predict whether one of this
# actions alters resource usage. For instance a tenant port
# is created when a router interface is added. Therefore it is
# important to mark as dirty resources whose counters have
# been altered by this operation
resource_registry.set_resources_dirty(request.context)
return ret_value
return _handle_action
else:
raise AttributeError()
def _get_pagination_helper(self, request):
if self._allow_pagination and self._native_pagination:
return api_common.PaginationNativeHelper(request,
self._primary_key)
elif self._allow_pagination:
return api_common.PaginationEmulatedHelper(request,
self._primary_key)
return api_common.NoPaginationHelper(request, self._primary_key)
def _get_sorting_helper(self, request):
if self._allow_sorting and self._native_sorting:
return api_common.SortingNativeHelper(request, self._attr_info)
elif self._allow_sorting:
return api_common.SortingEmulatedHelper(request, self._attr_info)
return api_common.NoSortingHelper(request, self._attr_info)
def _items(self, request, do_authz=False, parent_id=None):
"""Retrieves and formats a list of elements of the requested entity."""
# NOTE(salvatore-orlando): The following ensures that fields which
# are needed for authZ policy validation are not stripped away by the
# plugin before returning.
original_fields, fields_to_add = self._do_field_list(
api_common.list_args(request, 'fields'))
filters = api_common.get_filters(request, self._attr_info,
['fields', 'sort_key', 'sort_dir',
'limit', 'marker', 'page_reverse'])
kwargs = {'filters': filters,
'fields': original_fields}
sorting_helper = self._get_sorting_helper(request)
pagination_helper = self._get_pagination_helper(request)
sorting_helper.update_args(kwargs)
sorting_helper.update_fields(original_fields, fields_to_add)
pagination_helper.update_args(kwargs)
pagination_helper.update_fields(original_fields, fields_to_add)
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, self._plugin_handlers[self.LIST])
obj_list = obj_getter(request.context, **kwargs)
obj_list = sorting_helper.sort(obj_list)
obj_list = pagination_helper.paginate(obj_list)
# Check authz
if do_authz:
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
# Omit items from list that should not be visible
obj_list = [obj for obj in obj_list
if policy.check(request.context,
self._plugin_handlers[self.SHOW],
obj,
plugin=self._plugin,
pluralized=self._collection)]
# Use the first element in the list for discriminating which attributes
# should be filtered out because of authZ policies
# fields_to_add contains a list of attributes added for request policy
# checks but that were not required by the user. They should be
# therefore stripped
fields_to_strip = fields_to_add or []
if obj_list:
fields_to_strip += self._exclude_attributes_by_policy(
request.context, obj_list[0])
collection = {self._collection:
[self._filter_attributes(
request.context, obj,
fields_to_strip=fields_to_strip)
for obj in obj_list]}
pagination_links = pagination_helper.get_links(obj_list)
if pagination_links:
collection[self._collection + "_links"] = pagination_links
# Synchronize usage trackers, if needed
resource_registry.resync_resource(
request.context, self._resource, request.context.tenant_id)
return collection
def _item(self, request, id, do_authz=False, field_list=None,
parent_id=None):
"""Retrieves and formats a single element of the requested entity."""
kwargs = {'fields': field_list}
action = self._plugin_handlers[self.SHOW]
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, action)
obj = obj_getter(request.context, id, **kwargs)
# Check authz
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
if do_authz:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
return obj
def _send_dhcp_notification(self, context, data, methodname):
if cfg.CONF.dhcp_agent_notification:
if self._collection in data:
for body in data[self._collection]:
item = {self._resource: body}
self._dhcp_agent_notifier.notify(context, item, methodname)
else:
self._dhcp_agent_notifier.notify(context, data, methodname)
def _send_nova_notification(self, action, orig, returned):
if hasattr(self, '_nova_notifier'):
self._nova_notifier.send_network_change(action, orig, returned)
@db_api.retry_db_errors
def index(self, request, **kwargs):
"""Returns a list of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return self._items(request, True, parent_id)
@db_api.retry_db_errors
def show(self, request, id, **kwargs):
"""Returns detailed information about the requested entity."""
try:
# NOTE(salvatore-orlando): The following ensures that fields
# which are needed for authZ policy validation are not stripped
# away by the plugin before returning.
field_list, added_fields = self._do_field_list(
api_common.list_args(request, "fields"))
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return {self._resource:
self._view(request.context,
self._item(request,
id,
do_authz=True,
field_list=field_list,
parent_id=parent_id),
fields_to_strip=added_fields)}
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def _emulate_bulk_create(self, obj_creator, request, body, parent_id=None):
objs = []
try:
for item in body[self._collection]:
kwargs = {self._resource: item}
if parent_id:
kwargs[self._parent_id_name] = parent_id
fields_to_strip = self._exclude_attributes_by_policy(
request.context, item)
objs.append(self._filter_attributes(
request.context,
obj_creator(request.context, **kwargs),
fields_to_strip=fields_to_strip))
return objs
# Note(salvatore-orlando): broad catch as in theory a plugin
# could raise any kind of exception
except Exception:
with excutils.save_and_reraise_exception():
for obj in objs:
obj_deleter = getattr(self._plugin,
self._plugin_handlers[self.DELETE])
try:
kwargs = ({self._parent_id_name: parent_id}
if parent_id else {})
obj_deleter(request.context, obj['id'], **kwargs)
except Exception:
# broad catch as our only purpose is to log the
# exception
LOG.exception(_LE("Unable to undo add for "
"%(resource)s %(id)s"),
{'resource': self._resource,
'id': obj['id']})
# TODO(salvatore-orlando): The object being processed when the
# plugin raised might have been created or not in the db.
# We need a way for ensuring that if it has been created,
# it is then deleted
def create(self, request, body=None, **kwargs):
self._notifier.info(request.context,
self._resource + '.create.start',
body)
return self._create(request, body, **kwargs)
@db_api.retry_db_errors
def _create(self, request, body, **kwargs):
"""Creates a new instance of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
body = Controller.prepare_request_body(request.context,
copy.deepcopy(body), True,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.CREATE]
# Check authz
if self._collection in body:
# Have to account for bulk create
items = body[self._collection]
else:
items = [body]
# Ensure policy engine is initialized
policy.init()
# Store requested resource amounts grouping them by tenant
# This won't work with multiple resources. However because of the
# current structure of this controller there will hardly be more than
# one resource for which reservations are being made
request_deltas = collections.defaultdict(int)
for item in items:
self._validate_network_tenant_ownership(request,
item[self._resource])
policy.enforce(request.context,
action,
item[self._resource],
pluralized=self._collection)
if 'tenant_id' not in item[self._resource]:
# no tenant_id - no quota check
continue
tenant_id = item[self._resource]['tenant_id']
request_deltas[tenant_id] += 1
# Quota enforcement
reservations = []
try:
for (tenant, delta) in request_deltas.items():
reservation = quota.QUOTAS.make_reservation(
request.context,
tenant,
{self._resource: delta},
self._plugin)
reservations.append(reservation)
except n_exc.QuotaResourceUnknown as e:
# We don't want to quota this resource
LOG.debug(e)
def notify(create_result):
# Ensure usage trackers for all resources affected by this API
# operation are marked as dirty
with request.context.session.begin():
# Commit the reservation(s)
for reservation in reservations:
quota.QUOTAS.commit_reservation(
request.context, reservation.reservation_id)
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.create.end'
self._notifier.info(request.context,
notifier_method,
create_result)
self._send_dhcp_notification(request.context,
create_result,
notifier_method)
return create_result
def do_create(body, bulk=False, emulated=False):
kwargs = {self._parent_id_name: parent_id} if parent_id else {}
if bulk and not emulated:
obj_creator = getattr(self._plugin, "%s_bulk" % action)
else:
obj_creator = getattr(self._plugin, action)
try:
if emulated:
return self._emulate_bulk_create(obj_creator, request,
body, parent_id)
else:
if self._collection in body:
# This is weird but fixing it requires changes to the
# plugin interface
kwargs.update({self._collection: body})
else:
kwargs.update({self._resource: body})
return obj_creator(request.context, **kwargs)
except Exception:
# In case of failure the plugin will always raise an
# exception. Cancel the reservation
with excutils.save_and_reraise_exception():
for reservation in reservations:
quota.QUOTAS.cancel_reservation(
request.context, reservation.reservation_id)
if self._collection in body and self._native_bulk:
# plugin does atomic bulk create operations
objs = do_create(body, bulk=True)
# Use first element of list to discriminate attributes which
# should be removed because of authZ policies
fields_to_strip = self._exclude_attributes_by_policy(
request.context, objs[0])
return notify({self._collection: [self._filter_attributes(
request.context, obj, fields_to_strip=fields_to_strip)
for obj in objs]})
else:
if self._collection in body:
# Emulate atomic bulk behavior
objs = do_create(body, bulk=True, emulated=True)
return notify({self._collection: objs})
else:
obj = do_create(body)
self._send_nova_notification(action, {},
{self._resource: obj})
return notify({self._resource: self._view(request.context,
obj)})
def delete(self, request, id, **kwargs):
"""Deletes the specified entity."""
if request.body:
msg = _('Request body is not supported in DELETE.')
raise webob.exc.HTTPBadRequest(msg)
self._notifier.info(request.context,
self._resource + '.delete.start',
{self._resource + '_id': id})
return self._delete(request, id, **kwargs)
@db_api.retry_db_errors
def _delete(self, request, id, **kwargs):
action = self._plugin_handlers[self.DELETE]
# Check authz
policy.init()
parent_id = kwargs.get(self._parent_id_name)
obj = self._item(request, id, parent_id=parent_id)
try:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_deleter = getattr(self._plugin, action)
obj_deleter(request.context, id, **kwargs)
# A delete operation usually alters resource usage, so mark affected
# usage trackers as dirty
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.delete.end'
self._notifier.info(request.context,
notifier_method,
{self._resource + '_id': id})
result = {self._resource: self._view(request.context, obj)}
self._send_nova_notification(action, {}, result)
self._send_dhcp_notification(request.context,
result,
notifier_method)
def update(self, request, id, body=None, **kwargs):
"""Updates the specified entity's attributes."""
try:
payload = body.copy()
except AttributeError:
msg = _("Invalid format: %s") % request.body
raise exceptions.BadRequest(resource='body', msg=msg)
payload['id'] = id
self._notifier.info(request.context,
self._resource + '.update.start',
payload)
return self._update(request, id, body, **kwargs)
@db_api.retry_db_errors
def _update(self, request, id, body, **kwargs):
body = Controller.prepare_request_body(request.context,
copy.deepcopy(body), False,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.UPDATE]
# Load object to check authz
# but pass only attributes in the original body and required
# by the policy engine to the policy 'brain'
field_list = [name for (name, value) in six.iteritems(self._attr_info)
if (value.get('required_by_policy') or
value.get('primary_key') or
'default' not in value)]
# Ensure policy engine is initialized
policy.init()
parent_id = kwargs.get(self._parent_id_name)
orig_obj = self._item(request, id, field_list=field_list,
parent_id=parent_id)
orig_object_copy = copy.copy(orig_obj)
orig_obj.update(body[self._resource])
# Make a list of attributes to be updated to inform the policy engine
# which attributes are set explicitly so that it can distinguish them
# from the ones that are set to their default values.
orig_obj[n_const.ATTRIBUTES_TO_UPDATE] = body[self._resource].keys()
try:
policy.enforce(request.context,
action,
orig_obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
with excutils.save_and_reraise_exception() as ctxt:
# If a tenant is modifying it's own object, it's safe to return
# a 403. Otherwise, pretend that it doesn't exist to avoid
# giving away information.
if request.context.tenant_id != orig_obj['tenant_id']:
ctxt.reraise = False
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_updater = getattr(self._plugin, action)
kwargs = {self._resource: body}
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj = obj_updater(request.context, id, **kwargs)
# Usually an update operation does not alter resource usage, but as
# there might be side effects it might be worth checking for changes
# in resource usage here as well (e.g: a tenant port is created when a
# router interface is added)
resource_registry.set_resources_dirty(request.context)
result = {self._resource: self._view(request.context, obj)}
notifier_method = self._resource + '.update.end'
self._notifier.info(request.context, notifier_method, result)
self._send_dhcp_notification(request.context,
result,
notifier_method)
self._send_nova_notification(action, orig_object_copy, result)
return result
@staticmethod
def prepare_request_body(context, body, is_create, resource, attr_info,
allow_bulk=False):
"""Verifies required attributes are in request body.
Also checking that an attribute is only specified if it is allowed
for the given operation (create/update).
Attribute with default values are considered to be optional.
body argument must be the deserialized body.
"""
collection = resource + "s"
if not body:
raise webob.exc.HTTPBadRequest(_("Resource body required"))
LOG.debug("Request body: %(body)s", {'body': body})
try:
if collection in body:
if not allow_bulk:
raise webob.exc.HTTPBadRequest(_("Bulk operation "
"not supported"))
if not body[collection]:
raise webob.exc.HTTPBadRequest(_("Resources required"))
bulk_body = [
Controller.prepare_request_body(
context, item if resource in item
else {resource: item}, is_create, resource, attr_info,
allow_bulk) for item in body[collection]
]
return {collection: bulk_body}
res_dict = body.get(resource)
except (AttributeError, TypeError):
msg = _("Body contains invalid data")
raise webob.exc.HTTPBadRequest(msg)
if res_dict is None:
msg = _("Unable to find '%s' in request body") % resource
raise webob.exc.HTTPBadRequest(msg)
attributes.populate_tenant_id(context, res_dict, attr_info, is_create)
attributes.verify_attributes(res_dict, attr_info)
if is_create: # POST
attributes.fill_default_value(attr_info, res_dict,
webob.exc.HTTPBadRequest)
else: # PUT
for attr, attr_vals in six.iteritems(attr_info):
if attr in res_dict and not attr_vals['allow_put']:
msg = _("Cannot update read-only attribute %s") % attr
raise webob.exc.HTTPBadRequest(msg)
attributes.convert_value(attr_info, res_dict, webob.exc.HTTPBadRequest)
return body
def _validate_network_tenant_ownership(self, request, resource_item):
# TODO(salvatore-orlando): consider whether this check can be folded
# in the policy engine
if (request.context.is_admin or request.context.is_advsvc or
self._resource not in ('port', 'subnet')):
return
network = self._plugin.get_network(
request.context,
resource_item['network_id'])
# do not perform the check on shared networks
if network.get('shared'):
return
network_owner = network['tenant_id']
if network_owner != resource_item['tenant_id']:
# NOTE(kevinbenton): we raise a 404 to hide the existence of the
# network from the tenant since they don't have access to it.
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def create_resource(collection, resource, plugin, params, allow_bulk=False,
member_actions=None, parent=None, allow_pagination=False,
allow_sorting=False):
controller = Controller(plugin, collection, resource, params, allow_bulk,
member_actions=member_actions, parent=parent,
allow_pagination=allow_pagination,
allow_sorting=allow_sorting)
return wsgi_resource.Resource(controller, FAULT_MAP)
|
{
"content_hash": "b158a9cec3759c3551415f7e4ac04979",
"timestamp": "",
"source": "github",
"line_count": 723,
"max_line_length": 79,
"avg_line_length": 46.66113416320885,
"alnum_prop": 0.553385107896609,
"repo_name": "bigswitch/neutron",
"id": "84f9cb05ed72a752b0db650e14981d7275aaac3f",
"size": "34377",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron/api/v2/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "8468247"
},
{
"name": "Shell",
"bytes": "14648"
}
],
"symlink_target": ""
}
|
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import (
AbstractBaseUser,
python_2_unicode_compatible,
UserManager,
)
from django.core.exceptions import ValidationError
from django.core.mail import send_mail
from django.utils.http import urlquote
from django.db import models
from django.utils import timezone, six
from django.utils.translation import ugettext_lazy as _
# DJANGAE
from djangae.fields import CharOrNoneField, ComputedCharField
from .validators import validate_google_user_id
class GaeUserManager(UserManager):
def pre_create_google_user(self, email, **extra_fields):
""" Pre-create a User object for a user who will later log in via Google Accounts. """
values = dict(
# defaults which can be overridden
is_active=True,
)
values.update(**extra_fields)
values.update(
# things which cannot be overridden
email=self.normalize_email(email), # lowercase the domain only
username=None,
password=make_password(None), # unusable password
# Stupidly, last_login is not nullable, so we can't set it to None.
)
return self.create(**values)
def _get_email_lower(user):
""" Computer function for the computed lowercase email field. """
# Note that the `email` field is not nullable, but the `email_lower` one is nullable and must
# not contain empty strings because it is unique
return user.email and user.email.lower() or None
@python_2_unicode_compatible
class GaeAbstractBaseUser(AbstractBaseUser):
""" Absract base class for creating a User model which works with the App
Engine users API. """
username = CharOrNoneField(
# This stores the Google user_id, or custom username for non-Google-based users.
# We allow it to be null so that Google-based users can be pre-created before they log in.
_('User ID'), max_length=21, unique=True, blank=True, null=True, default=None,
validators=[validate_google_user_id]
)
first_name = models.CharField(_('first name'), max_length=30, blank=True)
last_name = models.CharField(_('last name'), max_length=30, blank=True)
# Email addresses are case sensitive, but many email systems and many people treat them as if
# they're not. We must store the case-preserved email address to ensure that sending emails
# always works, but we must be able to query for them case insensitively and therefore we must
# enforce uniqueness on a case insensitive basis, hence these 2 fields
email = models.EmailField(_('email address'))
# The null-able-ness of the email_lower is only to deal with when an email address moves between
# Google Accounts and therefore we need to wipe it without breaking the unique constraint.
email_lower = ComputedCharField(
_get_email_lower, max_length=email.max_length, unique=True, null=True
)
is_staff = models.BooleanField(
_('staff status'), default=False,
help_text=_('Designates whether the user can log into this admin site.')
)
is_active = models.BooleanField(
_('active'), default=True,
help_text=_(
'Designates whether this user should be treated as '
'active. Unselect this instead of deleting accounts.'
)
)
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
objects = GaeUserManager()
class Meta:
abstract = True
def clean(self):
# Only call up if username is not none. Parent's clean() stringifies
# username blindly
if self.get_username() is not None:
super(GaeAbstractBaseUser, self).clean()
def get_absolute_url(self):
return "/users/%s/" % urlquote(self.username)
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"Returns the short name for the user."
return self.first_name
def email_user(self, subject, message, from_email=None):
"""
Sends an email to this User.
"""
send_mail(subject, message, from_email, [self.email])
def __str__(self):
"""
We have to override this as username is nullable. We either return the email
address, or if there is a username, we return "email (username)".
"""
username = self.get_username()
if username:
return "{} ({})".format(six.text_type(self.email), six.text_type(username))
return six.text_type(self.email)
def validate_unique(self, exclude=None):
""" Check that the email address does not already exist by querying on email_lower. """
exclude = exclude or []
if "email_lower" not in exclude:
# We do our own check using the email_lower field, so we don't need Django to query
# on it as well
exclude.append("email_lower")
try:
super(GaeAbstractBaseUser, self).validate_unique(exclude=exclude)
except ValidationError as super_error:
pass
else:
super_error = None
if self.email and "email" not in exclude:
existing = self.__class__.objects.filter(email_lower=self.email.lower())
if not self._state.adding:
existing = existing.exclude(pk=self.pk)
if existing.exists():
model_name = self._meta.verbose_name
field_name = self._meta.get_field("email").verbose_name
message = "%s with this %s already exists" % (model_name, field_name)
error_dict = {"email": [message]}
if super_error:
super_error.update_error_dict(error_dict)
raise super_error
else:
raise ValidationError(error_dict)
elif super_error:
raise
|
{
"content_hash": "f58ad129640aca76dcf458ff57d8b295",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 100,
"avg_line_length": 39.48407643312102,
"alnum_prop": 0.6371995483142442,
"repo_name": "grzes/djangae",
"id": "07bbc95141670ae7ced299ce6f79dcaa37927c2e",
"size": "6213",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "djangae/contrib/gauth/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2305"
},
{
"name": "Python",
"bytes": "1074740"
},
{
"name": "Shell",
"bytes": "1052"
}
],
"symlink_target": ""
}
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='gwas_wrapper',
version='0.1',
description='Python wrapper for interacting with the NHGRI-EBI GWAS Catalog',
license='MIT',
keywords=['gwas', 'genomics', 'snp', 'bioinformatics'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
author='Kevin Arvai',
author_email='arvkevi@gmail.com',
download_url = 'https://github.com/arvkevi/gwas_wrapper/tarball/0.1',
url = 'https://github.com/arvkevi/gwas_wrapper',
packages=['gwas_wrapper'],
zip_safe=False)
|
{
"content_hash": "7b3fabc252eddc0cc4ea85412ced74f1",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 80,
"avg_line_length": 32.45,
"alnum_prop": 0.6949152542372882,
"repo_name": "arvkevi/gwas_wrapper",
"id": "ec4773d721abb12a38953daeb8d501dcfc47ea34",
"size": "674",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9705"
}
],
"symlink_target": ""
}
|
""" GlusterFS native protocol (glusterfs) driver for shares.
Test cases for GlusterFS native protocol driver.
"""
import ddt
import mock
from oslo_config import cfg
from manila.common import constants
from manila import context
from manila import exception
from manila.share import configuration as config
from manila.share.drivers.glusterfs import common
from manila.share.drivers import glusterfs_native
from manila import test
from manila.tests import fake_utils
CONF = cfg.CONF
def new_share(**kwargs):
share = {
'id': 'fakeid',
'name': 'fakename',
'size': 1,
'share_proto': 'glusterfs',
}
share.update(kwargs)
return share
@ddt.ddt
class GlusterfsNativeShareDriverTestCase(test.TestCase):
"""Tests GlusterfsNativeShareDriver."""
def setUp(self):
super(GlusterfsNativeShareDriverTestCase, self).setUp()
fake_utils.stub_out_utils_execute(self)
self._execute = fake_utils.fake_execute
self._context = context.get_admin_context()
self.glusterfs_target1 = 'root@host1:/gv1'
self.glusterfs_target2 = 'root@host2:/gv2'
self.glusterfs_server1 = 'root@host1'
self.glusterfs_server2 = 'root@host2'
self.glusterfs_server1_volumes = 'manila-share-1-1G\nshare1'
self.glusterfs_server2_volumes = 'manila-share-2-2G\nshare2'
self.share1 = new_share(
export_location=self.glusterfs_target1,
status=constants.STATUS_AVAILABLE)
self.share2 = new_share(
export_location=self.glusterfs_target2,
status=constants.STATUS_AVAILABLE)
self.gmgr1 = common.GlusterManager(self.glusterfs_server1,
self._execute, None, None,
requires={'volume': False})
self.gmgr2 = common.GlusterManager(self.glusterfs_server2,
self._execute, None, None,
requires={'volume': False})
self.glusterfs_volumes_dict = (
{'root@host1:/manila-share-1-1G': {'size': 1},
'root@host2:/manila-share-2-2G': {'size': 2}})
self.glusterfs_used_vols = set([
'root@host1:/manila-share-1-1G',
'root@host2:/manila-share-2-2G'])
CONF.set_default('glusterfs_volume_pattern',
'manila-share-\d+-#{size}G$')
CONF.set_default('driver_handles_share_servers', False)
self.fake_conf = config.Configuration(None)
self.mock_object(common.GlusterManager, 'make_gluster_call')
self._driver = glusterfs_native.GlusterfsNativeShareDriver(
execute=self._execute,
configuration=self.fake_conf)
self.addCleanup(fake_utils.fake_execute_set_repliers, [])
self.addCleanup(fake_utils.fake_execute_clear_log)
def test_supported_protocols(self):
self.assertEqual(('GLUSTERFS', ),
self._driver.supported_protocols)
@ddt.data(True, False)
def test_setup_via_manager(self, has_dynauth):
gmgr = mock.Mock()
if has_dynauth:
_gluster_call = lambda *args, **kwargs: None
else:
def _gluster_call(*args, **kwargs):
if kwargs.get('raw_error'):
raise exception.ProcessExecutionError(exit_code=1)
gmgr.gluster_call = mock.Mock(side_effect=_gluster_call)
gmgr.volume = 'fakevol'
gmgr.export = 'fakehost:/fakevol'
gmgr.get_gluster_vol_option = mock.Mock(
return_value='glusterfs-server-name,some-other-name')
share = mock.Mock()
ret = self._driver._setup_via_manager({'manager': gmgr,
'share': share})
gmgr.get_gluster_vol_option.assert_called_once_with('auth.ssl-allow')
args = (
('volume', 'set', 'fakevol', 'nfs.export-volumes', 'off',
{'log': mock.ANY}),
('volume', 'set', 'fakevol', 'client.ssl', 'on',
{'log': mock.ANY}),
('volume', 'set', 'fakevol', 'server.ssl', 'on',
{'log': mock.ANY}),
('volume', 'set', 'fakevol', 'server.dynamic-auth', 'on',
{'raw_error': True}),
('volume', 'stop', 'fakevol', '--mode=script', {'log': mock.ANY}),
('volume', 'start', 'fakevol', {'log': mock.ANY}))
gmgr.gluster_call.assert_has_calls(
[mock.call(*a[:-1], **a[-1]) for a in args])
self.assertEqual(ret, gmgr.export)
def test_setup_via_manager_with_parent(self):
gmgr = mock.Mock()
gmgr.gluster_call = mock.Mock()
gmgr.volume = 'fakevol'
gmgr.export = 'fakehost:/fakevol'
gmgr_parent = mock.Mock()
gmgr_parent.get_gluster_vol_option = mock.Mock(
return_value=(
'glusterfs-server-name,some-other-name,manila-host.com'))
share = mock.Mock()
share_parent = mock.Mock()
ret = self._driver._setup_via_manager(
{'manager': gmgr, 'share': share},
{'manager': gmgr_parent, 'share': share_parent})
gmgr_parent.get_gluster_vol_option.assert_called_once_with(
'auth.ssl-allow')
args = (
('volume', 'set', 'fakevol', 'auth.ssl-allow',
'glusterfs-server-name,manila-host.com', {'log': mock.ANY}),
('volume', 'set', 'fakevol', 'nfs.export-volumes', 'off',
{'log': mock.ANY}),
('volume', 'set', 'fakevol', 'client.ssl', 'on',
{'log': mock.ANY}),
('volume', 'set', 'fakevol', 'server.ssl', 'on',
{'log': mock.ANY}),
('volume', 'set', 'fakevol', 'server.dynamic-auth', 'on',
{'raw_error': True}))
gmgr.gluster_call.assert_has_calls(
[mock.call(*a[:-1], **a[-1]) for a in args])
self.assertEqual(ret, gmgr.export)
@ddt.data(True, False)
def test_setup_via_manager_no_option_data(self, has_parent):
share = mock.Mock()
gmgr = mock.Mock()
if has_parent:
share_parent = mock.Mock()
gmgr_parent = mock.Mock()
share_mgr_parent = {'share': share_parent, 'manager': gmgr_parent}
gmgr_queried = gmgr_parent
else:
share_mgr_parent = None
gmgr_queried = gmgr
gmgr_queried.get_gluster_vol_option = mock.Mock(return_value='')
self.assertRaises(exception.GlusterfsException,
self._driver._setup_via_manager,
{'share': share, 'manager': gmgr},
share_mgr_parent=share_mgr_parent)
gmgr_queried.get_gluster_vol_option.assert_called_once_with(
'auth.ssl-allow')
@ddt.data({'trouble': exception.ProcessExecutionError,
'trouble_kw': {'exit_code': 2},
'_exception': exception.GlusterfsException},
{'trouble': RuntimeError, 'trouble_kw': {},
'_exception': RuntimeError})
@ddt.unpack
def test_setup_via_manager_exception(self, trouble, trouble_kw,
_exception):
share = mock.Mock()
gmgr = mock.Mock()
def raise_exception(*args, **kwargs):
if kwargs.get('raw_error'):
raise trouble(**trouble_kw)
gmgr.gluster_call = mock.Mock(side_effect=raise_exception)
gmgr.get_gluster_vol_option = mock.Mock()
self.assertRaises(
_exception, self._driver._setup_via_manager,
{'share': share, 'manager': gmgr})
def test_snapshots_are_supported(self):
self.assertTrue(self._driver.snapshots_are_supported)
def test_allow_access_via_manager(self):
access = {'access_type': 'cert', 'access_to': 'client.example.com'}
gmgr1 = common.GlusterManager(self.glusterfs_target1, self._execute,
None, None)
self.mock_object(gmgr1, 'get_gluster_vol_option',
mock.Mock(return_value='some.common.name'))
test_args = ('volume', 'set', 'gv1', 'auth.ssl-allow',
'some.common.name,' + access['access_to'])
self._driver.layout.gluster_used_vols = set([self.glusterfs_target1])
self._driver._allow_access_via_manager(gmgr1, self._context,
self.share1, access)
gmgr1.get_gluster_vol_option.assert_called_once_with('auth.ssl-allow')
gmgr1.gluster_call.assert_called_once_with(*test_args, log=mock.ANY)
def test_allow_access_via_manager_with_share_having_access(self):
access = {'access_type': 'cert', 'access_to': 'client.example.com'}
gmgr1 = common.GlusterManager(self.glusterfs_target1, self._execute,
None, None)
self.mock_object(
gmgr1, 'get_gluster_vol_option',
mock.Mock(return_value='some.common.name,' + access['access_to']))
self._driver.layout.gluster_used_vols = set([self.glusterfs_target1])
self._driver._allow_access_via_manager(gmgr1, self._context,
self.share1, access)
gmgr1.get_gluster_vol_option.assert_called_once_with('auth.ssl-allow')
self.assertFalse(gmgr1.gluster_call.called)
def test_allow_access_via_manager_invalid_access_type(self):
access = {'access_type': 'invalid', 'access_to': 'client.example.com'}
expected_exec = []
self.assertRaises(exception.InvalidShareAccess,
self._driver._allow_access_via_manager,
self.gmgr1, self._context, self.share1, access)
self.assertEqual(expected_exec, fake_utils.fake_execute_get_log())
@ddt.data('on', '1', 'Yes', 'TRUE', 'enable')
def test_deny_access_via_manager(self, trueish):
self.mock_object(common, '_restart_gluster_vol', mock.Mock())
access = {'access_type': 'cert', 'access_to': 'client.example.com'}
gmgr1 = common.GlusterManager(self.glusterfs_target1, self._execute,
None, None)
def _get_gluster_vol_option(opt):
if opt == 'auth.ssl-allow':
return('some.common.name,' + access['access_to'])
elif opt == 'server.dynamic-auth':
return trueish
self.mock_object(
gmgr1, 'get_gluster_vol_option',
mock.Mock(side_effect=_get_gluster_vol_option))
self._driver.layout.gluster_used_vols = set([self.glusterfs_target1])
self._driver._deny_access_via_manager(gmgr1, self._context,
self.share1, access)
gmgr1.get_gluster_vol_option.assert_has_calls(
[mock.call(a) for a in ('auth.ssl-allow', 'server.dynamic-auth')])
test_args = ('volume', 'set', 'gv1', 'auth.ssl-allow',
'some.common.name')
gmgr1.gluster_call.assert_called_once_with(*test_args, log=mock.ANY)
self.assertFalse(common._restart_gluster_vol.called)
@ddt.data('off', None, 'strangelove')
def test_deny_access_via_manager_no_dyn_auth(self, falseish):
self.mock_object(common, '_restart_gluster_vol', mock.Mock())
access = {'access_type': 'cert', 'access_to': 'client.example.com'}
gmgr1 = common.GlusterManager(self.glusterfs_target1, self._execute,
None, None)
def _get_gluster_vol_option(opt):
if opt == 'auth.ssl-allow':
return('some.common.name,' + access['access_to'])
elif opt == 'server.dynamic-auth':
return falseish
self.mock_object(
gmgr1, 'get_gluster_vol_option',
mock.Mock(side_effect=_get_gluster_vol_option))
self._driver.layout.gluster_used_vols = set([self.glusterfs_target1])
self._driver._deny_access_via_manager(gmgr1, self._context,
self.share1, access)
gmgr1.get_gluster_vol_option.assert_has_calls(
[mock.call(a) for a in ('auth.ssl-allow', 'server.dynamic-auth')])
test_args = ('volume', 'set', 'gv1', 'auth.ssl-allow',
'some.common.name')
gmgr1.gluster_call.assert_called_once_with(*test_args, log=mock.ANY)
common._restart_gluster_vol.assert_called_once_with(gmgr1)
def test_deny_access_via_manager_with_share_having_no_access(self):
self.mock_object(common, '_restart_gluster_vol', mock.Mock())
access = {'access_type': 'cert', 'access_to': 'client.example.com'}
gmgr1 = common.GlusterManager(self.glusterfs_target1, self._execute,
None, None)
self.mock_object(gmgr1, 'get_gluster_vol_option',
mock.Mock(return_value='some.common.name'))
self._driver.layout.gluster_used_vols = set([self.glusterfs_target1])
self._driver._deny_access_via_manager(gmgr1, self._context,
self.share1, access)
gmgr1.get_gluster_vol_option.assert_called_once_with('auth.ssl-allow')
self.assertFalse(gmgr1.gluster_call.called)
self.assertFalse(common._restart_gluster_vol.called)
def test_deny_access_via_manager_invalid_access_type(self):
self.mock_object(common, '_restart_gluster_vol', mock.Mock())
access = {'access_type': 'invalid', 'access_to': 'NotApplicable'}
self.assertRaises(exception.InvalidShareAccess,
self._driver._deny_access_via_manager, self.gmgr1,
self._context, self.share1, access)
self.assertFalse(common._restart_gluster_vol.called)
def test_update_share_stats(self):
self._driver._update_share_stats()
test_data = {
'share_backend_name': 'GlusterFS-Native',
'driver_handles_share_servers': False,
'vendor_name': 'Red Hat',
'driver_version': '1.1',
'storage_protocol': 'glusterfs',
'reserved_percentage': 0,
'qos': False,
'total_capacity_gb': 'unknown',
'free_capacity_gb': 'unknown',
'pools': None,
'snapshot_support': True,
}
self.assertEqual(test_data, self._driver._stats)
def test_get_network_allocations_number(self):
self.assertEqual(0, self._driver.get_network_allocations_number())
|
{
"content_hash": "a823ab8083b128846eff105d5191b3a7",
"timestamp": "",
"source": "github",
"line_count": 345,
"max_line_length": 78,
"avg_line_length": 42.44347826086957,
"alnum_prop": 0.5702383391381548,
"repo_name": "scality/manila",
"id": "bb0f27eb878626a5bd5798cf47aab678d7cf75d2",
"size": "15276",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manila/tests/share/drivers/test_glusterfs_native.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "953"
},
{
"name": "Python",
"bytes": "5912966"
},
{
"name": "Shell",
"bytes": "46081"
}
],
"symlink_target": ""
}
|
from django.shortcuts import render_to_response
from django.template import RequestContext
def index(request):
return render_to_response('demo/index.html', {}, context_instance=RequestContext(request))
|
{
"content_hash": "c121358afe34912ae5778806298a679c",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 94,
"avg_line_length": 34.666666666666664,
"alnum_prop": 0.7932692307692307,
"repo_name": "QLGu/django-salted",
"id": "e0136c2d80365843863a8eb464e920663fb1a1c1",
"size": "208",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "demo_project/demo_project/demo/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "130"
},
{
"name": "Nginx",
"bytes": "1299"
},
{
"name": "Python",
"bytes": "8931"
},
{
"name": "SaltStack",
"bytes": "7583"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from collections import namedtuple
import six
import tensorflow as tf
from tensorflow.python.ops import data_flow_ops
import pdb
def _all_reduce_using_copy(tensors_across_devices, use_mean):
"""Does an all-reduce of a list of tensors by copying to the current device.
The tensors are copied to the current device and then reduced.
Args:
tensors_across_devices: A list of tensors, each on a different device.
use_mean: Whether to take the mean of the tensors instead of a sum:
Returns:
A reduced tensor on the current device.
"""
reduced_tensor = tf.add_n(tensors_across_devices)
if use_mean:
reduced_tensor *= 1 / len(tensors_across_devices)
return reduced_tensor
@six.add_metaclass(abc.ABCMeta)
class BatchAllReduceAlgorithm(object):
"""Represents an algorithm for performing a batch all-reduce operation."""
def batch_all_reduce(self, all_device_tensors):
"""Performs a batch all-reduce.
The reduction done is a sum.
`all_device_tensors` is a list of list of tensors that will be batch
all-reduced. All tensors within a single inner list must be on the same
device. The nth element in each list, for any n, will be reduced together.
The return value is in the same form as `all_device_tensors`, except that
each tensor is reduced.
For example, if `all_device_tensors` is:
[[ A, B ], # A and B are on GPU 0
[ C, D ]] # C and D are on GPU 1
Then the return value will be:
[[ A+C, B+D ], # These two tensors are on GPU 0
[ A+C, B+D ]] # These two tensors are on GPU 1
Arguments:
all_device_tensors: A list of list of tensors. `all_device_tensors[i][j]`
is a tensor where `i` is the device index and `j` is the tensor index.
Returns:
reduced_all_device_tensors: A list in the same form as
`all_device_tensors`, except each tensor has been reduced.
warmup_ops: A list of ops needed to be run once before the all-reduce can
occur.
"""
warmup_ops = []
all_device_tensors = self._do_batch_all_reduce(all_device_tensors)
return all_device_tensors, warmup_ops
@abc.abstractmethod
def _do_batch_all_reduce(self, all_device_tensors):
"""Performs a batch all-reduce.
Unlike `self.batch_all_reduce`, this does not do any preprocessing of the
tensors.
Args:
all_device_tensors: A list of list of tensors. `all_device_tensors[i][j]`
is a tensor where `i` is the device index and `j` is the tensor index.
Returns:
reduced_all_device_tensors: A list in the same form as
`all_device_tensors`, except each tensor has been reduced.
"""
pass
class CopyToDeviceAlgorithm(BatchAllReduceAlgorithm):
"""An algorithm that copies tensors to be reduced to a specific device."""
def __init__(self, devices_to_reduce_on, use_mean=True):
self._devices = devices_to_reduce_on
self._use_mean = use_mean
def _do_batch_all_reduce(self, all_device_tensors):
reduced_tensors = []
for i, tensors_across_devices in enumerate(zip(*all_device_tensors)):
with tf.device(self._devices[i % len(self._devices)]):
reduced_tensor = _all_reduce_using_copy(tensors_across_devices,
self._use_mean)
reduced_tensors.append(reduced_tensor)
# The tensors will be brought back to each device once they are used.
return [reduced_tensors] * len(all_device_tensors)
def algorithm_from_params(devices_to_reduce_on):
"""Returns a BatchAllReduceAlgorithm from a Params tuple."""
return CopyToDeviceAlgorithm(devices_to_reduce_on)
|
{
"content_hash": "4a3ba248ae17e937704bd018e6ca0ff1",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 79,
"avg_line_length": 35.424528301886795,
"alnum_prop": 0.6836218375499334,
"repo_name": "neuroailab/tfutils",
"id": "cade0ff18cc1a484a677f0e6e7d2b8c7db65a455",
"size": "3755",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tfutils/multi_gpu/easy_batch_allreduce.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "405509"
}
],
"symlink_target": ""
}
|
from closable_admin_filter.mixins import AdminClosableFilterMixin
VERSION = "0.1.1"
|
{
"content_hash": "34bf33c9461440fd9ef11d08cb646ee6",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 65,
"avg_line_length": 28,
"alnum_prop": 0.8214285714285714,
"repo_name": "eternalfame/django-closable_admin_filter",
"id": "b2d78562db9ec64c12ba25da3936e31cc4fb068b",
"size": "84",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "closable_admin_filter/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "386"
},
{
"name": "JavaScript",
"bytes": "669"
},
{
"name": "Python",
"bytes": "1544"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.