repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
Ashaba/rms
|
refs/heads/master
|
rmslocalenv/lib/python2.7/site-packages/django/template/defaulttags.py
|
31
|
"""Default tags used by the template system, available to all templates."""
from __future__ import unicode_literals
import os
import re
import sys
import warnings
from datetime import datetime
from itertools import cycle as itertools_cycle, groupby
from django.conf import settings
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text, smart_text
from django.utils.html import conditional_escape, format_html
from django.utils.lorem_ipsum import paragraphs, words
from django.utils.safestring import mark_safe
from .base import (
BLOCK_TAG_END, BLOCK_TAG_START, COMMENT_TAG_END, COMMENT_TAG_START,
SINGLE_BRACE_END, SINGLE_BRACE_START, VARIABLE_ATTRIBUTE_SEPARATOR,
VARIABLE_TAG_END, VARIABLE_TAG_START, Context, Node, NodeList, Template,
TemplateSyntaxError, VariableDoesNotExist, kwarg_re,
render_value_in_context, token_kwargs,
)
from .defaultfilters import date
from .library import Library
from .smartif import IfParser, Literal
register = Library()
class AutoEscapeControlNode(Node):
"""Implements the actions of the autoescape tag."""
def __init__(self, setting, nodelist):
self.setting, self.nodelist = setting, nodelist
def render(self, context):
old_setting = context.autoescape
context.autoescape = self.setting
output = self.nodelist.render(context)
context.autoescape = old_setting
if self.setting:
return mark_safe(output)
else:
return output
class CommentNode(Node):
def render(self, context):
return ''
class CsrfTokenNode(Node):
def render(self, context):
csrf_token = context.get('csrf_token')
if csrf_token:
if csrf_token == 'NOTPROVIDED':
return format_html("")
else:
return format_html("<input type='hidden' name='csrfmiddlewaretoken' value='{}' />", csrf_token)
else:
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
if settings.DEBUG:
warnings.warn(
"A {% csrf_token %} was used in a template, but the context "
"did not provide the value. This is usually caused by not "
"using RequestContext."
)
return ''
class CycleNode(Node):
def __init__(self, cyclevars, variable_name=None, silent=False):
self.cyclevars = cyclevars
self.variable_name = variable_name
self.silent = silent
def render(self, context):
if self not in context.render_context:
# First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self]
value = next(cycle_iter).resolve(context)
if self.variable_name:
context[self.variable_name] = value
if self.silent:
return ''
return render_value_in_context(value, context)
class DebugNode(Node):
def render(self, context):
from pprint import pformat
output = [force_text(pformat(val)) for val in context]
output.append('\n\n')
output.append(force_text(pformat(sys.modules)))
return ''.join(output)
class FilterNode(Node):
def __init__(self, filter_expr, nodelist):
self.filter_expr, self.nodelist = filter_expr, nodelist
def render(self, context):
output = self.nodelist.render(context)
# Apply filters.
with context.push(var=output):
return self.filter_expr.resolve(context)
class FirstOfNode(Node):
def __init__(self, variables, asvar=None):
self.vars = variables
self.asvar = asvar
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
first = render_value_in_context(value, context)
if self.asvar:
context[self.asvar] = first
return ''
return first
return ''
class ForNode(Node):
child_nodelists = ('nodelist_loop', 'nodelist_empty')
def __init__(self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None):
self.loopvars, self.sequence = loopvars, sequence
self.is_reversed = is_reversed
self.nodelist_loop = nodelist_loop
if nodelist_empty is None:
self.nodelist_empty = NodeList()
else:
self.nodelist_empty = nodelist_empty
def __repr__(self):
reversed_text = ' reversed' if self.is_reversed else ''
return "<For Node: for %s in %s, tail_len: %d%s>" % \
(', '.join(self.loopvars), self.sequence, len(self.nodelist_loop),
reversed_text)
def __iter__(self):
for node in self.nodelist_loop:
yield node
for node in self.nodelist_empty:
yield node
def render(self, context):
if 'forloop' in context:
parentloop = context['forloop']
else:
parentloop = {}
with context.push():
try:
values = self.sequence.resolve(context, True)
except VariableDoesNotExist:
values = []
if values is None:
values = []
if not hasattr(values, '__len__'):
values = list(values)
len_values = len(values)
if len_values < 1:
return self.nodelist_empty.render(context)
nodelist = []
if self.is_reversed:
values = reversed(values)
num_loopvars = len(self.loopvars)
unpack = num_loopvars > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context['forloop'] = {'parentloop': parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict['counter0'] = i
loop_dict['counter'] = i + 1
# Reverse counter iteration numbers.
loop_dict['revcounter'] = len_values - i
loop_dict['revcounter0'] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict['first'] = (i == 0)
loop_dict['last'] = (i == len_values - 1)
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
# To complete this deprecation, remove from here to the
# try/except block as well as the try/except itself,
# leaving `unpacked_vars = ...` and the "else" statements.
if not isinstance(item, (list, tuple)):
len_item = 1
else:
len_item = len(item)
# Check loop variable count before unpacking
if num_loopvars != len_item:
warnings.warn(
"Need {} values to unpack in for loop; got {}. "
"This will raise an exception in Django 1.10."
.format(num_loopvars, len_item),
RemovedInDjango110Warning)
try:
unpacked_vars = dict(zip(self.loopvars, item))
except TypeError:
pass
else:
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
for node in self.nodelist_loop:
nodelist.append(node.render_annotated(context))
if pop_context:
# The loop variables were pushed on to the context so pop them
# off again. This is necessary because the tag lets the length
# of loopvars differ to the length of each set of items and we
# don't want to leave any vars from the previous loop on the
# context.
context.pop()
return mark_safe(''.join(force_text(n) for n in nodelist))
class IfChangedNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, nodelist_true, nodelist_false, *varlist):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self._varlist = varlist
def render(self, context):
# Init state storage
state_frame = self._get_context_stack_frame(context)
if self not in state_frame:
state_frame[self] = None
nodelist_true_output = None
try:
if self._varlist:
# Consider multiple parameters. This automatically behaves
# like an OR evaluation of the multiple variables.
compare_to = [var.resolve(context, True) for var in self._varlist]
else:
# The "{% ifchanged %}" syntax (without any variables) compares the rendered output.
compare_to = nodelist_true_output = self.nodelist_true.render(context)
except VariableDoesNotExist:
compare_to = None
if compare_to != state_frame[self]:
state_frame[self] = compare_to
# render true block if not already rendered
return nodelist_true_output or self.nodelist_true.render(context)
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ''
def _get_context_stack_frame(self, context):
# The Context object behaves like a stack where each template tag can create a new scope.
# Find the place where to store the state to detect changes.
if 'forloop' in context:
# Ifchanged is bound to the local for loop.
# When there is a loop-in-loop, the state is bound to the inner loop,
# so it resets when the outer loop continues.
return context['forloop']
else:
# Using ifchanged outside loops. Effectively this is a no-op because the state is associated with 'self'.
return context.render_context
class IfEqualNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, var1, var2, nodelist_true, nodelist_false, negate):
self.var1, self.var2 = var1, var2
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self.negate = negate
def __repr__(self):
return "<IfEqualNode>"
def render(self, context):
val1 = self.var1.resolve(context, True)
val2 = self.var2.resolve(context, True)
if (self.negate and val1 != val2) or (not self.negate and val1 == val2):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class IfNode(Node):
def __init__(self, conditions_nodelists):
self.conditions_nodelists = conditions_nodelists
def __repr__(self):
return "<IfNode>"
def __iter__(self):
for _, nodelist in self.conditions_nodelists:
for node in nodelist:
yield node
@property
def nodelist(self):
return NodeList(node for _, nodelist in self.conditions_nodelists for node in nodelist)
def render(self, context):
for condition, nodelist in self.conditions_nodelists:
if condition is not None: # if / elif clause
try:
match = condition.eval(context)
except VariableDoesNotExist:
match = None
else: # else clause
match = True
if match:
return nodelist.render(context)
return ''
class LoremNode(Node):
def __init__(self, count, method, common):
self.count, self.method, self.common = count, method, common
def render(self, context):
try:
count = int(self.count.resolve(context))
except (ValueError, TypeError):
count = 1
if self.method == 'w':
return words(count, common=self.common)
else:
paras = paragraphs(count, common=self.common)
if self.method == 'p':
paras = ['<p>%s</p>' % p for p in paras]
return '\n\n'.join(paras)
class RegroupNode(Node):
def __init__(self, target, expression, var_name):
self.target, self.expression = target, expression
self.var_name = var_name
def resolve_expression(self, obj, context):
# This method is called for each object in self.target. See regroup()
# for the reason why we temporarily put the object in the context.
context[self.var_name] = obj
return self.expression.resolve(context, True)
def render(self, context):
obj_list = self.target.resolve(context, True)
if obj_list is None:
# target variable wasn't found in context; fail silently.
context[self.var_name] = []
return ''
# List of dictionaries in the format:
# {'grouper': 'key', 'list': [list of contents]}.
context[self.var_name] = [
{'grouper': key, 'list': list(val)}
for key, val in
groupby(obj_list, lambda obj: self.resolve_expression(obj, context))
]
return ''
def include_is_allowed(filepath, allowed_include_roots):
filepath = os.path.abspath(filepath)
for root in allowed_include_roots:
if filepath.startswith(root):
return True
return False
class SsiNode(Node):
def __init__(self, filepath, parsed):
self.filepath = filepath
self.parsed = parsed
def render(self, context):
filepath = self.filepath.resolve(context)
if not include_is_allowed(filepath, context.template.engine.allowed_include_roots):
if settings.DEBUG:
return "[Didn't have permission to include file]"
else:
return '' # Fail silently for invalid includes.
try:
with open(filepath, 'r') as fp:
output = fp.read()
except IOError:
output = ''
if self.parsed:
try:
t = Template(output, name=filepath, engine=context.template.engine)
return t.render(context)
except TemplateSyntaxError as e:
if settings.DEBUG:
return "[Included template had syntax error: %s]" % e
else:
return '' # Fail silently for invalid included templates.
return output
class LoadNode(Node):
def render(self, context):
return ''
class NowNode(Node):
def __init__(self, format_string, asvar=None):
self.format_string = format_string
self.asvar = asvar
def render(self, context):
tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
formatted = date(datetime.now(tz=tzinfo), self.format_string)
if self.asvar:
context[self.asvar] = formatted
return ''
else:
return formatted
class SpacelessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
return strip_spaces_between_tags(self.nodelist.render(context).strip())
class TemplateTagNode(Node):
mapping = {'openblock': BLOCK_TAG_START,
'closeblock': BLOCK_TAG_END,
'openvariable': VARIABLE_TAG_START,
'closevariable': VARIABLE_TAG_END,
'openbrace': SINGLE_BRACE_START,
'closebrace': SINGLE_BRACE_END,
'opencomment': COMMENT_TAG_START,
'closecomment': COMMENT_TAG_END,
}
def __init__(self, tagtype):
self.tagtype = tagtype
def render(self, context):
return self.mapping.get(self.tagtype, '')
class URLNode(Node):
def __init__(self, view_name, args, kwargs, asvar):
self.view_name = view_name
self.args = args
self.kwargs = kwargs
self.asvar = asvar
def render(self, context):
from django.core.urlresolvers import reverse, NoReverseMatch
args = [arg.resolve(context) for arg in self.args]
kwargs = {
smart_text(k, 'ascii'): v.resolve(context)
for k, v in self.kwargs.items()
}
view_name = self.view_name.resolve(context)
try:
current_app = context.request.current_app
except AttributeError:
# Leave only the else block when the deprecation path for
# Context.current_app completes in Django 1.10.
# Can also remove the Context.is_current_app_set property.
if context.is_current_app_set:
current_app = context.current_app
else:
try:
current_app = context.request.resolver_match.namespace
except AttributeError:
current_app = None
# Try to look up the URL twice: once given the view name, and again
# relative to what we guess is the "main" app. If they both fail,
# re-raise the NoReverseMatch unless we're using the
# {% url ... as var %} construct in which case return nothing.
url = ''
try:
url = reverse(view_name, args=args, kwargs=kwargs, current_app=current_app)
except NoReverseMatch:
exc_info = sys.exc_info()
if settings.SETTINGS_MODULE:
project_name = settings.SETTINGS_MODULE.split('.')[0]
try:
url = reverse(project_name + '.' + view_name,
args=args, kwargs=kwargs,
current_app=current_app)
except NoReverseMatch:
if self.asvar is None:
# Re-raise the original exception, not the one with
# the path relative to the project. This makes a
# better error message.
six.reraise(*exc_info)
else:
if self.asvar is None:
raise
if self.asvar:
context[self.asvar] = url
return ''
else:
if context.autoescape:
url = conditional_escape(url)
return url
class VerbatimNode(Node):
def __init__(self, content):
self.content = content
def render(self, context):
return self.content
class WidthRatioNode(Node):
def __init__(self, val_expr, max_expr, max_width, asvar=None):
self.val_expr = val_expr
self.max_expr = max_expr
self.max_width = max_width
self.asvar = asvar
def render(self, context):
try:
value = self.val_expr.resolve(context)
max_value = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ''
except (ValueError, TypeError):
raise TemplateSyntaxError("widthratio final argument must be a number")
try:
value = float(value)
max_value = float(max_value)
ratio = (value / max_value) * max_width
result = str(int(round(ratio)))
except ZeroDivisionError:
return '0'
except (ValueError, TypeError, OverflowError):
return ''
if self.asvar:
context[self.asvar] = result
return ''
else:
return result
class WithNode(Node):
def __init__(self, var, name, nodelist, extra_context=None):
self.nodelist = nodelist
# var and name are legacy attributes, being left in case they are used
# by third-party subclasses of this Node.
self.extra_context = extra_context or {}
if name:
self.extra_context[name] = var
def __repr__(self):
return "<WithNode>"
def render(self, context):
values = {key: val.resolve(context) for key, val in
six.iteritems(self.extra_context)}
with context.push(**values):
return self.nodelist.render(context)
@register.tag
def autoescape(parser, token):
"""
Force autoescape behavior for this block.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 2:
raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
arg = args[1]
if arg not in ('on', 'off'):
raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
nodelist = parser.parse(('endautoescape',))
parser.delete_first_token()
return AutoEscapeControlNode((arg == 'on'), nodelist)
@register.tag
def comment(parser, token):
"""
Ignores everything between ``{% comment %}`` and ``{% endcomment %}``.
"""
parser.skip_past('endcomment')
return CommentNode()
@register.tag
def cycle(parser, token):
"""
Cycles among the given strings each time this tag is encountered.
Within a loop, cycles among the given strings each time through
the loop::
{% for o in some_list %}
<tr class="{% cycle 'row1' 'row2' %}">
...
</tr>
{% endfor %}
Outside of a loop, give the values a unique name the first time you call
it, then use that name each successive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
You can use any number of values, separated by spaces. Commas can also
be used to separate values; if a comma is used, the cycle values are
interpreted as literal strings.
The optional flag "silent" can be used to prevent the cycle declaration
from returning any value::
{% for o in some_list %}
{% cycle 'row1' 'row2' as rowcolors silent %}
<tr class="{{ rowcolors }}">{% include "subtemplate.html " %}</tr>
{% endfor %}
"""
# Note: This returns the exact same node on each {% cycle name %} call;
# that is, the node object returned from {% cycle a b c as name %} and the
# one returned from {% cycle name %} are the exact same object. This
# shouldn't cause problems (heh), but if it does, now you know.
#
# Ugly hack warning: This stuffs the named template dict into parser so
# that names are only unique within each template (as opposed to using
# a global variable, which would make cycle names have to be unique across
# *all* templates.
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
if ',' in args[1]:
warnings.warn(
"The old {% cycle %} syntax with comma-separated arguments is deprecated.",
RemovedInDjango110Warning,
)
# Backwards compatibility: {% cycle a,b %} or {% cycle a,b as foo %}
# case.
args[1:2] = ['"%s"' % arg for arg in args[1].split(",")]
if len(args) == 2:
# {% cycle foo %} case.
name = args[1]
if not hasattr(parser, '_namedCycleNodes'):
raise TemplateSyntaxError("No named cycles in template. '%s' is not defined" % name)
if name not in parser._namedCycleNodes:
raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
return parser._namedCycleNodes[name]
as_form = False
if len(args) > 4:
# {% cycle ... as foo [silent] %} case.
if args[-3] == "as":
if args[-1] != "silent":
raise TemplateSyntaxError("Only 'silent' flag is allowed after cycle's name, not '%s'." % args[-1])
as_form = True
silent = True
args = args[:-1]
elif args[-2] == "as":
as_form = True
silent = False
if as_form:
name = args[-1]
values = [parser.compile_filter(arg) for arg in args[1:-2]]
node = CycleNode(values, name, silent=silent)
if not hasattr(parser, '_namedCycleNodes'):
parser._namedCycleNodes = {}
parser._namedCycleNodes[name] = node
else:
values = [parser.compile_filter(arg) for arg in args[1:]]
node = CycleNode(values)
return node
@register.tag
def csrf_token(parser, token):
return CsrfTokenNode()
@register.tag
def debug(parser, token):
"""
Outputs a whole load of debugging information, including the current
context and imported modules.
Sample usage::
<pre>
{% debug %}
</pre>
"""
return DebugNode()
@register.tag('filter')
def do_filter(parser, token):
"""
Filters the contents of the block through variable filters.
Filters can also be piped through each other, and they can have
arguments -- just like in variable syntax.
Sample usage::
{% filter force_escape|lower %}
This text will be HTML-escaped, and will appear in lowercase.
{% endfilter %}
Note that the ``escape`` and ``safe`` filters are not acceptable arguments.
Instead, use the ``autoescape`` tag to manage autoescaping for blocks of
template code.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
_, rest = token.contents.split(None, 1)
filter_expr = parser.compile_filter("var|%s" % (rest))
for func, unused in filter_expr.filters:
filter_name = getattr(func, '_filter_name', None)
if filter_name in ('escape', 'safe'):
raise TemplateSyntaxError('"filter %s" is not permitted. Use the "autoescape" tag instead.' % filter_name)
nodelist = parser.parse(('endfilter',))
parser.delete_first_token()
return FilterNode(filter_expr, nodelist)
@register.tag
def firstof(parser, token):
"""
Outputs the first variable passed that is not False, without escaping.
Outputs nothing if all the passed variables are False.
Sample usage::
{% firstof var1 var2 var3 as myvar %}
This is equivalent to::
{% if var1 %}
{{ var1|safe }}
{% elif var2 %}
{{ var2|safe }}
{% elif var3 %}
{{ var3|safe }}
{% endif %}
but obviously much cleaner!
You can also use a literal string as a fallback value in case all
passed variables are False::
{% firstof var1 var2 var3 "fallback value" %}
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% firstof var1 var2 var3 "<strong>fallback value</strong>" %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
"""
bits = token.split_contents()[1:]
asvar = None
if len(bits) < 1:
raise TemplateSyntaxError("'firstof' statement requires at least one argument")
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
return FirstOfNode([parser.compile_filter(bit) for bit in bits], asvar)
@register.tag('for')
def do_for(parser, token):
"""
Loops over each item in an array.
For example, to display a list of athletes given ``athlete_list``::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
</ul>
You can loop over a list in reverse by using
``{% for obj in list reversed %}``.
You can also unpack multiple values from a two-dimensional array::
{% for key,value in dict.items %}
{{ key }}: {{ value }}
{% endfor %}
The ``for`` tag can take an optional ``{% empty %}`` clause that will
be displayed if the given array is empty or could not be found::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% empty %}
<li>Sorry, no athletes in this list.</li>
{% endfor %}
<ul>
The above is equivalent to -- but shorter, cleaner, and possibly faster
than -- the following::
<ul>
{% if athlete_list %}
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
{% else %}
<li>Sorry, no athletes in this list.</li>
{% endif %}
</ul>
The for loop sets a number of variables available within the loop:
========================== ================================================
Variable Description
========================== ================================================
``forloop.counter`` The current iteration of the loop (1-indexed)
``forloop.counter0`` The current iteration of the loop (0-indexed)
``forloop.revcounter`` The number of iterations from the end of the
loop (1-indexed)
``forloop.revcounter0`` The number of iterations from the end of the
loop (0-indexed)
``forloop.first`` True if this is the first time through the loop
``forloop.last`` True if this is the last time through the loop
``forloop.parentloop`` For nested loops, this is the loop "above" the
current one
========================== ================================================
"""
bits = token.split_contents()
if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four"
" words: %s" % token.contents)
is_reversed = bits[-1] == 'reversed'
in_index = -3 if is_reversed else -2
if bits[in_index] != 'in':
raise TemplateSyntaxError("'for' statements should use the format"
" 'for x in y': %s" % token.contents)
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars:
if not var or ' ' in var:
raise TemplateSyntaxError("'for' tag received an invalid argument:"
" %s" % token.contents)
sequence = parser.compile_filter(bits[in_index + 1])
nodelist_loop = parser.parse(('empty', 'endfor',))
token = parser.next_token()
if token.contents == 'empty':
nodelist_empty = parser.parse(('endfor',))
parser.delete_first_token()
else:
nodelist_empty = None
return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
def do_ifequal(parser, token, negate):
bits = list(token.split_contents())
if len(bits) != 3:
raise TemplateSyntaxError("%r takes two arguments" % bits[0])
end_tag = 'end' + bits[0]
nodelist_true = parser.parse(('else', end_tag))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse((end_tag,))
parser.delete_first_token()
else:
nodelist_false = NodeList()
val1 = parser.compile_filter(bits[1])
val2 = parser.compile_filter(bits[2])
return IfEqualNode(val1, val2, nodelist_true, nodelist_false, negate)
@register.tag
def ifequal(parser, token):
"""
Outputs the contents of the block if the two arguments equal each other.
Examples::
{% ifequal user.id comment.user_id %}
...
{% endifequal %}
{% ifnotequal user.id comment.user_id %}
...
{% else %}
...
{% endifnotequal %}
"""
return do_ifequal(parser, token, False)
@register.tag
def ifnotequal(parser, token):
"""
Outputs the contents of the block if the two arguments are not equal.
See ifequal.
"""
return do_ifequal(parser, token, True)
class TemplateLiteral(Literal):
def __init__(self, value, text):
self.value = value
self.text = text # for better error messages
def display(self):
return self.text
def eval(self, context):
return self.value.resolve(context, ignore_failures=True)
class TemplateIfParser(IfParser):
error_class = TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
super(TemplateIfParser, self).__init__(*args, **kwargs)
def create_var(self, value):
return TemplateLiteral(self.template_parser.compile_filter(value), value)
@register.tag('if')
def do_if(parser, token):
"""
The ``{% if %}`` tag evaluates a variable, and if that variable is "true"
(i.e., exists, is not empty, and is not a false boolean value), the
contents of the block are output:
::
{% if athlete_list %}
Number of athletes: {{ athlete_list|count }}
{% elif athlete_in_locker_room_list %}
Athletes should be out of the locker room soon!
{% else %}
No athletes.
{% endif %}
In the above, if ``athlete_list`` is not empty, the number of athletes will
be displayed by the ``{{ athlete_list|count }}`` variable.
As you can see, the ``if`` tag may take one or several `` {% elif %}``
clauses, as well as an ``{% else %}`` clause that will be displayed if all
previous conditions fail. These clauses are optional.
``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
variables or to negate a given variable::
{% if not athlete_list %}
There are no athletes.
{% endif %}
{% if athlete_list or coach_list %}
There are some athletes or some coaches.
{% endif %}
{% if athlete_list and coach_list %}
Both athletes and coaches are available.
{% endif %}
{% if not athlete_list or coach_list %}
There are no athletes, or there are some coaches.
{% endif %}
{% if athlete_list and not coach_list %}
There are some athletes and absolutely no coaches.
{% endif %}
Comparison operators are also available, and the use of filters is also
allowed, for example::
{% if articles|length >= 5 %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid if tag.
All supported operators are: ``or``, ``and``, ``in``, ``not in``
``==``, ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
Operator precedence follows Python.
"""
# {% if ... %}
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists = [(condition, nodelist)]
token = parser.next_token()
# {% elif ... %} (repeatable)
while token.contents.startswith('elif'):
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists.append((condition, nodelist))
token = parser.next_token()
# {% else %} (optional)
if token.contents == 'else':
nodelist = parser.parse(('endif',))
conditions_nodelists.append((None, nodelist))
token = parser.next_token()
# {% endif %}
assert token.contents == 'endif'
return IfNode(conditions_nodelists)
@register.tag
def ifchanged(parser, token):
"""
Checks if a value has changed from the last iteration of a loop.
The ``{% ifchanged %}`` block tag is used within a loop. It has two
possible uses.
1. Checks its own rendered contents against its previous state and only
displays the content if it has changed. For example, this displays a
list of days, only displaying the month if it changes::
<h1>Archive for {{ year }}</h1>
{% for date in days %}
{% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
<a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
{% endfor %}
2. If given one or more variables, check whether any variable has changed.
For example, the following shows the date every time it changes, while
showing the hour if either the hour or the date has changed::
{% for date in days %}
{% ifchanged date.date %} {{ date.date }} {% endifchanged %}
{% ifchanged date.hour date.date %}
{{ date.hour }}
{% endifchanged %}
{% endfor %}
"""
bits = token.split_contents()
nodelist_true = parser.parse(('else', 'endifchanged'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endifchanged',))
parser.delete_first_token()
else:
nodelist_false = NodeList()
values = [parser.compile_filter(bit) for bit in bits[1:]]
return IfChangedNode(nodelist_true, nodelist_false, *values)
@register.tag
def ssi(parser, token):
"""
Outputs the contents of a given file into the page.
Like a simple "include" tag, the ``ssi`` tag includes the contents
of another file -- which must be specified using an absolute path --
in the current page::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" %}
If the optional "parsed" parameter is given, the contents of the included
file are evaluated as template code, with the current context::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" parsed %}
"""
warnings.warn(
"The {% ssi %} tag is deprecated. Use the {% include %} tag instead.",
RemovedInDjango110Warning,
)
bits = token.split_contents()
parsed = False
if len(bits) not in (2, 3):
raise TemplateSyntaxError("'ssi' tag takes one argument: the path to"
" the file to be included")
if len(bits) == 3:
if bits[2] == 'parsed':
parsed = True
else:
raise TemplateSyntaxError("Second (optional) argument to %s tag"
" must be 'parsed'" % bits[0])
filepath = parser.compile_filter(bits[1])
return SsiNode(filepath, parsed)
def find_library(parser, name):
try:
return parser.libraries[name]
except KeyError:
raise TemplateSyntaxError(
"'%s' is not a registered tag library. Must be one of:\n%s" % (
name, "\n".join(sorted(parser.libraries.keys())),
),
)
def load_from_library(library, label, names):
"""
Return a subset of tags and filters from a library.
"""
subset = Library()
for name in names:
found = False
if name in library.tags:
found = True
subset.tags[name] = library.tags[name]
if name in library.filters:
found = True
subset.filters[name] = library.filters[name]
if found is False:
raise TemplateSyntaxError(
"'%s' is not a valid tag or filter in tag library '%s'" % (
name, label,
),
)
return subset
@register.tag
def load(parser, token):
"""
Loads a custom template tag library into the parser.
For example, to load the template tags in
``django/templatetags/news/photos.py``::
{% load news.photos %}
Can also be used to load an individual tag/filter from
a library::
{% load byline from news %}
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) >= 4 and bits[-2] == "from":
# from syntax is used; load individual tags from the library
name = bits[-1]
lib = find_library(parser, name)
subset = load_from_library(lib, name, bits[1:-2])
parser.add_library(subset)
else:
# one or more libraries are specified; load and add them to the parser
for name in bits[1:]:
lib = find_library(parser, name)
parser.add_library(lib)
return LoadNode()
@register.tag
def lorem(parser, token):
"""
Creates random Latin text useful for providing test data in templates.
Usage format::
{% lorem [count] [method] [random] %}
``count`` is a number (or variable) containing the number of paragraphs or
words to generate (default is 1).
``method`` is either ``w`` for words, ``p`` for HTML paragraphs, ``b`` for
plain-text paragraph blocks (default is ``b``).
``random`` is the word ``random``, which if given, does not use the common
paragraph (starting "Lorem ipsum dolor sit amet, consectetuer...").
Examples:
* ``{% lorem %}`` will output the common "lorem ipsum" paragraph
* ``{% lorem 3 p %}`` will output the common "lorem ipsum" paragraph
and two random paragraphs each wrapped in HTML ``<p>`` tags
* ``{% lorem 2 w random %}`` will output two random latin words
"""
bits = list(token.split_contents())
tagname = bits[0]
# Random bit
common = bits[-1] != 'random'
if not common:
bits.pop()
# Method bit
if bits[-1] in ('w', 'p', 'b'):
method = bits.pop()
else:
method = 'b'
# Count bit
if len(bits) > 1:
count = bits.pop()
else:
count = '1'
count = parser.compile_filter(count)
if len(bits) != 1:
raise TemplateSyntaxError("Incorrect format for %r tag" % tagname)
return LoremNode(count, method, common)
@register.tag
def now(parser, token):
"""
Displays the date, formatted according to the given string.
Uses the same format as PHP's ``date()`` function; see http://php.net/date
for all the possible values.
Sample usage::
It is {% now "jS F Y H:i" %}
"""
bits = token.split_contents()
asvar = None
if len(bits) == 4 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits) != 2:
raise TemplateSyntaxError("'now' statement takes one argument")
format_string = bits[1][1:-1]
return NowNode(format_string, asvar)
@register.tag
def regroup(parser, token):
"""
Regroups a list of alike objects by a common attribute.
This complex tag is best illustrated by use of an example: say that
``people`` is a list of ``Person`` objects that have ``first_name``,
``last_name``, and ``gender`` attributes, and you'd like to display a list
that looks like:
* Male:
* George Bush
* Bill Clinton
* Female:
* Margaret Thatcher
* Colendeeza Rice
* Unknown:
* Pat Smith
The following snippet of template code would accomplish this dubious task::
{% regroup people by gender as grouped %}
<ul>
{% for group in grouped %}
<li>{{ group.grouper }}
<ul>
{% for item in group.list %}
<li>{{ item }}</li>
{% endfor %}
</ul>
{% endfor %}
</ul>
As you can see, ``{% regroup %}`` populates a variable with a list of
objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
item that was grouped by; ``list`` contains the list of objects that share
that ``grouper``. In this case, ``grouper`` would be ``Male``, ``Female``
and ``Unknown``, and ``list`` is the list of people with those genders.
Note that ``{% regroup %}`` does not work when the list to be grouped is not
sorted by the key you are grouping by! This means that if your list of
people was not sorted by gender, you'd need to make sure it is sorted
before using it, i.e.::
{% regroup people|dictsort:"gender" by gender as grouped %}
"""
bits = token.split_contents()
if len(bits) != 6:
raise TemplateSyntaxError("'regroup' tag takes five arguments")
target = parser.compile_filter(bits[1])
if bits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
" be 'as'")
var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
# group by the resulting value. After all items are processed, it will
# save the final result in the context under 'var_name', thus clearing the
# temporary values. This hack is necessary because the template engine
# doesn't provide a context-aware equivalent of Python's getattr.
expression = parser.compile_filter(var_name +
VARIABLE_ATTRIBUTE_SEPARATOR +
bits[3])
return RegroupNode(target, expression, var_name)
@register.tag
def spaceless(parser, token):
"""
Removes whitespace between HTML tags, including tab and newline characters.
Example usage::
{% spaceless %}
<p>
<a href="foo/">Foo</a>
</p>
{% endspaceless %}
This example would return this HTML::
<p><a href="foo/">Foo</a></p>
Only space between *tags* is normalized -- not space between tags and text.
In this example, the space around ``Hello`` won't be stripped::
{% spaceless %}
<strong>
Hello
</strong>
{% endspaceless %}
"""
nodelist = parser.parse(('endspaceless',))
parser.delete_first_token()
return SpacelessNode(nodelist)
@register.tag
def templatetag(parser, token):
"""
Outputs one of the bits used to compose template tags.
Since the template system has no concept of "escaping", to display one of
the bits used in template tags, you must use the ``{% templatetag %}`` tag.
The argument tells which template bit to output:
================== =======
Argument Outputs
================== =======
``openblock`` ``{%``
``closeblock`` ``%}``
``openvariable`` ``{{``
``closevariable`` ``}}``
``openbrace`` ``{``
``closebrace`` ``}``
``opencomment`` ``{#``
``closecomment`` ``#}``
================== =======
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'templatetag' statement takes one argument")
tag = bits[1]
if tag not in TemplateTagNode.mapping:
raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
" Must be one of: %s" %
(tag, list(TemplateTagNode.mapping)))
return TemplateTagNode(tag)
@register.tag
def url(parser, token):
"""
Returns an absolute URL matching given view with its parameters.
This is a way to define links that aren't tied to a particular URL
configuration::
{% url "path.to.some_view" arg1 arg2 %}
or
{% url "path.to.some_view" name1=value1 name2=value2 %}
The first argument is a path to a view. It can be an absolute Python path
or just ``app_name.view_name`` without the project name if the view is
located inside the project.
Other arguments are space-separated values that will be filled in place of
positional and keyword arguments in the URL. Don't mix positional and
keyword arguments.
All arguments for the URL should be present.
For example if you have a view ``app_name.client`` taking client's id and
the corresponding line in a URLconf looks like this::
('^client/(\d+)/$', 'app_name.client')
and this app's URLconf is included into the project's URLconf under some
path::
('^clients/', include('project_name.app_name.urls'))
then in a template you can create a link for a certain client like this::
{% url "app_name.client" client.id %}
The URL will look like ``/clients/client/123/``.
The first argument can also be a named URL instead of the Python path to
the view callable. For example if the URLconf entry looks like this::
url('^client/(\d+)/$', name='client-detail-view')
then in the template you can use::
{% url "client-detail-view" client.id %}
There is even another possible value type for the first argument. It can be
the name of a template variable that will be evaluated to obtain the view
name or the URL name, e.g.::
{% with view_path="app_name.client" %}
{% url view_path client.id %}
{% endwith %}
or,
{% with url_name="client-detail-view" %}
{% url url_name client.id %}
{% endwith %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument"
" (path to a view)" % bits[0])
viewname = parser.compile_filter(bits[1])
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits):
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar)
@register.tag
def verbatim(parser, token):
"""
Stops the template engine from rendering the contents of this block tag.
Usage::
{% verbatim %}
{% don't process this %}
{% endverbatim %}
You can also designate a specific closing tag block (allowing the
unrendered use of ``{% endverbatim %}``)::
{% verbatim myblock %}
...
{% endverbatim myblock %}
"""
nodelist = parser.parse(('endverbatim',))
parser.delete_first_token()
return VerbatimNode(nodelist.render(Context()))
@register.tag
def widthratio(parser, token):
"""
For creating bar charts and such, this tag calculates the ratio of a given
value to a maximum value, and then applies that ratio to a constant.
For example::
<img src="bar.png" alt="Bar"
height="10" width="{% widthratio this_value max_value max_width %}" />
If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100,
the image in the above example will be 88 pixels wide
(because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88).
In some cases you might want to capture the result of widthratio in a
variable. It can be useful for instance in a blocktrans like this::
{% widthratio this_value max_value max_width as width %}
{% blocktrans %}The width is: {{ width }}{% endblocktrans %}
"""
bits = token.split_contents()
if len(bits) == 4:
tag, this_value_expr, max_value_expr, max_width = bits
asvar = None
elif len(bits) == 6:
tag, this_value_expr, max_value_expr, max_width, as_, asvar = bits
if as_ != 'as':
raise TemplateSyntaxError("Invalid syntax in widthratio tag. Expecting 'as' keyword")
else:
raise TemplateSyntaxError("widthratio takes at least three arguments")
return WidthRatioNode(parser.compile_filter(this_value_expr),
parser.compile_filter(max_value_expr),
parser.compile_filter(max_width),
asvar=asvar)
@register.tag('with')
def do_with(parser, token):
"""
Adds one or more values to the context (inside of this block) for caching
and easy access.
For example::
{% with total=person.some_sql_method %}
{{ total }} object{{ total|pluralize }}
{% endwith %}
Multiple values can be added to the context::
{% with foo=1 bar=2 %}
...
{% endwith %}
The legacy format of ``{% with person.some_sql_method as total %}`` is
still accepted.
"""
bits = token.split_contents()
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable "
"assignment" % bits[0])
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))
nodelist = parser.parse(('endwith',))
parser.delete_first_token()
return WithNode(None, None, nodelist, extra_context=extra_context)
|
hlzz/dotfiles
|
refs/heads/master
|
graphics/VTK-7.0.0/Filters/General/Testing/Python/cursor2D.py
|
2
|
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# create four cursors configured differently
cursor = vtk.vtkCursor2D()
cursor.SetModelBounds(15, 45, 15, 45, 0, 0)
cursor.SetFocalPoint(30, 30, 0)
cursor.AllOff()
cursor.AxesOn()
cursor.OutlineOn()
cursorMapper = vtk.vtkPolyDataMapper2D()
cursorMapper.SetInputConnection(cursor.GetOutputPort())
cursorActor = vtk.vtkActor2D()
cursorActor.SetMapper(cursorMapper)
cursorActor.GetProperty().SetColor(1, 0, 0)
cursor2 = vtk.vtkCursor2D()
cursor2.SetModelBounds(75, 105, 15, 45, 0, 0)
cursor2.SetFocalPoint(90, 30, 0)
cursor2.AllOff()
cursor2.AxesOn()
cursor2.OutlineOn()
cursor2.PointOn()
cursor2Mapper = vtk.vtkPolyDataMapper2D()
cursor2Mapper.SetInputConnection(cursor2.GetOutputPort())
cursor2Actor = vtk.vtkActor2D()
cursor2Actor.SetMapper(cursor2Mapper)
cursor2Actor.GetProperty().SetColor(0, 1, 0)
cursor3 = vtk.vtkCursor2D()
cursor3.SetModelBounds(15, 45, 75, 105, 0, 0)
cursor3.SetFocalPoint(30, 90, 0)
cursor3.AllOff()
cursor3.AxesOn()
cursor3.OutlineOff()
cursor3.PointOn()
cursor3.SetRadius(3)
cursor3Mapper = vtk.vtkPolyDataMapper2D()
cursor3Mapper.SetInputConnection(cursor3.GetOutputPort())
cursor3Actor = vtk.vtkActor2D()
cursor3Actor.SetMapper(cursor3Mapper)
cursor3Actor.GetProperty().SetColor(0, 1, 0)
cursor4 = vtk.vtkCursor2D()
cursor4.SetModelBounds(75, 105, 75, 105, 0, 0)
cursor4.SetFocalPoint(90, 90, 0)
cursor4.AllOff()
cursor4.AxesOn()
cursor4.SetRadius(0.0)
cursor4Mapper = vtk.vtkPolyDataMapper2D()
cursor4Mapper.SetInputConnection(cursor4.GetOutputPort())
cursor4Actor = vtk.vtkActor2D()
cursor4Actor.SetMapper(cursor4Mapper)
cursor4Actor.GetProperty().SetColor(1, 0, 0)
# rendering support
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# read data
ren1.AddActor(cursorActor)
ren1.AddActor(cursor2Actor)
ren1.AddActor(cursor3Actor)
ren1.AddActor(cursor4Actor)
ren1.SetBackground(0, 0, 0)
renWin.SetSize(150, 150)
renWin.SetMultiSamples(0)
renWin.Render()
iren.Initialize()
#iren.Start()
|
Dino0631/RedRain-Bot
|
refs/heads/develop
|
cogs/lib/youtube_dl/extractor/fivemin.py
|
79
|
from __future__ import unicode_literals
from .common import InfoExtractor
class FiveMinIE(InfoExtractor):
IE_NAME = '5min'
_VALID_URL = r'(?:5min:|https?://(?:[^/]*?5min\.com/|delivery\.vidible\.tv/aol)(?:(?:Scripts/PlayerSeed\.js|playerseed/?)?\?.*?playList=)?)(?P<id>\d+)'
_TESTS = [
{
# From http://www.engadget.com/2013/11/15/ipad-mini-retina-display-review/
'url': 'http://pshared.5min.com/Scripts/PlayerSeed.js?sid=281&width=560&height=345&playList=518013791',
'md5': '4f7b0b79bf1a470e5004f7112385941d',
'info_dict': {
'id': '518013791',
'ext': 'mp4',
'title': 'iPad Mini with Retina Display Review',
'description': 'iPad mini with Retina Display review',
'duration': 177,
'uploader': 'engadget',
'upload_date': '20131115',
'timestamp': 1384515288,
},
'params': {
# m3u8 download
'skip_download': True,
}
},
{
# From http://on.aol.com/video/how-to-make-a-next-level-fruit-salad-518086247
'url': '5min:518086247',
'md5': 'e539a9dd682c288ef5a498898009f69e',
'info_dict': {
'id': '518086247',
'ext': 'mp4',
'title': 'How to Make a Next-Level Fruit Salad',
'duration': 184,
},
'skip': 'no longer available',
},
{
'url': 'http://embed.5min.com/518726732/',
'only_matching': True,
},
{
'url': 'http://delivery.vidible.tv/aol?playList=518013791',
'only_matching': True,
}
]
def _real_extract(self, url):
video_id = self._match_id(url)
return self.url_result('aol-video:%s' % video_id)
|
christi3k/zulip
|
refs/heads/master
|
zerver/management/commands/import.py
|
6
|
from __future__ import absolute_import
from __future__ import print_function
from optparse import make_option
from django.core.management import call_command
from django.core.management.base import BaseCommand, CommandParser
from django.db import connection
from django.conf import settings
from zerver.models import Realm, Stream, UserProfile, Recipient, Subscription, \
Message, UserMessage, Huddle, DefaultStream, RealmDomain, RealmFilter, Client
from zerver.lib.export import do_import_realm
import os
import subprocess
import sys
import ujson
from typing import Any
Model = Any # TODO: make this mypy type more specific
class Command(BaseCommand):
help = """Import Zulip database dump files into a fresh Zulip instance.
This command should be used only on a newly created, empty Zulip instance to
import a database dump from one or more JSON files.
Usage: ./manage.py import [--destroy-rebuild-database] [--import-into-nonempty] <export path name> [<export path name>...]"""
def add_arguments(self, parser):
# type: (CommandParser) -> None
parser.add_argument('--destroy-rebuild-database',
dest='destroy_rebuild_database',
default=False,
action="store_true",
help='Destroys and rebuilds the databases prior to import.')
parser.add_argument('--import-into-nonempty',
dest='import_into_nonempty',
default=False,
action="store_true",
help='Import into an existing nonempty database.')
def new_instance_check(self, model):
# type: (Model) -> None
count = model.objects.count()
if count:
print("Zulip instance is not empty, found %d rows in %s table. "
% (count, model._meta.db_table))
print("You may use --destroy-rebuild-database to destroy and rebuild the database prior to import.")
exit(1)
def do_destroy_and_rebuild_database(self, db_name):
# type: (str) -> None
call_command('flush', verbosity=0, interactive=False)
subprocess.check_call([os.path.join(settings.DEPLOY_ROOT, "scripts/setup/flush-memcached")])
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
models_to_import = [Realm, Stream, UserProfile, Recipient, Subscription,
Client, Message, UserMessage, Huddle, DefaultStream, RealmDomain,
RealmFilter]
if len(args) == 0:
print("Please provide at least one realm dump to import.")
exit(1)
if options["destroy_rebuild_database"]:
print("Rebuilding the database!")
db_name = settings.DATABASES['default']['NAME']
self.do_destroy_and_rebuild_database(db_name)
elif not options["import_into_nonempty"]:
for model in models_to_import:
self.new_instance_check(model)
for path in args:
if not os.path.exists(path):
print("Directory not found: '%s'" % (path,))
exit(1)
print("Processing dump: %s ..." % (path,))
do_import_realm(path)
|
erickt/hue
|
refs/heads/master
|
desktop/core/ext-py/simplejson/simplejson/tool.py
|
291
|
r"""Command-line tool to validate and pretty-print JSON
Usage::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
import sys
import simplejson
def main():
if len(sys.argv) == 1:
infile = sys.stdin
outfile = sys.stdout
elif len(sys.argv) == 2:
infile = open(sys.argv[1], 'rb')
outfile = sys.stdout
elif len(sys.argv) == 3:
infile = open(sys.argv[1], 'rb')
outfile = open(sys.argv[2], 'wb')
else:
raise SystemExit(sys.argv[0] + " [infile [outfile]]")
try:
obj = simplejson.load(infile)
except ValueError, e:
raise SystemExit(e)
simplejson.dump(obj, outfile, sort_keys=True, indent=4)
outfile.write('\n')
if __name__ == '__main__':
main()
|
beastbikes/django-adminlte
|
refs/heads/master
|
setup.py
|
1
|
import os
from setuptools import find_packages, setup
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-adminlte-admin',
version='0.6.1',
packages=find_packages(),
include_package_data=True,
license='MIT License',
description='Django AdminLTE is a smart admin based on adminLTE and django',
url='https://github.com/beastbikes/django-adminlte',
author='Bohan',
author_email='bohan.zhang@speedx.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
BeDjango/intef-openedx
|
refs/heads/master
|
common/lib/xmodule/xmodule/modulestore/tests/test_store_utilities.py
|
194
|
"""
Tests for store_utilities.py
"""
import unittest
from mock import Mock
import ddt
from xmodule.modulestore.store_utilities import (
get_draft_subtree_roots, draft_node_constructor
)
@ddt.ddt
class TestUtils(unittest.TestCase):
"""
Tests for store_utilities
ASCII trees for ONLY_ROOTS and SOME_TREES:
ONLY_ROOTS:
1)
vertical (not draft)
|
url1
2)
sequential (not draft)
|
url2
SOME_TREES:
1)
sequential_1 (not draft)
|
vertical_1
/ \
/ \
child_1 child_2
2)
great_grandparent_vertical (not draft)
|
grandparent_vertical
|
vertical_2
/ \
/ \
child_3 child_4
"""
ONLY_ROOTS = [
draft_node_constructor(Mock(), 'url1', 'vertical'),
draft_node_constructor(Mock(), 'url2', 'sequential'),
]
ONLY_ROOTS_URLS = ['url1', 'url2']
SOME_TREES = [
draft_node_constructor(Mock(), 'child_1', 'vertical_1'),
draft_node_constructor(Mock(), 'child_2', 'vertical_1'),
draft_node_constructor(Mock(), 'vertical_1', 'sequential_1'),
draft_node_constructor(Mock(), 'child_3', 'vertical_2'),
draft_node_constructor(Mock(), 'child_4', 'vertical_2'),
draft_node_constructor(Mock(), 'vertical_2', 'grandparent_vertical'),
draft_node_constructor(Mock(), 'grandparent_vertical', 'great_grandparent_vertical'),
]
SOME_TREES_ROOTS_URLS = ['vertical_1', 'grandparent_vertical']
@ddt.data(
(ONLY_ROOTS, ONLY_ROOTS_URLS),
(SOME_TREES, SOME_TREES_ROOTS_URLS),
)
@ddt.unpack
def test_get_draft_subtree_roots(self, module_nodes, expected_roots_urls):
"""tests for get_draft_subtree_roots"""
subtree_roots_urls = [root.url for root in get_draft_subtree_roots(module_nodes)]
# check that we return the expected urls
self.assertEqual(set(subtree_roots_urls), set(expected_roots_urls))
|
azunite/chrome_build
|
refs/heads/master
|
third_party/boto/s3/website.py
|
51
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
def tag(key, value):
start = '<%s>' % key
end = '</%s>' % key
return '%s%s%s' % (start, value, end)
class WebsiteConfiguration(object):
"""
Website configuration for a bucket.
:ivar suffix: Suffix that is appended to a request that is for a
"directory" on the website endpoint (e.g. if the suffix is
index.html and you make a request to samplebucket/images/
the data that is returned will be for the object with the
key name images/index.html). The suffix must not be empty
and must not include a slash character.
:ivar error_key: The object key name to use when a 4xx class error
occurs. This key identifies the page that is returned when
such an error occurs.
:ivar redirect_all_requests_to: Describes the redirect behavior for every
request to this bucket's website endpoint. If this value is non None,
no other values are considered when configuring the website
configuration for the bucket. This is an instance of
``RedirectLocation``.
:ivar routing_rules: ``RoutingRules`` object which specifies conditions
and redirects that apply when the conditions are met.
"""
WEBSITE_SKELETON = """<?xml version="1.0" encoding="UTF-8"?>
<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
%(body)s
</WebsiteConfiguration>"""
def __init__(self, suffix=None, error_key=None,
redirect_all_requests_to=None, routing_rules=None):
self.suffix = suffix
self.error_key = error_key
self.redirect_all_requests_to = redirect_all_requests_to
self.routing_rules = routing_rules
def to_xml(self):
body_parts = []
if self.suffix is not None:
body_parts.append(tag('IndexDocument', tag('Suffix', self.suffix)))
if self.error_key is not None:
body_parts.append(tag('ErrorDocument', tag('Key', self.error_key)))
if self.redirect_all_requests_to is not None:
body_parts.append(self.redirect_all_requests_to.to_xml())
if self.routing_rules is not None:
body_parts.append(self.routing_rules.to_xml())
body = '\n'.join(body_parts)
return self.WEBSITE_SKELETON % {'body': body}
class RedirectLocation(object):
"""Specify redirect behavior for every request to a bucket's endpoint.
:ivar hostname: Name of the host where requests will be redirected.
:ivar protocol: Protocol to use (http, https) when redirecting requests.
The default is the protocol that is used in the original request.
"""
def __init__(self, hostname, protocol=None):
self.hostname = hostname
self.protocol = protocol
def to_xml(self):
inner_text = []
if self.hostname is not None:
inner_text.append(tag('HostName', self.hostname))
if self.protocol is not None:
inner_text.append(tag('Protocol', self.protocol))
return tag('RedirectAllRequestsTo', '\n'.join(inner_text))
class RoutingRules(object):
def __init__(self):
self._rules = []
def add_rule(self, rule):
"""
:type rule: :class:`boto.s3.website.RoutingRule`
:param rule: A routing rule.
:return: This ``RoutingRules`` object is returned,
so that it can chain subsequent calls.
"""
self._rules.append(rule)
return self
def to_xml(self):
inner_text = []
for rule in self._rules:
inner_text.append(rule.to_xml())
return tag('RoutingRules', '\n'.join(inner_text))
class RoutingRule(object):
"""Represents a single routing rule.
There are convenience methods to making creating rules
more concise::
rule = RoutingRule.when(key_prefix='foo/').then_redirect('example.com')
:ivar condition: Describes condition that must be met for the
specified redirect to apply.
:ivar redirect: Specifies redirect behavior. You can redirect requests to
another host, to another page, or with another protocol. In the event
of an error, you can can specify a different error code to return.
"""
def __init__(self, condition, redirect):
self.condition = condition
self.redirect = redirect
def to_xml(self):
return tag('RoutingRule',
self.condition.to_xml() + self.redirect.to_xml())
@classmethod
def when(cls, key_prefix=None, http_error_code=None):
return cls(Condition(key_prefix=key_prefix,
http_error_code=http_error_code), None)
def then_redirect(self, hostname=None, protocol=None, replace_key=None,
replace_key_prefix=None, http_redirect_code=None):
self.redirect = Redirect(
hostname=hostname, protocol=protocol,
replace_key=replace_key,
replace_key_prefix=replace_key_prefix,
http_redirect_code=http_redirect_code)
return self
class Condition(object):
"""
:ivar key_prefix: The object key name prefix when the redirect is applied.
For example, to redirect requests for ExamplePage.html, the key prefix
will be ExamplePage.html. To redirect request for all pages with the
prefix docs/, the key prefix will be /docs, which identifies all
objects in the docs/ folder.
:ivar http_error_code: The HTTP error code when the redirect is applied. In
the event of an error, if the error code equals this value, then the
specified redirect is applied.
"""
def __init__(self, key_prefix=None, http_error_code=None):
self.key_prefix = key_prefix
self.http_error_code = http_error_code
def to_xml(self):
inner_text = []
if self.key_prefix is not None:
inner_text.append(tag('KeyPrefixEquals', self.key_prefix))
if self.http_error_code is not None:
inner_text.append(
tag('HttpErrorCodeReturnedEquals',
self.http_error_code))
return tag('Condition', '\n'.join(inner_text))
class Redirect(object):
"""
:ivar hostname: The host name to use in the redirect request.
:ivar protocol: The protocol to use in the redirect request. Can be either
'http' or 'https'.
:ivar replace_key: The specific object key to use in the redirect request.
For example, redirect request to error.html.
:ivar replace_key_prefix: The object key prefix to use in the redirect
request. For example, to redirect requests for all pages with prefix
docs/ (objects in the docs/ folder) to documents/, you can set a
condition block with KeyPrefixEquals set to docs/ and in the Redirect
set ReplaceKeyPrefixWith to /documents.
:ivar http_redirect_code: The HTTP redirect code to use on the response.
"""
def __init__(self, hostname=None, protocol=None, replace_key=None,
replace_key_prefix=None, http_redirect_code=None):
self.hostname = hostname
self.protocol = protocol
self.replace_key = replace_key
self.replace_key_prefix = replace_key_prefix
self.http_redirect_code = http_redirect_code
def to_xml(self):
inner_text = []
if self.hostname is not None:
inner_text.append(tag('HostName', self.hostname))
if self.protocol is not None:
inner_text.append(tag('Protocol', self.protocol))
if self.replace_key is not None:
inner_text.append(tag('ReplaceKeyWith', self.replace_key))
if self.replace_key_prefix is not None:
inner_text.append(tag('ReplaceKeyPrefixWith',
self.replace_key_prefix))
if self.http_redirect_code is not None:
inner_text.append(tag('HttpRedirectCode', self.http_redirect_code))
return tag('Redirect', '\n'.join(inner_text))
|
sociogenetics/xendor
|
refs/heads/master
|
xendor/adminx.py
|
1
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
import xadmin
from xendor.forms import PageAdminForm
from xendor.tree_admin import XDP17TreeModelAdmin
from xendor.models import Page, Fragment, Setting
class PageAdmin(object):
admin_label = u'Управление контентом'
fieldsets = (
('', {
'classes': ('closed',),
'fields': ('title', 'menu_title', 'content', 'in_menu'),
}),
('Метаданные', {
'classes': ('collapse closed',),
'description': 'Используются поисковиками для лучшей индексации страницы',
'fields' : ('meta_title', 'meta_description', 'meta_keywords'),
}),
('Настройки', {
'classes': ('collapse closed',),
'description': 'Без четкой уверенности сюда лучше не лезть',
'fields': ('slug', 'visible', 'parameters', 'template', 'app_extension', 'menu_url', 'is_main'), # 'template',
}),
('В структуре сайта..', {
'classes': ('hidden',),
'fields': ('parent',),
})
)
list_display = ['__unicode__', 'app_extension']
list_filter = ('visible', )
form = PageAdminForm
class ChunkAdmin(object):
"""Текстовые блоки (чанки)"""
admin_label = u'Управление контентом'
list_display = ['__unicode__', 'is_html', 'content']
list_editable = 'is_html',
def get_form(self, request, obj=None, **kwargs):
if hasattr(obj, 'id'):
self.exclude = ['title']
else:
self.exclude = []
return super(ChunkAdmin, self).get_form(request, obj, **kwargs)
xadmin.site.register(Page, PageAdmin)
xadmin.site.register(Fragment)
class SettingAdmin(object):
admin_label = u'Управление контентом'
list_display = ('__unicode__', 'value', )
list_editable = ('value', )
list_resolve_foreign_keys = ['setting']
xadmin.site.register(Setting, SettingAdmin)
|
endlessm/chromium-browser
|
refs/heads/master
|
native_client/build/package_version/package_locations.py
|
2
|
#!/usr/bin/python
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This library keeps track of all the standard locations for package files"""
import os
import posixpath
import sys
SHARED_FOLDER = 'shared'
ARCHIVE_DIR = 'package_archives'
def GetRemotePackageKey(is_shared, rev_num, package_target, package_name):
"""Returns key for package files in the google storage cloud.
Args:
is_shared: Whether or not the package is marked as shared.
rev_num: The revision identifier of when the package was built.
package_target: The package target which this package belongs to.
package_name: The name of the package.
Returns:
The google cloud storage key where the package file should be found.
"""
if is_shared:
intermediate_dir = SHARED_FOLDER
else:
intermediate_dir = package_target
return posixpath.join('builds',
str(rev_num),
intermediate_dir,
package_name + '.json')
def GetRemotePackageArchiveKey(archive_name, hash_value):
"""Returns key for package archive files in the google storage cloud.
Args:
archive_name: The name of the archive.
hash_value: The hash of the archive.
Returns:
The google cloud storage key where the package archive should be found.
"""
return posixpath.join('archives',
archive_name,
hash_value)
def GetLocalPackageFile(tar_dir, package_target, package_name):
"""Returns the local package file location.
Args:
tar_dir: The tar directory for where package archives would be found.
package_target: The package target of the package.
package_name: The name of the package.
Returns:
The standard location where local package file is found.
"""
return os.path.join(tar_dir,
package_target,
package_name + '.json')
def GetArchiveExtension(archive_name):
"""Returns the extension of an archive.
Note that the archive extension is different from how os.path.splitext splits
extensions. The standard python one splits on the last period, while this one
will split on the first period.
Args:
archive_name: The name of the archive.
Returns:
The extension of the archive.
"""
name_split = archive_name.split('.', 1)
if len(name_split) == 2:
return '.' + name_split[1]
return ''
def GetLocalPackageArchiveDir(tar_dir, archive_name):
"""Returns directory where local package archive files live.
Args:
tar_dir: The tar root directory for where package archives would be found.
archive_name: The name of the archive contained within the package.
Returns:
The standard location where local package archive files are found.
"""
return os.path.join(tar_dir,
ARCHIVE_DIR,
archive_name)
def GetLocalPackageArchiveFile(tar_dir, archive_name, archive_hash):
"""Returns the local package archive file location.
Args:
tar_dir: The tar root directory for where package archives would be found.
archive_name: The name of the archive contained within the package.
archive_hash: The hash of the archive, which will be part of the final name.
Returns:
The standard location where local package archive file is found.
"""
if isinstance(archive_hash, int) or (sys.version_info.major < 3 and
isinstance(archive_hash, long)):
archive_hash = '%040x' % archive_hash
archive_directory = GetLocalPackageArchiveDir(tar_dir, archive_name)
# Have the file keep the extension so that extractions know the file type.
archive_filename = archive_hash + GetArchiveExtension(archive_name)
return os.path.join(archive_directory, archive_filename)
def GetLocalPackageArchiveLogFile(archive_file):
"""Returns the local package archive log file location.
Args:
archive_file: Location of the local archive file location.
Returns:
The standard location where local package archive log file is found.
"""
return os.path.splitext(archive_file)[0] + '.log'
def GetRevisionFile(revision_dir, package_name):
"""Returns the local revision file location.
Args:
revision_dir: Revision directory where revision files should be found.
package_name: The name of the package revision file represents.
Returns:
The standard location where the revision file is found.
"""
return os.path.join(revision_dir,
package_name + '.json')
def GetFullDestDir(dest_dir, package_target, package_name):
"""Returns the destination directory for a package archive.
Args:
dest_dir: Destination directory root.
package_target: Package target of the package to extract.
package_name: The package name of the package to extract.
Returns:
The package directory within the destination directory.
"""
return os.path.join(dest_dir, package_target, package_name)
def GetDestPackageFile(dest_dir, package_target, package_name):
"""Returns the package file stored in the destination directory.
The package file is normally stored inside of the tar directory, but upon
extraction a copy of the package file is also made into the extraction
directory for book keeping purposes.
Args:
dest_dir: Destination directory root.
package_target: Package target of the package to extract.
package_name: The package name of the package to extract.
Returns:
The location of the package file within the destination directory.
"""
return os.path.join(GetFullDestDir(dest_dir, package_target, package_name),
package_name + '.json')
def WalkPackages(tar_dir):
"""Generator for local package target packages within a root tar directory.
Use this generator to walk through the list of package targets and their
respective packages found within a local tar directory. This function does
not guarantee that these are valid package targets or packages, so it could
yield invalid names for malformed tar directories.
Args:
tar_dir: The tar root directory where package archives would be found.
Yields:
Tuple containing (package_target, [list-of package_names]).
"""
if os.path.isdir(tar_dir):
for package_target_dir in os.listdir(tar_dir):
# Skip the package archive directory
if package_target_dir == ARCHIVE_DIR:
continue
full_package_target_dir = os.path.join(tar_dir, package_target_dir)
if os.path.isdir(full_package_target_dir):
packages = [os.path.splitext(package_name)[0]
for package_name in os.listdir(full_package_target_dir)
if package_name.endswith('.json') and
os.path.isfile(os.path.join(full_package_target_dir,
package_name))]
yield (package_target_dir, packages)
|
20tab/upy
|
refs/heads/master
|
upy/bin/upy_project_set/project/project/django_wsgi.py
|
1
|
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
SBRG/ome
|
refs/heads/master
|
cobradb/model_dumping.py
|
2
|
# -*- coding: utf-8 -*-
from cobradb.models import *
from cobradb.util import increment_id, make_reaction_copy_id, timing
from sqlalchemy import and_
import cobra.core
import logging
from itertools import repeat
from collections import defaultdict
from copy import copy
import six
try:
from ipy_progressbar import ProgressBar
except ImportError:
pass
def _none_to_str(val):
return '' if val is None else val
def _make_annotation_lookup(db_links):
"""Make a lookup dictionary from a list of flat external DB links"""
lookup = defaultdict(lambda: defaultdict(set))
for res in db_links:
# skip old_bigg_id because it will be in notes
if res[0] not in ['old_bigg_id', 'deprecated']:
lookup[res[2]][res[0]].add(res[1])
# return lists instead of sets
return {
bigg_id: {source: list(vals) for source, vals in links.items()}
for bigg_id, links in lookup.items()
}
@timing
def dump_model(bigg_id):
session = Session()
# find the model
model_db = (session
.query(Model)
.filter(Model.bigg_id == bigg_id)
.first())
if model_db is None:
session.commit()
session.close()
raise Exception('Could not find model %s' % bigg_id)
model = cobra.core.Model(bigg_id)
# genes
logging.debug('Dumping genes')
# get genes and original bigg ids (might be multiple)
genes_db = (session
.query(Gene.bigg_id, Gene.name, Synonym.synonym)
.join(ModelGene, ModelGene.gene_id == Gene.id)
.join(OldIDSynonym, OldIDSynonym.ome_id == ModelGene.id)
.filter(OldIDSynonym.type == 'model_gene')
.join(Synonym, Synonym.id == OldIDSynonym.synonym_id)
.filter(ModelGene.model_id == model_db.id))
gene_names = []
old_gene_ids_dict = defaultdict(list)
for gene_id, gene_name, old_id in genes_db:
if gene_id not in old_gene_ids_dict:
gene_names.append((gene_id, gene_name))
old_gene_ids_dict[gene_id].append(old_id)
# get gene annotations
gene_db_links = _make_annotation_lookup(
session
.query(DataSource.bigg_id, Synonym.synonym, Gene.bigg_id)
.join(Synonym)
.join(Gene, Gene.id == Synonym.ome_id)
.filter(Synonym.type == 'gene')
)
for gene_id, gene_name in gene_names:
gene = cobra.core.Gene(gene_id)
gene.name = _none_to_str(gene_name)
gene.notes = {'original_bigg_ids': old_gene_ids_dict[gene_id]}
gene.annotation = gene_db_links.get(gene_id, {})
# add SBO terms
gene.annotation['sbo'] = 'SBO:0000243'
model.genes.append(gene)
# reactions
logging.debug('Dumping reactions')
# get original bigg ids (might be multiple)
reactions_db = (session
.query(ModelReaction, Reaction, Synonym.synonym)
.join(Reaction)
.outerjoin(OldIDSynonym,
and_(OldIDSynonym.ome_id == ModelReaction.id,
OldIDSynonym.type == 'model_reaction'))
.outerjoin(Synonym,
and_(Synonym.id == OldIDSynonym.synonym_id,
Synonym.type == 'reaction'))
.filter(ModelReaction.model_id == model_db.id))
reactions_model_reactions = []
found_model_reactions = set()
old_reaction_ids_dict = defaultdict(list)
for model_reaction, reaction, old_id in reactions_db:
# there may be multiple model reactions for a given bigg_id
if model_reaction.id not in found_model_reactions:
reactions_model_reactions.append((model_reaction, reaction))
found_model_reactions.add(model_reaction.id)
if old_id is not None:
old_reaction_ids_dict[reaction.bigg_id].append(old_id)
# get reaction annotations
reaction_db_links = _make_annotation_lookup(
session
.query(DataSource.bigg_id, Synonym.synonym, Reaction.bigg_id)
.join(Synonym)
.join(Reaction, Reaction.id == Synonym.ome_id)
.filter(Synonym.type == 'reaction')
)
# make dictionaries and cast results
result_dicts = []
for mr_db, r_db in reactions_model_reactions:
d = {}
d['bigg_id'] = r_db.bigg_id
d['name'] = r_db.name
d['gene_reaction_rule'] = mr_db.gene_reaction_rule
d['lower_bound'] = mr_db.lower_bound
d['upper_bound'] = mr_db.upper_bound
d['objective_coefficient'] = mr_db.objective_coefficient
d['original_bigg_ids'] = old_reaction_ids_dict[r_db.bigg_id]
d['subsystem'] = mr_db.subsystem
d['annotation'] = reaction_db_links.get(r_db.bigg_id, {})
# add SBO terms
if r_db.bigg_id.startswith('BIOMASS_'):
d['annotation']['sbo'] = 'SBO:0000629'
elif r_db.bigg_id.startswith('EX_'):
d['annotation']['sbo'] = 'SBO:0000627'
elif r_db.bigg_id.startswith('DM_'):
d['annotation']['sbo'] = 'SBO:0000628'
elif r_db.bigg_id.startswith('SK_'):
d['annotation']['sbo'] = 'SBO:0000632'
else:
# assume non-transport. will update for transporters later
d['annotation']['sbo'] = 'SBO:0000176'
# specify bigg id
d['annotation']['bigg.reaction'] = [r_db.bigg_id]
d['copy_number'] = mr_db.copy_number
result_dicts.append(d)
def filter_duplicates(result_dicts):
"""Find the reactions with multiple ModelReactions and increment names."""
tups_by_bigg_id = defaultdict(list)
# for each ModelReaction
for d in result_dicts:
# add to duplicates
tups_by_bigg_id[d['bigg_id']].append(d)
# duplicates have multiple ModelReactions
duplicates = {k: v for k, v in six.iteritems(tups_by_bigg_id) if len(v) > 1}
for bigg_id, dup_dicts in six.iteritems(duplicates):
# add _copy1, copy2, etc. to the bigg ids for the duplicates
for d in dup_dicts:
d['bigg_id'] = make_reaction_copy_id(bigg_id, d['copy_number'])
return result_dicts
# fix duplicates
result_filtered = filter_duplicates(result_dicts)
reactions = []
objectives = {}
for result_dict in result_filtered:
r = cobra.core.Reaction(result_dict['bigg_id'])
r.name = _none_to_str(result_dict['name'])
r.gene_reaction_rule = result_dict['gene_reaction_rule']
r.lower_bound = result_dict['lower_bound']
r.upper_bound = result_dict['upper_bound']
r.notes = {'original_bigg_ids': result_dict['original_bigg_ids']}
r.subsystem = result_dict['subsystem']
r.annotation = result_dict['annotation']
reactions.append(r)
objectives[r.id] = result_dict['objective_coefficient']
model.add_reactions(reactions)
for k, v in six.iteritems(objectives):
model.reactions.get_by_id(k).objective_coefficient = v
# metabolites
logging.debug('Dumping metabolites')
# get original bigg ids (might be multiple)
metabolites_db = (session
.query(Component.bigg_id,
Component.name,
ModelCompartmentalizedComponent.formula,
ModelCompartmentalizedComponent.charge,
Compartment.bigg_id,
Synonym.synonym)
.join(CompartmentalizedComponent,
CompartmentalizedComponent.component_id == Component.id) # noqa
.join(Compartment,
Compartment.id == CompartmentalizedComponent.compartment_id) # noqa
.join(ModelCompartmentalizedComponent,
ModelCompartmentalizedComponent.compartmentalized_component_id == CompartmentalizedComponent.id) # noqa
.join(OldIDSynonym, OldIDSynonym.ome_id == ModelCompartmentalizedComponent.id) # noqa
.filter(OldIDSynonym.type == 'model_compartmentalized_component') # noqa
.filter(Synonym.type == 'compartmentalized_component')
.join(Synonym)
.filter(ModelCompartmentalizedComponent.model_id == model_db.id)) # noqa
metabolite_names = []
old_metabolite_ids_dict = defaultdict(list)
for metabolite_id, metabolite_name, formula, charge, compartment_id, old_id in metabolites_db:
if metabolite_id + '_' + compartment_id not in old_metabolite_ids_dict:
metabolite_names.append((metabolite_id, metabolite_name, formula, charge, compartment_id))
old_metabolite_ids_dict[metabolite_id + '_' + compartment_id].append(old_id)
# get metabolite annotations
metabolite_db_links = _make_annotation_lookup(
session
.query(DataSource.bigg_id, Synonym.synonym, Component.bigg_id)
.join(Synonym)
.join(Component, Component.id == Synonym.ome_id)
.filter(Synonym.type == 'component')
)
metabolites = []
compartments = set()
for component_id, component_name, formula, charge, compartment_id in metabolite_names:
if component_id is not None and compartment_id is not None:
m = cobra.core.Metabolite(id=component_id + '_' + compartment_id,
compartment=compartment_id,
formula=formula)
m.charge = charge
m.name = _none_to_str(component_name)
m.notes = {'original_bigg_ids': old_metabolite_ids_dict[component_id + '_' + compartment_id]}
m.annotation = metabolite_db_links.get(component_id, {})
# specify bigg id
m.annotation['bigg.metabolite'] = [component_id]
m.annotation['sbo'] = 'SBO:0000247'
compartments.add(compartment_id)
metabolites.append(m)
model.add_metabolites(metabolites)
# compartments
compartment_db = (session.query(Compartment)
.filter(Compartment.bigg_id.in_(compartments)))
model.compartments = {i.bigg_id: i.name for i in compartment_db}
# reaction matrix
logging.debug('Dumping reaction matrix')
matrix_db = (session
.query(ReactionMatrix.stoichiometry, Reaction.bigg_id,
Component.bigg_id, Compartment.bigg_id)
# component, compartment
.join(CompartmentalizedComponent,
CompartmentalizedComponent.id == ReactionMatrix.compartmentalized_component_id) # noqa
.join(Component,
Component.id == CompartmentalizedComponent.component_id)
.join(Compartment,
Compartment.id == CompartmentalizedComponent.compartment_id) # noqa
# reaction
.join(Reaction, Reaction.id == ReactionMatrix.reaction_id)
.join(ModelReaction, ModelReaction.reaction_id == Reaction.id)
.filter(ModelReaction.model_id == model_db.id)
.distinct()) # make sure we don't duplicate
# load metabolites
compartments_for_reaction = defaultdict(set)
for stoich, reaction_id, component_id, compartment_id in matrix_db:
try:
m = model.metabolites.get_by_id(component_id + '_' + compartment_id)
except KeyError:
logging.warning('Metabolite not found %s in compartment %s for reaction %s' %
(component_id, compartment_id, reaction_id))
continue
# add to reactions
if reaction_id in model.reactions:
# check again that we don't duplicate
r = model.reactions.get_by_id(reaction_id)
if m not in r.metabolites:
r.add_metabolites({m: float(stoich)})
# check for transporters and update sbo term
compartments_for_reaction[reaction_id].add(m.compartment)
if len(compartments_for_reaction[reaction_id]) > 1 and r.annotation['sbo'] == 'SBO:0000176': # noqa
r.annotation['sbo'] = 'SBO:0000185'
else:
# try incremented ids
while True:
reaction_id = increment_id(reaction_id, 'copy')
try:
# check again that we don't duplicate
r = model.reactions.get_by_id(reaction_id)
if m not in r.metabolites:
r.add_metabolites({m: float(stoich)})
except KeyError:
break
session.commit()
session.close()
cobra.manipulation.annotate.add_SBO(model)
return model
|
munk/pywumpus
|
refs/heads/master
|
test_player.py
|
2
|
from player import Player
def test_create_player():
p = Player()
assert p.ammo == 5
assert p.location == 0
|
jsilter/scipy
|
refs/heads/master
|
scipy/io/matlab/benchmarks/bench_structarr.py
|
21
|
from __future__ import division, print_function, absolute_import
from numpy.testing import *
from io import BytesIO
import numpy as np
import scipy.io as sio
def make_structarr(n_vars, n_fields, n_structs):
var_dict = {}
for vno in range(n_vars):
vname = 'var%00d' % vno
end_dtype = [('f%d' % d, 'i4', 10) for d in range(n_fields)]
s_arrs = np.zeros((n_structs,), dtype=end_dtype)
var_dict[vname] = s_arrs
return var_dict
def bench_run():
str_io = BytesIO()
print()
print('Read / writing matlab structs')
print('='*60)
print(' write | read | vars | fields | structs | compressed')
print('-'*60)
print()
for n_vars, n_fields, n_structs in (
(10, 10, 20), (20, 20, 40), (30, 30, 50)):
var_dict = make_structarr(n_vars, n_fields, n_structs)
for compression in (False, True):
str_io = BytesIO()
write_time = measure('sio.savemat(str_io, var_dict, do_compression=%r)' % compression)
read_time = measure('sio.loadmat(str_io)')
print('%.5f | %.5f | %5d | %5d | %5d | %r' % (
write_time,
read_time,
n_vars,
n_fields,
n_structs,
compression))
if __name__ == '__main__':
bench_run()
|
luchesar/emibg
|
refs/heads/master
|
node_modules/pangyp/gyp/pylib/gyp/easy_xml_test.py
|
2698
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(['test']),
'<?xml version="1.0" encoding="utf-8"?><test/>')
self.assertEqual(
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
def test_EasyXml_escaping(self):
original = '<test>\'"\r&\nfoo'
converted = '<test>\'"
&
foo'
converted_apos = converted.replace("'", ''')
self.assertEqual(
easy_xml.XmlToString(['test3', {'a': original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
(converted, converted_apos))
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
['test3',
['GrandParent',
['Parent1',
['Child']
],
['Parent2']
]
],
pretty=True),
'<?xml version="1.0" encoding="utf-8"?>\n'
'<test3>\n'
' <GrandParent>\n'
' <Parent1>\n'
' <Child/>\n'
' </Parent1>\n'
' <Parent2/>\n'
' </GrandParent>\n'
'</test3>\n')
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup '
'Condition="\'$(Configuration)|$(Platform)\'=='
'\'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.XmlToString(
['Project',
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(xml, target)
if __name__ == '__main__':
unittest.main()
|
errx/django
|
refs/heads/master
|
tests/inline_formsets/models.py
|
19
|
# coding: utf-8
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class School(models.Model):
name = models.CharField(max_length=100)
class Parent(models.Model):
name = models.CharField(max_length=100)
class Child(models.Model):
mother = models.ForeignKey(Parent, related_name='mothers_children')
father = models.ForeignKey(Parent, related_name='fathers_children')
school = models.ForeignKey(School)
name = models.CharField(max_length=100)
@python_2_unicode_compatible
class Poet(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Poem(models.Model):
poet = models.ForeignKey(Poet)
name = models.CharField(max_length=100)
def __str__(self):
return self.name
|
fergalbyrne/nupic
|
refs/heads/master
|
tests/integration/nupic/engine/network_twonode_test.py
|
35
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This test demonstrates building and running
a two node network. Some features demonstrated include:
- Can add regions to network and set dimensions
- Linking induces dimensions correctly
- Network computation happens in correct order
- Direct (zero-copy) access to outputs
- Linking correctly maps outputs to inputs
"""
import logging
import unittest2 as unittest
from nupic.engine import Network, Dimensions
LOGGER = logging.getLogger(__name__)
class NetworkTwoNodeTest(unittest.TestCase):
def testTwoNode(self):
# =====================================================
# Build and run the network
# =====================================================
net = Network()
level1 = net.addRegion("level1", "TestNode", "{int32Param: 15}")
dims = Dimensions([6, 4])
level1.setDimensions(dims)
level2 = net.addRegion("level2", "TestNode", "{real64Param: 128.23}")
net.link("level1", "level2", "TestFanIn2", "")
# Could call initialize here, but not necessary as net.run()
# initializes implicitly.
# net.initialize()
net.run(1)
LOGGER.info("Successfully created network and ran for one iteration")
# =====================================================
# Check everything
# =====================================================
dims = level1.getDimensions()
self.assertEquals(len(dims), 2)
self.assertEquals(dims[0], 6)
self.assertEquals(dims[1], 4)
dims = level2.getDimensions()
self.assertEquals(len(dims), 2)
self.assertEquals(dims[0], 3)
self.assertEquals(dims[1], 2)
# Check L1 output. "False" means don't copy, i.e.
# get a pointer to the actual output
# Actual output values are determined by the TestNode
# compute() behavior.
l1output = level1.getOutputData("bottomUpOut")
self.assertEquals(len(l1output), 48) # 24 nodes; 2 values per node
for i in xrange(24):
self.assertEquals(l1output[2*i], 0) # size of input to each node is 0
self.assertEquals(l1output[2*i+1], i) # node number
# check L2 output.
l2output = level2.getOutputData("bottomUpOut", )
self.assertEquals(len(l2output), 12) # 6 nodes; 2 values per node
# Output val = node number + sum(inputs)
# Can compute from knowing L1 layout
#
# 00 01 | 02 03 | 04 05
# 06 07 | 08 09 | 10 11
# ---------------------
# 12 13 | 14 15 | 16 17
# 18 19 | 20 21 | 22 23
outputVals = []
outputVals.append(0 + (0 + 1 + 6 + 7))
outputVals.append(1 + (2 + 3 + 8 + 9))
outputVals.append(2 + (4 + 5 + 10 + 11))
outputVals.append(3 + (12 + 13 + 18 + 19))
outputVals.append(4 + (14 + 15 + 20 + 21))
outputVals.append(5 + (16 + 17 + 22 + 23))
for i in xrange(6):
self.assertEquals(l2output[2*i], 8) # size of input for each node is 8
self.assertEquals(l2output[2*i+1], outputVals[i])
# =====================================================
# Run for one more iteration
# =====================================================
LOGGER.info("Running for a second iteration")
net.run(1)
# =====================================================
# Check everything again
# =====================================================
# Outputs are all the same except that the first output is
# incremented by the iteration number
for i in xrange(24):
self.assertEquals(l1output[2*i], 1)
self.assertEquals(l1output[2*i+1], i)
for i in xrange(6):
self.assertEquals(l2output[2*i], 9)
self.assertEquals(l2output[2*i+1], outputVals[i] + 4)
def testLinkingDownwardDimensions(self):
#
# Linking can induce dimensions downward
#
net = Network()
level1 = net.addRegion("level1", "TestNode", "")
level2 = net.addRegion("level2", "TestNode", "")
dims = Dimensions([3, 2])
level2.setDimensions(dims)
net.link("level1", "level2", "TestFanIn2", "")
net.initialize()
# Level1 should now have dimensions [6, 4]
self.assertEquals(level1.getDimensions()[0], 6)
self.assertEquals(level1.getDimensions()[1], 4)
#
# We get nice error messages when network can't be initialized
#
LOGGER.info("=====")
LOGGER.info("Creating a 3 level network in which levels 1 and 2 have")
LOGGER.info("dimensions but network initialization will fail because")
LOGGER.info("level3 does not have dimensions")
LOGGER.info("Error message follows:")
net = Network()
level1 = net.addRegion("level1", "TestNode", "")
level2 = net.addRegion("level2", "TestNode", "")
_level3 = net.addRegion("level3", "TestNode", "")
dims = Dimensions([6, 4])
level1.setDimensions(dims)
net.link("level1", "level2", "TestFanIn2", "")
self.assertRaises(RuntimeError, net.initialize)
LOGGER.info("=====")
LOGGER.info("======")
LOGGER.info("Creating a link with incompatible dimensions. \
Error message follows")
net.link("level2", "level3", "TestFanIn2", "")
self.assertRaises(RuntimeError, net.initialize)
if __name__ == "__main__":
unittest.main()
|
40223245/2015cd_midterm
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/browser/ajax.py
|
735
|
from _ajax import *
|
romanpitak/torrentsearch
|
refs/heads/master
|
setup.py
|
1
|
# -*- coding: utf-8 -*-
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='torrentsearch',
version='0.0.0',
description='Torrent search library.',
long_description=readme(),
url='https://github.com/romanpitak/torrentsearch',
author='Roman Piták',
author_email='roman@pitak.net',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
],
keywords='torrent',
packages=['torrentsearch'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False,
)
|
enitihas/SAC-Website
|
refs/heads/master
|
helpers.py
|
1
|
from glob import glob
from binascii import hexlify, unhexlify
societies = {
"res": "Research Society",
"spo": "Sports Society",
"lit": "Literary Society",
"tec": "Technical Society",
"cul": "Cultural Society",
"aca": "Academic Society",
"gen": "SAC",
"nss": "NSS IIT-Mandi",
}
months = [
"-", "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"
]
def get_entries_raw():
return glob("posts/*")
def get_entries(filter_soc="", filter_clb=""):
"""Collect entries from /posts.
Entries in posts are named yyyymmdd.soc.clb.md"""
entries_raw = get_entries_raw()
entries = []
# We need the latest entries to be displayed first.
for entry_raw in reversed(sorted(entries_raw)):
# Collect meta-data from filename of entry.
date, society, club = get_entry_mdata(entry_raw)
if filter_soc != "" and filter_soc != society:
continue
if filter_clb != "" and filter_clb != club:
continue
with open(entry_raw) as doc:
entries.append([hexlify(entry_raw),
doc.readline(),
societies[society],
date])
return entries
def get_date(entry_raw):
date_raw = entry_raw[6:14]
year, month, day = date_raw[:4], months[int(date_raw[4:6])], date_raw[6:8]
return month + " " + day + ", " + year
def get_society(entry_raw):
return entry_raw[15:18]
def get_club(entry_raw):
return entry_raw[19:22]
def get_entry_mdata(entry_raw):
return [get_date(entry_raw), get_society(entry_raw), get_club(entry_raw)]
|
khushboo9293/mailman3
|
refs/heads/develop
|
src/mailman/model/listmanager.py
|
4
|
# Copyright (C) 2007-2015 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""A mailing list manager."""
__all__ = [
'ListManager',
]
from mailman.database.transaction import dbconnection
from mailman.interfaces.address import InvalidEmailAddressError
from mailman.interfaces.listmanager import (
IListManager, ListAlreadyExistsError, ListCreatedEvent, ListCreatingEvent,
ListDeletedEvent, ListDeletingEvent)
from mailman.model.mailinglist import IAcceptableAliasSet, MailingList
from mailman.model.mime import ContentFilter
from mailman.utilities.datetime import now
from zope.event import notify
from zope.interface import implementer
@implementer(IListManager)
class ListManager:
"""An implementation of the `IListManager` interface."""
@dbconnection
def create(self, store, fqdn_listname):
"""See `IListManager`."""
fqdn_listname = fqdn_listname.lower()
listname, at, hostname = fqdn_listname.partition('@')
if len(hostname) == 0:
raise InvalidEmailAddressError(fqdn_listname)
list_id = '{0}.{1}'.format(listname, hostname)
notify(ListCreatingEvent(fqdn_listname))
mlist = store.query(MailingList).filter_by(_list_id=list_id).first()
if mlist:
raise ListAlreadyExistsError(fqdn_listname)
mlist = MailingList(fqdn_listname)
mlist.created_at = now()
store.add(mlist)
notify(ListCreatedEvent(mlist))
return mlist
@dbconnection
def get(self, store, fqdn_listname):
"""See `IListManager`."""
listname, at, hostname = fqdn_listname.partition('@')
list_id = '{0}.{1}'.format(listname, hostname)
return store.query(MailingList).filter_by(_list_id=list_id).first()
@dbconnection
def get_by_list_id(self, store, list_id):
"""See `IListManager`."""
return store.query(MailingList).filter_by(_list_id=list_id).first()
@dbconnection
def delete(self, store, mlist):
"""See `IListManager`."""
fqdn_listname = mlist.fqdn_listname
notify(ListDeletingEvent(mlist))
# First delete information associated with the mailing list.
IAcceptableAliasSet(mlist).clear()
store.query(ContentFilter).filter_by(mailing_list=mlist).delete()
store.delete(mlist)
notify(ListDeletedEvent(fqdn_listname))
@property
@dbconnection
def mailing_lists(self, store):
"""See `IListManager`."""
for mlist in store.query(MailingList).order_by(
MailingList._list_id).all():
yield mlist
@dbconnection
def __iter__(self, store):
"""See `IListManager`."""
for mlist in store.query(MailingList).all():
yield mlist
@property
@dbconnection
def names(self, store):
"""See `IListManager`."""
result_set = store.query(MailingList)
for mail_host, list_name in result_set.values(MailingList.mail_host,
MailingList.list_name):
yield '{0}@{1}'.format(list_name, mail_host)
@property
@dbconnection
def list_ids(self, store):
"""See `IListManager`."""
result_set = store.query(MailingList)
for list_id in result_set.values(MailingList._list_id):
assert isinstance(list_id, tuple) and len(list_id) == 1
yield list_id[0]
@property
@dbconnection
def name_components(self, store):
"""See `IListManager`."""
result_set = store.query(MailingList)
for mail_host, list_name in result_set.values(MailingList.mail_host,
MailingList.list_name):
yield list_name, mail_host
|
joney000/Online-Code-Checker-and-Compiler-in-Java-CodeOj-codeoj.com-
|
refs/heads/master
|
build/web/env/Main1156/Main1156.py
|
192
|
while True:
n=input()
if(n == 42):
break
else:
print n
|
Taapat/enigma2-openpli-fulan
|
refs/heads/master
|
lib/python/Components/Ipkg.py
|
1
|
import os
from enigma import eConsoleAppContainer
from Components.Harddisk import harddiskmanager
opkgDestinations = []
opkgStatusPath = ''
def opkgExtraDestinations():
global opkgDestinations
return ''.join([" --dest %s:%s" % (i,i) for i in opkgDestinations])
def opkgAddDestination(mountpoint):
pass
#global opkgDestinations
#if mountpoint not in opkgDestinations:
#opkgDestinations.append(mountpoint)
#print "[Ipkg] Added to OPKG destinations:", mountpoint
def onPartitionChange(why, part):
global opkgDestinations
global opkgStatusPath
mountpoint = os.path.normpath(part.mountpoint)
if mountpoint and mountpoint != '/':
if why == 'add':
if opkgStatusPath == '':
# older opkg versions
opkgStatusPath = 'usr/lib/opkg/status'
if not os.path.exists(os.path.join('/', opkgStatusPath)):
# recent opkg versions
opkgStatusPath = 'var/lib/opkg/status'
if os.path.exists(os.path.join(mountpoint, opkgStatusPath)):
opkgAddDestination(mountpoint)
elif why == 'remove':
try:
opkgDestinations.remove(mountpoint)
print "[Ipkg] Removed from OPKG destinations:", mountpoint
except:
pass
harddiskmanager.on_partition_list_change.append(onPartitionChange)
for part in harddiskmanager.getMountedPartitions():
onPartitionChange('add', part)
class IpkgComponent:
EVENT_INSTALL = 0
EVENT_DOWNLOAD = 1
EVENT_INFLATING = 2
EVENT_CONFIGURING = 3
EVENT_REMOVE = 4
EVENT_UPGRADE = 5
EVENT_LISTITEM = 9
EVENT_DONE = 10
EVENT_ERROR = 11
EVENT_MODIFIED = 12
CMD_INSTALL = 0
CMD_LIST = 1
CMD_REMOVE = 2
CMD_UPDATE = 3
CMD_UPGRADE = 4
CMD_UPGRADE_LIST = 5
def __init__(self, ipkg = 'opkg'):
self.ipkg = ipkg
self.cmd = eConsoleAppContainer()
self.cache = None
self.callbackList = []
self.setCurrentCommand()
def setCurrentCommand(self, command = None):
self.currentCommand = command
def runCmdEx(self, cmd):
self.runCmd(opkgExtraDestinations() + ' ' + cmd)
def runCmd(self, cmd):
print "executing", self.ipkg, cmd
self.cmd.appClosed.append(self.cmdFinished)
self.cmd.dataAvail.append(self.cmdData)
if self.cmd.execute(self.ipkg + " " + cmd):
self.cmdFinished(-1)
def startCmd(self, cmd, args = None):
if cmd is self.CMD_UPDATE:
self.runCmdEx("update")
elif cmd is self.CMD_UPGRADE:
append = ""
if args["test_only"]:
append = " -test"
self.runCmdEx("upgrade" + append)
elif cmd is self.CMD_LIST:
self.fetchedList = []
if args['installed_only']:
self.runCmdEx("list_installed")
else:
self.runCmd("list")
elif cmd is self.CMD_INSTALL:
self.runCmd("install " + args['package'])
elif cmd is self.CMD_REMOVE:
self.runCmd("remove " + args['package'])
elif cmd is self.CMD_UPGRADE_LIST:
self.fetchedList = []
self.runCmdEx("list_upgradable")
self.setCurrentCommand(cmd)
def cmdFinished(self, retval):
self.callCallbacks(self.EVENT_DONE)
self.cmd.appClosed.remove(self.cmdFinished)
self.cmd.dataAvail.remove(self.cmdData)
def cmdData(self, data):
print "data:", data
if self.cache is None:
self.cache = data
else:
self.cache += data
if '\n' in data:
splitcache = self.cache.split('\n')
if self.cache[-1] == '\n':
iteration = splitcache
self.cache = None
else:
iteration = splitcache[:-1]
self.cache = splitcache[-1]
for mydata in iteration:
if mydata != '':
self.parseLine(mydata)
def parseLine(self, data):
try:
if data.startswith('Not selecting'):
return
if self.currentCommand in (self.CMD_LIST, self.CMD_UPGRADE_LIST):
item = data.split(' - ', 2)
if len(item) < 3:
self.callCallbacks(self.EVENT_ERROR, None)
return
self.fetchedList.append(item)
self.callCallbacks(self.EVENT_LISTITEM, item)
return
if data[:11] == 'Downloading':
self.callCallbacks(self.EVENT_DOWNLOAD, data.split(' ', 5)[1].strip())
elif data[:9] == 'Upgrading':
self.callCallbacks(self.EVENT_UPGRADE, data.split(' ', 2)[1])
elif data[:10] == 'Installing':
self.callCallbacks(self.EVENT_INSTALL, data.split(' ', 2)[1])
elif data[:8] == 'Removing':
self.callCallbacks(self.EVENT_REMOVE, data.split(' ', 3)[2])
elif data[:11] == 'Configuring':
self.callCallbacks(self.EVENT_CONFIGURING, data.split(' ', 2)[1])
elif data[:17] == 'An error occurred':
self.callCallbacks(self.EVENT_ERROR, None)
elif data[:18] == 'Failed to download':
self.callCallbacks(self.EVENT_ERROR, None)
elif data[:21] == 'ipkg_download: ERROR:':
self.callCallbacks(self.EVENT_ERROR, None)
elif 'Configuration file \'' in data:
# Note: the config file update question doesn't end with a newline, so
# if we get multiple config file update questions, the next ones
# don't necessarily start at the beginning of a line
self.callCallbacks(self.EVENT_MODIFIED, data.split(' \'', 3)[1][:-1])
except Exception, ex:
print "[Ipkg] Failed to parse: '%s'" % data
print "[Ipkg]", ex
def callCallbacks(self, event, param = None):
for callback in self.callbackList:
callback(event, param)
def addCallback(self, callback):
self.callbackList.append(callback)
def removeCallback(self, callback):
self.callbackList.remove(callback)
def getFetchedList(self):
return self.fetchedList
def stop(self):
self.cmd.kill()
def isRunning(self):
return self.cmd.running()
def write(self, what):
if what:
# We except unterminated commands
what += "\n"
self.cmd.write(what, len(what))
|
JerzySpendel/python-social-auth
|
refs/heads/master
|
social/backends/wunderlist.py
|
72
|
from social.backends.oauth import BaseOAuth2
class WunderlistOAuth2(BaseOAuth2):
"""Wunderlist OAuth2 authentication backend"""
name = 'wunderlist'
AUTHORIZATION_URL = 'https://www.wunderlist.com/oauth/authorize'
ACCESS_TOKEN_URL = 'https://www.wunderlist.com/oauth/access_token'
ACCESS_TOKEN_METHOD = 'POST'
REDIRECT_STATE = False
def get_user_details(self, response):
"""Return user details from Wunderlist account"""
fullname, first_name, last_name = self.get_user_names(
response.get('name')
)
return {'username': str(response.get('id')),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
headers = {
'X-Access-Token': access_token,
'X-Client-ID': self.setting('KEY')}
return self.get_json(
'https://a.wunderlist.com/api/v1/user', headers=headers)
|
thaim/ansible
|
refs/heads/fix-broken-link
|
lib/ansible/modules/network/check_point/cp_mgmt_service_sctp.py
|
20
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage Check Point Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: cp_mgmt_service_sctp
short_description: Manages service-sctp objects on Check Point over Web Services API
description:
- Manages service-sctp objects on Check Point devices including creating, updating and removing objects.
- All operations are performed over Web Services API.
version_added: "2.9"
author: "Or Soffer (@chkp-orso)"
options:
name:
description:
- Object name.
type: str
required: True
port:
description:
- Port number. To specify a port range add a hyphen between the lowest and the highest port numbers, for example 44-45.
type: str
aggressive_aging:
description:
- Sets short (aggressive) timeouts for idle connections.
type: dict
suboptions:
default_timeout:
description:
- Default aggressive aging timeout in seconds.
type: int
enable:
description:
- N/A
type: bool
timeout:
description:
- Aggressive aging timeout in seconds.
type: int
use_default_timeout:
description:
- N/A
type: bool
keep_connections_open_after_policy_installation:
description:
- Keep connections open after policy has been installed even if they are not allowed under the new policy. This overrides the settings in the
Connection Persistence page. If you change this property, the change will not affect open connections, but only future connections.
type: bool
match_for_any:
description:
- Indicates whether this service is used when 'Any' is set as the rule's service and there are several service objects with the same source port
and protocol.
type: bool
session_timeout:
description:
- Time (in seconds) before the session times out.
type: int
source_port:
description:
- Source port number. To specify a port range add a hyphen between the lowest and the highest port numbers, for example 44-45.
type: str
sync_connections_on_cluster:
description:
- Enables state-synchronized High Availability or Load Sharing on a ClusterXL or OPSEC-certified cluster.
type: bool
tags:
description:
- Collection of tag identifiers.
type: list
use_default_session_timeout:
description:
- Use default virtual session timeout.
type: bool
color:
description:
- Color of the object. Should be one of existing colors.
type: str
choices: ['aquamarine', 'black', 'blue', 'crete blue', 'burlywood', 'cyan', 'dark green', 'khaki', 'orchid', 'dark orange', 'dark sea green',
'pink', 'turquoise', 'dark blue', 'firebrick', 'brown', 'forest green', 'gold', 'dark gold', 'gray', 'dark gray', 'light green', 'lemon chiffon',
'coral', 'sea green', 'sky blue', 'magenta', 'purple', 'slate blue', 'violet red', 'navy blue', 'olive', 'orange', 'red', 'sienna', 'yellow']
comments:
description:
- Comments string.
type: str
details_level:
description:
- The level of detail for some of the fields in the response can vary from showing only the UID value of the object to a fully detailed
representation of the object.
type: str
choices: ['uid', 'standard', 'full']
groups:
description:
- Collection of group identifiers.
type: list
ignore_warnings:
description:
- Apply changes ignoring warnings.
type: bool
ignore_errors:
description:
- Apply changes ignoring errors. You won't be able to publish such a changes. If ignore-warnings flag was omitted - warnings will also be ignored.
type: bool
extends_documentation_fragment: checkpoint_objects
"""
EXAMPLES = """
- name: add-service-sctp
cp_mgmt_service_sctp:
aggressive_aging:
enable: true
timeout: 360
use_default_timeout: false
keep_connections_open_after_policy_installation: false
match_for_any: true
name: New_SCTP_Service_1
port: 5669
session_timeout: 0
state: present
sync_connections_on_cluster: true
- name: set-service-sctp
cp_mgmt_service_sctp:
aggressive_aging:
default_timeout: 3600
color: green
name: New_SCTP_Service_1
port: 5656
state: present
- name: delete-service-sctp
cp_mgmt_service_sctp:
name: New_SCTP_Service_2
state: absent
"""
RETURN = """
cp_mgmt_service_sctp:
description: The checkpoint object created or updated.
returned: always, except when deleting the object.
type: dict
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.checkpoint.checkpoint import checkpoint_argument_spec_for_objects, api_call
def main():
argument_spec = dict(
name=dict(type='str', required=True),
port=dict(type='str'),
aggressive_aging=dict(type='dict', options=dict(
default_timeout=dict(type='int'),
enable=dict(type='bool'),
timeout=dict(type='int'),
use_default_timeout=dict(type='bool')
)),
keep_connections_open_after_policy_installation=dict(type='bool'),
match_for_any=dict(type='bool'),
session_timeout=dict(type='int'),
source_port=dict(type='str'),
sync_connections_on_cluster=dict(type='bool'),
tags=dict(type='list'),
use_default_session_timeout=dict(type='bool'),
color=dict(type='str', choices=['aquamarine', 'black', 'blue', 'crete blue', 'burlywood', 'cyan', 'dark green',
'khaki', 'orchid', 'dark orange', 'dark sea green', 'pink', 'turquoise', 'dark blue', 'firebrick', 'brown',
'forest green', 'gold', 'dark gold', 'gray', 'dark gray', 'light green', 'lemon chiffon', 'coral', 'sea green',
'sky blue', 'magenta', 'purple', 'slate blue', 'violet red', 'navy blue', 'olive', 'orange', 'red', 'sienna',
'yellow']),
comments=dict(type='str'),
details_level=dict(type='str', choices=['uid', 'standard', 'full']),
groups=dict(type='list'),
ignore_warnings=dict(type='bool'),
ignore_errors=dict(type='bool')
)
argument_spec.update(checkpoint_argument_spec_for_objects)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
api_call_object = 'service-sctp'
result = api_call(module, api_call_object)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
blondegeek/pymatgen
|
refs/heads/master
|
pymatgen/symmetry/tests/__init__.py
|
161
|
# coding=utf-8
|
atandon/Ghost
|
refs/heads/master
|
node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/pygments/lexers/jvm.py
|
193
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.jvm
~~~~~~~~~~~~~~~~~~~
Pygments lexers for JVM languages.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
this
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
from pygments.util import get_choice_opt
from pygments import unistring as uni
__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'KotlinLexer',
'XtendLexer', 'AspectJLexer', 'CeylonLexer']
class JavaLexer(RegexLexer):
"""
For `Java <http://www.sun.com/java/>`_ source code.
"""
name = 'Java'
aliases = ['java']
filenames = ['*.java']
mimetypes = ['text/x-java']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]<>]*\s+)+?)' # return arguments
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
Keyword),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'import': [
(r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
],
}
class AspectJLexer(JavaLexer):
"""
For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
*New in Pygments 1.6.*
"""
name = 'AspectJ'
aliases = ['aspectj']
filenames = ['*.aj']
mimetypes = ['text/x-aspectj']
aj_keywords = [
'aspect', 'pointcut', 'privileged', 'call', 'execution',
'initialization', 'preinitialization', 'handler', 'get', 'set',
'staticinitialization', 'target', 'args', 'within', 'withincode',
'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
]
aj_inter_type = ['parents:', 'warning:', 'error:', 'soft:', 'precedence:']
aj_inter_type_annotation = ['@type', '@method', '@constructor', '@field']
def get_tokens_unprocessed(self, text):
for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
if token is Name and value in self.aj_keywords:
yield index, Keyword, value
elif token is Name.Label and value in self.aj_inter_type:
yield index, Keyword, value[:-1]
yield index, Operator, value[-1]
elif token is Name.Decorator and value in self.aj_inter_type_annotation:
yield index, Keyword, value
else:
yield index, token, value
class ScalaLexer(RegexLexer):
"""
For `Scala <http://www.scala-lang.org>`_ source code.
"""
name = 'Scala'
aliases = ['scala']
filenames = ['*.scala']
mimetypes = ['text/x-scala']
flags = re.MULTILINE | re.DOTALL
# don't use raw unicode strings!
op = (u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1'
u'\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9'
u'\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2'
u'\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38'
u'\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940'
u'\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c'
u'\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118'
u'\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144'
u'\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767'
u'\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb'
u'\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020'
u'\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3'
u'\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff'
u'\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66'
u'\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+')
letter = (u'[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6'
u'\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386'
u'\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2'
u'\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5'
u'\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5'
u'\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961'
u'\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1'
u'\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd'
u'\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9'
u'\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1'
u'\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30'
u'\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4'
u'\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f'
u'\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070'
u'\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f'
u'\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711'
u'\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc'
u'\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7'
u'\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf'
u'\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77'
u'\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb'
u'\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113'
u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139'
u'\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c'
u'\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029'
u'\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e'
u'\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c'
u'\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f'
u'\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822'
u'\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28'
u'\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28'
u'\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a'
u'\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]')
upper = (u'[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108'
u'\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c'
u'\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130'
u'\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145'
u'\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a'
u'\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e'
u'\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182'
u'\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194'
u'\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7'
u'\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc'
u'\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9'
u'\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee'
u'\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204'
u'\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218'
u'\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c'
u'\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246'
u'\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f'
u'\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0'
u'\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7'
u'\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a'
u'\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e'
u'\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a'
u'\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae'
u'\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1'
u'\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6'
u'\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea'
u'\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe'
u'\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512'
u'\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556'
u'\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e'
u'\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22'
u'\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36'
u'\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a'
u'\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e'
u'\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72'
u'\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86'
u'\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2'
u'\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6'
u'\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca'
u'\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede'
u'\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2'
u'\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d'
u'\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f'
u'\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb'
u'\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112'
u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133'
u'\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67'
u'\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86'
u'\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a'
u'\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae'
u'\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2'
u'\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6'
u'\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646'
u'\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a'
u'\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682'
u'\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696'
u'\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736'
u'\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a'
u'\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e'
u'\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b'
u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
idrest = ur'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
tokens = {
'root': [
# method names
(r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
(ur"'%s" % idrest, Text.Symbol),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(ur'@%s' % idrest, Name.Decorator),
(ur'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
ur'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
ur'lazy|match|new|override|pr(?:ivate|otected)'
ur'|re(?:quires|turn)|s(?:ealed|uper)|'
ur't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|'
u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\\b|(?=\\s)|$)', Keyword),
(ur':(?!%s)' % op, Keyword, 'type'),
(ur'%s%s\b' % (upper, idrest), Name.Class),
(r'(true|false|null)\b', Keyword.Constant),
(r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
(r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
(r'""".*?"""(?!")', String),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
# (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
# Name.Attribute)),
(idrest, Name),
(r'`[^`]+`', Name),
(r'\[', Operator, 'typeparam'),
(r'[\(\)\{\};,.#]', Operator),
(op, Operator),
(r'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(ur'(%s|%s|`[^`]+`)(\s*)(\[)' % (idrest, op),
bygroups(Name.Class, Text, Operator), 'typeparam'),
(r'\s+', Text),
(r'{', Operator, '#pop'),
(r'\(', Operator, '#pop'),
(r'//.*?\n', Comment.Single, '#pop'),
(ur'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
],
'type': [
(r'\s+', Text),
(u'<[%:]|>:|[#_\u21D2]|forSome|type', Keyword),
(r'([,\);}]|=>|=)(\s*)', bygroups(Operator, Text), '#pop'),
(r'[\(\{]', Operator, '#push'),
(ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)(\[)' %
(idrest, op, idrest, op),
bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
(ur'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)$' %
(idrest, op, idrest, op),
bygroups(Keyword.Type, Text), '#pop'),
(r'//.*?\n', Comment.Single, '#pop'),
(ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'typeparam': [
(r'[\s,]+', Text),
(u'<[%:]|=>|>:|[#_\u21D2]|forSome|type', Keyword),
(r'([\]\)\}])', Operator, '#pop'),
(r'[\(\[\{]', Operator, '#push'),
(ur'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'comment': [
(r'[^/\*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'import': [
(ur'(%s|\.)+' % idrest, Name.Namespace, '#pop')
],
}
class GosuLexer(RegexLexer):
"""
For Gosu source code.
*New in Pygments 1.5.*
"""
name = 'Gosu'
aliases = ['gosu']
filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
mimetypes = ['text/x-gosu']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # modifiers etc.
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
(r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
r'index|while|do|continue|break|return|try|catch|finally|this|'
r'throw|new|switch|case|default|eval|super|outer|classpath|'
r'using)\b', Keyword),
(r'(var|delegate|construct|function|private|internal|protected|'
r'public|abstract|override|final|static|extends|transient|'
r'implements|represents|readonly)\b', Keyword.Declaration),
(r'(property\s+)(get|set)?', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
(r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Keyword.Declaration, Text, Name.Class)),
(r'(uses)(\s+)([a-zA-Z0-9_.]+\*?)',
bygroups(Keyword.Namespace, Text, Name.Namespace)),
(r'"', String, 'string'),
(r'(\??[\.#])([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Operator, Name.Attribute)),
(r'(:)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
(r'and|or|not|[\\~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+', Number.Integer),
(r'\n', Text)
],
'templateText': [
(r'(\\<)|(\\\$)', String),
(r'(<%@\s+)(extends|params)',
bygroups(Operator, Name.Decorator), 'stringTemplate'),
(r'<%!--.*?--%>', Comment.Multiline),
(r'(<%)|(<%=)', Operator, 'stringTemplate'),
(r'\$\{', Operator, 'stringTemplateShorthand'),
(r'.', String)
],
'string': [
(r'"', String, '#pop'),
include('templateText')
],
'stringTemplate': [
(r'"', String, 'string'),
(r'%>', Operator, '#pop'),
include('root')
],
'stringTemplateShorthand': [
(r'"', String, 'string'),
(r'\{', Operator, 'stringTemplateShorthand'),
(r'\}', Operator, '#pop'),
include('root')
],
}
class GosuTemplateLexer(Lexer):
"""
For Gosu templates.
*New in Pygments 1.5.*
"""
name = 'Gosu Template'
aliases = ['gst']
filenames = ['*.gst']
mimetypes = ['text/x-gosu-template']
lexer = GosuLexer()
def get_tokens_unprocessed(self, text):
stack = ['templateText']
for item in self.lexer.get_tokens_unprocessed(text, stack):
yield item
class GroovyLexer(RegexLexer):
"""
For `Groovy <http://groovy.codehaus.org/>`_ source code.
*New in Pygments 1.5.*
"""
name = 'Groovy'
aliases = ['groovy']
filenames = ['*.groovy']
mimetypes = ['text/x-groovy']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
Keyword),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'\$/((?!/\$).)*/\$', String),
(r'/(\\\\|\\"|[^/])*/', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'import': [
(r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
],
}
class IokeLexer(RegexLexer):
"""
For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
prototype based programming language) source.
*New in Pygments 1.4.*
"""
name = 'Ioke'
filenames = ['*.ik']
aliases = ['ioke', 'ik']
mimetypes = ['text/x-iokesrc']
tokens = {
'interpolatableText': [
(r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
(r'#{', Punctuation, 'textInterpolationRoot')
],
'text': [
(r'(?<!\\)"', String, '#pop'),
include('interpolatableText'),
(r'[^"]', String)
],
'documentation': [
(r'(?<!\\)"', String.Doc, '#pop'),
include('interpolatableText'),
(r'[^"]', String.Doc)
],
'textInterpolationRoot': [
(r'}', Punctuation, '#pop'),
include('root')
],
'slashRegexp': [
(r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\/', String.Regex),
(r'[^/]', String.Regex)
],
'squareRegexp': [
(r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\]', String.Regex),
(r'[^\]]', String.Regex)
],
'squareText': [
(r'(?<!\\)]', String, '#pop'),
include('interpolatableText'),
(r'[^\]]', String)
],
'root': [
(r'\n', Text),
(r'\s+', Text),
# Comments
(r';(.*?)\n', Comment),
(r'\A#!(.*?)\n', Comment),
#Regexps
(r'#/', String.Regex, 'slashRegexp'),
(r'#r\[', String.Regex, 'squareRegexp'),
#Symbols
(r':[a-zA-Z0-9_!:?]+', String.Symbol),
(r'[a-zA-Z0-9_!:?]+:(?![a-zA-Z0-9_!?])', String.Other),
(r':"(\\\\|\\"|[^"])*"', String.Symbol),
#Documentation
(r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
#Text
(r'"', String, 'text'),
(r'#\[', String, 'squareText'),
#Mimic
(r'[a-zA-Z0-9_][a-zA-Z0-9!?_:]+(?=\s*=.*mimic\s)', Name.Entity),
#Assignment
(r'[a-zA-Z_][a-zA-Z0-9_!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
Name.Variable),
# keywords
(r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
r'with)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
# Origin
(r'(eval|mimic|print|println)(?![a-zA-Z0-9!:_?])', Keyword),
# Base
(r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
r'(?![a-zA-Z0-9!:_?])', Keyword),
# Ground
(r'(stackTraceAsText)(?![a-zA-Z0-9!:_?])', Keyword),
#DefaultBehaviour Literals
(r'(dict|list|message|set)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
#DefaultBehaviour Case
(r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
r'case:otherwise|case:xor)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
#DefaultBehaviour Reflection
(r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
r'(?![a-zA-Z0-9!:_?])', Keyword),
#DefaultBehaviour Aspects
(r'(after|around|before)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
# DefaultBehaviour
(r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
r'(?![a-zA-Z0-9!:_?])', Keyword),
(r'(use|destructuring)', Keyword.Reserved),
#DefaultBehavior BaseBehavior
(r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
r'documentation|identity|removeCell!|undefineCell)'
r'(?![a-zA-Z0-9!:_?])', Keyword),
#DefaultBehavior Internal
(r'(internal:compositeRegexp|internal:concatenateText|'
r'internal:createDecimal|internal:createNumber|'
r'internal:createRegexp|internal:createText)'
r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
#DefaultBehaviour Conditions
(r'(availableRestarts|bind|error\!|findRestart|handle|'
r'invokeRestart|rescue|restart|signal\!|warn\!)'
r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
# constants
(r'(nil|false|true)(?![a-zA-Z0-9!:_?])', Name.Constant),
# names
(r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
r'Conditions|Definitions|FlowControl|Internal|Literals|'
r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
r'System|Text|Tuple)(?![a-zA-Z0-9!:_?])', Name.Builtin),
# functions
(ur'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
ur'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
ur'(?![a-zA-Z0-9!:_?])', Name.Function),
# Numbers
(r'-?0[xX][0-9a-fA-F]+', Number.Hex),
(r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
(r'-?\d+', Number.Integer),
(r'#\(', Punctuation),
# Operators
(ur'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
ur'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
ur'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
ur'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
ur'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
ur'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
ur'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
(r'(and|nand|or|xor|nor|return|import)(?![a-zA-Z0-9_!?])',
Operator),
# Punctuation
(r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|{|})', Punctuation),
#kinds
(r'[A-Z][a-zA-Z0-9_!:?]*', Name.Class),
#default cellnames
(r'[a-z_][a-zA-Z0-9_!:?]*', Name)
]
}
class ClojureLexer(RegexLexer):
"""
Lexer for `Clojure <http://clojure.org/>`_ source code.
*New in Pygments 0.11.*
"""
name = 'Clojure'
aliases = ['clojure', 'clj']
filenames = ['*.clj']
mimetypes = ['text/x-clojure', 'application/x-clojure']
special_forms = [
'.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
]
# It's safe to consider 'ns' a declaration thing because it defines a new
# namespace.
declarations = [
'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
'defstruct', 'defonce', 'declare', 'definline', 'definterface',
'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
]
builtins = [
'*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
'butlast', 'byte', 'cast', 'char', 'children', 'class',
'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
'vector?', 'when', 'when-first', 'when-let', 'when-not',
'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper']
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
# TODO / should divide keywords/symbols into namespace/rest
# but that's hard, so just pretend / is part of the name
valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
def _multi_escape(entries):
return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries))
tokens = {
'root': [
# the comments - always starting with semicolon
# and going to the end of the line
(r';.*$', Comment.Single),
# whitespaces - usually not relevant
(r'[,\s]+', Text),
# numbers
(r'-?\d+\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
(r'0x-?[abcdef\d]+', Number.Hex),
# strings, symbols and characters
(r'"(\\\\|\\"|[^"])*"', String),
(r"'" + valid_name, String.Symbol),
(r"\\(.|[a-z]+)", String.Char),
# keywords
(r'::?' + valid_name, String.Symbol),
# special operators
(r'~@|[`\'#^~&@]', Operator),
# highlight the special forms
(_multi_escape(special_forms), Keyword),
# Technically, only the special forms are 'keywords'. The problem
# is that only treating them as keywords means that things like
# 'defn' and 'ns' need to be highlighted as builtins. This is ugly
# and weird for most styles. So, as a compromise we're going to
# highlight them as Keyword.Declarations.
(_multi_escape(declarations), Keyword.Declaration),
# highlight the builtins
(_multi_escape(builtins), Name.Builtin),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Function),
# find the remaining variables
(valid_name, Name.Variable),
# Clojure accepts vector notation
(r'(\[|\])', Punctuation),
# Clojure accepts map notation
(r'(\{|\})', Punctuation),
# the famous parentheses!
(r'(\(|\))', Punctuation),
],
}
class TeaLangLexer(RegexLexer):
"""
For `Tea <http://teatrove.org/>`_ source code. Only used within a
TeaTemplateLexer.
*New in Pygments 1.5.*
"""
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
(r'(and|break|else|foreach|if|in|not|or|reverse)\b',
Keyword),
(r'(as|call|define)\b', Keyword.Declaration),
(r'(true|false|null)\b', Keyword.Constant),
(r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r'\'(\\\\|\\\'|[^\'])*\'', String),
(r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
(r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'template': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'import': [
(r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
],
}
class CeylonLexer(RegexLexer):
"""
For `Ceylon <http://ceylon-lang.org/>`_ source code.
*New in Pygments 1.6.*
"""
name = 'Ceylon'
aliases = ['ceylon']
filenames = ['*.ceylon']
mimetypes = ['text/x-ceylon']
flags = re.MULTILINE | re.DOTALL
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'(variable|shared|abstract|doc|by|formal|actual|late|native)',
Name.Decorator),
(r'(break|case|catch|continue|default|else|finally|for|in|'
r'variable|if|return|switch|this|throw|try|while|is|exists|dynamic|'
r'nonempty|then|outer|assert)\b', Keyword),
(r'(abstracts|extends|satisfies|adapts|'
r'super|given|of|out|assign|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(function|value|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface|object|alias)(\s+)',
bygroups(Keyword.Declaration, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
(r'".*``.*``.*"', String.Interpol),
(r'(\.)([a-z_][a-zA-Z0-9_]*)',
bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
(r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
(r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
Number.Float),
(r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
(r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
Number.Float),
(r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
(r'#[0-9a-fA-F]+', Number.Hex),
(r'\$([01]{4})(_[01]{4})+', Number.Integer),
(r'\$[01]+', Number.Integer),
(r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
(r'[0-9]+[kMGTP]?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[A-Za-z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'import': [
(r'[a-z][a-zA-Z0-9_.]*',
Name.Namespace, '#pop')
],
}
class KotlinLexer(RegexLexer):
"""
For `Kotlin <http://confluence.jetbrains.net/display/Kotlin/>`_
source code.
Additional options accepted:
`unicodelevel`
Determines which Unicode characters this lexer allows for identifiers.
The possible values are:
* ``none`` -- only the ASCII letters and numbers are allowed. This
is the fastest selection.
* ``basic`` -- all Unicode characters from the specification except
category ``Lo`` are allowed.
* ``full`` -- all Unicode characters as specified in the C# specs
are allowed. Note that this means a considerable slowdown since the
``Lo`` category has more than 40,000 characters in it!
The default value is ``basic``.
*New in Pygments 1.5.*
"""
name = 'Kotlin'
aliases = ['kotlin']
filenames = ['*.kt']
mimetypes = ['text/x-kotlin'] # inferred
flags = re.MULTILINE | re.DOTALL | re.UNICODE
# for the range of allowed unicode characters in identifiers,
# see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
levels = {
'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
'[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
'full': ('@?(?:_|[^' +
uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
+ '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
}
tokens = {}
token_variants = True
for levelname, cs_ident in levels.items():
tokens[levelname] = {
'root': [
# method names
(r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
r'(' + cs_ident + ')' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
(r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'\n', Text),
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
(r'@"(""|[^"])*"', String),
(r'"(\\\\|\\"|[^"\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'#[ \t]*(if|endif|else|elif|define|undef|'
r'line|error|warning|region|endregion|pragma)\b.*?\n',
Comment.Preproc),
(r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
Keyword)),
(r'(abstract|as|break|catch|'
r'fun|continue|default|delegate|'
r'do|else|enum|extern|false|finally|'
r'fixed|for|goto|if|implicit|in|interface|'
r'internal|is|lock|null|'
r'out|override|private|protected|public|readonly|'
r'ref|return|sealed|sizeof|'
r'when|this|throw|true|try|typeof|'
r'unchecked|unsafe|virtual|void|while|'
r'get|set|new|partial|yield|val|var)\b', Keyword),
(r'(global)(::)', bygroups(Keyword, Punctuation)),
(r'(bool|byte|char|decimal|double|dynamic|float|int|long|'
r'short)\b\??', Keyword.Type),
(r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
(r'(package|using)(\s+)', bygroups(Keyword, Text), 'package'),
(cs_ident, Name),
],
'class': [
(cs_ident, Name.Class, '#pop')
],
'package': [
(r'(?=\()', Text, '#pop'), # using (resource)
('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
]
}
def __init__(self, **options):
level = get_choice_opt(options, 'unicodelevel', self.tokens.keys(),
'basic')
if level not in self._all_tokens:
# compile the regexes now
self._tokens = self.__class__.process_tokendef(level)
else:
self._tokens = self._all_tokens[level]
RegexLexer.__init__(self, **options)
class XtendLexer(RegexLexer):
"""
For `Xtend <http://xtend-lang.org/>`_ source code.
*New in Pygments 1.6.*
"""
name = 'Xtend'
aliases = ['xtend']
filenames = ['*.xtend']
mimetypes = ['text/x-xtend']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_$][a-zA-Z0-9_$]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
Keyword),
(r'(def|abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r"(''')", String, 'template'),
(ur"(\u00BB)", String, 'template'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'(\\\\|\\'|[^'])*'", String),
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'import': [
(r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
],
'template': [
(r"'''", String, '#pop'),
(ur"\u00AB", String, '#pop'),
(r'.', String)
],
}
|
aehlke/manabi
|
refs/heads/master
|
manabi/apps/flashcards/migrations/0046_drop_deck_priorities.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11a1 on 2017-02-10 02:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('flashcards', '0045_manabi_4k_deck_creation_migration'),
]
operations = [
migrations.RemoveField(
model_name='deck',
name='priority',
),
]
|
blueboxgroup/horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/identity/projects/panel.py
|
44
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.identity import dashboard
class Tenants(horizon.Panel):
name = _("Projects")
slug = 'projects'
policy_rules = (("identity", "identity:list_projects"),
("identity", "identity:list_user_projects"))
dashboard.Identity.register(Tenants)
|
OCA/social
|
refs/heads/12.0
|
mass_mailing_partner/models/__init__.py
|
1
|
# Copyright 2015 Pedro M. Baeza <pedro.baeza@tecnativa.com>
# Copyright 2015 Antonio Espinosa <antonio.espinosa@tecnativa.com>
# Copyright 2015 Javier Iniesta <javieria@antiun.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import mail_mass_mailing_contact
from . import res_partner
from . import mail_mass_mailing
from . import mail_mail_statistics
from . import mail_mass_mailing_list_contact_rel
|
sysbot/trigger
|
refs/heads/master
|
tests/acceptance/trigger_acceptance_tests.py
|
13
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
tests/trigger_acceptance_test.py - Acceptance test suite that verifies trigger functionality
in very brief
"""
from trigger.netdevices import NetDevices
netdevices = NetDevices(with_acls=False)
nd=NetDevices(with_acls=False)
print nd.values()
__author__ = 'Murat Ezbiderli'
__maintainer__ = 'Salesforce'
__copyright__ = 'Copyright 2012-2013 Salesforce Inc.'
__version__ = '2.1'
import os
import unittest
from trigger.netdevices import NetDevices
class NetDevicesTest(unittest.TestCase):
def setUp(self):
self.nd = NetDevices(with_acls=False)
print self.nd.values()
self.nodename = self.nd.keys()[0]
self.nodeobj = self.nd.values()[0]
def testBasics(self):
"""Basic test of NetDevices functionality."""
self.assertEqual(len(self.nd), 3)
self.assertEqual(self.nodeobj.nodeName, self.nodename)
self.assertEqual(self.nodeobj.manufacturer, 'JUNIPER')
def testFind(self):
"""Test the find() method."""
self.assertEqual(self.nd.find(self.nodename), self.nodeobj)
nodebasename = self.nodename[:self.nodename.index('.')]
self.assertEqual(self.nd.find(nodebasename), self.nodeobj)
self.assertRaises(KeyError, lambda: self.nd.find(self.nodename[0:3]))
if __name__ == "__main__":
unittest.main()
|
mdietrichc2c/OCB
|
refs/heads/8.0
|
addons/crm_mass_mailing/__init__.py
|
333
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import mass_mailing
|
mattuuh7/incubator-airflow
|
refs/heads/master
|
tests/contrib/sensors/test_emr_job_flow_sensor.py
|
56
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import datetime
from dateutil.tz import tzlocal
from mock import MagicMock, patch
from airflow import configuration
from airflow.contrib.sensors.emr_job_flow_sensor import EmrJobFlowSensor
DESCRIBE_CLUSTER_RUNNING_RETURN = {
'Cluster': {
'Applications': [
{'Name': 'Spark', 'Version': '1.6.1'}
],
'AutoTerminate': True,
'Configurations': [],
'Ec2InstanceAttributes': {'IamInstanceProfile': 'EMR_EC2_DefaultRole'},
'Id': 'j-27ZY9GBEEU2GU',
'LogUri': 's3n://some-location/',
'Name': 'PiCalc',
'NormalizedInstanceHours': 0,
'ReleaseLabel': 'emr-4.6.0',
'ServiceRole': 'EMR_DefaultRole',
'Status': {
'State': 'STARTING',
'StateChangeReason': {},
'Timeline': {'CreationDateTime': datetime.datetime(2016, 6, 27, 21, 5, 2, 348000, tzinfo=tzlocal())}
},
'Tags': [
{'Key': 'app', 'Value': 'analytics'},
{'Key': 'environment', 'Value': 'development'}
],
'TerminationProtected': False,
'VisibleToAllUsers': True
},
'ResponseMetadata': {
'HTTPStatusCode': 200,
'RequestId': 'd5456308-3caa-11e6-9d46-951401f04e0e'
}
}
DESCRIBE_CLUSTER_TERMINATED_RETURN = {
'Cluster': {
'Applications': [
{'Name': 'Spark', 'Version': '1.6.1'}
],
'AutoTerminate': True,
'Configurations': [],
'Ec2InstanceAttributes': {'IamInstanceProfile': 'EMR_EC2_DefaultRole'},
'Id': 'j-27ZY9GBEEU2GU',
'LogUri': 's3n://some-location/',
'Name': 'PiCalc',
'NormalizedInstanceHours': 0,
'ReleaseLabel': 'emr-4.6.0',
'ServiceRole': 'EMR_DefaultRole',
'Status': {
'State': 'TERMINATED',
'StateChangeReason': {},
'Timeline': {'CreationDateTime': datetime.datetime(2016, 6, 27, 21, 5, 2, 348000, tzinfo=tzlocal())}
},
'Tags': [
{'Key': 'app', 'Value': 'analytics'},
{'Key': 'environment', 'Value': 'development'}
],
'TerminationProtected': False,
'VisibleToAllUsers': True
},
'ResponseMetadata': {
'HTTPStatusCode': 200,
'RequestId': 'd5456308-3caa-11e6-9d46-951401f04e0e'
}
}
class TestEmrJobFlowSensor(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
# Mock out the emr_client (moto has incorrect response)
self.mock_emr_client = MagicMock()
self.mock_emr_client.describe_cluster.side_effect = [
DESCRIBE_CLUSTER_RUNNING_RETURN,
DESCRIBE_CLUSTER_TERMINATED_RETURN
]
# Mock out the emr_client creator
self.boto3_client_mock = MagicMock(return_value=self.mock_emr_client)
def test_execute_calls_with_the_job_flow_id_until_it_reaches_a_terminal_state(self):
with patch('boto3.client', self.boto3_client_mock):
operator = EmrJobFlowSensor(
task_id='test_task',
poke_interval=2,
job_flow_id='j-8989898989',
aws_conn_id='aws_default'
)
operator.execute(None)
# make sure we called twice
self.assertEqual(self.mock_emr_client.describe_cluster.call_count, 2)
# make sure it was called with the job_flow_id
self.mock_emr_client.describe_cluster.assert_called_with(ClusterId='j-8989898989')
if __name__ == '__main__':
unittest.main()
|
mseaborn/rowhammer-test
|
refs/heads/master
|
dram_physaddr_mapping/analyse_addrs.py
|
6
|
# Copyright 2015, Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script examines the physical aggressor/victim addresses
# outputted by rowhammer_test_ext, and it checks whether these
# addresses match a model of how physical addresses are mapped to DRAM
# row/bank/column numbers.
#
# This script explains the resulting addresses from a laptop with a
# Sandy Bridge CPU which has 2 * 4GB SO-DIMMs.
#
# For this laptop, decode-dimms reports:
# Size 4096 MB
# Banks x Rows x Columns x Bits 8 x 15 x 10 x 64
# Ranks 2
def GetResultAddrs(log_filename):
for line in open(log_filename):
if line.startswith('RESULT '):
parts = line[len('RESULT '):].strip('\n').split(',')
if parts[0] == 'PAIR':
yield [int(part, 16) for part in parts[1:]]
def FormatBits(val, bits):
got = []
for bit in xrange(bits - 1, -1, -1):
got.append(str((val >> bit) & 1))
return ''.join(got)
def ExtractBits(val, offset_in_bits, size_in_bits):
return [(val >> offset_in_bits) & ((1 << size_in_bits) - 1),
size_in_bits]
def Convert(phys):
fields = [
('col_lo', ExtractBits(phys, 0, 6)),
('channel', ExtractBits(phys, 6, 1)),
('col_hi', ExtractBits(phys, 7, 7)),
('bank', ExtractBits(phys, 14, 3)),
('rank', ExtractBits(phys, 17, 1)),
('row', ExtractBits(phys, 18, 14)),
]
d = dict(fields)
# The bottom 3 bits of the row number are XOR'd into the bank number.
d['bank'][0] ^= d['row'][0] & 7
return fields
def Format(fields):
return ' '.join('%s=%s' % (name, FormatBits(val, size))
for name, (val, size) in reversed(fields))
def Main():
count = 0
count_fits = 0
for addrs in GetResultAddrs('bitflip_addrs'):
aggs = addrs[0:2]
victim = addrs[2]
# Sort aggressor addresses by closeness to victim. We assume
# the closest one is the one that causes the victim's bit flip.
aggs.sort(key=lambda agg: abs(victim - agg))
def FormatAddr(name, val):
fmt = Format(Convert(val))
print '\taddr=0x%09x -> %s (%s)' % (val, fmt, name)
print 'result:'
print '\tdiff=%x' % (victim - aggs[0])
FormatAddr('victim', victim)
FormatAddr('aggressor1', aggs[0])
FormatAddr('aggressor2', aggs[1])
# Test hypotheses.
agg1_dict = dict(Convert(aggs[0]))
agg2_dict = dict(Convert(aggs[1]))
victim_dict = dict(Convert(victim))
row_diff = abs(agg1_dict['row'][0] - victim_dict['row'][0])
fits = (agg1_dict['bank'] == victim_dict['bank'] and
agg2_dict['bank'] == victim_dict['bank'] and
row_diff in (1, -1))
print '\t' + 'fits=%s' % fits
if agg1_dict['row'][0] & 2 != victim_dict['row'][0] & 2:
print '\t' + 'unusual?'
count += 1
if fits:
count_fits += 1
print "\nSummary: of %i results, %i fit and %i don't fit" % (
count, count_fits, count - count_fits)
if __name__ == '__main__':
Main()
|
dudepare/django
|
refs/heads/master
|
tests/contenttypes_tests/models.py
|
172
|
from __future__ import unicode_literals
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.http import urlquote
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
def get_absolute_url(self):
return '/authors/%s/' % self.id
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField(max_length=100)
slug = models.SlugField()
author = models.ForeignKey(Author, models.CASCADE)
date_created = models.DateTimeField()
def __str__(self):
return self.title
@python_2_unicode_compatible
class SchemeIncludedURL(models.Model):
url = models.URLField(max_length=100)
def __str__(self):
return self.url
def get_absolute_url(self):
return self.url
class ConcreteModel(models.Model):
name = models.CharField(max_length=10)
class ProxyModel(ConcreteModel):
class Meta:
proxy = True
@python_2_unicode_compatible
class FooWithoutUrl(models.Model):
"""
Fake model not defining ``get_absolute_url`` for
ContentTypesTests.test_shortcut_view_without_get_absolute_url()
"""
name = models.CharField(max_length=30, unique=True)
def __str__(self):
return self.name
class FooWithUrl(FooWithoutUrl):
"""
Fake model defining ``get_absolute_url`` for
ContentTypesTests.test_shortcut_view().
"""
def get_absolute_url(self):
return "/users/%s/" % urlquote(self.name)
class FooWithBrokenAbsoluteUrl(FooWithoutUrl):
"""
Fake model defining a ``get_absolute_url`` method containing an error
"""
def get_absolute_url(self):
return "/users/%s/" % self.unknown_field
class Question(models.Model):
text = models.CharField(max_length=200)
answer_set = GenericRelation('Answer')
@python_2_unicode_compatible
class Answer(models.Model):
text = models.CharField(max_length=200)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
question = GenericForeignKey()
class Meta:
order_with_respect_to = 'question'
def __str__(self):
return self.text
@python_2_unicode_compatible
class Post(models.Model):
"""An ordered tag on an item."""
title = models.CharField(max_length=200)
content_type = models.ForeignKey(ContentType, models.CASCADE, null=True)
object_id = models.PositiveIntegerField(null=True)
parent = GenericForeignKey()
children = GenericRelation('Post')
class Meta:
order_with_respect_to = 'parent'
def __str__(self):
return self.title
|
Gaia3D/QGIS
|
refs/heads/master
|
python/ext-libs/pygments/style.py
|
270
|
# -*- coding: utf-8 -*-
"""
pygments.style
~~~~~~~~~~~~~~
Basic style object.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.token import Token, STANDARD_TYPES
class StyleMeta(type):
def __new__(mcs, name, bases, dct):
obj = type.__new__(mcs, name, bases, dct)
for token in STANDARD_TYPES:
if token not in obj.styles:
obj.styles[token] = ''
def colorformat(text):
if text[0:1] == '#':
col = text[1:]
if len(col) == 6:
return col
elif len(col) == 3:
return col[0]*2 + col[1]*2 + col[2]*2
elif text == '':
return ''
assert False, "wrong color format %r" % text
_styles = obj._styles = {}
for ttype in obj.styles:
for token in ttype.split():
if token in _styles:
continue
ndef = _styles.get(token.parent, None)
styledefs = obj.styles.get(token, '').split()
if not ndef or token is None:
ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
elif 'noinherit' in styledefs and token is not Token:
ndef = _styles[Token][:]
else:
ndef = ndef[:]
_styles[token] = ndef
for styledef in obj.styles.get(token, '').split():
if styledef == 'noinherit':
pass
elif styledef == 'bold':
ndef[1] = 1
elif styledef == 'nobold':
ndef[1] = 0
elif styledef == 'italic':
ndef[2] = 1
elif styledef == 'noitalic':
ndef[2] = 0
elif styledef == 'underline':
ndef[3] = 1
elif styledef == 'nounderline':
ndef[3] = 0
elif styledef[:3] == 'bg:':
ndef[4] = colorformat(styledef[3:])
elif styledef[:7] == 'border:':
ndef[5] = colorformat(styledef[7:])
elif styledef == 'roman':
ndef[6] = 1
elif styledef == 'sans':
ndef[7] = 1
elif styledef == 'mono':
ndef[8] = 1
else:
ndef[0] = colorformat(styledef)
return obj
def style_for_token(cls, token):
t = cls._styles[token]
return {
'color': t[0] or None,
'bold': bool(t[1]),
'italic': bool(t[2]),
'underline': bool(t[3]),
'bgcolor': t[4] or None,
'border': t[5] or None,
'roman': bool(t[6]) or None,
'sans': bool(t[7]) or None,
'mono': bool(t[8]) or None,
}
def list_styles(cls):
return list(cls)
def styles_token(cls, ttype):
return ttype in cls._styles
def __iter__(cls):
for token in cls._styles:
yield token, cls.style_for_token(token)
def __len__(cls):
return len(cls._styles)
class Style(object):
__metaclass__ = StyleMeta
#: overall background color (``None`` means transparent)
background_color = '#ffffff'
#: highlight background color
highlight_color = '#ffffcc'
#: Style definitions for individual token types.
styles = {}
|
orekyuu/intellij-community
|
refs/heads/master
|
python/testData/refactoring/extractmethod/Parameter.before.py
|
83
|
def foo(a):
<selection>print(a)</selection>
|
paksu/pytelegraf
|
refs/heads/master
|
telegraf/protocol.py
|
1
|
from telegraf.utils import format_string, format_value
class Line(object):
def __init__(self, measurement, values, tags=None, timestamp=None):
assert measurement, "Must have measurement"
assert values not in (None, {}), "Must have values"
# Name of the actual measurement
self.measurement = measurement
# Single value or a dict of value_name: value pairs
self.values = values
# Dict of tags if any
self.tags = tags or {}
# User provided timestamp if any
self.timestamp = timestamp
def get_output_measurement(self):
"""
Formats and escapes measurement name that can be rendered to line protocol
"""
return format_string(self.measurement)
def get_output_values(self):
"""
Return an escaped string of comma separated value_name: value pairs
"""
# Handle primitive values here and implicitly convert them to a dict because
# it allows the API to be simpler.
# Also influxDB mandates that each value also has a name so the default name
# for any non-dict value is "value"
if not isinstance(self.values, dict):
metric_values = {'value': self.values}
else:
metric_values = self.values
# Sort the values in lexicographically by value name
sorted_values = sorted(metric_values.items())
# Remove None values
sorted_values = [(k, v) for k, v in sorted_values if v is not None]
return u",".join(u"{0}={1}".format(format_string(k), format_value(v)) for k, v in sorted_values)
def get_output_tags(self):
"""
Return an escaped string of comma separated tag_name: tag_value pairs
Tags should be sorted by key before being sent for best performance. The sort should
match that from the Go bytes. Compare function (http://golang.org/pkg/bytes/#Compare).
"""
# Sort the tags in lexicographically by tag name
sorted_tags = sorted(self.tags.items())
# Finally render, escape and return the tag string
return u",".join(u"{0}={1}".format(format_string(k), format_string(v)) for k, v in sorted_tags)
def get_output_timestamp(self):
"""
Formats timestamp so it can be rendered to line protocol
"""
return " {0}".format(self.timestamp) if self.timestamp else ""
def to_line_protocol(self):
"""
Converts the given metrics as a single line of InfluxDB line protocol
"""
tags = self.get_output_tags()
return u"{0}{1} {2}{3}".format(
self.get_output_measurement(),
"," + tags if tags else '',
self.get_output_values(),
self.get_output_timestamp()
)
|
dNG-git/pas_upnp
|
refs/heads/master
|
src/dNG/net/upnp/ssdp_listener_ipv4_multicast.py
|
1
|
# -*- coding: utf-8 -*-
"""
direct PAS
Python Application Services
----------------------------------------------------------------------------
(C) direct Netware Group - All rights reserved
https://www.direct-netware.de/redirect?pas;upnp
The following license agreement remains valid unless any additions or
changes are being made by direct Netware Group in a written form.
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
----------------------------------------------------------------------------
https://www.direct-netware.de/redirect?licenses;gpl
----------------------------------------------------------------------------
#echo(pasUPnPVersion)#
#echo(__FILEPATH__)#
"""
import socket
from dNG.net.server.dispatcher import Dispatcher
from dNG.net.udp_ne_ipv4_socket import UdpNeIpv4Socket
from .ssdp_request import SsdpRequest
class SsdpListenerIpv4Multicast(Dispatcher):
"""
Listener instance receiving IPv4 multicast SSDP messages.
:author: direct Netware Group et al.
:copyright: (C) direct Netware Group - All rights reserved
:package: pas
:subpackage: upnp
:since: v0.2.00
:license: https://www.direct-netware.de/redirect?licenses;gpl
GNU General Public License 2
"""
def __init__(self, ip):
"""
Constructor __init__(SsdpListenerIpv4Multicast)
:param ip: IPv4 address
:since: v0.2.00
"""
self.listener_active = False
"""
True if multicast listener is active
"""
self.listener_ip = ip
"""
Listener IPv6 address
"""
listener_socket = UdpNeIpv4Socket(( "", 1900 ))
Dispatcher.__init__(self, listener_socket, SsdpRequest, 1)
#
def is_listening(self):
"""
Returns true if the listener is active.
:return: (bool) Listener state
:since: v0.2.00
"""
return self.listener_active
#
def run(self):
"""
Run the main loop for this server instance.
:since: v0.2.00
"""
# pylint: disable=broad-except
if (not self.listener_active):
try:
mreq = (socket.inet_pton(socket.AF_INET, "239.255.255.250") + socket.inet_pton(socket.AF_INET, self.listener_ip)
if (hasattr(socket, "inet_pton")) else
socket.inet_aton("239.255.255.250") + socket.inet_aton(self.listener_ip)
)
self.listener_socket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
self.listener_active = True
if (self.log_handler is not None): self.log_handler.debug("#echo(__FILEPATH__)# -{0!r}.run()- reporting: Started listening on '{1}'", self, self.listener_ip, context = "pas_upnp")
except Exception as handled_exception:
if (self.log_handler is not None): self.log_handler.debug(handled_exception, context = "pas_upnp")
#
#
Dispatcher.run(self)
#
def stop(self):
"""
Stops the listener and unqueues all running sockets.
:since: v0.2.00
"""
# pylint: disable=broad-except
if (self.listener_active):
try: self.listener_socket.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, socket.inet_pton(socket.AF_INET, "239.255.255.250") + socket.inet_pton(socket.AF_INET, self.listener_ip))
except Exception as handled_exception:
if (self.log_handler is not None): self.log_handler.debug(handled_exception, context = "pas_upnp")
#
self.listener_active = False
#
Dispatcher.stop(self)
#
#
|
CentechMTL/TableauDeBord
|
refs/heads/master
|
app/valuePropositionCanvas/models.py
|
2
|
# coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse_lazy
from app.company.models import Company
from django.utils import timezone
# TODO Delete foreign key
VALUE_PROPOSITION_CANVAS_TYPE_CHOICES = (
('Gain', 'Gain'),
('Pain', 'Pain'),
('customerJob', 'customerJob'),
('GainCreator', 'GainCreator'),
('PainReliever', 'PainReliever'),
('ProductAndService', 'ProductAndService'),
)
# Type (ex:pains, gains)
class ValuePropositionCanvasType(models.Model):
class Meta:
verbose_name = _('Value proposition canvas type')
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
# Elements of canvas
class ValuePropositionCanvasElement(models.Model):
class Meta:
verbose_name = _('Value proposition canvas element')
title = models.CharField(max_length=200)
comment = models.TextField(
blank=True,
max_length=2000
)
date = models.DateTimeField(
auto_now_add=True,
auto_now=False
)
type = models.ForeignKey(
ValuePropositionCanvasType,
verbose_name=_('Type')
)
company = models.ForeignKey(Company)
def __unicode__(self):
return self.title
|
cristian-frincu/twitter-bot
|
refs/heads/master
|
twitter_follow_bot.py
|
1
|
# -*- coding: utf-8 -*-
from twitter import Twitter, OAuth, TwitterHTTPError
import os
import random
import time
from creditentials import (
OAUTH_TOKEN,
OAUTH_SECRET,
CONSUMER_KEY,
CONSUMER_SECRET,
TWITTER_HANDLE,
)
# list in the full path and file name of the file you want to store your "already followed"
ALREADY_FOLLOWED_FILE = "already-followed.csv"
MIN_MINUTES_BETWEEN_ROUNDS=0.2
MAX_MINUTES_BETWEEN_ROUNDS=10
MIN_TO_FOLLOW_PER_ROUND=3
MAX_TO_FOLLOW_PER_ROUND=10
HASHTAGS_TO_FOLLOW = 'followback'
MIN_TO_UNFOLLOW_PER_ROUND=4
MAX_TO_UNFOLLOW_PER_ROUND=6 # VALUE * numOfRounds
ROUNDS_TO_START_UNFOLLOW_AFTER= random.randint(1,5)
t = Twitter(auth=OAuth(OAUTH_TOKEN, OAUTH_SECRET,CONSUMER_KEY, CONSUMER_SECRET),retry = True)
def search_tweets(q, count=100, result_type="recent"):
"""
Returns a list of tweets matching a certain phrase (hashtag, word, etc.)
"""
return t.search.tweets(q=q, result_type=result_type, count=count)
def auto_fav(q, count=100, result_type="recent"):
"""
Favorites tweets that match a certain phrase (hashtag, word, etc.)
"""
result = search_tweets(q, count, result_type)
for tweet in result["statuses"]:
try:
# don't favorite your own tweets
if tweet["user"]["screen_name"] == TWITTER_HANDLE:
continue
result = t.favorites.create(_id=tweet["id"])
print("favorited: %s" % (result["text"].encode("utf-8")))
# when you have already favorited a tweet, this error is thrown
except TwitterHTTPError as e:
print("error: %s" % (str(e)))
def auto_rt(q, count=100, result_type="recent"):
"""
Retweets tweets that match a certain phrase (hashtag, word, etc.)
"""
result = search_tweets(q, count, result_type)
for tweet in result["statuses"]:
try:
# don't retweet your own tweets
if tweet["user"]["screen_name"] == TWITTER_HANDLE:
continue
result = t.statuses.retweet(id=tweet["id"])
print("retweeted: %s" % (result["text"].encode("utf-8")))
# when you have already retweeted a tweet, this error is thrown
except TwitterHTTPError as e:
print("error: %s" % (str(e)))
def get_do_not_follow_list():
"""
Returns list of users the bot has already followed.
"""
# make sure the "already followed" file exists
if not os.path.isfile(ALREADY_FOLLOWED_FILE):
with open(ALREADY_FOLLOWED_FILE, "w") as out_file:
out_file.write("")
# read in the list of user IDs that the bot has already followed in the
# past
do_not_follow = set()
dnf_list = []
with open(ALREADY_FOLLOWED_FILE) as in_file:
for line in in_file:
dnf_list.append(int(line))
do_not_follow.update(set(dnf_list))
del dnf_list
return do_not_follow
def auto_follow(q, count=100, result_type="recent"):
"""
Follows anyone who tweets about a specific phrase (hashtag, word, etc.)
"""
result = search_tweets(q, count, result_type)
following = set(t.friends.ids(screen_name=TWITTER_HANDLE)["ids"])
do_not_follow = get_do_not_follow_list()
for tweet in result["statuses"]:
try:
if (tweet["user"]["screen_name"] != TWITTER_HANDLE and
tweet["user"]["id"] not in following and
tweet["user"]["id"] not in do_not_follow):
t.friendships.create(user_id=tweet["user"]["id"], follow=False)
following.update(set([tweet["user"]["id"]]))
print("followed %s" % (tweet["user"]["screen_name"]))
except TwitterHTTPError as e:
print("error: %s" % (str(e)))
# quit on error unless it's because someone blocked me
if "blocked" not in str(e).lower():
quit()
def auto_follow_followers_for_user(user_screen_name, count=100):
"""
Follows the followers of a user
"""
following = set(t.friends.ids(screen_name=TWITTER_HANDLE)["ids"])
followers_for_user = set(t.followers.ids(screen_name=user_screen_name)["ids"][:count]);
do_not_follow = get_do_not_follow_list()
for user_id in followers_for_user:
try:
if (user_id not in following and
user_id not in do_not_follow):
t.friendships.create(user_id=user_id, follow=False)
print("followed %s" % user_id)
except TwitterHTTPError as e:
print("error: %s" % (str(e)))
def auto_follow_followers():
"""
Follows back everyone who's followed you
"""
following = set(t.friends.ids(screen_name=TWITTER_HANDLE)["ids"])
followers = set(t.followers.ids(screen_name=TWITTER_HANDLE)["ids"])
not_following_back = followers - following
for user_id in not_following_back:
try:
t.friendships.create(user_id=user_id, follow=False)
except Exception as e:
print("error: %s" % (str(e)))
def auto_unfollow_nonfollowers(numberToUnfollow=10):
"""
Unfollows everyone who hasn't followed you back
"""
following = set(t.friends.ids(screen_name=TWITTER_HANDLE)["ids"])
followers = set(t.followers.ids(screen_name=TWITTER_HANDLE)["ids"])
# put user IDs here that you want to keep following even if they don't
# follow you back
users_keep_following = set([])
not_following_back = following - followers
# make sure the "already followed" file exists
if not os.path.isfile(ALREADY_FOLLOWED_FILE):
with open(ALREADY_FOLLOWED_FILE, "w") as out_file:
out_file.write("")
# update the "already followed" file with users who didn't follow back
already_followed = set(not_following_back)
af_list = []
with open(ALREADY_FOLLOWED_FILE) as in_file:
for line in in_file:
af_list.append(int(line))
already_followed.update(set(af_list))
del af_list
with open(ALREADY_FOLLOWED_FILE, "w") as out_file:
for val in already_followed:
out_file.write(str(val) + "\n")
unfollowed=0;
for user_id in not_following_back:
if unfollowed==numberToUnfollow:
break;
if user_id not in users_keep_following:
unfollowed+=1;
t.friendships.destroy(user_id=user_id)
print("unfollowed %d" % (user_id))
def auto_mute_following():
"""
Mutes everyone that you are following
"""
following = set(t.friends.ids(screen_name=TWITTER_HANDLE)["ids"])
muted = set(t.mutes.users.ids(screen_name=TWITTER_HANDLE)["ids"])
not_muted = following - muted
# put user IDs of people you do not want to mute here
users_keep_unmuted = set([])
# mute all
for user_id in not_muted:
if user_id not in users_keep_unmuted:
t.mutes.users.create(user_id=user_id)
print("muted %d" % (user_id))
def auto_unmute():
"""
Unmutes everyone that you have muted
"""
muted = set(t.mutes.users.ids(screen_name=TWITTER_HANDLE)["ids"])
# put user IDs of people you want to remain muted here
users_keep_muted = set([])
# mute all
for user_id in muted:
if user_id not in users_keep_muted:
t.mutes.users.destroy(user_id=user_id)
print("unmuted %d" % (user_id))
# timesRunning = 0
# while True:
# numberToFollow =random.randint(MIN_TO_FOLLOW_PER_ROUND,MAX_TO_FOLLOW_PER_ROUND);
# secondsUntilNextWave = random.randint(MIN_MINUTES_BETWEEN_ROUNDS*60,MAX_MINUTES_BETWEEN_ROUNDS*60)
# print "---Number to follow:",numberToFollow
# print "---Wait until next wave:",secondsUntilNextWave
# auto_follow(HASHTAGS_TO_FOLLOW,numberToFollow)
# time.sleep(secondsUntilNextWave)
# timesRunning+=1
# print "-------------Rounds Running:"+str(timesRunning)+"-------------------"
# if timesRunning > ROUNDS_TO_START_UNFOLLOW_AFTER:
# howManyToUnfollow=random.randint(MIN_TO_UNFOLLOW_PER_ROUND,timesRunning*MIN_TO_UNFOLLOW_PER_ROUND)
# print "---Start unfollow sequence, unfollowing:",howManyToUnfollow
# timesRunning =0; #needs to be reset to zero or else it would keep unfollowing after it runs 10 times
# time.sleep(random.randint(50,100))
# auto_unfollow_nonfollowers(howManyToUnfollow)
# time.sleep(random.randint(10,45))
|
gurneyalex/purchase-workflow
|
refs/heads/8.0
|
purchase_discount/__init__.py
|
29
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import models
|
bvamanan/ns3
|
refs/heads/master
|
src/topology-read/bindings/modulegen__gcc_LP64.py
|
22
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.topology_read', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## topology-reader-helper.h (module 'topology-read'): ns3::TopologyReaderHelper [class]
module.add_class('TopologyReaderHelper')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## topology-reader.h (module 'topology-read'): ns3::TopologyReader [class]
module.add_class('TopologyReader', parent=root_module['ns3::Object'])
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link [class]
module.add_class('Link', outer_class=root_module['ns3::TopologyReader'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## inet-topology-reader.h (module 'topology-read'): ns3::InetTopologyReader [class]
module.add_class('InetTopologyReader', parent=root_module['ns3::TopologyReader'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## orbis-topology-reader.h (module 'topology-read'): ns3::OrbisTopologyReader [class]
module.add_class('OrbisTopologyReader', parent=root_module['ns3::TopologyReader'])
## rocketfuel-topology-reader.h (module 'topology-read'): ns3::RocketfuelTopologyReader [class]
module.add_class('RocketfuelTopologyReader', parent=root_module['ns3::TopologyReader'])
## nstime.h (module 'core'): ns3::TimeChecker [class]
module.add_class('TimeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_container('std::map< std::string, std::string >', ('std::string', 'std::string'), container_type='map')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TopologyReaderHelper_methods(root_module, root_module['ns3::TopologyReaderHelper'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TopologyReader_methods(root_module, root_module['ns3::TopologyReader'])
register_Ns3TopologyReaderLink_methods(root_module, root_module['ns3::TopologyReader::Link'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3InetTopologyReader_methods(root_module, root_module['ns3::InetTopologyReader'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3OrbisTopologyReader_methods(root_module, root_module['ns3::OrbisTopologyReader'])
register_Ns3RocketfuelTopologyReader_methods(root_module, root_module['ns3::RocketfuelTopologyReader'])
register_Ns3TimeChecker_methods(root_module, root_module['ns3::TimeChecker'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[])
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TopologyReaderHelper_methods(root_module, cls):
## topology-reader-helper.h (module 'topology-read'): ns3::TopologyReaderHelper::TopologyReaderHelper(ns3::TopologyReaderHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TopologyReaderHelper const &', 'arg0')])
## topology-reader-helper.h (module 'topology-read'): ns3::TopologyReaderHelper::TopologyReaderHelper() [constructor]
cls.add_constructor([])
## topology-reader-helper.h (module 'topology-read'): ns3::Ptr<ns3::TopologyReader> ns3::TopologyReaderHelper::GetTopologyReader() [member function]
cls.add_method('GetTopologyReader',
'ns3::Ptr< ns3::TopologyReader >',
[])
## topology-reader-helper.h (module 'topology-read'): void ns3::TopologyReaderHelper::SetFileName(std::string const fileName) [member function]
cls.add_method('SetFileName',
'void',
[param('std::string const', 'fileName')])
## topology-reader-helper.h (module 'topology-read'): void ns3::TopologyReaderHelper::SetFileType(std::string const fileType) [member function]
cls.add_method('SetFileType',
'void',
[param('std::string const', 'fileType')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Start() [member function]
cls.add_method('Start',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static void ns3::Time::FreezeResolution() [member function]
cls.add_method('FreezeResolution',
'void',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TopologyReader_methods(root_module, cls):
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::TopologyReader() [constructor]
cls.add_constructor([])
## topology-reader.h (module 'topology-read'): void ns3::TopologyReader::AddLink(ns3::TopologyReader::Link link) [member function]
cls.add_method('AddLink',
'void',
[param('ns3::TopologyReader::Link', 'link')])
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::GetFileName() const [member function]
cls.add_method('GetFileName',
'std::string',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::TopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## topology-reader.h (module 'topology-read'): std::_List_const_iterator<ns3::TopologyReader::Link> ns3::TopologyReader::LinksBegin() const [member function]
cls.add_method('LinksBegin',
'std::_List_const_iterator< ns3::TopologyReader::Link >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): bool ns3::TopologyReader::LinksEmpty() const [member function]
cls.add_method('LinksEmpty',
'bool',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::_List_const_iterator<ns3::TopologyReader::Link> ns3::TopologyReader::LinksEnd() const [member function]
cls.add_method('LinksEnd',
'std::_List_const_iterator< ns3::TopologyReader::Link >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): int ns3::TopologyReader::LinksSize() const [member function]
cls.add_method('LinksSize',
'int',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::TopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_pure_virtual=True, is_virtual=True)
## topology-reader.h (module 'topology-read'): void ns3::TopologyReader::SetFileName(std::string const & fileName) [member function]
cls.add_method('SetFileName',
'void',
[param('std::string const &', 'fileName')])
return
def register_Ns3TopologyReaderLink_methods(root_module, cls):
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link::Link(ns3::TopologyReader::Link const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TopologyReader::Link const &', 'arg0')])
## topology-reader.h (module 'topology-read'): ns3::TopologyReader::Link::Link(ns3::Ptr<ns3::Node> fromPtr, std::string const & fromName, ns3::Ptr<ns3::Node> toPtr, std::string const & toName) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'fromPtr'), param('std::string const &', 'fromName'), param('ns3::Ptr< ns3::Node >', 'toPtr'), param('std::string const &', 'toName')])
## topology-reader.h (module 'topology-read'): std::_Rb_tree_const_iterator<std::pair<const std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> > > > ns3::TopologyReader::Link::AttributesBegin() [member function]
cls.add_method('AttributesBegin',
'std::_Rb_tree_const_iterator< std::pair< std::basic_string< char, std::char_traits< char >, std::allocator< char > > const, std::basic_string< char, std::char_traits< char >, std::allocator< char > > > >',
[])
## topology-reader.h (module 'topology-read'): std::_Rb_tree_const_iterator<std::pair<const std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> > > > ns3::TopologyReader::Link::AttributesEnd() [member function]
cls.add_method('AttributesEnd',
'std::_Rb_tree_const_iterator< std::pair< std::basic_string< char, std::char_traits< char >, std::allocator< char > > const, std::basic_string< char, std::char_traits< char >, std::allocator< char > > > >',
[])
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::Link::GetAttribute(std::string const & name) const [member function]
cls.add_method('GetAttribute',
'std::string',
[param('std::string const &', 'name')],
is_const=True)
## topology-reader.h (module 'topology-read'): bool ns3::TopologyReader::Link::GetAttributeFailSafe(std::string const & name, std::string & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string const &', 'name'), param('std::string &', 'value')],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::Ptr<ns3::Node> ns3::TopologyReader::Link::GetFromNode() const [member function]
cls.add_method('GetFromNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::Link::GetFromNodeName() const [member function]
cls.add_method('GetFromNodeName',
'std::string',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): ns3::Ptr<ns3::Node> ns3::TopologyReader::Link::GetToNode() const [member function]
cls.add_method('GetToNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): std::string ns3::TopologyReader::Link::GetToNodeName() const [member function]
cls.add_method('GetToNodeName',
'std::string',
[],
is_const=True)
## topology-reader.h (module 'topology-read'): void ns3::TopologyReader::Link::SetAttribute(std::string const & name, std::string const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string const &', 'name'), param('std::string const &', 'value')])
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3InetTopologyReader_methods(root_module, cls):
## inet-topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::InetTopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## inet-topology-reader.h (module 'topology-read'): ns3::InetTopologyReader::InetTopologyReader() [constructor]
cls.add_constructor([])
## inet-topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::InetTopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3OrbisTopologyReader_methods(root_module, cls):
## orbis-topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::OrbisTopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## orbis-topology-reader.h (module 'topology-read'): ns3::OrbisTopologyReader::OrbisTopologyReader() [constructor]
cls.add_constructor([])
## orbis-topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::OrbisTopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_virtual=True)
return
def register_Ns3RocketfuelTopologyReader_methods(root_module, cls):
## rocketfuel-topology-reader.h (module 'topology-read'): static ns3::TypeId ns3::RocketfuelTopologyReader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## rocketfuel-topology-reader.h (module 'topology-read'): ns3::RocketfuelTopologyReader::RocketfuelTopologyReader() [constructor]
cls.add_constructor([])
## rocketfuel-topology-reader.h (module 'topology-read'): ns3::NodeContainer ns3::RocketfuelTopologyReader::Read() [member function]
cls.add_method('Read',
'ns3::NodeContainer',
[],
is_virtual=True)
return
def register_Ns3TimeChecker_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker(ns3::TimeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeChecker const &', 'arg0')])
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
nathangeffen/tbonline-old
|
refs/heads/master
|
tbonlineproject/tweets/templatetags/recover_tweets.py
|
2
|
from django import template
from django.template import Context
from django.template.loaders.app_directories import load_template_source
import tweepy
import settings
from tweets.models import TwitterUsername
register = template.Library()
def do_recover_tweets(parser, token):
try:
tag_name = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError('%r tag requires zero arguments' % token.contents.split()[0])
return RecoverTweetsNode()
class RecoverTweetsNode(template.Node):
def render(self, context):
try:
if settings.TWEETS_ACTIVATED:
t = template.loader.get_template('tweets_div.html')
accounts = TwitterUsername.objects.all()
tweets = []
for account in accounts:
base_query_string = 'from:%s' % (account.username)
hashtags = account.hashtag_set.all()
if len(hashtags) > 0:
query_string = '%s AND ' % (base_query_string)
for index, hashtag in enumerate(hashtags):
if hashtag.tag == '':
query_string = base_query_string
break
if index != len(hashtags) - 1:
query_string = '%s #%s OR ' % (query_string, hashtag.tag)
else:
query_string = '%s #%s' % (query_string, hashtag.tag)
base_query_string = query_string #final query string
tweets_from_user = tweepy.api.search(q=base_query_string, rpp=100)
tweets.extend(tweets_from_user)
tweets = sorted(tweets, key=lambda tweet: tweet.created_at, reverse=True)
return t.render(Context({'tweets': tweets}, autoescape=context.autoescape))
else:
return ''
except template.VariableDoesNotExist:
return ''
register.tag('recover_tweets', do_recover_tweets)
|
MonamAgarwal/final
|
refs/heads/master
|
GTG/gtk/backends_dialog/addpanel.py
|
3
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
from gi.repository import Gtk
from GTG.gtk.backends_dialog.backendscombo import BackendsCombo
from GTG.backends import BackendFactory
from GTG import _, ngettext
from functools import reduce
class AddPanel(Gtk.Box):
'''
A vertical Box filled with gtk widgets to let the user choose a new
backend.
'''
def __init__(self, backends_dialog):
'''
Constructor, just initializes the gtk widgets
@param backends_dialog: a reference to the dialog in which this is
loaded
'''
super(AddPanel, self).__init__(orientation=Gtk.Orientation.VERTICAL)
self.dialog = backends_dialog
self._create_widgets()
def _create_widgets(self):
'''
gtk widgets initialization
'''
# Division of the available space in three segments:
# top, middle and bottom.
top = Gtk.Box()
top.set_spacing(6)
middle = Gtk.Box()
bottom = Gtk.Box()
self._fill_top_box(top)
self._fill_middle_box(middle)
self._fill_bottom_box(bottom)
self.pack_start(top, False, True, 0)
self.pack_start(middle, True, True, 0)
self.pack_start(bottom, True, True, 0)
self.set_border_width(12)
def _fill_top_box(self, box):
'''
Helper function to fill and box with a combobox that lists the
available backends and a Gtk.Label.
@param box: the Gtk.Box to fill
'''
label = Gtk.Label(label=_("Select synchronization service:"))
label.set_alignment(0, 0.5)
self.combo_types = BackendsCombo(self.dialog)
#FIXME
#self.combo_types.get_child().connect('changed', self.on_combo_changed)
self.combo_types.connect('changed', self.on_combo_changed)
box.pack_start(label, False, True, 0)
box.pack_start(self.combo_types, False, True, 0)
def _fill_middle_box(self, box):
'''
Helper function to fill an box with a label describing the backend
and a Gtk.Image (that loads the backend image)
@param box: the Gtk.Box to fill
'''
self.label_name = Gtk.Label(label="name")
self.label_name.set_alignment(xalign=0.5, yalign=1)
self.label_description = Gtk.Label()
self.label_description.set_justify(Gtk.Justification.FILL)
self.label_description.set_line_wrap(True)
self.label_description.set_size_request(300, -1)
self.label_description.set_alignment(xalign=0, yalign=0.5)
self.label_author = Gtk.Label(label="")
self.label_author.set_line_wrap(True)
self.label_author.set_alignment(xalign=0, yalign=0)
self.label_modules = Gtk.Label(label="")
self.label_modules.set_line_wrap(True)
self.label_modules.set_alignment(xalign=0, yalign=0)
self.image_icon = Gtk.Image()
self.image_icon.set_size_request(128, 128)
align_image = Gtk.Alignment.new(1, 0, 0, 0)
align_image.add(self.image_icon)
labels_vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
labels_vbox.pack_start(self.label_description, True, True, 10)
labels_vbox.pack_start(self.label_author, True, True, 0)
labels_vbox.pack_start(self.label_modules, True, True, 0)
low_box = Gtk.Box()
low_box.pack_start(labels_vbox, True, True, 0)
low_box.pack_start(align_image, True, True, 0)
vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
vbox.pack_start(self.label_name, True, True, 0)
vbox.pack_start(low_box, True, True, 0)
box.pack_start(vbox, True, True, 0)
def _fill_bottom_box(self, box):
'''
Helper function to fill and box with a buttonbox, featuring
and ok and cancel buttons.
@param box: the Gtk.Box to fill
'''
cancel_button = Gtk.Button(stock=Gtk.STOCK_CANCEL)
cancel_button.connect('clicked', self.on_cancel)
self.ok_button = Gtk.Button(stock=Gtk.STOCK_OK)
self.ok_button.connect('clicked', self.on_confirm)
align = Gtk.Alignment.new(0.5, 1, 1, 0)
align.set_padding(0, 10, 0, 0)
buttonbox = Gtk.ButtonBox()
buttonbox.set_layout(Gtk.ButtonBoxStyle.EDGE)
buttonbox.add(cancel_button)
buttonbox.set_child_secondary(cancel_button, False)
buttonbox.add(self.ok_button)
align.add(buttonbox)
box.pack_start(align, True, True, 0)
def refresh_backends(self):
'''Populates the combo box containing the available backends'''
self.combo_types.refresh()
def on_confirm(self, widget=None):
'''
Notifies the dialog holding this Box that a backend has been
chosen
@param widget: just to make this function usable as a signal callback.
Not used.
'''
backend_name = self.combo_types.get_selected()
self.dialog.on_backend_added(backend_name)
def on_cancel(self, widget=None):
'''
Aborts the addition of a new backend. Shows the configuration panel
previously loaded.
@param widget: just to make this function usable as a signal callback.
Not used.
'''
self.dialog.show_config_for_backend(None)
def on_combo_changed(self, widget=None):
'''
Updates the backend description and icon.
@param widget: just to make this function usable as a signal callback.
Not used.
'''
backend_name = self.combo_types.get_selected()
if backend_name is None:
return
backend = BackendFactory().get_backend(backend_name)
self.label_description.set_markup(backend.Backend.get_description())
markup = '<big><big><big><b>%s</b></big></big></big>' % \
backend.Backend.get_human_default_name()
self.label_name.set_markup(markup)
authors = backend.Backend.get_authors()
author_txt = '<b>%s</b>:\n - %s' % \
(ngettext("Author", "Authors", len(authors)),
reduce(lambda a, b: a + "\n" + " - " + b, authors))
self.label_author.set_markup(author_txt)
pixbuf = self.dialog.get_pixbuf_from_icon_name(backend_name, 128)
self.image_icon.set_from_pixbuf(pixbuf)
self.show_all()
|
nwchandler/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/aos/aos_logical_device_map.py
|
78
|
#!/usr/bin/python
#
# (c) 2017 Apstra Inc, <community@apstra.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aos_logical_device_map
author: Damien Garros (@dgarros)
version_added: "2.3"
short_description: Manage AOS Logical Device Map
description:
- Apstra AOS Logical Device Map module let you manage your Logical Device Map easily. You can create
create and delete Logical Device Map by Name, ID or by using a JSON File. This module
is idempotent and support the I(check) mode. It's using the AOS REST API.
requirements:
- "aos-pyez >= 0.6.0"
options:
session:
description:
- An existing AOS session as obtained by M(aos_login) module.
required: true
name:
description:
- Name of the Logical Device Map to manage.
Only one of I(name), I(id) or I(content) can be set.
id:
description:
- AOS Id of the Logical Device Map to manage (can't be used to create a new Logical Device Map),
Only one of I(name), I(id) or I(content) can be set.
content:
description:
- Datastructure of the Logical Device Map to manage. The data can be in YAML / JSON or
directly a variable. It's the same datastructure that is returned
on success in I(value). Only one of I(name), I(id) or I(content) can be set.
state:
description:
- Indicate what is the expected state of the Logical Device Map (present or not).
default: present
choices: ['present', 'absent']
'''
EXAMPLES = '''
- name: "Create an Logical Device Map with one subnet"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-logical-device-map"
state: present
- name: "Create an Logical Device Map with multiple subnets"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-other-logical-device-map"
state: present
- name: "Check if an Logical Device Map exist with same subnets by ID"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "45ab26fc-c2ed-4307-b330-0870488fa13e"
state: present
- name: "Delete an Logical Device Map by name"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-logical-device-map"
state: absent
- name: "Delete an Logical Device Map by id"
aos_logical_device_map:
session: "{{ aos_session }}"
id: "45ab26fc-c2ed-4307-b330-0870488fa13e"
state: absent
# Save an Logical Device Map to a file
- name: "Access Logical Device Map 1/3"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-logical-device-map"
state: present
register: logical_device_map
- name: "Save Logical Device Map into a file in JSON 2/3"
copy:
content: "{{ logical_device_map.value | to_nice_json }}"
dest: logical_device_map_saved.json
- name: "Save Logical Device Map into a file in YAML 3/3"
copy:
content: "{{ logical_device_map.value | to_nice_yaml }}"
dest: logical_device_map_saved.yaml
- name: "Load Logical Device Map from a JSON file"
aos_logical_device_map:
session: "{{ aos_session }}"
content: "{{ lookup('file', 'resources/logical_device_map_saved.json') }}"
state: present
- name: "Load Logical Device Map from a YAML file"
aos_logical_device_map:
session: "{{ aos_session }}"
content: "{{ lookup('file', 'resources/logical_device_map_saved.yaml') }}"
state: present
'''
RETURNS = '''
name:
description: Name of the Logical Device Map
returned: always
type: str
sample: Server-IpAddrs
id:
description: AOS unique ID assigned to the Logical Device Map
returned: always
type: str
sample: fcc4ac1c-e249-4fe7-b458-2138bfb44c06
value:
description: Value of the object as returned by the AOS Server
returned: always
type: dict
sample: {'...'}
'''
import json
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.aos import get_aos_session, find_collection_item, do_load_resource, check_aos_version, content_to_dict
#########################################################
# State Processing
#########################################################
def logical_device_map_absent(module, aos, my_log_dev_map):
margs = module.params
# If the module do not exist, return directly
if my_log_dev_map.exists is False:
module.exit_json(changed=False, name=margs['name'], id='', value={})
# If not in check mode, delete Logical Device Map
if not module.check_mode:
try:
# Need to wait for 1sec before a delete to workaround a current
# limitation in AOS
time.sleep(1)
my_log_dev_map.delete()
except:
module.fail_json(msg="An error occurred, while trying to delete the Logical Device Map")
module.exit_json( changed=True,
name=my_log_dev_map.name,
id=my_log_dev_map.id,
value={} )
def logical_device_map_present(module, aos, my_log_dev_map):
margs = module.params
# if content is defined, create object from Content
if margs['content'] is not None:
if 'display_name' in module.params['content'].keys():
do_load_resource(module, aos.LogicalDeviceMaps, module.params['content']['display_name'])
else:
module.fail_json(msg="Unable to find display_name in 'content', Mandatory")
# if my_log_dev_map doesn't exist already, create a new one
if my_log_dev_map.exists is False and 'content' not in margs.keys():
module.fail_json(msg="'Content' is mandatory for module that don't exist currently")
module.exit_json( changed=False,
name=my_log_dev_map.name,
id=my_log_dev_map.id,
value=my_log_dev_map.value )
#########################################################
# Main Function
#########################################################
def logical_device_map(module):
margs = module.params
try:
aos = get_aos_session(module, margs['session'])
except:
module.fail_json(msg="Unable to login to the AOS server")
item_name = False
item_id = False
if margs['content'] is not None:
content = content_to_dict(module, margs['content'] )
if 'display_name' in content.keys():
item_name = content['display_name']
else:
module.fail_json(msg="Unable to extract 'display_name' from 'content'")
elif margs['name'] is not None:
item_name = margs['name']
elif margs['id'] is not None:
item_id = margs['id']
#----------------------------------------------------
# Find Object if available based on ID or Name
#----------------------------------------------------
try:
my_log_dev_map = find_collection_item(aos.LogicalDeviceMaps,
item_name=item_name,
item_id=item_id)
except:
module.fail_json(msg="Unable to find the Logical Device Map based on name or ID, something went wrong")
#----------------------------------------------------
# Proceed based on State value
#----------------------------------------------------
if margs['state'] == 'absent':
logical_device_map_absent(module, aos, my_log_dev_map)
elif margs['state'] == 'present':
logical_device_map_present(module, aos, my_log_dev_map)
def main():
module = AnsibleModule(
argument_spec=dict(
session=dict(required=True, type="dict"),
name=dict(required=False ),
id=dict(required=False ),
content=dict(required=False, type="json"),
state=dict( required=False,
choices=['present', 'absent'],
default="present")
),
mutually_exclusive = [('name', 'id', 'content')],
required_one_of=[('name', 'id', 'content')],
supports_check_mode=True
)
# Check if aos-pyez is present and match the minimum version
check_aos_version(module, '0.6.0')
logical_device_map(module)
if __name__ == "__main__":
main()
|
ivanbusthomi/inasafe
|
refs/heads/develop
|
safe/definitions/styles.py
|
2
|
# coding=utf-8
"""Styles and colors which are used in InaSAFE."""
from PyQt4.QtGui import QColor
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "info@inasafe.org"
__revision__ = '$Format:%H$'
transparent = QColor(0, 0, 0, 0)
# Hazard classes as specified in reporting standards :
# https://github.com/inasafe/inasafe/issues/2920#issuecomment-229874044
grey = QColor('#8C8C8C') # Used for not exposed.
green = QColor('#1A9641')
light_green = QColor('#A6D96A')
yellow = QColor('#FFFFB2')
orange = QColor('#FEB24C')
red = QColor('#F03B20')
dark_red = QColor('#BD0026')
very_dark_red = QColor('#710017')
# Colors for each MMI levels.
MMI_1 = QColor('#ffffff')
MMI_2 = QColor('#209fff')
MMI_3 = QColor('#00cfff')
MMI_4 = QColor('#55ffff')
MMI_5 = QColor('#aaffff')
MMI_6 = QColor('#fff000')
MMI_7 = QColor('#ffa800')
MMI_8 = QColor('#ff7000')
MMI_9 = QColor('#ff0000')
MMI_10 = QColor('#dd0000')
MMI_11 = QColor('#880000')
MMI_12 = QColor('#440000')
# Colors used in reporting.
affected_column_background = QColor('#fff8e9')
charcoal_black = QColor('#36454F')
# Map legend templates according to standards :
# https://github.com/inasafe/inasafe/issues/3653#issuecomment-275011957
# The space between count and exposure_unit will be automatically added if
# the unit is not empty.
template_without_thresholds = u'{name} ({count}{exposure_unit})'
template_with_maximum_thresholds = (
u'{name} <= {max} {hazard_unit} ({count}{exposure_unit})')
template_with_minimum_thresholds = (
u'{name} > {min} {hazard_unit} ({count}{exposure_unit})')
template_with_range_thresholds = (
u'{name} > {min} - {max} {hazard_unit} ({count}{exposure_unit})')
# Default line width for exposure
line_width_exposure = 0.66
# Rubber bands
user_analysis_color = QColor(0, 0, 255, 100) # Blue
user_analysis_width = 2
next_analysis_color = QColor(0, 255, 0, 100) # Green
next_analysis_width = 10
last_analysis_color = QColor(255, 0, 0, 100) # Red
last_analysis_width = 5
# Availability options color in wizard
available_option_color = QColor(120, 255, 120)
unavailable_option_color = QColor(220, 220, 220)
# Aggregation layer
aggregation_width = '1.0'
aggregation_color = QColor(255, 125, 125)
# Analysis layer
analysis_width = '2.0'
analysis_color = QColor(255, 0, 0)
|
awduda/awduda.github.io
|
refs/heads/master
|
venv/lib/python2.7/site-packages/wheel/test/test_tool.py
|
327
|
from .. import tool
def test_keygen():
def get_keyring():
WheelKeys, keyring = tool.get_keyring()
class WheelKeysTest(WheelKeys):
def save(self):
pass
class keyringTest:
@classmethod
def get_keyring(cls):
class keyringTest2:
pw = None
def set_password(self, a, b, c):
self.pw = c
def get_password(self, a, b):
return self.pw
return keyringTest2()
return WheelKeysTest, keyringTest
tool.keygen(get_keyring=get_keyring)
|
shipci/sympy
|
refs/heads/master
|
sympy/utilities/tests/test_enumerative.py
|
20
|
from sympy.core.compatibility import xrange, zip_longest
from sympy.utilities.enumerative import (
factoring_visitor,
list_visitor,
MultisetPartitionTraverser,
multiset_partitions_taocp,
PartComponent,
part_key
)
from sympy.utilities.iterables import multiset_partitions, _set_partitions
from sympy.utilities.pytest import slow
# first some functions only useful as test scaffolding - these provide
# straightforward, but slow reference implementations against which to
# compare the real versions, and also a comparision to verify that
# different versions are giving identical results.
def part_range_filter(partition_iterator, lb, ub):
"""
Filters (on the number of parts) a multiset partition enumeration
Arguments
=========
lb, and ub are a range (in the python slice sense) on the lpart
variable returned from a multiset partition enumeration. Recall
that lpart is 0-based (it points to the topmost part on the part
stack), so if you want to return parts of sizes 2,3,4,5 you would
use lb=1 and ub=5.
"""
for state in partition_iterator:
f, lpart, pstack = state
if lpart >= lb and lpart < ub:
yield state
def multiset_partitions_baseline(multiplicities, components):
"""Enumerates partitions of a multiset
Parameters
==========
multiplicities
list of integer multiplicities of the components of the multiset.
components
the components (elements) themselves
Returns
=======
Set of partitions. Each partition is tuple of parts, and each
part is a tuple of components (with repeats to indicate
multiplicity)
Notes
=====
Multiset partitions can be created as equivalence classes of set
partitions, and this function does just that. This approach is
slow and memory intensive compared to the more advanced algorithms
available, but the code is simple and easy to understand. Hence
this routine is strictly for testing -- to provide a
straightforward baseline against which to regress the production
versions. (This code is a simplified version of an earlier
production implementation.)
"""
canon = [] # list of components with repeats
for ct, elem in zip(multiplicities, components):
canon.extend([elem]*ct)
# accumulate the multiset partitions in a set to eliminate dups
cache = set()
n = len(canon)
for nc, q in _set_partitions(n):
rv = [[] for i in xrange(nc)]
for i in xrange(n):
rv[q[i]].append(canon[i])
canonical = tuple(
sorted([tuple(p) for p in rv]))
cache.add(canonical)
return cache
def compare_multiset_w_baseline(multiplicities):
"""
Enumerates the partitions of multiset with AOCP algorithm and
baseline implementation, and compare the results.
"""
letters = "abcdefghijklmnopqrstuvwxyz"
bl_partitions = multiset_partitions_baseline(multiplicities, letters)
# The partitions returned by the different algorithms may have
# their parts in different orders. Also, they generate partitions
# in different orders. Hence the sorting, and set comparison.
aocp_partitions = set()
for state in multiset_partitions_taocp(multiplicities):
p1 = tuple(sorted(
[tuple(p) for p in list_visitor(state, letters)]))
aocp_partitions.add(p1)
assert bl_partitions == aocp_partitions
def compare_multiset_states(s1, s2):
"""compare for equality two instances of multiset partition states
This is useful for comparing different versions of the algorithm
to verify correctness."""
# Comparison is physical, the only use of semantics is to ignore
# trash off the top of the stack.
f1, lpart1, pstack1 = s1
f2, lpart2, pstack2 = s2
if (lpart1 == lpart2) and (f1[0:lpart1+1] == f2[0:lpart2+1]):
if pstack1[0:f1[lpart1+1]] == pstack2[0:f2[lpart2+1]]:
return True
return False
def test_multiset_partitions_taocp():
"""Compares the output of multiset_partitions_taocp with a baseline
(set partition based) implementation."""
# Test cases should not be too large, since the baseline
# implementation is fairly slow.
multiplicities = [2,2]
compare_multiset_w_baseline(multiplicities)
multiplicities = [4,3,1]
compare_multiset_w_baseline(multiplicities)
def test_multiset_partitions_versions():
"""Compares Knuth-based versions of multiset_partitions"""
multiplicities = [5,2,2,1]
m = MultisetPartitionTraverser()
for s1, s2 in zip_longest(m.enum_all(multiplicities),
multiset_partitions_taocp(multiplicities)):
assert compare_multiset_states(s1, s2)
def subrange_exercise(mult, lb, ub):
"""Compare filter-based and more optimized subrange implementations
Helper for tests, called with both small and larger multisets.
"""
m = MultisetPartitionTraverser()
assert m.count_partitions(mult) == \
m.count_partitions_slow(mult)
# Note - multiple traversals from the same
# MultisetPartitionTraverser object cannot execute at the same
# time, hence make several instances here.
ma = MultisetPartitionTraverser()
mc = MultisetPartitionTraverser()
md = MultisetPartitionTraverser()
# Several paths to compute just the size two partitions
a_it = ma.enum_range(mult, lb, ub)
b_it = part_range_filter(multiset_partitions_taocp(mult), lb, ub)
c_it = part_range_filter(mc.enum_small(mult, ub), lb, sum(mult))
d_it = part_range_filter(md.enum_large(mult, lb), 0, ub)
for sa, sb, sc, sd in zip_longest(a_it, b_it, c_it, d_it):
assert compare_multiset_states(sa, sb)
assert compare_multiset_states(sa, sc)
assert compare_multiset_states(sa, sd)
def test_subrange():
# Quick, but doesn't hit some of the corner cases
mult = [4,4,2,1] # mississippi
lb = 1
ub = 2
subrange_exercise(mult, lb, ub)
@slow
def test_subrange_large():
# takes a second or so, depending on cpu, Python version, etc.
mult = [6,3,2,1]
lb = 4
ub = 7
subrange_exercise(mult, lb, ub)
|
lamblin/pylearn2
|
refs/heads/master
|
pylearn2/models/tests/test_maxout.py
|
44
|
"""
Tests of the maxout functionality.
So far these don't test correctness, just that you can
run the objects.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2013, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
import numpy as np
import unittest
# Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest
from theano import config
from theano import function
from theano.sandbox import cuda
from theano import tensor as T
from pylearn2.config import yaml_parse
from pylearn2.datasets.exc import NoDataPathError
from pylearn2.models.mlp import MLP
from pylearn2.models.maxout import Maxout
from pylearn2.space import VectorSpace
def test_min_zero():
"""
This test guards against a bug where the size of the zero buffer used with
the min_zero flag was specified to have the wrong size. The bug only
manifested when compiled with optimizations off, because the optimizations
discard information about the size of the zero buffer.
"""
mlp = MLP(input_space=VectorSpace(1),
layers= [Maxout(layer_name="test_layer", num_units=1,
num_pieces = 2,
irange=.05, min_zero=True)])
X = T.matrix()
output = mlp.fprop(X)
# Compile in debug mode so we don't optimize out the size of the buffer
# of zeros
f = function([X], output, mode="DEBUG_MODE")
f(np.zeros((1, 1)).astype(X.dtype))
def test_maxout_basic():
# Tests that we can load a densely connected maxout model
# and train it for a few epochs (without saving) on a dummy
# dataset-- tiny model and dataset
yaml_string = """
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.testing.datasets.random_one_hot_dense_d\
esign_matrix {
rng: !obj:numpy.random.RandomState { seed: [2013, 3, 16] },
num_examples: 12,
dim: 2,
num_classes: 10
},
model: !obj:pylearn2.models.mlp.MLP {
layers: [
!obj:pylearn2.models.maxout.Maxout {
layer_name: 'h0',
num_units: 3,
num_pieces: 2,
irange: .005,
max_col_norm: 1.9365,
},
!obj:pylearn2.models.maxout.Maxout {
layer_name: 'h1',
num_units: 2,
num_pieces: 3,
irange: .005,
max_col_norm: 1.9365,
},
!obj:pylearn2.models.mlp.Softmax {
max_col_norm: 1.9365,
layer_name: 'y',
n_classes: 10,
irange: .005
}
],
nvis: 2,
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
learning_rule: !obj:pylearn2.training_algorithms.learning_rule.Mo\
mentum { init_momentum: 0.5 },
batch_size: 6,
learning_rate: .1,
monitoring_dataset:
{
'train' : *train
},
cost: !obj:pylearn2.costs.mlp.dropout.Dropout {
input_include_probs: { 'h0' : .8 },
input_scales: { 'h0': 1. }
},
termination_criterion: !obj:pylearn2.termination_criteria.EpochCo\
unter {
max_epochs: 3,
},
update_callbacks: !obj:pylearn2.training_algorithms.sgd.Exponenti\
alDecay {
decay_factor: 1.000004,
min_lr: .000001
}
},
extensions: [
!obj:pylearn2.training_algorithms.learning_rule.MomentumAdjustor {
start: 1,
saturate: 250,
final_momentum: .7
}
],
}
"""
train = yaml_parse.load(yaml_string)
train.main_loop()
yaml_string_maxout_conv_c01b_basic = """
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.testing.datasets.random_one_hot_topolog\
ical_dense_design_matrix {
rng: !obj:numpy.random.RandomState { seed: [2013, 3, 16] },
shape: &input_shape [10, 10],
channels: 1,
axes: ['c', 0, 1, 'b'],
num_examples: 12,
num_classes: 10
},
model: !obj:pylearn2.models.mlp.MLP {
batch_size: 2,
layers: [
!obj:pylearn2.models.maxout.MaxoutConvC01B {
layer_name: 'h0',
pad: 0,
num_channels: 8,
num_pieces: 2,
kernel_shape: [2, 2],
pool_shape: [2, 2],
pool_stride: [2, 2],
irange: .005,
max_kernel_norm: .9,
},
# The following layers are commented out to make this
# test pass on a GTX 285.
# cuda-convnet isn't really meant to run on such an old
# graphics card but that's what we use for the buildbot.
# In the long run, we should move the buildbot to a newer
# graphics card and uncomment the remaining layers.
# !obj:pylearn2.models.maxout.MaxoutConvC01B {
# layer_name: 'h1',
# pad: 3,
# num_channels: 4,
# num_pieces: 4,
# kernel_shape: [3, 3],
# pool_shape: [2, 2],
# pool_stride: [2, 2],
# irange: .005,
# max_kernel_norm: 1.9365,
# },
#!obj:pylearn2.models.maxout.MaxoutConvC01B {
# pad: 3,
# layer_name: 'h2',
# num_channels: 16,
# num_pieces: 2,
# kernel_shape: [2, 2],
# pool_shape: [2, 2],
# pool_stride: [2, 2],
# irange: .005,
# max_kernel_norm: 1.9365,
# },
!obj:pylearn2.models.mlp.Softmax {
max_col_norm: 1.9365,
layer_name: 'y',
n_classes: 10,
irange: .005
}
],
input_space: !obj:pylearn2.space.Conv2DSpace {
shape: *input_shape,
num_channels: 1,
axes: ['c', 0, 1, 'b'],
},
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
learning_rate: .05,
learning_rule: !obj:pylearn2.training_algorithms.learning_rule.Mo\
mentum { init_momentum: 0.9 },
monitoring_dataset:
{
'train': *train
},
cost: !obj:pylearn2.costs.mlp.dropout.Dropout {
input_include_probs: { 'h0' : .8 },
input_scales: { 'h0': 1. }
},
termination_criterion: !obj:pylearn2.termination_criteria.EpochCo\
unter {
max_epochs: 3
},
update_callbacks: !obj:pylearn2.training_algorithms.sgd.Exponenti\
alDecay {
decay_factor: 1.00004,
min_lr: .000001
}
},
extensions: [
!obj:pylearn2.training_algorithms.learning_rule.MomentumAdjustor {
start: 1,
saturate: 250,
final_momentum: .7
}
]
}
"""
yaml_string_maxout_conv_c01b_cifar10 = """
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.datasets.cifar10.CIFAR10 {
toronto_prepro: True,
which_set: 'train',
axes: ['c', 0, 1, 'b'],
start: 0,
stop: 50000
},
model: !obj:pylearn2.models.mlp.MLP {
batch_size: 100,
input_space: !obj:pylearn2.space.Conv2DSpace {
shape: [32, 32],
num_channels: 3,
axes: ['c', 0, 1, 'b'],
},
layers: [
!obj:pylearn2.models.maxout.MaxoutConvC01B {
layer_name: 'conv1',
pad: 0,
num_channels: 32,
num_pieces: 1,
kernel_shape: [5, 5],
pool_shape: [3, 3],
pool_stride: [2, 2],
irange: .01,
min_zero: True,
W_lr_scale: 1.,
b_lr_scale: 2.,
tied_b: True,
max_kernel_norm: 9.9,
},
!obj:pylearn2.models.mlp.Softmax {
layer_name: 'y',
n_classes: 10,
istdev: .01,
W_lr_scale: 1.,
b_lr_scale: 2.,
max_col_norm: 9.9365
}
],
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
learning_rule: !obj:pylearn2.training_algorithms.learning_rule.Mo\
mentum { init_momentum: 0.9 },
batch_size: 100,
learning_rate: .01,
monitoring_dataset:
{
'valid' : !obj:pylearn2.datasets.cifar10.CIFAR10 {
toronto_prepro: True,
axes: ['c', 0, 1, 'b'],
which_set: 'train',
start: 40000,
stop: 50000
},
'test' : !obj:pylearn2.datasets.cifar10.CIFAR10 {
toronto_prepro: True,
axes: ['c', 0, 1, 'b'],
which_set: 'test',
}
},
termination_criterion: !obj:pylearn2.termination_criteria.EpochCo\
unter {
max_epochs: 5
}
}
}
"""
yaml_string_maxout_conv_c01b_cifar10_fast = """
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.datasets.cifar10.CIFAR10 {
toronto_prepro: True,
which_set: 'train',
axes: ['c', 0, 1, 'b'],
start: 0,
stop: 100
},
model: !obj:pylearn2.models.mlp.MLP {
batch_size: 100,
input_space: !obj:pylearn2.space.Conv2DSpace {
shape: [32, 32],
num_channels: 3,
axes: ['c', 0, 1, 'b'],
},
layers: [
!obj:pylearn2.models.maxout.MaxoutConvC01B {
layer_name: 'conv1',
pad: 0,
num_channels: 16,
num_pieces: 1,
kernel_shape: [5, 5],
pool_shape: [3, 3],
pool_stride: [2, 2],
irange: .01,
min_zero: False,
W_lr_scale: 1.,
b_lr_scale: 2.,
tied_b: True,
max_kernel_norm: 9.9,
},
!obj:pylearn2.models.mlp.Softmax {
layer_name: 'y',
n_classes: 10,
istdev: .03,
W_lr_scale: 1.,
b_lr_scale: 2.,
max_col_norm: 8.5
}
],
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
learning_rule: !obj:pylearn2.training_algorithms.learning_rule.Mo\
momentum: { init_momentum: 0.9 },
batch_size: 100,
learning_rate: .01,
monitoring_dataset:
{
'valid' : !obj:pylearn2.datasets.cifar10.CIFAR10 {
toronto_prepro: True,
axes: ['c', 0, 1, 'b'],
which_set: 'train',
start: 40000,
stop: 40100
},
'test' : !obj:pylearn2.datasets.cifar10.CIFAR10 {
toronto_prepro: True,
axes: ['c', 0, 1, 'b'],
which_set: 'test',
}
},
termination_criterion: !obj:pylearn2.termination_criteria.EpochCo\
unter {
max_epochs: 5
}
}
}
"""
class TestMaxout(unittest.TestCase):
def test_maxout_conv_c01b_basic_err(self):
assert cuda.cuda_enabled is False
self.assertRaises(RuntimeError,
yaml_parse.load,
yaml_string_maxout_conv_c01b_basic)
def test_maxout_conv_c01b_basic(self):
if cuda.cuda_available is False:
raise SkipTest('Optional package cuda disabled')
if not hasattr(cuda, 'unuse'):
raise Exception("Theano version too old to run this test!")
# Tests that we can run a small convolutional model on GPU,
assert cuda.cuda_enabled is False
# Even if there is a GPU, but the user didn't specify device=gpu
# we want to run this test.
try:
old_floatX = config.floatX
cuda.use('gpu')
config.floatX = 'float32'
train = yaml_parse.load(yaml_string_maxout_conv_c01b_basic)
train.main_loop()
finally:
config.floatX = old_floatX
cuda.unuse()
assert cuda.cuda_enabled is False
def test_maxout_conv_c01b_cifar10(self):
if cuda.cuda_available is False:
raise SkipTest('Optional package cuda disabled')
if not hasattr(cuda, 'unuse'):
raise Exception("Theano version too old to run this test!")
# Tests that we can run a small convolutional model on GPU,
assert cuda.cuda_enabled is False
# Even if there is a GPU, but the user didn't specify device=gpu
# we want to run this test.
try:
old_floatX = config.floatX
cuda.use('gpu')
config.floatX = 'float32'
try:
if config.mode in ['DEBUG_MODE', 'DebugMode']:
train = yaml_parse.load(yaml_string_maxout_conv_c01b_cifar10_fast)
else:
train = yaml_parse.load(yaml_string_maxout_conv_c01b_cifar10)
except NoDataPathError:
raise SkipTest("PYLEARN2_DATA_PATH environment variable "
"not defined")
train.main_loop()
# Check that the performance is close to the expected one:
# test_y_misclass: 0.3777000308036804
misclass_chan = train.algorithm.monitor.channels['test_y_misclass']
if not config.mode in ['DEBUG_MODE', 'DebugMode']:
assert misclass_chan.val_record[-1] < 0.38, \
("misclass_chan.val_record[-1] = %g" %
misclass_chan.val_record[-1])
# test_y_nll: 1.0978516340255737
nll_chan = train.algorithm.monitor.channels['test_y_nll']
if not config.mode in ['DEBUG_MODE', 'DebugMode']:
assert nll_chan.val_record[-1] < 1.1
finally:
config.floatX = old_floatX
cuda.unuse()
assert cuda.cuda_enabled is False
if __name__ == '__main__':
t = TestMaxout('setUp')
t.setUp()
t.test_maxout_conv_c01b_basic()
if 0:
unittest.main()
|
tomtor/QGIS
|
refs/heads/master
|
python/plugins/MetaSearch/dialogs/xmldialog.py
|
30
|
# -*- coding: utf-8 -*-
###############################################################################
#
# CSW Client
# ---------------------------------------------------------
# QGIS Catalog Service client.
#
# Copyright (C) 2014 Tom Kralidis (tomkralidis@gmail.com)
#
# This source is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This code is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
###############################################################################
from qgis.PyQt.QtWidgets import QDialog
from MetaSearch.util import get_ui_class
BASE_CLASS = get_ui_class('xmldialog.ui')
class XMLDialog(QDialog, BASE_CLASS):
"""Raw XML Dialogue"""
def __init__(self):
"""init"""
QDialog.__init__(self)
self.setupUi(self)
|
cloudera/hue
|
refs/heads/master
|
desktop/libs/liboauth/src/liboauth/views.py
|
2
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from future import standard_library
standard_library.install_aliases()
import logging
import sys
LOG = logging.getLogger(__name__)
import urllib.request, urllib.parse, urllib.error
try:
import httplib2
except ImportError:
LOG.warning('httplib2 module not found')
import django.contrib.auth.views
from django.core.exceptions import SuspiciousOperation
from django.contrib.auth import login, get_backends, authenticate
from django.contrib.sessions.models import Session
from django.http import HttpResponseRedirect
from django.urls import reverse
from hadoop.fs.exceptions import WebHdfsException
from useradmin.models import User
from useradmin.views import ensure_home_directory
from desktop.auth.backend import AllowFirstUserDjangoBackend
from desktop.auth.forms import UserCreationForm, AuthenticationForm
from desktop.lib.django_util import render
from desktop.lib.django_util import login_notrequired
from desktop.log.access import access_warn, last_access_map
import liboauth.conf
from liboauth.backend import OAuthBackend
if sys.version_info[0] > 2:
from django.utils.translation import gettext as _
else:
from django.utils.translation import ugettext as _
@login_notrequired
def show_login_page(request, login_errors=False):
"""Used by the non-jframe login"""
redirect_to = request.GET.get('next', '/')
is_first_login_ever = OAuthBackend.is_first_login_ever()
request.session.set_test_cookie()
return render('oauth-login.mako', request, {
'action': reverse('oauth_login'),
'next': redirect_to,
'first_login_ever': is_first_login_ever,
'login_errors': request.method == 'POST' or login_errors,
'socialGoogle': liboauth.conf.CONSUMER_KEY_GOOGLE.get() != "" and liboauth.conf.CONSUMER_SECRET_GOOGLE.get() != "",
'socialFacebook': liboauth.conf.CONSUMER_KEY_FACEBOOK.get() != "" and liboauth.conf.CONSUMER_SECRET_FACEBOOK.get() != "",
'socialLinkedin': liboauth.conf.CONSUMER_KEY_LINKEDIN.get() != "" and liboauth.conf.CONSUMER_SECRET_LINKEDIN.get() != "",
'socialTwitter': liboauth.conf.CONSUMER_KEY_TWITTER.get() != "" and liboauth.conf.CONSUMER_SECRET_TWITTER.get() != ""
})
@login_notrequired
def oauth_login(request):
if 'social' not in request.GET:
raise Exception(_("Invalid request: %s") % resp)
else:
url = OAuthBackend.handleLoginRequest(request)
return HttpResponseRedirect(url)
@login_notrequired
def oauth_authenticated(request):
access_token, next = OAuthBackend.handleAuthenticationRequest(request)
if access_token == "":
return show_login_page(request, True)
user = authenticate(access_token=access_token)
login(request, user)
return HttpResponseRedirect(next)
|
ishay2b/tensorflow
|
refs/heads/segnet
|
tensorflow/contrib/losses/python/losses/__init__.py
|
94
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for building neural network losses.
See @{$python/contrib.losses}.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.contrib.losses.python.losses.loss_ops import *
# pylint: enable=wildcard-import
|
Administrate/surveymonkey
|
refs/heads/master
|
surveymonkey/tests/mocks/utils.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division
import math
from datetime import datetime
from random import randint
from furl import furl
from delorean import Delorean
def create_quota_headers(qpd=None, qps=None, reset=datetime.now()):
qpd_allotted, qpd_current = qpd if qpd else (10000, randint(1, 9000))
qps_allotted, qps_current = qps if qps else (8, randint(1, 5))
reset = Delorean(datetime=reset, timezone='UTC').datetime
return {
'X-Plan-QPS-Allotted': qps_allotted,
'X-Plan-QPS-Current': qps_current,
'X-Plan-Quota-Allotted': qpd_allotted,
'X-Plan-Quota-Current': qpd_current,
'X-Plan-Quota-Reset': reset.strftime("%A, %B %d, %Y %I:%M:%S %p %Z")
}
class BaseListMock(object):
def __init__(self, total=125, base_url=None):
self.total = total
self.base_url = base_url
def get_links(self, per_page, current_page, pages, folder_id=None):
last_page = pages
def _clean_base_url(url): # Prevent duplicate qs params
return furl(url).remove(['per_page', 'current_page', 'pages', 'folder_id']).copy()
links = dict()
default_link_values = {
"per_page": per_page,
"page": current_page
}
if folder_id is not None:
default_link_values['folder_id'] = folder_id
links["self"] = _clean_base_url(self.base_url).add(default_link_values).url
if last_page > 1:
if current_page != last_page:
links["last"] = _clean_base_url(self.base_url).add(default_link_values).url
if current_page < last_page:
next_link_values = default_link_values
next_link_values['page'] = current_page + 1
links["next"] = _clean_base_url(self.base_url).add(next_link_values).url
if current_page != 1:
first_link_values = default_link_values
first_link_values['page'] = 1
links["first"] = _clean_base_url(self.base_url).add(first_link_values).url
prev_link_values = default_link_values
prev_link_values['page'] = current_page - 1
links["prev"] = _clean_base_url(self.base_url).add(prev_link_values).url
return links
def calculate_number_remaining(self, per_page, current_page):
if current_page == 1:
return self.total
else:
total_done = (current_page - 1) * per_page
total_remaining = self.total - total_done
return 0 if total_remaining <= 0 else total_remaining
def parse_url(self, url):
url = furl(url.geturl())
per_page = int(url.args.get("per_page", 50))
current_page = int(url.args.get("page", 1))
pages = math.ceil(self.total / per_page)
return per_page, current_page, pages
def parse_url_with_folder(self, url):
per_page, current_page, pages = self.parse_url(url)
url = furl(url.geturl())
folder_id = url.args.get("folder_id", None)
return per_page, current_page, pages, folder_id
def create_item(self):
raise NotImplementedError("Implemented in subclass")
def create_items(self, per_page, current_page, pages):
items = []
remaining = self.calculate_number_remaining(per_page, current_page)
if remaining > 0:
remaining = remaining if remaining < per_page else per_page
for x in range(0, remaining):
item = self.create_item()
items.append(item)
return items
|
BhallaLab/moose
|
refs/heads/master
|
moose-examples/traub_2005/py/testutils.py
|
2
|
# test_utils.py ---
#
# Filename: test_utils.py
# Description:
# Author:
# Maintainer:
# Created: Sat May 26 10:41:37 2012 (+0530)
# Version:
# Last-Updated: Sat Aug 6 15:45:51 2016 (-0400)
# By: subha
# Update #: 414
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# Code:
import os
os.environ['NUMPTHREADS'] = '1'
import sys
sys.path.append('../../../python')
import uuid
import numpy as np
from matplotlib import pyplot as plt
import unittest
import moose
from moose import utils as mutils
import config
import channelbase
INITCLOCK = 0
ELECCLOCK = 1
CHANCLOCK = 2
POOLCLOCK = 3
LOOKUPCLOCK = 6
STIMCLOCK = 7
PLOTCLOCK = 8
SIMDT = 5e-6
PLOTDT = 0.25e-3
lib = moose.Neutral(config.modelSettings.libpath)
def setup_clocks(simdt, plotdt):
print( 'Setting up clocks: simdt', simdt, 'plotdt', plotdt)
moose.setClock(INITCLOCK, simdt)
moose.setClock(ELECCLOCK, simdt)
moose.setClock(CHANCLOCK, simdt)
moose.setClock(POOLCLOCK, simdt)
moose.setClock(LOOKUPCLOCK, simdt)
moose.setClock(STIMCLOCK, simdt)
moose.setClock(PLOTCLOCK, plotdt)
moose.le('/clock')
def assign_clocks(model_container, data_container, solver='euler'):
"""Assign clockticks to elements.
Parameters
----------
model_container: element
All model components are under this element. The model elements
are assigned clocks as required to maintain the right update
sequence.
INITCLOCK = 0 calls `init` method in Compartments
ELECCLOCK = 1 calls `process` method of Compartments
CHANCLOCK = 2 calls `process` method for HHChannels
POOLCLOCK = 3 is not used in electrical simulation
LOOKUPCLOCK = 6 is not used in these simulations.
STIMCLOCK = 7 calls `process` method in stimulus objects like
PulseGen.
data_container: element
All data recording tables are under this element. They are
assigned PLOTCLOCK, the clock whose update interval is plotdt.
PLOTCLOCK = 8 calls `process` method of Table elements under
data_container
"""
moose.useClock(STIMCLOCK,
model_container.path+'/##[TYPE=PulseGen]',
'process')
moose.useClock(PLOTCLOCK,
data_container.path+'/##[TYPE=Table]',
'process')
if solver == 'hsolve':
for neuron in moose.wildcardFind('%s/##[TYPE=Neuron]'):
solver = moose.HSolve(neuron.path+'/solve')
solver.dt = moose.element('/clock/tick[0]').dt
solver.target = neuron.path
moose.useClock(INITCLOCK,
model_container.path+'/##[TYPE=HSolve]',
'process')
else:
moose.useClock(INITCLOCK,
model_container.path+'/##[TYPE=Compartment]',
'init')
moose.useClock(ELECCLOCK,
model_container.path+'/##[TYPE=Compartment]',
'process')
moose.useClock(CHANCLOCK,
model_container.path+'/##[TYPE=HHChannel]',
'process')
moose.useClock(POOLCLOCK,
model_container.path+'/##[TYPE=CaConc]',
'process')
def step_run(simtime, steptime, verbose=True):
"""Run the simulation in steps of `steptime` for `simtime`."""
clock = moose.Clock('/clock')
if verbose:
print( 'Starting simulation for', simtime)
while clock.currentTime < simtime - steptime:
moose.start(steptime)
if verbose:
print( 'Simulated till', clock.currentTime, 's')
remaining = simtime - clock.currentTime
if remaining > 0:
if verbose:
print( 'Running the remaining', remaining, 's')
moose.start(remaining)
if verbose:
print( 'Finished simulation')
def make_testcomp(containerpath):
comp = moose.Compartment('%s/testcomp' % (containerpath))
comp.Em = -65e-3
comp.initVm = -65e-3
comp.Cm = 1e-12
comp.Rm = 1e9
comp.Ra = 1e5
return comp
def make_pulsegen(containerpath):
pulsegen = moose.PulseGen('%s/testpulse' % (containerpath))
pulsegen.firstLevel = 1e-12
pulsegen.firstDelay = 50e-3
pulsegen.firstWidth = 100e-3
pulsegen.secondLevel = -1e-12
pulsegen.secondDelay = 150e-3
pulsegen.secondWidth = 100e-3
pulsegen.count = 3
pulsegen.delay[2] = 1e9
return pulsegen
def setup_single_compartment(model_container, data_container, channel_proto, Gbar):
"""Setup a single compartment with a channel
Parameters
----------
model_container: element
The model compartment is created under this element
data_container: element
The tables to record data are created under this
channel_proto: element
Channel prototype in library
Gbar: float
Maximum conductance density of the channel
"""
comp = make_testcomp(model_container.path)
channel = moose.copy(channel_proto, comp, channel_proto.name)[0]
moose.connect(channel, 'channel', comp, 'channel')
channel.Gbar = Gbar
pulsegen = make_pulsegen(model_container.path)
moose.connect(pulsegen, 'output', comp, 'injectMsg')
vm_table = moose.Table('%s/Vm' % (data_container.path))
moose.connect(vm_table, 'requestOut', comp, 'getVm')
gk_table = moose.Table('%s/Gk' % (data_container.path))
moose.connect(gk_table, 'requestOut', channel, 'getGk')
ik_table = moose.Table('%s/Ik' % (data_container.path))
moose.connect(ik_table, 'requestOut', channel, 'getIk')
return {'compartment': comp,
'stimulus': pulsegen,
'channel': channel,
'Vm': vm_table,
'Gk': gk_table,
'Ik': ik_table}
def insert_hhchannel(compartment, channelclass, gbar):
channel = moose.copy(channelclass.prototype, compartment)
channel[0].Gbar = gbar
moose.connect(channel, 'channel', compartment, 'channel')
return channel[0]
def compare_data_arrays(left, right, relative='maxw', plot=False, x_range=None):
"""Compare two data arrays and return some measure of the
error.
The arrays must have the same number of dimensions (1 or 2) and
represent the same range of x values. In case they are 1
dimensional, we take x values as relative position of that data
point in the total x-range.
We interpolate the y values for the x-values of the series with
lower resolution using the heigher resolution series as the
interpolation table.
The error is calculated as the maximum difference between the
interpolated values and the actual values in the lower resolution
array divided by the difference between the maximum and minimum y
values of both the series.
If plot is True, left, right and their difference at common points
are plotted.
relative: `rms` - return root mean square of the error values
`taxicab` - mean of the absolute error values
`maxw` - max(abs(error))/(max(y) - min(y))
`meany` - rms(error)/mean(y)
x_range : (minx, maxx) range of X values to consider for comparison
"""
if len(left.shape) != len(right.shape):
print( left.shape, right.shape)
raise ValueError('Arrays to be compared must have same dimensions.')
# y is the intrepolation result for x array using xp and fp when xp and x do not match.
# xp and fp are interpolation table's independent and dependent variables
# yp is a view of the original y values
x = None
y = None
xp = None
fp = None
yp = None
# arbitrarily keep series with more datapoint as left
if left.shape[0] < right.shape[0]:
tmp = left
left = right
right = tmp
if len(right.shape) == 1:
x = np.arange(right.shape[0]) * 1.0 / right.shape[0]
yp = right
xp = np.arange(left.shape[0]) * 1.0 / left.shape[0]
fp = left
elif len(right.shape) == 2:
x = right[:,0]
yp = right[:,1]
xp = left[:,0]
fp = left[:,1]
else:
raise ValueError('Cannot handle more than 2 dimensional arrays.')
if left.shape[0] != right.shape[0]:
print( 'Array sizes not matching: (%d <> %d) - interpolating' % (left.shape[0], right.shape[0]))
y = np.interp(x, xp, fp)
else: # assume we have the same X values when sizes are the same
y = np.array(fp)
if x_range:
indices = np.nonzero((x > x_range[0]) & (x <= x_range[1]))[0]
y = np.array(y[indices])
yp = np.array(yp[indices])
x = np.array(x[indices])
# We update xp and fp to have the same plotting x-range
indices = np.nonzero((xp > x_range[0]) & (xp <= x_range[1]))[0]
xp = xp[indices]
fp = fp[indices]
err = y - yp
print( min(err), max(err), min(y), max(y), min(yp), max(yp))
# I measure a conservative relative error as maximum of all the
# errors between pairs of points with
all_y = np.r_[y, yp]
if plot:
plt.plot(x, yp, 'b-.', label='right')
plt.plot(xp, fp, 'g--', label='left')
plt.plot(x, err, 'r:', label='error')
plt.legend()
plt.show()
if relative == 'rms':
return np.sqrt(np.mean(err**2))
elif relative == 'taxicab':
return np.mean(np.abs(err))
elif relative == 'maxw':
return max(np.abs(err))/(max(all_y) - min(all_y))
elif relative == 'meany':
return np.sqrt(np.mean(err**2)) / np.mean(all_y)
else:
return err
import csv
def compare_cell_dump(left, right, rtol=1e-3, atol=1e-8, row_header=True, col_header=True):
"""This is a utility function to compare various compartment
parameters for a single cell model dumped in csv format using
NEURON and MOOSE."""
print( 'Comparing:', left, 'with', right)
ret = True
left_file = open(left, 'rb')
right_file = open(right, 'rb')
left_reader = csv.DictReader(left_file, delimiter=',')
right_reader = csv.DictReader(right_file, delimiter=',')
lheader = list(left_reader.fieldnames)
lheader.remove('comp')
lheader = sorted(lheader)
rheader = list(right_reader.fieldnames)
rheader.remove('comp')
rheader = sorted(rheader)
if len(lheader) != len(rheader):
print( 'Column number mismatch: left %d <-> right %d' % (len(lheader), len(rheader)))
return False
for ii in range(len(lheader)):
if lheader[ii] != rheader[ii]:
print( ii, '-th column name mismatch:', lheader[ii], '<->', rheader[ii])
return False
index = 2
left_end = False
right_end = False
while True:
try:
left_row = next(left_reader)
except StopIteration:
left_end = True
try:
right_row = next(right_reader)
except StopIteration:
right_end = True
if left_end and not right_end:
print( left, 'run out of line after', index, 'rows')
return False
if right_end and not left_end:
print( right, 'run out of line after', index, 'rows')
return False
if left_end and right_end:
return ret
if len(left_row) != len(right_row):
print( 'No. of columns differ: left - ', len(left_row), 'right -', len(right_row))
ret = False
break
for key in lheader:
try:
left = float(left_row[key])
right = float(right_row[key])
if not np.allclose(float(left), float(right), rtol=rtol, atol=atol):
print(('Mismatch in row:%s, column:%s. Values: %g <> %g' % (index, key, left, right)))
ret = False
except ValueError as e:
print(e)
print(('Row:', index, 'Key:', key, left_row[key], right_row[key]))
index = index + 1
return ret
#
# test_utils.py ends here
|
Grirrane/odoo
|
refs/heads/master
|
addons/report_webkit/__init__.py
|
8
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Nicolas Bessi (Camptocamp)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import header
import company
import report_helper
import webkit_report
import ir_report
import wizard
import convert
import report
|
rhelmer/socorro
|
refs/heads/master
|
socorro/external/postgresql/crash_data.py
|
15
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from socorro.external.crash_data_base import CrashDataBase
class CrashData(CrashDataBase):
"""
Implement the /crash_data service with PostgreSQL.
"""
def get_storage(self):
return self.config.database.crashstorage_class(self.config.database)
|
fotinakis/sentry
|
refs/heads/master
|
src/sentry/nodestore/multi/backend.py
|
23
|
"""
sentry.nodestore.multi.backend
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import random
import six
from sentry.nodestore.base import NodeStorage
from sentry.utils.imports import import_string
class MultiNodeStorage(NodeStorage):
"""
A backend which will write to multiple backends, and read from a random
choice.
This is not intended for consistency, but is instead designed to allow you
to dual-write for purposes of migrations.
>>> MultiNodeStorage(backends=[
>>> ('sentry.nodestore.django.backend.DjangoNodeStorage', {}),
>>> ('sentry.nodestore.riak.backend.RiakNodeStorage', {}),
>>> ], read_selector=lambda backends: backends[0])
"""
def __init__(self, backends, read_selector=random.choice, **kwargs):
assert backends, "you should provide at least one backend"
self.backends = []
for backend, backend_options in backends:
if isinstance(backend, six.string_types):
backend = import_string(backend)
self.backends.append(backend(**backend_options))
self.read_selector = read_selector
super(MultiNodeStorage, self).__init__(**kwargs)
def get(self, id):
# just fetch it from a random backend, we're not aiming for consistency
backend = self.read_selector(self.backends)
return backend.get(id)
def get_multi(self, id_list):
backend = self.read_selector(self.backends)
return backend.get_multi(id_list=id_list)
def set(self, id, data):
should_raise = False
for backend in self.backends:
try:
backend.set(id, data)
except Exception:
should_raise = True
if should_raise:
raise
def set_multi(self, values):
should_raise = False
for backend in self.backends:
try:
backend.set_multi(values)
except Exception:
should_raise = True
if should_raise:
raise
def delete(self, id):
should_raise = False
for backend in self.backends:
try:
backend.delete(id)
except Exception:
should_raise = True
if should_raise:
raise
def cleanup(self, cutoff_timestamp):
should_raise = False
for backend in self.backends:
try:
backend.cleanup(cutoff_timestamp)
except Exception:
should_raise = True
if should_raise:
raise
|
priyatransbit/linux
|
refs/heads/master
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py
|
4653
|
# EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
|
OptiPop/external_chromium_org
|
refs/heads/opti-5.1
|
tools/perf/page_sets/__init__.py
|
63
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import inspect
import os
import sys
from telemetry.core import discover
from telemetry.page import page_set
# Import all submodules' PageSet classes.
start_dir = os.path.dirname(os.path.abspath(__file__))
top_level_dir = os.path.dirname(start_dir)
base_class = page_set.PageSet
for cls in discover.DiscoverClasses(
start_dir, top_level_dir, base_class).values():
setattr(sys.modules[__name__], cls.__name__, cls)
|
bgroff/kala-app
|
refs/heads/master
|
django_kala/organizations/tasks/delete_organization.py
|
1
|
from celery.task import Task
from django.conf import settings
from django.contrib.auth import get_user_model
from organizations.models import Organization
User = get_user_model()
class DeleteOrganizationTask(Task):
def run(self, *args, **kwargs):
self.organization = Organization.objects.get(pk=args[0])
user = User.objects.get(pk=args[1])
if not self.organization.can_manage(user):
# TODO: Log this
return
for project in self.organization.project_set.all():
for document in project.document_set.all():
manager = settings.PLATFORM_MANAGER()
manager.delete_document(document)
def on_success(self, retval, task_id, args, kwargs):
self.organization.delete()
def on_failure(self, exc, task_id, args, kwargs, einfo):
# TODO: Log this
return
|
psawaya/Mental-Ginger
|
refs/heads/master
|
django/contrib/localflavor/au/au_states.py
|
544
|
"""
An alphabetical list of states for use as `choices` in a formfield.
This exists in this standalone file so that it's only imported into memory
when explicitly needed.
"""
STATE_CHOICES = (
('ACT', 'Australian Capital Territory'),
('NSW', 'New South Wales'),
('NT', 'Northern Territory'),
('QLD', 'Queensland'),
('SA', 'South Australia'),
('TAS', 'Tasmania'),
('VIC', 'Victoria'),
('WA', 'Western Australia'),
)
|
KureFM/Xenon
|
refs/heads/master
|
src/packages/bs4/builder/_htmlparser.py
|
412
|
"""Use the HTMLParser library to parse HTML files that aren't too bad."""
__all__ = [
'HTMLParserTreeBuilder',
]
from HTMLParser import (
HTMLParser,
HTMLParseError,
)
import sys
import warnings
# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
# argument, which we'd like to set to False. Unfortunately,
# http://bugs.python.org/issue13273 makes strict=True a better bet
# before Python 3.2.3.
#
# At the end of this file, we monkeypatch HTMLParser so that
# strict=True works well on Python 3.2.2.
major, minor, release = sys.version_info[:3]
CONSTRUCTOR_TAKES_STRICT = (
major > 3
or (major == 3 and minor > 2)
or (major == 3 and minor == 2 and release >= 3))
from bs4.element import (
CData,
Comment,
Declaration,
Doctype,
ProcessingInstruction,
)
from bs4.dammit import EntitySubstitution, UnicodeDammit
from bs4.builder import (
HTML,
HTMLTreeBuilder,
STRICT,
)
HTMLPARSER = 'html.parser'
class BeautifulSoupHTMLParser(HTMLParser):
def handle_starttag(self, name, attrs):
# XXX namespace
attr_dict = {}
for key, value in attrs:
# Change None attribute values to the empty string
# for consistency with the other tree builders.
if value is None:
value = ''
attr_dict[key] = value
attrvalue = '""'
self.soup.handle_starttag(name, None, None, attr_dict)
def handle_endtag(self, name):
self.soup.handle_endtag(name)
def handle_data(self, data):
self.soup.handle_data(data)
def handle_charref(self, name):
# XXX workaround for a bug in HTMLParser. Remove this once
# it's fixed.
if name.startswith('x'):
real_name = int(name.lstrip('x'), 16)
elif name.startswith('X'):
real_name = int(name.lstrip('X'), 16)
else:
real_name = int(name)
try:
data = unichr(real_name)
except (ValueError, OverflowError), e:
data = u"\N{REPLACEMENT CHARACTER}"
self.handle_data(data)
def handle_entityref(self, name):
character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
if character is not None:
data = character
else:
data = "&%s;" % name
self.handle_data(data)
def handle_comment(self, data):
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(Comment)
def handle_decl(self, data):
self.soup.endData()
if data.startswith("DOCTYPE "):
data = data[len("DOCTYPE "):]
elif data == 'DOCTYPE':
# i.e. "<!DOCTYPE>"
data = ''
self.soup.handle_data(data)
self.soup.endData(Doctype)
def unknown_decl(self, data):
if data.upper().startswith('CDATA['):
cls = CData
data = data[len('CDATA['):]
else:
cls = Declaration
self.soup.endData()
self.soup.handle_data(data)
self.soup.endData(cls)
def handle_pi(self, data):
self.soup.endData()
if data.endswith("?") and data.lower().startswith("xml"):
# "An XHTML processing instruction using the trailing '?'
# will cause the '?' to be included in data." - HTMLParser
# docs.
#
# Strip the question mark so we don't end up with two
# question marks.
data = data[:-1]
self.soup.handle_data(data)
self.soup.endData(ProcessingInstruction)
class HTMLParserTreeBuilder(HTMLTreeBuilder):
is_xml = False
features = [HTML, STRICT, HTMLPARSER]
def __init__(self, *args, **kwargs):
if CONSTRUCTOR_TAKES_STRICT:
kwargs['strict'] = False
self.parser_args = (args, kwargs)
def prepare_markup(self, markup, user_specified_encoding=None,
document_declared_encoding=None):
"""
:return: A 4-tuple (markup, original encoding, encoding
declared within markup, whether any characters had to be
replaced with REPLACEMENT CHARACTER).
"""
if isinstance(markup, unicode):
yield (markup, None, None, False)
return
try_encodings = [user_specified_encoding, document_declared_encoding]
dammit = UnicodeDammit(markup, try_encodings, is_html=True)
yield (dammit.markup, dammit.original_encoding,
dammit.declared_html_encoding,
dammit.contains_replacement_characters)
def feed(self, markup):
args, kwargs = self.parser_args
parser = BeautifulSoupHTMLParser(*args, **kwargs)
parser.soup = self.soup
try:
parser.feed(markup)
except HTMLParseError, e:
warnings.warn(RuntimeWarning(
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
raise e
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
# string.
#
# XXX This code can be removed once most Python 3 users are on 3.2.3.
if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
import re
attrfind_tolerant = re.compile(
r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
locatestarttagend = re.compile(r"""
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
(?:\s+ # whitespace before attribute name
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
(?:\s*=\s* # value indicator
(?:'[^']*' # LITA-enclosed value
|\"[^\"]*\" # LIT-enclosed value
|[^'\">\s]+ # bare value
)
)?
)
)*
\s* # trailing whitespace
""", re.VERBOSE)
BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
from html.parser import tagfind, attrfind
def parse_starttag(self, i):
self.__starttag_text = None
endpos = self.check_for_whole_start_tag(i)
if endpos < 0:
return endpos
rawdata = self.rawdata
self.__starttag_text = rawdata[i:endpos]
# Now parse the data between i+1 and j into a tag and attrs
attrs = []
match = tagfind.match(rawdata, i+1)
assert match, 'unexpected call to parse_starttag()'
k = match.end()
self.lasttag = tag = rawdata[i+1:k].lower()
while k < endpos:
if self.strict:
m = attrfind.match(rawdata, k)
else:
m = attrfind_tolerant.match(rawdata, k)
if not m:
break
attrname, rest, attrvalue = m.group(1, 2, 3)
if not rest:
attrvalue = None
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
attrvalue[:1] == '"' == attrvalue[-1:]:
attrvalue = attrvalue[1:-1]
if attrvalue:
attrvalue = self.unescape(attrvalue)
attrs.append((attrname.lower(), attrvalue))
k = m.end()
end = rawdata[k:endpos].strip()
if end not in (">", "/>"):
lineno, offset = self.getpos()
if "\n" in self.__starttag_text:
lineno = lineno + self.__starttag_text.count("\n")
offset = len(self.__starttag_text) \
- self.__starttag_text.rfind("\n")
else:
offset = offset + len(self.__starttag_text)
if self.strict:
self.error("junk characters in start tag: %r"
% (rawdata[k:endpos][:20],))
self.handle_data(rawdata[i:endpos])
return endpos
if end.endswith('/>'):
# XHTML-style empty tag: <span attr="value" />
self.handle_startendtag(tag, attrs)
else:
self.handle_starttag(tag, attrs)
if tag in self.CDATA_CONTENT_ELEMENTS:
self.set_cdata_mode(tag)
return endpos
def set_cdata_mode(self, elem):
self.cdata_elem = elem.lower()
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
BeautifulSoupHTMLParser.parse_starttag = parse_starttag
BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
CONSTRUCTOR_TAKES_STRICT = True
|
Si-elegans/Web-based_GUI_Tools
|
refs/heads/master
|
wiki/core/permissions.py
|
5
|
from wiki.conf import settings
###############################
# ARTICLE PERMISSION HANDLING #
###############################
#
# All functions are:
# can_something(article, user)
# => True/False
#
# All functions can be replaced by pointing their relevant
# settings variable in wiki.conf.settings to a callable(article, user)
def can_read(article, user):
if callable(settings.CAN_READ):
return settings.CAN_READ(article, user)
else:
# Deny reading access to deleted articles if user has no delete access
article_is_deleted = article.current_revision and article.current_revision.deleted
if article_is_deleted and not article.can_delete(user):
return False
# Check access for other users...
if user.is_anonymous() and not settings.ANONYMOUS:
return False
elif article.other_read:
return True
elif user.is_anonymous():
return False
if user == article.owner:
return True
if article.group_read:
if article.group and user.groups.filter(id=article.group.id).exists():
return True
if article.can_moderate(user):
return True
return False
def can_write(article, user):
if callable(settings.CAN_WRITE):
return settings.CAN_WRITE(article, user)
# Check access for other users...
if user.is_anonymous() and not settings.ANONYMOUS_WRITE:
return False
elif article.other_write:
return True
elif user.is_anonymous():
return False
if user == article.owner:
return True
if article.group_write:
if article.group and user and user.groups.filter(id=article.group.id).exists():
return True
if article.can_moderate(user):
return True
return False
def can_assign(article, user):
if callable(settings.CAN_ASSIGN):
return settings.CAN_ASSIGN(article, user)
return not user.is_anonymous() and user.has_perm('wiki.assign')
def can_assign_owner(article, user):
if callable(settings.CAN_ASSIGN_OWNER):
return settings.CAN_ASSIGN_OWNER(article, user)
return False
def can_change_permissions(article, user):
if callable(settings.CAN_CHANGE_PERMISSIONS):
return settings.CAN_CHANGE_PERMISSIONS(article, user)
return (
not user.is_anonymous() and (
article.owner == user or
user.has_perm('wiki.assign')
)
)
def can_delete(article, user):
if callable(settings.CAN_DELETE):
return settings.CAN_DELETE(article, user)
return not user.is_anonymous() and article.can_write(user)
def can_moderate(article, user):
if callable(settings.CAN_MODERATE):
return settings.CAN_MODERATE(article, user)
return not user.is_anonymous() and user.has_perm('wiki.moderate')
def can_admin(article, user):
if callable(settings.CAN_ADMIN):
return settings.CAN_ADMIN(article, user)
return not user.is_anonymous() and user.has_perm('wiki.admin')
|
Bleno/teste_api
|
refs/heads/master
|
meu_ambiente/lib/python2.7/ntpath.py
|
4
|
/usr/lib/python2.7/ntpath.py
|
delving/nave
|
refs/heads/develop
|
setup.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Setup for the nave-public Django Application."""
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need fine tuning.
build_exe_options = {
'packages': [
'os', 'encodings', 'asyncio', 'requests', 'idna', 'django', 'eventlet',
'raven'
],
'include_files': ['/lib64/libc.so.6'],
'excludes': ['tkinter'],
}
# GUI applications require a different base on Windows (the default is for a
# console application).
base = None
if sys.platform == "win32":
base = "Win32GUI"
executables = [
Executable(
'server.py',
base=base,
targetName='hub3_nave.exe'
)
]
from nave.common import version
version = version.__version__
if sys.argv[-1] == 'publish':
try:
import wheel
print('Wheel version: ', wheel.__version__)
except ImportError:
print('Wheel library missing. Please run `pip install wheel`')
sys.exit()
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
sys.exit()
if sys.argv[-1] == 'tag':
print('Tagging the version on git:')
os.system('git tag -a %s -m "version %s"' % (version, version))
os.system('git push --tags')
sys.exit()
readme = open('README.md').read()
# history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='nave-public',
version=version,
description="""Nave LoD platform""",
long_description=readme, # + '\n\n' + history,
author='Sjoerd Siebinga',
author_email='sjoerd@delving.eu',
url='https://github.com/delving/nave',
packages=[
'nave',
],
include_package_data=True,
executables=executables,
install_requires=[],
license='BSD 3-Clause License',
zip_safe=False,
keywords='nave-public',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Flask',
'Framework :: flask :: 0.12',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
emedinaa/contentbox
|
refs/heads/master
|
third_party/django/contrib/messages/tests/base.py
|
104
|
from django import http
from django.conf import settings, global_settings
from django.contrib.messages import constants, utils, get_level, set_level
from django.contrib.messages.api import MessageFailure
from django.contrib.messages.storage import default_storage, base
from django.contrib.messages.storage.base import Message
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from django.utils.translation import ugettext_lazy
from django.utils.unittest import skipIf
def skipUnlessAuthIsInstalled(func):
return skipIf(
'django.contrib.auth' not in settings.INSTALLED_APPS,
"django.contrib.auth isn't installed")(func)
def add_level_messages(storage):
"""
Adds 6 messages from different levels (including a custom one) to a storage
instance.
"""
storage.add(constants.INFO, 'A generic info message')
storage.add(29, 'Some custom level')
storage.add(constants.DEBUG, 'A debugging message', extra_tags='extra-tag')
storage.add(constants.WARNING, 'A warning')
storage.add(constants.ERROR, 'An error')
storage.add(constants.SUCCESS, 'This was a triumph.')
class override_settings_tags(override_settings):
def enable(self):
super(override_settings_tags, self).enable()
# LEVEL_TAGS is a constant defined in the
# django.contrib.messages.storage.base module, so after changing
# settings.MESSAGE_TAGS, we need to update that constant too.
self.old_level_tags = base.LEVEL_TAGS
base.LEVEL_TAGS = utils.get_level_tags()
def disable(self):
super(override_settings_tags, self).disable()
base.LEVEL_TAGS = self.old_level_tags
class BaseTests(object):
storage_class = default_storage
urls = 'django.contrib.messages.tests.urls'
levels = {
'debug': constants.DEBUG,
'info': constants.INFO,
'success': constants.SUCCESS,
'warning': constants.WARNING,
'error': constants.ERROR,
}
def setUp(self):
self.settings_override = override_settings_tags(
TEMPLATE_DIRS = (),
TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS,
MESSAGE_TAGS = '',
MESSAGE_STORAGE = '%s.%s' % (self.storage_class.__module__,
self.storage_class.__name__),
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer',
)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def get_request(self):
return http.HttpRequest()
def get_response(self):
return http.HttpResponse()
def get_storage(self, data=None):
"""
Returns the storage backend, setting its loaded data to the ``data``
argument.
This method avoids the storage ``_get`` method from getting called so
that other parts of the storage backend can be tested independent of
the message retrieval logic.
"""
storage = self.storage_class(self.get_request())
storage._loaded_data = data or []
return storage
def test_add(self):
storage = self.get_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 1')
self.assertTrue(storage.added_new)
storage.add(constants.INFO, 'Test message 2', extra_tags='tag')
self.assertEqual(len(storage), 2)
def test_add_lazy_translation(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, ugettext_lazy('lazy message'))
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
def test_no_update(self):
storage = self.get_storage()
response = self.get_response()
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_add_update(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 1')
storage.add(constants.INFO, 'Test message 1', extra_tags='tag')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 2)
def test_existing_add_read_update(self):
storage = self.get_existing_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 3')
list(storage) # Simulates a read
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_existing_read_add_update(self):
storage = self.get_existing_storage()
response = self.get_response()
list(storage) # Simulates a read
storage.add(constants.INFO, 'Test message 3')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_full_request_response_cycle(self):
"""
With the message middleware enabled, tests that messages are properly
stored and then retrieved across the full request/redirect/response
cycle.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertTrue('messages' in response.context)
messages = [Message(self.levels[level], msg) for msg in
data['messages']]
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_with_template_response(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('django.contrib.messages.tests.urls.show_template_response')
for level in self.levels.keys():
add_url = reverse('django.contrib.messages.tests.urls.add_template_response',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertTrue('messages' in response.context)
for msg in data['messages']:
self.assertContains(response, msg)
# there shouldn't be any messages on second GET request
response = self.client.get(show_url)
for msg in data['messages']:
self.assertNotContains(response, msg)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_multiple_posts(self):
"""
Tests that messages persist properly when multiple POSTs are made
before a GET.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
messages = []
for level in ('debug', 'info', 'success', 'warning', 'error'):
messages.extend([Message(self.levels[level], msg) for msg in
data['messages']])
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
self.client.post(add_url, data)
response = self.client.get(show_url)
self.assertTrue('messages' in response.context)
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@override_settings(
INSTALLED_APPS=filter(
lambda app:app!='django.contrib.messages', settings.INSTALLED_APPS),
MIDDLEWARE_CLASSES=filter(
lambda m:'MessageMiddleware' not in m, settings.MIDDLEWARE_CLASSES),
TEMPLATE_CONTEXT_PROCESSORS=filter(
lambda p:'context_processors.messages' not in p,
settings.TEMPLATE_CONTEXT_PROCESSORS),
MESSAGE_LEVEL=constants.DEBUG
)
def test_middleware_disabled(self):
"""
Tests that, when the middleware is disabled, an exception is raised
when one attempts to store a message.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
self.assertRaises(MessageFailure, self.client.post, add_url,
data, follow=True)
@override_settings(
INSTALLED_APPS=filter(
lambda app:app!='django.contrib.messages', settings.INSTALLED_APPS),
MIDDLEWARE_CLASSES=filter(
lambda m:'MessageMiddleware' not in m, settings.MIDDLEWARE_CLASSES),
TEMPLATE_CONTEXT_PROCESSORS=filter(
lambda p:'context_processors.messages' not in p,
settings.TEMPLATE_CONTEXT_PROCESSORS),
MESSAGE_LEVEL=constants.DEBUG
)
def test_middleware_disabled_fail_silently(self):
"""
Tests that, when the middleware is disabled, an exception is not
raised if 'fail_silently' = True
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
'fail_silently': True,
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertFalse('messages' in response.context)
def stored_messages_count(self, storage, response):
"""
Returns the number of messages being stored after a
``storage.update()`` call.
"""
raise NotImplementedError('This method must be set by a subclass.')
def test_get(self):
raise NotImplementedError('This method must be set by a subclass.')
def get_existing_storage(self):
return self.get_storage([Message(constants.INFO, 'Test message 1'),
Message(constants.INFO, 'Test message 2',
extra_tags='tag')])
def test_existing_read(self):
"""
Tests that reading the existing storage doesn't cause the data to be
lost.
"""
storage = self.get_existing_storage()
self.assertFalse(storage.used)
# After iterating the storage engine directly, the used flag is set.
data = list(storage)
self.assertTrue(storage.used)
# The data does not disappear because it has been iterated.
self.assertEqual(data, list(storage))
def test_existing_add(self):
storage = self.get_existing_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 3')
self.assertTrue(storage.added_new)
def test_default_level(self):
# get_level works even with no storage on the request.
request = self.get_request()
self.assertEqual(get_level(request), constants.INFO)
# get_level returns the default level if it hasn't been set.
storage = self.get_storage()
request._messages = storage
self.assertEqual(get_level(request), constants.INFO)
# Only messages of sufficient level get recorded.
add_level_messages(storage)
self.assertEqual(len(storage), 5)
def test_low_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 5))
self.assertEqual(get_level(request), 5)
add_level_messages(storage)
self.assertEqual(len(storage), 6)
def test_high_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 30))
self.assertEqual(get_level(request), 30)
add_level_messages(storage)
self.assertEqual(len(storage), 2)
@override_settings(MESSAGE_LEVEL=29)
def test_settings_level(self):
request = self.get_request()
storage = self.storage_class(request)
self.assertEqual(get_level(request), 29)
add_level_messages(storage)
self.assertEqual(len(storage), 3)
def test_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags,
['info', '', 'extra-tag debug', 'warning', 'error',
'success'])
@override_settings_tags(MESSAGE_TAGS={
constants.INFO: 'info',
constants.DEBUG: '',
constants.WARNING: '',
constants.ERROR: 'bad',
29: 'custom',
}
)
def test_custom_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags,
['info', 'custom', 'extra-tag', '', 'bad', 'success'])
|
andras-tim/sqlalchemy-migrate
|
refs/heads/master
|
migrate/tests/changeset/test_constraint.py
|
46
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from sqlalchemy import *
from sqlalchemy.util import *
from sqlalchemy.exc import *
from migrate.changeset.util import fk_column_names
from migrate.exceptions import *
from migrate.changeset import *
from migrate.tests import fixture
class CommonTestConstraint(fixture.DB):
"""helper functions to test constraints.
we just create a fresh new table and make sure everything is
as required.
"""
def _setup(self, url):
super(CommonTestConstraint, self)._setup(url)
self._create_table()
def _teardown(self):
if hasattr(self, 'table') and self.engine.has_table(self.table.name):
self.table.drop()
super(CommonTestConstraint, self)._teardown()
def _create_table(self):
self._connect(self.url)
self.meta = MetaData(self.engine)
self.tablename = 'mytable'
self.table = Table(self.tablename, self.meta,
Column(u'id', Integer, nullable=False),
Column(u'fkey', Integer, nullable=False),
mysql_engine='InnoDB')
if self.engine.has_table(self.table.name):
self.table.drop()
self.table.create()
# make sure we start at zero
self.assertEqual(len(self.table.primary_key), 0)
self.assertTrue(isinstance(self.table.primary_key,
schema.PrimaryKeyConstraint), self.table.primary_key.__class__)
class TestConstraint(CommonTestConstraint):
level = fixture.DB.CONNECT
def _define_pk(self, *cols):
# Add a pk by creating a PK constraint
if (self.engine.name in ('oracle', 'firebird')):
# Can't drop Oracle PKs without an explicit name
pk = PrimaryKeyConstraint(table=self.table, name='temp_pk_key', *cols)
else:
pk = PrimaryKeyConstraint(table=self.table, *cols)
self.compare_columns_equal(pk.columns, cols)
pk.create()
self.refresh_table()
if not self.url.startswith('sqlite'):
self.compare_columns_equal(self.table.primary_key, cols, ['type', 'autoincrement'])
# Drop the PK constraint
#if (self.engine.name in ('oracle', 'firebird')):
# # Apparently Oracle PK names aren't introspected
# pk.name = self.table.primary_key.name
pk.drop()
self.refresh_table()
self.assertEqual(len(self.table.primary_key), 0)
self.assertTrue(isinstance(self.table.primary_key, schema.PrimaryKeyConstraint))
return pk
@fixture.usedb()
def test_define_fk(self):
"""FK constraints can be defined, created, and dropped"""
# FK target must be unique
pk = PrimaryKeyConstraint(self.table.c.id, table=self.table, name="pkid")
pk.create()
# Add a FK by creating a FK constraint
if SQLA_07:
self.assertEqual(list(self.table.c.fkey.foreign_keys), [])
else:
self.assertEqual(self.table.c.fkey.foreign_keys._list, [])
fk = ForeignKeyConstraint([self.table.c.fkey],
[self.table.c.id],
name="fk_id_fkey",
ondelete="CASCADE")
if SQLA_07:
self.assertTrue(list(self.table.c.fkey.foreign_keys) is not [])
else:
self.assertTrue(self.table.c.fkey.foreign_keys._list is not [])
for key in fk_column_names(fk):
self.assertEqual(key, self.table.c.fkey.name)
self.assertEqual([e.column for e in fk.elements], [self.table.c.id])
self.assertEqual(list(fk.referenced), [self.table.c.id])
if self.url.startswith('mysql'):
# MySQL FKs need an index
index = Index('index_name', self.table.c.fkey)
index.create()
fk.create()
# test for ondelete/onupdate
if SQLA_07:
fkey = list(self.table.c.fkey.foreign_keys)[0]
else:
fkey = self.table.c.fkey.foreign_keys._list[0]
self.assertEqual(fkey.ondelete, "CASCADE")
# TODO: test on real db if it was set
self.refresh_table()
if SQLA_07:
self.assertTrue(list(self.table.c.fkey.foreign_keys) is not [])
else:
self.assertTrue(self.table.c.fkey.foreign_keys._list is not [])
fk.drop()
self.refresh_table()
if SQLA_07:
self.assertEqual(list(self.table.c.fkey.foreign_keys), [])
else:
self.assertEqual(self.table.c.fkey.foreign_keys._list, [])
@fixture.usedb()
def test_define_pk(self):
"""PK constraints can be defined, created, and dropped"""
self._define_pk(self.table.c.fkey)
@fixture.usedb()
def test_define_pk_multi(self):
"""Multicolumn PK constraints can be defined, created, and dropped"""
self._define_pk(self.table.c.id, self.table.c.fkey)
@fixture.usedb(not_supported=['firebird'])
def test_drop_cascade(self):
"""Drop constraint cascaded"""
pk = PrimaryKeyConstraint('fkey', table=self.table, name="id_pkey")
pk.create()
self.refresh_table()
# Drop the PK constraint forcing cascade
pk.drop(cascade=True)
# TODO: add real assertion if it was added
@fixture.usedb(supported=['mysql'])
def test_fail_mysql_check_constraints(self):
"""Check constraints raise NotSupported for mysql on drop"""
cons = CheckConstraint('id > 3', name="id_check", table=self.table)
cons.create()
self.refresh_table()
try:
cons.drop()
except NotSupportedError:
pass
else:
self.fail()
@fixture.usedb(not_supported=['sqlite', 'mysql'])
def test_named_check_constraints(self):
"""Check constraints can be defined, created, and dropped"""
self.assertRaises(InvalidConstraintError, CheckConstraint, 'id > 3')
cons = CheckConstraint('id > 3', name="id_check", table=self.table)
cons.create()
self.refresh_table()
self.table.insert(values={'id': 4, 'fkey': 1}).execute()
try:
self.table.insert(values={'id': 1, 'fkey': 1}).execute()
except (IntegrityError, ProgrammingError):
pass
else:
self.fail()
# Remove the name, drop the constraint; it should succeed
cons.drop()
self.refresh_table()
self.table.insert(values={'id': 2, 'fkey': 2}).execute()
self.table.insert(values={'id': 1, 'fkey': 2}).execute()
class TestAutoname(CommonTestConstraint):
"""Every method tests for a type of constraint wether it can autoname
itself and if you can pass object instance and names to classes.
"""
level = fixture.DB.CONNECT
@fixture.usedb(not_supported=['oracle', 'firebird'])
def test_autoname_pk(self):
"""PrimaryKeyConstraints can guess their name if None is given"""
# Don't supply a name; it should create one
cons = PrimaryKeyConstraint(self.table.c.id)
cons.create()
self.refresh_table()
if not self.url.startswith('sqlite'):
# TODO: test for index for sqlite
self.compare_columns_equal(cons.columns, self.table.primary_key, ['autoincrement', 'type'])
# Remove the name, drop the constraint; it should succeed
cons.name = None
cons.drop()
self.refresh_table()
self.assertEqual(list(), list(self.table.primary_key))
# test string names
cons = PrimaryKeyConstraint('id', table=self.table)
cons.create()
self.refresh_table()
if not self.url.startswith('sqlite'):
# TODO: test for index for sqlite
self.compare_columns_equal(cons.columns, self.table.primary_key)
cons.name = None
cons.drop()
@fixture.usedb(not_supported=['oracle', 'sqlite', 'firebird'])
def test_autoname_fk(self):
"""ForeignKeyConstraints can guess their name if None is given"""
cons = PrimaryKeyConstraint(self.table.c.id)
cons.create()
cons = ForeignKeyConstraint([self.table.c.fkey], [self.table.c.id])
cons.create()
self.refresh_table()
if SQLA_07:
list(self.table.c.fkey.foreign_keys)[0].column is self.table.c.id
else:
self.table.c.fkey.foreign_keys[0].column is self.table.c.id
# Remove the name, drop the constraint; it should succeed
cons.name = None
cons.drop()
self.refresh_table()
if SQLA_07:
self.assertEqual(list(self.table.c.fkey.foreign_keys), list())
else:
self.assertEqual(self.table.c.fkey.foreign_keys._list, list())
# test string names
cons = ForeignKeyConstraint(['fkey'], ['%s.id' % self.tablename], table=self.table)
cons.create()
self.refresh_table()
if SQLA_07:
list(self.table.c.fkey.foreign_keys)[0].column is self.table.c.id
else:
self.table.c.fkey.foreign_keys[0].column is self.table.c.id
# Remove the name, drop the constraint; it should succeed
cons.name = None
cons.drop()
@fixture.usedb(not_supported=['oracle', 'sqlite', 'mysql'])
def test_autoname_check(self):
"""CheckConstraints can guess their name if None is given"""
cons = CheckConstraint('id > 3', columns=[self.table.c.id])
cons.create()
self.refresh_table()
if not self.engine.name == 'mysql':
self.table.insert(values={'id': 4, 'fkey': 1}).execute()
try:
self.table.insert(values={'id': 1, 'fkey': 2}).execute()
except (IntegrityError, ProgrammingError):
pass
else:
self.fail()
# Remove the name, drop the constraint; it should succeed
cons.name = None
cons.drop()
self.refresh_table()
self.table.insert(values={'id': 2, 'fkey': 2}).execute()
self.table.insert(values={'id': 1, 'fkey': 3}).execute()
@fixture.usedb(not_supported=['oracle'])
def test_autoname_unique(self):
"""UniqueConstraints can guess their name if None is given"""
cons = UniqueConstraint(self.table.c.fkey)
cons.create()
self.refresh_table()
self.table.insert(values={'fkey': 4, 'id': 1}).execute()
try:
self.table.insert(values={'fkey': 4, 'id': 2}).execute()
except (sqlalchemy.exc.IntegrityError,
sqlalchemy.exc.ProgrammingError):
pass
else:
self.fail()
# Remove the name, drop the constraint; it should succeed
cons.name = None
cons.drop()
self.refresh_table()
self.table.insert(values={'fkey': 4, 'id': 2}).execute()
self.table.insert(values={'fkey': 4, 'id': 1}).execute()
|
UManPychron/pychron
|
refs/heads/develop
|
pychron/mdd/tasks/mdd_figure_editor.py
|
2
|
# ===============================================================================
# Copyright 2016 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import csv
import os
from traits.api import Instance, List, HasTraits, Str, Event, Any, Bool, Color
from traitsui.api import View, UItem, VGroup
# from traitsui.editors import TreeEditor
from traitsui.tree_node import TreeNode
from pychron.core.ui.tree_editor import TreeEditor
from pychron.envisage.resources import icon
from pychron.envisage.tasks.base_editor import BaseTraitsEditor
from pychron.graph.diffusion_graph import DiffusionGraph
visible = icon('eye')
class MDDItemNode(TreeNode):
def get_icon(self, obj, is_expanded):
icon = visible if obj.visible else ''
return icon
class BaseItem(HasTraits):
name = Str
enabled = Bool(True)
class MDDItem(BaseItem):
plots = List
background = Color
visible = Bool(True)
def toggle_visible(self):
for p in self.plots:
p.visible = not p.visible
p.request_redraw()
self.visible = p.visible
self.background = 'white' if p.visible else 'lightgrey'
class MDDGraph(BaseItem):
items = List(MDDItem)
class MDDTree(HasTraits):
graphs = List(MDDGraph)
def add_node(self, tag, name, plots):
if plots is None:
plots = []
graph = self._get_graph(tag)
if graph is None:
graph = MDDGraph(name=tag)
self.graphs.append(graph)
graph.items.append(MDDItem(name=name, plots=plots))
def _get_graph(self, tag):
return next((g for g in self.graphs if g.name == tag), None)
class EditorOptions(HasTraits):
tree = Instance(MDDTree, ())
selected = Any
dclicked = Event
refresh_needed = Event
def _dclicked_fired(self):
self.selected.toggle_visible()
self.refresh_needed = True
def traits_view(self):
nodes = [TreeNode(node_for=[MDDTree],
icon_open='',
children='graphs'),
TreeNode(node_for=[MDDGraph],
children='items',
label='name'),
MDDItemNode(node_for=[MDDItem],
label='name',
background='background')]
v = View(UItem('tree', editor=TreeEditor(nodes=nodes,
hide_root=True,
editable=False,
refresh_icons='refresh_needed',
refresh='refresh_needed',
selected='selected',
dclick='dclicked')))
return v
class MDDFigureEditor(BaseTraitsEditor):
refresh_needed = Event
graph = Instance(DiffusionGraph)
roots = List
editor_options = Instance(EditorOptions, ())
plotter_options = Any
def _refresh_needed_fired(self):
self.replot(force=True)
def _get_data(self, root, path, msg, func, delimiter=' '):
path = os.path.join(root, path)
if os.path.isfile(path):
with open(path, 'r') as rfile:
reader = csv.reader(rfile, delimiter=delimiter)
return func(reader)
else:
self.warning('Cannot load {}. No file at {}'.format(msg, path))
def _get_model_arrhenius_data(self, root):
return self._get_arrhenius(root, 'arr.dat', 'Modeled Arrhenius')
def _get_arrhenius_data(self, root):
return self._get_arrhenius(root, 'arr.samp', 'Measured Arrhenius')
def _get_arrhenius(self, root, name, tag):
def func(reader):
inv_temp = []
log_d = []
for row in reader:
try:
inv_temp.append(float(row[0]))
log_d.append(float(row[1]))
except ValueError:
continue
return inv_temp, log_d
return self._get_data(root, name, tag, func, '\t')
def _get_cooling_history_data(self, root, name='confmed.dat'):
def func(reader):
age, low_conf, high_conf = [], [], []
for row in reader:
try:
age.append(float(row[0]))
low_conf.append(float(row[1]))
high_conf.append(float(row[2]))
except ValueError:
continue
return age, low_conf, high_conf
return self._get_data(root, name, 'Cooling History', func, '\t')
def _get_logr_ro_data(self, root, name='logr.samp'):
def func(reader):
logr = []
logr39 = []
for row in reader:
if '&' not in row:
try:
logr.append(float(row[0]))
logr39.append(float(row[1]))
except ValueError:
continue
return logr, logr39
return self._get_data(root, name, 'Log R/Ro', func, '\t')
def _get_model_spectrum_data(self, root):
return self._get_spectrum(root, 'ages-me.dat', 'Model Spectrum')
def _get_spectrum_data(self, root):
return self._get_spectrum(root, 'age.in', 'Spectrum')
def _get_spectrum(self, root, name, tag):
def func(reader):
a = []
e = []
for row in reader:
row = [r for r in row if r != '']
try:
e.append(float(row[0]))
a.append(float(row[1]))
e.append(float(row[0]))
a.append(float(row[1]))
except ValueError:
pass
return a, e
def func2(reader):
a = []
e = []
for row in reader:
row = [r for r in row if r != '']
try:
e.append(float(row[0]))
a.append(float(row[1]))
except ValueError:
pass
return a, e
age, ar39 = self._get_data(root, name, tag, func)
age_err, ar39_err = self._get_data(root, 'age-sd.smp', 'Spectrum Errors', func2)
return ar39[:-1], age[1:], ar39_err, age_err
def _get_model_spectrum_data(self, root, name='ages-me.dat'):
def func(reader):
age = []
ar39 = []
for row in reader:
try:
ar39.append(float(row[0]))
age.append(float(row[1]))
except ValueError:
continue
return ar39, age
return self._get_data(root, name, 'Spectrum', func, '\t')
def replot(self, force=False):
graph = self.graph
if force:
self.editor_options = EditorOptions()
graph = None
if graph is None:
opt = self.plotter_options
k, n, r, c = self.plotter_options.rc()
graph = DiffusionGraph(container_dict=dict(kind=k,
bgcolor=opt.bgcolor,
# padding=[10, 10, 40, 10],
shape=(r, c)))
ps = opt.panels()
graph.new_graph(n, bgcolor=opt.plot_bgcolor, padding=opt.get_paddings())
opt = self.editor_options
for root in self.roots:
for i, tags in ps:
for tag in tags:
ltag = tag.lower().replace(' ', '_')
data = getattr(self, '_get_{}_data'.format(ltag))(root)
if data is not None:
plots = getattr(graph, 'build_{}'.format(ltag))(*data, pid=i)
opt.tree.add_node(tag, os.path.basename(root), plots)
else:
graph.clear()
graph.refresh()
self.graph = graph
def traits_view(self):
graph_grp = VGroup(UItem('graph', style='custom'))
v = View(VGroup(graph_grp))
return v
# ============= EOF =============================================
|
xsteadfastx/pelican-plugins
|
refs/heads/master
|
md_inline_extension/inline.py
|
17
|
# -*- coding: utf-8 -*-
"""
Markdown Inline Extension For Pelican
=====================================
Extends Pelican's Markdown module
and allows for customized inline HTML
"""
import os
import sys
from pelican import signals
try:
from . pelican_inline_markdown_extension import PelicanInlineMarkdownExtension
except ImportError as e:
PelicanInlineMarkdownExtension = None
print("\nMarkdown is not installed - inline Markdown extension disabled\n")
def process_settings(pelicanobj):
"""Sets user specified settings (see README for more details)"""
# Default settings
inline_settings = {}
inline_settings['config'] = {'[]':('', 'pelican-inline')}
# Get the user specified settings
try:
settings = pelicanobj.settings['MD_INLINE']
except:
settings = None
# If settings have been specified, add them to the config
if isinstance(settings, dict):
inline_settings['config'].update(settings)
return inline_settings
def inline_markdown_extension(pelicanobj, config):
"""Instantiates a customized Markdown extension"""
# Instantiate Markdown extension and append it to the current extensions
try:
if isinstance(pelicanobj.settings.get('MD_EXTENSIONS'), list): # pelican 3.6.3 and earlier
pelicanobj.settings['MD_EXTENSIONS'].append(PelicanInlineMarkdownExtension(config))
else:
pelicanobj.settings['MARKDOWN'].setdefault('extensions', []).append(PelicanInlineMarkdownExtension(config))
except:
sys.excepthook(*sys.exc_info())
sys.stderr.write("\nError - the pelican Markdown extension failed to configure. Inline Markdown extension is non-functional.\n")
sys.stderr.flush()
def pelican_init(pelicanobj):
"""Loads settings and instantiates the Python Markdown extension"""
# If there was an error loading Markdown, then do not process any further
if not PelicanInlineMarkdownExtension:
return
# Process settings
config = process_settings(pelicanobj)
# Configure Markdown Extension
inline_markdown_extension(pelicanobj, config)
def register():
"""Plugin registration"""
signals.initialized.connect(pelican_init)
|
mesonbuild/meson
|
refs/heads/master
|
mesonbuild/munstable_coredata.py
|
2
|
# Copyright 2019 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import coredata as cdata
from .mesonlib import MachineChoice, OptionKey
import os.path
import pprint
import textwrap
def add_arguments(parser):
parser.add_argument('--all', action='store_true', dest='all', default=False,
help='Show data not used by current backend.')
parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
def dump_compilers(compilers):
for lang, compiler in compilers.items():
print(' ' + lang + ':')
print(' Id: ' + compiler.id)
print(' Command: ' + ' '.join(compiler.exelist))
if compiler.full_version:
print(' Full version: ' + compiler.full_version)
if compiler.version:
print(' Detected version: ' + compiler.version)
def dump_guids(d):
for name, value in d.items():
print(' ' + name + ': ' + value)
def run(options):
datadir = 'meson-private'
if options.builddir is not None:
datadir = os.path.join(options.builddir, datadir)
if not os.path.isdir(datadir):
print('Current directory is not a build dir. Please specify it or '
'change the working directory to it.')
return 1
all_backends = options.all
print('This is a dump of the internal unstable cache of meson. This is for debugging only.')
print('Do NOT parse, this will change from version to version in incompatible ways')
print('')
coredata = cdata.load(options.builddir)
backend = coredata.get_option(OptionKey('backend'))
for k, v in sorted(coredata.__dict__.items()):
if k in ('backend_options', 'base_options', 'builtins', 'compiler_options', 'user_options'):
# use `meson configure` to view these
pass
elif k in ['install_guid', 'test_guid', 'regen_guid']:
if all_backends or backend.startswith('vs'):
print(k + ': ' + v)
elif k == 'target_guids':
if all_backends or backend.startswith('vs'):
print(k + ':')
dump_guids(v)
elif k in ['lang_guids']:
if all_backends or backend.startswith('vs') or backend == 'xcode':
print(k + ':')
dump_guids(v)
elif k == 'meson_command':
if all_backends or backend.startswith('vs'):
print('Meson command used in build file regeneration: ' + ' '.join(v))
elif k == 'pkgconf_envvar':
print('Last seen PKGCONFIG environment variable value: ' + v)
elif k == 'version':
print('Meson version: ' + v)
elif k == 'cross_files':
if v:
print('Cross File: ' + ' '.join(v))
elif k == 'config_files':
if v:
print('Native File: ' + ' '.join(v))
elif k == 'compilers':
for for_machine in MachineChoice:
print('Cached {} machine compilers:'.format(
for_machine.get_lower_case_name()))
dump_compilers(v[for_machine])
elif k == 'deps':
def print_dep(dep_key, dep):
print(' ' + dep_key[0] + ": ")
print(' compile args: ' + repr(dep.get_compile_args()))
print(' link args: ' + repr(dep.get_link_args()))
if dep.get_sources():
print(' sources: ' + repr(dep.get_sources()))
print(' version: ' + repr(dep.get_version()))
for for_machine in iter(MachineChoice):
items_list = list(sorted(v[for_machine].items()))
if items_list:
print('Cached dependencies for {} machine' % for_machine.get_lower_case_name())
for dep_key, deps in items_list:
for dep in deps:
print_dep(dep_key, dep)
else:
print(k + ':')
print(textwrap.indent(pprint.pformat(v), ' '))
|
falkolab/titanium_mobile
|
refs/heads/master
|
node_modules/node-ios-device/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
|
1534
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import os
import gyp
import gyp.common
import gyp.msvs_emulation
import json
import sys
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = False
generator_filelist_paths = {
}
generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
'LIB_DIR', 'SHARED_LIB_DIR']:
# Some gyp steps fail if these are empty(!).
generator_default_variables[dirname] = 'dir'
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
'CONFIGURATION_NAME']:
generator_default_variables[unused] = ''
def CalculateVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
flavor = gyp.common.GetFlavor(params)
if flavor =='win':
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
if generator_flags.get('adjust_static_libraries', False):
global generator_wants_static_library_dependencies_adjusted
generator_wants_static_library_dependencies_adjusted = True
toplevel = params['options'].toplevel_dir
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, generator_dir, output_dir, 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def GenerateOutput(target_list, target_dicts, data, params):
# Map of target -> list of targets it depends on.
edges = {}
# Queue of targets to visit.
targets_to_visit = target_list[:]
while len(targets_to_visit) > 0:
target = targets_to_visit.pop()
if target in edges:
continue
edges[target] = []
for dep in target_dicts[target].get('dependencies', []):
edges[target].append(dep)
targets_to_visit.append(dep)
try:
filepath = params['generator_flags']['output_dir']
except KeyError:
filepath = '.'
filename = os.path.join(filepath, 'dump.json')
f = open(filename, 'w')
json.dump(edges, f)
f.close()
print 'Wrote json to %s.' % filename
|
turbokongen/home-assistant
|
refs/heads/dev
|
tests/components/pushbullet/__init__.py
|
36
|
"""Tests for the pushbullet component."""
|
MalloyPower/parsing-python
|
refs/heads/master
|
front-end/testsuite-python-lib/Python-3.2/Lib/test/test_sort.py
|
9
|
from test import support
import random
import sys
import unittest
verbose = support.verbose
nerrors = 0
def CmpToKey(mycmp):
'Convert a cmp= function into a key= function'
class K(object):
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) == -1
return K
def check(tag, expected, raw, compare=None):
global nerrors
if verbose:
print(" checking", tag)
orig = raw[:] # save input in case of error
if compare:
raw.sort(key=CmpToKey(compare))
else:
raw.sort()
if len(expected) != len(raw):
print("error in", tag)
print("length mismatch;", len(expected), len(raw))
print(expected)
print(orig)
print(raw)
nerrors += 1
return
for i, good in enumerate(expected):
maybe = raw[i]
if good is not maybe:
print("error in", tag)
print("out of order at index", i, good, maybe)
print(expected)
print(orig)
print(raw)
nerrors += 1
return
class TestBase(unittest.TestCase):
def testStressfully(self):
# Try a variety of sizes at and around powers of 2, and at powers of 10.
sizes = [0]
for power in range(1, 10):
n = 2 ** power
sizes.extend(range(n-1, n+2))
sizes.extend([10, 100, 1000])
class Complains(object):
maybe_complain = True
def __init__(self, i):
self.i = i
def __lt__(self, other):
if Complains.maybe_complain and random.random() < 0.001:
if verbose:
print(" complaining at", self, other)
raise RuntimeError
return self.i < other.i
def __repr__(self):
return "Complains(%d)" % self.i
class Stable(object):
def __init__(self, key, i):
self.key = key
self.index = i
def __lt__(self, other):
return self.key < other.key
def __repr__(self):
return "Stable(%d, %d)" % (self.key, self.index)
for n in sizes:
x = list(range(n))
if verbose:
print("Testing size", n)
s = x[:]
check("identity", x, s)
s = x[:]
s.reverse()
check("reversed", x, s)
s = x[:]
random.shuffle(s)
check("random permutation", x, s)
y = x[:]
y.reverse()
s = x[:]
check("reversed via function", y, s, lambda a, b: (b>a)-(b<a))
if verbose:
print(" Checking against an insane comparison function.")
print(" If the implementation isn't careful, this may segfault.")
s = x[:]
s.sort(key=CmpToKey(lambda a, b: int(random.random() * 3) - 1))
check("an insane function left some permutation", x, s)
x = [Complains(i) for i in x]
s = x[:]
random.shuffle(s)
Complains.maybe_complain = True
it_complained = False
try:
s.sort()
except RuntimeError:
it_complained = True
if it_complained:
Complains.maybe_complain = False
check("exception during sort left some permutation", x, s)
s = [Stable(random.randrange(10), i) for i in range(n)]
augmented = [(e, e.index) for e in s]
augmented.sort() # forced stable because ties broken by index
x = [e for e, i in augmented] # a stable sort of s
check("stability", x, s)
#==============================================================================
class TestBugs(unittest.TestCase):
def test_bug453523(self):
# bug 453523 -- list.sort() crasher.
# If this fails, the most likely outcome is a core dump.
# Mutations during a list sort should raise a ValueError.
class C:
def __lt__(self, other):
if L and random.random() < 0.75:
L.pop()
else:
L.append(3)
return random.random() < 0.5
L = [C() for i in range(50)]
self.assertRaises(ValueError, L.sort)
def test_undetected_mutation(self):
# Python 2.4a1 did not always detect mutation
memorywaster = []
for i in range(20):
def mutating_cmp(x, y):
L.append(3)
L.pop()
return (x > y) - (x < y)
L = [1,2]
self.assertRaises(ValueError, L.sort, key=CmpToKey(mutating_cmp))
def mutating_cmp(x, y):
L.append(3)
del L[:]
return (x > y) - (x < y)
self.assertRaises(ValueError, L.sort, key=CmpToKey(mutating_cmp))
memorywaster = [memorywaster]
#==============================================================================
class TestDecorateSortUndecorate(unittest.TestCase):
def test_decorated(self):
data = 'The quick Brown fox Jumped over The lazy Dog'.split()
copy = data[:]
random.shuffle(data)
data.sort(key=str.lower)
def my_cmp(x, y):
xlower, ylower = x.lower(), y.lower()
return (xlower > ylower) - (xlower < ylower)
copy.sort(key=CmpToKey(my_cmp))
def test_baddecorator(self):
data = 'The quick Brown fox Jumped over The lazy Dog'.split()
self.assertRaises(TypeError, data.sort, key=lambda x,y: 0)
def test_stability(self):
data = [(random.randrange(100), i) for i in range(200)]
copy = data[:]
data.sort(key=lambda t: t[0]) # sort on the random first field
copy.sort() # sort using both fields
self.assertEqual(data, copy) # should get the same result
def test_key_with_exception(self):
# Verify that the wrapper has been removed
data = list(range(-2, 2))
dup = data[:]
self.assertRaises(ZeroDivisionError, data.sort, key=lambda x: 1/x)
self.assertEqual(data, dup)
def test_key_with_mutation(self):
data = list(range(10))
def k(x):
del data[:]
data[:] = range(20)
return x
self.assertRaises(ValueError, data.sort, key=k)
def test_key_with_mutating_del(self):
data = list(range(10))
class SortKiller(object):
def __init__(self, x):
pass
def __del__(self):
del data[:]
data[:] = range(20)
def __lt__(self, other):
return id(self) < id(other)
self.assertRaises(ValueError, data.sort, key=SortKiller)
def test_key_with_mutating_del_and_exception(self):
data = list(range(10))
## dup = data[:]
class SortKiller(object):
def __init__(self, x):
if x > 2:
raise RuntimeError
def __del__(self):
del data[:]
data[:] = list(range(20))
self.assertRaises(RuntimeError, data.sort, key=SortKiller)
## major honking subtlety: we *can't* do:
##
## self.assertEqual(data, dup)
##
## because there is a reference to a SortKiller in the
## traceback and by the time it dies we're outside the call to
## .sort() and so the list protection gimmicks are out of
## date (this cost some brain cells to figure out...).
def test_reverse(self):
data = list(range(100))
random.shuffle(data)
data.sort(reverse=True)
self.assertEqual(data, list(range(99,-1,-1)))
def test_reverse_stability(self):
data = [(random.randrange(100), i) for i in range(200)]
copy1 = data[:]
copy2 = data[:]
def my_cmp(x, y):
x0, y0 = x[0], y[0]
return (x0 > y0) - (x0 < y0)
def my_cmp_reversed(x, y):
x0, y0 = x[0], y[0]
return (y0 > x0) - (y0 < x0)
data.sort(key=CmpToKey(my_cmp), reverse=True)
copy1.sort(key=CmpToKey(my_cmp_reversed))
self.assertEqual(data, copy1)
copy2.sort(key=lambda x: x[0], reverse=True)
self.assertEqual(data, copy2)
#==============================================================================
def test_main(verbose=None):
test_classes = (
TestBase,
TestDecorateSortUndecorate,
TestBugs,
)
support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
|
unistra/django-rest-framework-fine-permissions
|
refs/heads/master
|
tests/urls.py
|
1
|
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework_fine_permissions.urls import urlpatterns as drffp_urls
urlpatterns = drffp_urls
urlpatterns += [
url(r'^admin/', admin.site.urls),
]
|
google/llvm-propeller
|
refs/heads/bb-clusters
|
lldb/test/API/python_api/default-constructor/sb_communication.py
|
13
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import lldb
def fuzz_obj(obj):
broadcaster = obj.GetBroadcaster()
# Do fuzz testing on the broadcaster obj, it should not crash lldb.
import sb_broadcaster
sb_broadcaster.fuzz_obj(broadcaster)
obj.AdoptFileDesriptor(0, False)
obj.AdoptFileDesriptor(1, False)
obj.AdoptFileDesriptor(2, False)
obj.Connect("file:/tmp/myfile")
obj.Connect(None)
obj.Disconnect()
obj.IsConnected()
obj.GetCloseOnEOF()
obj.SetCloseOnEOF(True)
obj.SetCloseOnEOF(False)
#obj.Write(None, sys.maxint, None)
#obj.Read(None, sys.maxint, 0xffffffff, None)
obj.ReadThreadStart()
obj.ReadThreadStop()
obj.ReadThreadIsRunning()
obj.SetReadThreadBytesReceivedCallback(None, None)
|
gauribhoite/personfinder
|
refs/heads/master
|
env/google_appengine/lib/django-1.4/django/db/backends/oracle/creation.py
|
29
|
import sys
import time
from django.db.backends.creation import BaseDatabaseCreation
TEST_DATABASE_PREFIX = 'test_'
PASSWORD = 'Im_a_lumberjack'
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated Oracle column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
#
# Any format strings starting with "qn_" are quoted before being used in the
# output (the "qn_" prefix is stripped before the lookup is performed.
data_types = {
'AutoField': 'NUMBER(11)',
'BooleanField': 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))',
'CharField': 'NVARCHAR2(%(max_length)s)',
'CommaSeparatedIntegerField': 'VARCHAR2(%(max_length)s)',
'DateField': 'DATE',
'DateTimeField': 'TIMESTAMP',
'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)',
'FileField': 'NVARCHAR2(%(max_length)s)',
'FilePathField': 'NVARCHAR2(%(max_length)s)',
'FloatField': 'DOUBLE PRECISION',
'IntegerField': 'NUMBER(11)',
'BigIntegerField': 'NUMBER(19)',
'IPAddressField': 'VARCHAR2(15)',
'GenericIPAddressField': 'VARCHAR2(39)',
'NullBooleanField': 'NUMBER(1) CHECK ((%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL))',
'OneToOneField': 'NUMBER(11)',
'PositiveIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
'PositiveSmallIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
'SlugField': 'NVARCHAR2(%(max_length)s)',
'SmallIntegerField': 'NUMBER(11)',
'TextField': 'NCLOB',
'TimeField': 'TIMESTAMP',
'URLField': 'VARCHAR2(%(max_length)s)',
}
def __init__(self, connection):
super(DatabaseCreation, self).__init__(connection)
def _create_test_db(self, verbosity=1, autoclobber=False):
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
cursor = self.connection.cursor()
if self._test_database_create():
try:
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_NAME)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test database '%s'..." % self.connection.alias
self._execute_test_db_destruction(cursor, parameters, verbosity)
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
if self._test_user_create():
if verbosity >= 1:
print "Creating test user..."
try:
self._create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test user: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_USER)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test user..."
self._destroy_test_user(cursor, parameters, verbosity)
if verbosity >= 1:
print "Creating test user..."
self._create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test user: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
self.connection.settings_dict['SAVED_USER'] = self.connection.settings_dict['USER']
self.connection.settings_dict['SAVED_PASSWORD'] = self.connection.settings_dict['PASSWORD']
self.connection.settings_dict['TEST_USER'] = self.connection.settings_dict['USER'] = TEST_USER
self.connection.settings_dict['PASSWORD'] = TEST_PASSWD
return self.connection.settings_dict['NAME']
def _destroy_test_db(self, test_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
self.connection.settings_dict['USER'] = self.connection.settings_dict['SAVED_USER']
self.connection.settings_dict['PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD']
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
cursor = self.connection.cursor()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
if self._test_user_create():
if verbosity >= 1:
print 'Destroying test user...'
self._destroy_test_user(cursor, parameters, verbosity)
if self._test_database_create():
if verbosity >= 1:
print 'Destroying test database tables...'
self._execute_test_db_destruction(cursor, parameters, verbosity)
self.connection.close()
def _execute_test_db_creation(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_db(): dbname = %s" % parameters['dbname']
statements = [
"""CREATE TABLESPACE %(tblspace)s
DATAFILE '%(tblspace)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 200M
""",
"""CREATE TEMPORARY TABLESPACE %(tblspace_temp)s
TEMPFILE '%(tblspace_temp)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 100M
""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _create_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_user(): username = %s" % parameters['user']
statements = [
"""CREATE USER %(user)s
IDENTIFIED BY %(password)s
DEFAULT TABLESPACE %(tblspace)s
TEMPORARY TABLESPACE %(tblspace_temp)s
QUOTA UNLIMITED ON %(tblspace)s
""",
"""GRANT CONNECT, RESOURCE TO %(user)s""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_test_db_destruction(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_execute_test_db_destruction(): dbname=%s" % parameters['dbname']
statements = [
'DROP TABLESPACE %(tblspace)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
'DROP TABLESPACE %(tblspace_temp)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _destroy_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_destroy_test_user(): user=%s" % parameters['user']
print "Be patient. This can take some time..."
statements = [
'DROP USER %(user)s CASCADE',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_statements(self, cursor, statements, parameters, verbosity):
for template in statements:
stmt = template % parameters
if verbosity >= 2:
print stmt
try:
cursor.execute(stmt)
except Exception, err:
sys.stderr.write("Failed (%s)\n" % (err))
raise
def _test_database_name(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
try:
if self.connection.settings_dict['TEST_NAME']:
name = self.connection.settings_dict['TEST_NAME']
except AttributeError:
pass
return name
def _test_database_create(self):
return self.connection.settings_dict.get('TEST_CREATE', True)
def _test_user_create(self):
return self.connection.settings_dict.get('TEST_USER_CREATE', True)
def _test_database_user(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['USER']
try:
if self.connection.settings_dict['TEST_USER']:
name = self.connection.settings_dict['TEST_USER']
except KeyError:
pass
return name
def _test_database_passwd(self):
name = PASSWORD
try:
if self.connection.settings_dict['TEST_PASSWD']:
name = self.connection.settings_dict['TEST_PASSWD']
except KeyError:
pass
return name
def _test_database_tblspace(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
try:
if self.connection.settings_dict['TEST_TBLSPACE']:
name = self.connection.settings_dict['TEST_TBLSPACE']
except KeyError:
pass
return name
def _test_database_tblspace_tmp(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME'] + '_temp'
try:
if self.connection.settings_dict['TEST_TBLSPACE_TMP']:
name = self.connection.settings_dict['TEST_TBLSPACE_TMP']
except KeyError:
pass
return name
def _get_test_db_name(self):
"""
We need to return the 'production' DB name to get the test DB creation
machinery to work. This isn't a great deal in this case because DB
names as handled by Django haven't real counterparts in Oracle.
"""
return self.connection.settings_dict['NAME']
def test_db_signature(self):
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME'],
self._test_database_user(),
)
def set_autocommit(self):
self.connection.connection.autocommit = True
|
nicholasserra/sentry
|
refs/heads/master
|
src/sentry/models/groupsnooze.py
|
4
|
from __future__ import absolute_import
from django.db import models
from sentry.db.models import Model, FlexibleForeignKey, sane_repr
class GroupSnooze(Model):
__core__ = False
group = FlexibleForeignKey('sentry.Group', unique=True)
until = models.DateTimeField()
class Meta:
db_table = 'sentry_groupsnooze'
app_label = 'sentry'
__repr__ = sane_repr('group_id')
|
CiscoSystems/horizon
|
refs/heads/master
|
horizon/test/test_dashboards/cats/kittens/panel.py
|
102
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import horizon
from horizon.test.test_dashboards.cats import dashboard
class Kittens(horizon.Panel):
name = "Kittens"
slug = "kittens"
permissions = ("horizon.test",)
dashboard.Cats.register(Kittens)
|
catapult-project/catapult-csm
|
refs/heads/master
|
tracing/tracing/value/convert_chart_json.py
|
7
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import tracing_project
import vinn
def ConvertChartJson(chart_json):
"""Convert chart_json to Histograms.
Args:
chart_json: path to a file containing chart-json
Returns:
a Vinn result object whose 'returncode' indicates whether there was an
exception, and whose 'stdout' contains HistogramSet json.
"""
return vinn.RunFile(
os.path.join(os.path.dirname(__file__),
'convert_chart_json_cmdline.html'),
source_paths=tracing_project.TracingProject().source_paths,
js_args=[os.path.abspath(chart_json)])
|
codingforentrepreneurs/marketplace
|
refs/heads/master
|
src/cart/views.py
|
1
|
from django.contrib import messages
from django.shortcuts import render_to_response, Http404, RequestContext, HttpResponseRedirect
from django.core.urlresolvers import reverse
from products.models import Product
from products.views import check_product #takes user and product as args
from .models import Cart, CartItem
def cart(request):
try:
cart_id = request.session['cart_id']
except:
cart_id = False
if cart_id:
cart = Cart.objects.get(id=cart_id)
else:
cart = False
items = CartItem.objects.filter(cart=cart)
if len(items) > 0:
exists = True
else:
exists = False
return render_to_response('cart/view_cart.html', locals(), context_instance=RequestContext(request))
def update_cart(request, id):
try:
product = Product.objects.get(id=id)
except:
product = False
try:
cart_id = request.session['cart_id']
except:
cart_id = False
try:
cart = Cart.objects.get(id=cart_id)
except Cart.DoesNotExist:
cart = Cart()
cart.save()
request.session['cart_id'] = cart.id
if product:
new_item, created = CartItem.objects.get_or_create(cart=cart, product=product)
if created:
new_item.cart = cart
new_item.save()
messages.success(request, 'cart item added')
else:
new_item.delete()
messages.success(request, 'cart item removed')
return HttpResponseRedirect(reverse('cart'))
|
danmergens/mi-instrument
|
refs/heads/master
|
mi/core/instrument/dataset_data_particle.py
|
4
|
#!/usr/bin/env python
"""
@package mi.core.instrument.data_particle_generator Base data particle generator
@file mi/core/instrument/data_particle_generator.py
@author Steve Foley
@brief Contains logic to generate data particles to be exchanged between
the driver and agent. This involves a JSON interchange format
"""
import time
import ntplib
import base64
import json
from mi.core.common import BaseEnum
from mi.core.exceptions import SampleException, ReadOnlyException, NotImplementedException, InstrumentParameterException
from mi.core.log import get_logger
log = get_logger()
__author__ = 'Steve Foley'
__license__ = 'Apache 2.0'
class CommonDataParticleType(BaseEnum):
"""
This enum defines all the common particle types defined in the modules. Currently there is only one, but by
using an enum here we have the opportunity to define more common data particles.
"""
RAW = "raw"
class DataParticleKey(BaseEnum):
PKT_FORMAT_ID = "pkt_format_id"
PKT_VERSION = "pkt_version"
STREAM_NAME = "stream_name"
INTERNAL_TIMESTAMP = "internal_timestamp"
PORT_TIMESTAMP = "port_timestamp"
DRIVER_TIMESTAMP = "driver_timestamp"
PREFERRED_TIMESTAMP = "preferred_timestamp"
QUALITY_FLAG = "quality_flag"
VALUES = "values"
VALUE_ID = "value_id"
VALUE = "value"
BINARY = "binary"
NEW_SEQUENCE = "new_sequence"
class DataParticleValue(BaseEnum):
JSON_DATA = "JSON_Data"
ENG = "eng"
OK = "ok"
CHECKSUM_FAILED = "checksum_failed"
OUT_OF_RANGE = "out_of_range"
INVALID = "invalid"
QUESTIONABLE = "questionable"
class DataParticle(object):
"""
This class is responsible for storing and ultimately generating data
particles in the designated format from the associated inputs. It
fills in fields as necessary, and is a valid Data Particle
that can be sent up to the InstrumentAgent.
It is the intent that this class is subclassed as needed if an instrument must
modify fields in the outgoing packet. The hope is to have most of the superclass
code be called by the child class with just values overridden as needed.
"""
# data particle type is intended to be defined in each derived data particle class. This value should be unique
# for all data particles. Best practice is to access this variable using the accessor method:
# data_particle_type()
_data_particle_type = None
def __init__(self, raw_data,
port_timestamp=None,
internal_timestamp=None,
preferred_timestamp=None,
quality_flag=DataParticleValue.OK,
new_sequence=None):
""" Build a particle seeded with appropriate information
@param raw_data The raw data used in the particle
"""
if new_sequence is not None and not isinstance(new_sequence, bool):
raise TypeError("new_sequence is not a bool")
self.contents = {
DataParticleKey.PKT_FORMAT_ID: DataParticleValue.JSON_DATA,
DataParticleKey.PKT_VERSION: 1,
DataParticleKey.PORT_TIMESTAMP: port_timestamp,
DataParticleKey.INTERNAL_TIMESTAMP: internal_timestamp,
DataParticleKey.DRIVER_TIMESTAMP: ntplib.system_to_ntp_time(time.time()),
DataParticleKey.PREFERRED_TIMESTAMP: preferred_timestamp,
DataParticleKey.QUALITY_FLAG: quality_flag,
}
self._encoding_errors = []
if new_sequence is not None:
self.contents[DataParticleKey.NEW_SEQUENCE] = new_sequence
self.raw_data = raw_data
self._values = None
def __eq__(self, arg):
"""
Quick equality check for testing purposes. If they have the same raw
data, timestamp, they are the same enough for this particle
"""
allowed_diff = .000001
if self._data_particle_type != arg._data_particle_type:
log.debug('Data particle type does not match: %s %s', self._data_particle_type, arg._data_particle_type)
return False
if self.raw_data != arg.raw_data:
log.debug('Raw data does not match')
return False
t1 = self.contents[DataParticleKey.INTERNAL_TIMESTAMP]
t2 = arg.contents[DataParticleKey.INTERNAL_TIMESTAMP]
if (t1 is None) or (t2 is None):
tdiff = allowed_diff
else:
tdiff = abs(t1 - t2)
if tdiff > allowed_diff:
log.debug('Timestamp %s does not match %s', t1, t2)
return False
generated1 = json.loads(self.generate())
generated2 = json.loads(arg.generate())
missing, differing = self._compare(generated1, generated2, ignore_keys=[DataParticleKey.DRIVER_TIMESTAMP,
DataParticleKey.PREFERRED_TIMESTAMP])
if missing:
log.error('Key mismatch between particle dictionaries: %r', missing)
return False
if differing:
log.error('Value mismatch between particle dictionaries: %r', differing)
return True
@staticmethod
def _compare(d1, d2, ignore_keys=None):
ignore_keys = ignore_keys if ignore_keys else []
missing = set(d1).symmetric_difference(d2)
differing = {}
for k in d1:
if k in ignore_keys or k in missing:
continue
if d1[k] != d2[k]:
differing[k] = (d1[k], d2[k])
return missing, differing
def set_internal_timestamp(self, timestamp=None, unix_time=None):
"""
Set the internal timestamp
@param timestamp: NTP timestamp to set
@param unit_time: Unix time as returned from time.time()
@raise InstrumentParameterException if timestamp or unix_time not supplied
"""
if timestamp is None and unix_time is None:
raise InstrumentParameterException("timestamp or unix_time required")
if unix_time is not None:
timestamp = ntplib.system_to_ntp_time(unix_time)
# Do we want this to happen here or in down stream processes?
# if(not self._check_timestamp(timestamp)):
# raise InstrumentParameterException("invalid timestamp")
self.contents[DataParticleKey.INTERNAL_TIMESTAMP] = float(timestamp)
def set_port_timestamp(self, timestamp=None, unix_time=None):
"""
Set the port timestamp
@param timestamp: NTP timestamp to set
@param unix_time: Unix time as returned from time.time()
@raise InstrumentParameterException if timestamp or unix_time not supplied
"""
if timestamp is None and unix_time is None:
raise InstrumentParameterException("timestamp or unix_time required")
if unix_time is not None:
timestamp = ntplib.system_to_ntp_time(unix_time)
# Do we want this to happen here or in down stream processes?
if not self._check_timestamp(timestamp):
raise InstrumentParameterException("invalid timestamp")
self.contents[DataParticleKey.PORT_TIMESTAMP] = float(timestamp)
def set_value(self, id, value):
"""
Set a content value, restricted as necessary
@param id The ID of the value to set, should be from DataParticleKey
@param value The value to set
@raises ReadOnlyException If the parameter cannot be set
"""
if (id == DataParticleKey.INTERNAL_TIMESTAMP) and (self._check_timestamp(value)):
self.contents[DataParticleKey.INTERNAL_TIMESTAMP] = value
else:
raise ReadOnlyException("Parameter %s not able to be set to %s after object creation!" %
(id, value))
def get_value(self, id):
""" Return a stored value
@param id The ID (from DataParticleKey) for the parameter to return
@raises NotImplementedException If there is an invalid id
"""
if DataParticleKey.has(id):
return self.contents[id]
else:
raise NotImplementedException("Value %s not available in particle!", id)
def data_particle_type(self):
"""
Return the data particle type (aka stream name)
@raise: NotImplementedException if _data_particle_type is not set
"""
if self._data_particle_type is None:
raise NotImplementedException("_data_particle_type not initialized")
return self._data_particle_type
def generate_dict(self):
"""
Generate a simple dictionary of sensor data and timestamps, without
going to JSON. This is useful for the times when JSON is not needed to
go across an interface. There are times when particles are used
internally to a component/process/module/etc.
@retval A python dictionary with the proper timestamps and data values
@throws InstrumentDriverException if there is a problem wtih the inputs
"""
# verify preferred timestamp exists in the structure...
if not self._check_preferred_timestamps():
raise SampleException("Preferred timestamp not in particle!")
# build response structure
self._encoding_errors = []
if self._values is None:
self._values = self._build_parsed_values()
result = self._build_base_structure()
result[DataParticleKey.STREAM_NAME] = self.data_particle_type()
result[DataParticleKey.VALUES] = self._values
return result
def generate(self, sorted=False):
"""
Generates a JSON_parsed packet from a sample dictionary of sensor data and
associates a timestamp with it
@param sorted Returned sorted json dict, useful for testing, but slow,
so dont do it unless it is important
@return A JSON_raw string, properly structured with port agent time stamp
and driver timestamp
@throws InstrumentDriverException If there is a problem with the inputs
"""
json_result = json.dumps(self.generate_dict(), sort_keys=sorted)
return json_result
def _build_parsed_values(self):
"""
Build values of a parsed structure. Just the values are built so
so that a child class can override this class, but call it with
super() to get the base structure before modification
@return the values tag for this data structure ready to JSONify
@raises SampleException when parsed values can not be properly returned
"""
raise SampleException("Parsed values block not overridden")
def _build_base_structure(self):
"""
Build the base/header information for an output structure.
Follow on methods can then modify it by adding or editing values.
@return A fresh copy of a core structure to be exported
"""
result = dict(self.contents)
# clean out optional fields that were missing
if not self.contents[DataParticleKey.PORT_TIMESTAMP]:
del result[DataParticleKey.PORT_TIMESTAMP]
if not self.contents[DataParticleKey.INTERNAL_TIMESTAMP]:
del result[DataParticleKey.INTERNAL_TIMESTAMP]
return result
def _check_timestamp(self, timestamp):
"""
Check to make sure the timestamp is reasonable
@param timestamp An NTP4 formatted timestamp (64bit)
@return True if timestamp is okay or None, False otherwise
"""
if timestamp is None:
return True
if not isinstance(timestamp, float):
return False
# is it sufficiently in the future to be unreasonable?
if timestamp > ntplib.system_to_ntp_time(time.time() + (86400 * 365)):
return False
else:
return True
def _check_preferred_timestamps(self):
"""
Check to make sure the preferred timestamp indicated in the
particle is actually listed, possibly adjusting to 2nd best
if not there.
@throws SampleException When there is a problem with the preferred
timestamp in the sample.
"""
if self.contents[DataParticleKey.PREFERRED_TIMESTAMP] is None:
raise SampleException("Missing preferred timestamp, %s, in particle" %
self.contents[DataParticleKey.PREFERRED_TIMESTAMP])
# This should be handled downstream. Don't want to not publish data because
# the port agent stopped putting out timestamps
# if self.contents[self.contents[DataParticleKey.PREFERRED_TIMESTAMP]] == None:
# raise SampleException("Preferred timestamp, %s, is not defined" %
# self.contents[DataParticleKey.PREFERRED_TIMESTAMP])
return True
def _encode_value(self, name, value, encoding_function):
"""
Encode a value using the encoding function, if it fails store the error in a queue
"""
encoded_val = None
try:
encoded_val = encoding_function(value)
except Exception as e:
log.error("Data particle error encoding. Name:%s Value:%s", name, value)
self._encoding_errors.append({name: value})
return {DataParticleKey.VALUE_ID: name,
DataParticleKey.VALUE: encoded_val}
def get_encoding_errors(self):
"""
Return the encoding errors list
"""
return self._encoding_errors
class RawDataParticleKey(BaseEnum):
PAYLOAD = "raw"
LENGTH = "length"
TYPE = "type"
CHECKSUM = "checksum"
class RawDataParticle(DataParticle):
"""
This class a common data particle for generating data particles of raw
data.
It essentially is a translation of the port agent packet
"""
_data_particle_type = CommonDataParticleType.RAW
def _build_parsed_values(self):
"""
Build a particle out of a port agent packet.
@returns A list that is ready to be added to the "values" tag before
the structure is JSONified
"""
port_agent_packet = self.raw_data
if not isinstance(port_agent_packet, dict):
raise SampleException("raw data not a dictionary")
for param in ["raw", "length", "type", "checksum"]:
if param not in port_agent_packet:
raise SampleException("raw data not a complete port agent packet. missing %s" % param)
payload = None
length = None
type = None
checksum = None
# Attempt to convert values
try:
payload = base64.b64encode(port_agent_packet.get("raw"))
except TypeError:
pass
try:
length = int(port_agent_packet.get("length"))
except TypeError:
pass
try:
type = int(port_agent_packet.get("type"))
except TypeError:
pass
try:
checksum = int(port_agent_packet.get("checksum"))
except TypeError:
pass
result = [{
DataParticleKey.VALUE_ID: RawDataParticleKey.PAYLOAD,
DataParticleKey.VALUE: payload,
DataParticleKey.BINARY: True},
{
DataParticleKey.VALUE_ID: RawDataParticleKey.LENGTH,
DataParticleKey.VALUE: length},
{
DataParticleKey.VALUE_ID: RawDataParticleKey.TYPE,
DataParticleKey.VALUE: type},
{
DataParticleKey.VALUE_ID: RawDataParticleKey.CHECKSUM,
DataParticleKey.VALUE: checksum},
]
return result
|
macks22/dblp
|
refs/heads/master
|
api/db.py
|
2
|
import os
import sys
import logging
import argparse
import sqlalchemy as sa
from sqlalchemy import (
Table, Column,
Integer, String, TEXT,
ForeignKey, PrimaryKeyConstraint,
MetaData
)
import config
# engine = sa.create_engine('sqlite:///dblp.sql', echo=False)
connection_string = 'postgresql://%s:%s@%s/%s' % (
config.username,
config.password,
config.hostname,
config.dbname
)
logging.info('db connect using connection string: %s' % connection_string)
engine = sa.create_engine(connection_string)
metadata = MetaData()
papers = Table('papers', metadata,
Column('id', Integer, primary_key=True),
Column('title', String(255), nullable=False),
Column('abstract', TEXT),
Column('venue', String(255)),
Column('year', Integer)
)
person = Table('person', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(255), unique=True)
)
authors = Table('authors', metadata,
Column('person', Integer,
ForeignKey('person.id', onupdate='CASCADE', ondelete='CASCADE')),
Column('paper', Integer,
ForeignKey('papers.id', onupdate='CASCADE', ondelete='CASCADE')),
PrimaryKeyConstraint('person', 'paper', name='authors_pk')
)
refs = Table('refs', metadata,
Column('paper', Integer,
ForeignKey('papers.id', onupdate='CASCADE', ondelete='CASCADE')),
Column('ref', Integer),
# Column('ref', Integer,
# ForeignKey('papers.id', onupdate='CASCADE', ondelete='CASCADE')),
PrimaryKeyConstraint('paper', 'ref', name='refs_pk')
)
def make_parser():
parser = argparse.ArgumentParser(
description='create/update dblp database')
parser.add_argument(
'-v', '--verbose', action='store_true',
help='enable verbose logging output')
return parser
if __name__ == "__main__":
parser = make_parser()
args = parser.parse_args()
if args.verbose:
logger = logging.getLogger()
logger.setLevel(logging.INFO)
engine.echo=True
logging.info('dropping all tables');
metadata.drop_all(engine)
logging.info('creating database from schema');
metadata.create_all(engine)
sys.exit(0)
|
mrbox/django
|
refs/heads/master
|
tests/reverse_lookup/tests.py
|
326
|
from __future__ import unicode_literals
from django.core.exceptions import FieldError
from django.test import TestCase
from .models import Choice, Poll, User
class ReverseLookupTests(TestCase):
def setUp(self):
john = User.objects.create(name="John Doe")
jim = User.objects.create(name="Jim Bo")
first_poll = Poll.objects.create(
question="What's the first question?",
creator=john
)
second_poll = Poll.objects.create(
question="What's the second question?",
creator=jim
)
Choice.objects.create(
poll=first_poll,
related_poll=second_poll,
name="This is the answer."
)
def test_reverse_by_field(self):
u1 = User.objects.get(
poll__question__exact="What's the first question?"
)
self.assertEqual(u1.name, "John Doe")
u2 = User.objects.get(
poll__question__exact="What's the second question?"
)
self.assertEqual(u2.name, "Jim Bo")
def test_reverse_by_related_name(self):
p1 = Poll.objects.get(poll_choice__name__exact="This is the answer.")
self.assertEqual(p1.question, "What's the first question?")
p2 = Poll.objects.get(
related_choice__name__exact="This is the answer.")
self.assertEqual(p2.question, "What's the second question?")
def test_reverse_field_name_disallowed(self):
"""
If a related_name is given you can't use the field name instead
"""
self.assertRaises(FieldError, Poll.objects.get,
choice__name__exact="This is the answer")
|
zhukaixy/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/tkinter/scrolledtext.py
|
164
|
"""A ScrolledText widget feels like a text widget but also has a
vertical scroll bar on its right. (Later, options may be added to
add a horizontal bar as well, to make the bars disappear
automatically when not needed, to move them to the other side of the
window, etc.)
Configuration options are passed to the Text widget.
A Frame widget is inserted between the master and the text, to hold
the Scrollbar widget.
Most methods calls are inherited from the Text widget; Pack, Grid and
Place methods are redirected to the Frame widget however.
"""
__all__ = ['ScrolledText']
from tkinter import Frame, Text, Scrollbar, Pack, Grid, Place
from tkinter.constants import RIGHT, LEFT, Y, BOTH
class ScrolledText(Text):
def __init__(self, master=None, **kw):
self.frame = Frame(master)
self.vbar = Scrollbar(self.frame)
self.vbar.pack(side=RIGHT, fill=Y)
kw.update({'yscrollcommand': self.vbar.set})
Text.__init__(self, self.frame, **kw)
self.pack(side=LEFT, fill=BOTH, expand=True)
self.vbar['command'] = self.yview
# Copy geometry methods of self.frame without overriding Text
# methods -- hack!
text_meths = vars(Text).keys()
methods = vars(Pack).keys() | vars(Grid).keys() | vars(Place).keys()
methods = methods.difference(text_meths)
for m in methods:
if m[0] != '_' and m != 'config' and m != 'configure':
setattr(self, m, getattr(self.frame, m))
def __str__(self):
return str(self.frame)
def example():
from tkinter.constants import END
stext = ScrolledText(bg='white', height=10)
stext.insert(END, __doc__)
stext.pack(fill=BOTH, side=LEFT, expand=True)
stext.focus_set()
stext.mainloop()
if __name__ == "__main__":
example()
|
suyashphadtare/sajil-final-frappe
|
refs/heads/develop
|
frappe/core/doctype/page/__init__.py
|
2292
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
|
egelmex/ajenti
|
refs/heads/master
|
ajenti/plugins/plugins/main.py
|
5
|
from ajenti.api import *
from ajenti.ui import *
from ajenti.plugmgr import PluginLoader, RepositoryManager
class PluginManager(CategoryPlugin, URLHandler):
text = 'Plugins'
icon = '/dl/plugins/icon.png'
folder = 'bottom'
def on_session_start(self):
self._mgr = RepositoryManager(self.app.config)
def on_init(self):
self._mgr.refresh()
def get_counter(self):
return len(self._mgr.upgradable) or None
def get_ui(self):
ui = self.app.inflate('plugins:main')
inst = self._mgr.installed
for k in inst:
row = self.app.inflate('plugins:item')
desc = '<span class="ui-el-label-1" style="padding-left: 5px;">%s</span>'%k.desc
row.find('name').set('text', k.name)
row.find('desc').set('text', k.desc)
row.find('icon').set('file', k.icon)
row.find('version').set('text', k.version)
row.find('author').set('text', k.author)
row.find('author').set('url', k.homepage)
row.append('buttons', UI.TipIcon(
icon='/dl/core/ui/stock/delete.png',
text='Uninstall',
id='remove/'+k.id,
warning='Completely remove plugin "%s"'%k.name,
))
if k.problem:
row.find('status').set('file', '/dl/plugins/broken.png')
row.append('reqs', UI.Icon(icon='/dl/core/ui/stock/warning.png', text=k.problem))
else:
row.find('status').set('file', '/dl/plugins/good.png')
ui.append('list', row)
lst = self._mgr.available
btn = UI.Button(text='Check for updates', id='update')
if len(lst) == 0:
btn['text'] = 'Download plugin list'
for k in lst:
row = self.app.inflate('plugins:item')
row.find('name').set('text', k.name)
row.find('desc').set('text', k.description)
row.find('icon').set('file', k.icon)
row.find('version').set('text', k.version)
row.find('author').set('text', k.author)
row.find('author').set('url', k.homepage)
row.find('status').set('file', '/dl/plugins/none.png')
for p in inst:
if k.id == p.id and not p.problem:
row.find('status').set('file', '/dl/plugins/upgrade.png')
reqs = k.str_req()
url = 'http://%s/view/plugins.php?id=%s' % (
self.app.config.get('ajenti', 'update_server'),
k.id
)
if reqs == '':
row.append('buttons', UI.TipIcon(
icon='/dl/core/ui/stock/download.png',
text='Download and install',
id='install/'+k.id,
))
else:
row.append('reqs', UI.Icon(icon='/dl/core/ui/stock/warning.png', text=reqs))
ui.append('avail', row)
return ui
def get_ui_upload(self):
return UI.Uploader(
url='/upload_plugin',
text='Install'
)
@url('^/upload_plugin$')
def upload(self, req, sr):
vars = get_environment_vars(req)
f = vars.getvalue('file', None)
try:
self._mgr.install_stream(f)
except:
pass
sr('301 Moved Permanently', [('Location', '/')])
return ''
@event('button/click')
def on_click(self, event, params, vars=None):
if params[0] == 'update':
self._mgr.update_list()
self.put_message('info', 'Plugin list updated')
if params[0] == 'remove':
self._mgr.remove(params[1])
self.put_message('info', 'Plugin removed. Refresh page for changes to take effect.')
if params[0] == 'reload':
try:
PluginLoader.unload(params[1])
except:
pass
try:
PluginLoader.load(params[1])
except:
pass
self.put_message('info', 'Plugin reloaded. Refresh page for changes to take effect.')
if params[0] == 'restart':
self.app.restart()
if params[0] == 'install':
self._mgr.install(params[1], load=True)
self.put_message('info', 'Plugin installed. Refresh page for changes to take effect.')
ComponentManager.get().rescan()
|
TheWardoctor/Wardoctors-repo
|
refs/heads/master
|
script.module.schism.common/lib/requests/packages/chardet/charsetprober.py
|
3126
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import re
class CharSetProber:
def __init__(self):
pass
def reset(self):
self._mState = constants.eDetecting
def get_charset_name(self):
return None
def feed(self, aBuf):
pass
def get_state(self):
return self._mState
def get_confidence(self):
return 0.0
def filter_high_bit_only(self, aBuf):
aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
return aBuf
def filter_without_english_letters(self, aBuf):
aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
return aBuf
def filter_with_english_letters(self, aBuf):
# TODO
return aBuf
|
merutak/python-social-auth
|
refs/heads/master
|
social/backends/xing.py
|
83
|
"""
XING OAuth1 backend, docs at:
http://psa.matiasaguirre.net/docs/backends/xing.html
"""
from social.backends.oauth import BaseOAuth1
class XingOAuth(BaseOAuth1):
"""Xing OAuth authentication backend"""
name = 'xing'
AUTHORIZATION_URL = 'https://api.xing.com/v1/authorize'
REQUEST_TOKEN_URL = 'https://api.xing.com/v1/request_token'
ACCESS_TOKEN_URL = 'https://api.xing.com/v1/access_token'
SCOPE_SEPARATOR = '+'
EXTRA_DATA = [
('id', 'id'),
('user_id', 'user_id')
]
def get_user_details(self, response):
"""Return user details from Xing account"""
email = response.get('email', '')
fullname, first_name, last_name = self.get_user_names(
first_name=response['first_name'],
last_name=response['last_name']
)
return {'username': first_name + last_name,
'fullname': fullname,
'first_name': first_name,
'last_name': last_name,
'email': email}
def user_data(self, access_token, *args, **kwargs):
"""Return user data provided"""
profile = self.get_json(
'https://api.xing.com/v1/users/me.json',
auth=self.oauth_auth(access_token)
)['users'][0]
return {
'user_id': profile['id'],
'id': profile['id'],
'first_name': profile['first_name'],
'last_name': profile['last_name'],
'email': profile['active_email']
}
|
sdague/home-assistant
|
refs/heads/dev
|
tests/components/humidifier/test_intent.py
|
12
|
"""Tests for the humidifier intents."""
from homeassistant.components.humidifier import (
ATTR_AVAILABLE_MODES,
ATTR_HUMIDITY,
ATTR_MODE,
DOMAIN,
SERVICE_SET_HUMIDITY,
SERVICE_SET_MODE,
intent,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.helpers.intent import IntentHandleError
from tests.common import async_mock_service
async def test_intent_set_humidity(hass):
"""Test the set humidity intent."""
hass.states.async_set(
"humidifier.bedroom_humidifier", STATE_ON, {ATTR_HUMIDITY: 40}
)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await intent.async_setup_intents(hass)
result = await hass.helpers.intent.async_handle(
"test",
intent.INTENT_HUMIDITY,
{"name": {"value": "Bedroom humidifier"}, "humidity": {"value": "50"}},
)
await hass.async_block_till_done()
assert result.speech["plain"]["speech"] == "The bedroom humidifier is set to 50%"
assert len(turn_on_calls) == 0
assert len(humidity_calls) == 1
call = humidity_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_SET_HUMIDITY
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert call.data.get(ATTR_HUMIDITY) == 50
async def test_intent_set_humidity_and_turn_on(hass):
"""Test the set humidity intent for turned off humidifier."""
hass.states.async_set(
"humidifier.bedroom_humidifier", STATE_OFF, {ATTR_HUMIDITY: 40}
)
humidity_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY)
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await intent.async_setup_intents(hass)
result = await hass.helpers.intent.async_handle(
"test",
intent.INTENT_HUMIDITY,
{"name": {"value": "Bedroom humidifier"}, "humidity": {"value": "50"}},
)
await hass.async_block_till_done()
assert (
result.speech["plain"]["speech"]
== "Turned bedroom humidifier on and set humidity to 50%"
)
assert len(turn_on_calls) == 1
call = turn_on_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_TURN_ON
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert len(humidity_calls) == 1
call = humidity_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_SET_HUMIDITY
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert call.data.get(ATTR_HUMIDITY) == 50
async def test_intent_set_mode(hass):
"""Test the set mode intent."""
hass.states.async_set(
"humidifier.bedroom_humidifier",
STATE_ON,
{
ATTR_HUMIDITY: 40,
ATTR_SUPPORTED_FEATURES: 1,
ATTR_AVAILABLE_MODES: ["home", "away"],
ATTR_MODE: "home",
},
)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await intent.async_setup_intents(hass)
result = await hass.helpers.intent.async_handle(
"test",
intent.INTENT_MODE,
{"name": {"value": "Bedroom humidifier"}, "mode": {"value": "away"}},
)
await hass.async_block_till_done()
assert (
result.speech["plain"]["speech"]
== "The mode for bedroom humidifier is set to away"
)
assert len(turn_on_calls) == 0
assert len(mode_calls) == 1
call = mode_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_SET_MODE
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert call.data.get(ATTR_MODE) == "away"
async def test_intent_set_mode_and_turn_on(hass):
"""Test the set mode intent."""
hass.states.async_set(
"humidifier.bedroom_humidifier",
STATE_OFF,
{
ATTR_HUMIDITY: 40,
ATTR_SUPPORTED_FEATURES: 1,
ATTR_AVAILABLE_MODES: ["home", "away"],
ATTR_MODE: "home",
},
)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
turn_on_calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await intent.async_setup_intents(hass)
result = await hass.helpers.intent.async_handle(
"test",
intent.INTENT_MODE,
{"name": {"value": "Bedroom humidifier"}, "mode": {"value": "away"}},
)
await hass.async_block_till_done()
assert (
result.speech["plain"]["speech"]
== "Turned bedroom humidifier on and set away mode"
)
assert len(turn_on_calls) == 1
call = turn_on_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_TURN_ON
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert len(mode_calls) == 1
call = mode_calls[0]
assert call.domain == DOMAIN
assert call.service == SERVICE_SET_MODE
assert call.data.get(ATTR_ENTITY_ID) == "humidifier.bedroom_humidifier"
assert call.data.get(ATTR_MODE) == "away"
async def test_intent_set_mode_tests_feature(hass):
"""Test the set mode intent where modes are not supported."""
hass.states.async_set(
"humidifier.bedroom_humidifier", STATE_ON, {ATTR_HUMIDITY: 40}
)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
await intent.async_setup_intents(hass)
try:
await hass.helpers.intent.async_handle(
"test",
intent.INTENT_MODE,
{"name": {"value": "Bedroom humidifier"}, "mode": {"value": "away"}},
)
assert False, "handling intent should have raised"
except IntentHandleError as err:
assert str(err) == "Entity bedroom humidifier does not support modes"
assert len(mode_calls) == 0
async def test_intent_set_unknown_mode(hass):
"""Test the set mode intent for unsupported mode."""
hass.states.async_set(
"humidifier.bedroom_humidifier",
STATE_ON,
{
ATTR_HUMIDITY: 40,
ATTR_SUPPORTED_FEATURES: 1,
ATTR_AVAILABLE_MODES: ["home", "away"],
ATTR_MODE: "home",
},
)
mode_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_MODE)
await intent.async_setup_intents(hass)
try:
await hass.helpers.intent.async_handle(
"test",
intent.INTENT_MODE,
{"name": {"value": "Bedroom humidifier"}, "mode": {"value": "eco"}},
)
assert False, "handling intent should have raised"
except IntentHandleError as err:
assert str(err) == "Entity bedroom humidifier does not support eco mode"
assert len(mode_calls) == 0
|
marcoantoniooliveira/labweb
|
refs/heads/master
|
oscar/lib/python2.7/site-packages/django_extensions/management/commands/runscript.py
|
6
|
from django.core.management.base import BaseCommand
from django.conf import settings
from optparse import make_option
import imp
def vararg_callback(option, opt_str, opt_value, parser):
parser.rargs.insert(0, opt_value)
value = []
for arg in parser.rargs:
# stop on --foo like options
if arg[:2] == "--" and len(arg) > 2:
break
# stop on -a like options
if arg[:1] == "-":
break
value.append(arg)
del parser.rargs[:len(value)]
setattr(parser.values, option.dest, value)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--fixtures', action='store_true', dest='infixtures', default=False,
help='Only look in app.fixtures subdir'),
make_option('--noscripts', action='store_true', dest='noscripts', default=False,
help='Look in app.scripts subdir'),
make_option('-s', '--silent', action='store_true', dest='silent', default=False,
help='Run silently, do not show errors and tracebacks'),
make_option('--no-traceback', action='store_true', dest='no_traceback', default=False,
help='Do not show tracebacks'),
make_option('--script-args', action='callback', callback=vararg_callback, type='string',
help='Space-separated argument list to be passed to the scripts. Note that the '
'same arguments will be passed to all named scripts.'),
)
help = 'Runs a script in django context.'
args = "script [script ...]"
def handle(self, *scripts, **options):
NOTICE = self.style.SQL_TABLE
NOTICE2 = self.style.SQL_FIELD
ERROR = self.style.ERROR
ERROR2 = self.style.NOTICE
subdirs = []
if not options.get('noscripts'):
subdirs.append('scripts')
if options.get('infixtures'):
subdirs.append('fixtures')
verbosity = int(options.get('verbosity', 1))
show_traceback = options.get('traceback', True)
if show_traceback is None:
# XXX: traceback is set to None from Django ?
show_traceback = True
no_traceback = options.get('no_traceback', False)
if no_traceback:
show_traceback = False
silent = options.get('silent', False)
if silent:
verbosity = 0
if len(subdirs) < 1:
print(NOTICE("No subdirs to run left."))
return
if len(scripts) < 1:
print(ERROR("Script name required."))
return
def run_script(mod, *script_args):
try:
mod.run(*script_args)
except Exception:
if silent:
return
if verbosity > 0:
print(ERROR("Exception while running run() in '%s'" % mod.__name__))
if show_traceback:
raise
def my_import(mod):
if verbosity > 1:
print(NOTICE("Check for %s" % mod))
# check if module exists before importing
try:
path = None
for package in mod.split('.')[:-1]:
module_tuple = imp.find_module(package, path)
path = imp.load_module(package, *module_tuple).__path__
imp.find_module(mod.split('.')[-1], path)
t = __import__(mod, [], [], [" "])
except (ImportError, AttributeError):
return False
#if verbosity > 1:
# print(NOTICE("Found script %s ..." % mod))
if hasattr(t, "run"):
if verbosity > 1:
print(NOTICE2("Found script '%s' ..." % mod))
#if verbosity > 1:
# print(NOTICE("found run() in %s. executing..." % mod))
return t
else:
if verbosity > 1:
print(ERROR2("Find script '%s' but no run() function found." % mod))
def find_modules_for_script(script):
""" find script module which contains 'run' attribute """
modules = []
# first look in apps
for app in settings.INSTALLED_APPS:
for subdir in subdirs:
mod = my_import("%s.%s.%s" % (app, subdir, script))
if mod:
modules.append(mod)
# try app.DIR.script import
sa = script.split(".")
for subdir in subdirs:
nn = ".".join(sa[:-1] + [subdir, sa[-1]])
mod = my_import(nn)
if mod:
modules.append(mod)
# try direct import
if script.find(".") != -1:
mod = my_import(script)
if mod:
modules.append(mod)
return modules
if options.get('script_args'):
script_args = options['script_args']
else:
script_args = []
for script in scripts:
modules = find_modules_for_script(script)
if not modules:
if verbosity > 0 and not silent:
print(ERROR("No module for script '%s' found" % script))
for mod in modules:
if verbosity > 1:
print(NOTICE2("Running script '%s' ..." % mod.__name__))
run_script(mod, *script_args)
|
miyosuda/intro-to-dl-android
|
refs/heads/master
|
HandWriting/jni-build/jni/include/tensorflow/python/kernel_tests/xent_op_test.py
|
3
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SoftmaxCrossEntropyWithLogits op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
import tensorflow as tf
class XentTest(tf.test.TestCase):
def _npXent(self, features, labels):
batch_dim = 0
class_dim = 1
batch_size = features.shape[batch_dim]
e = np.exp(features -
np.reshape(np.amax(features, axis=class_dim), [batch_size, 1]))
probs = e / np.reshape(np.sum(e, axis=class_dim), [batch_size, 1])
bp = (probs - labels)
l = -np.sum(labels * np.log(probs + 1.0e-20), axis=1)
return l, bp
def _testXent(self, np_features, np_labels, use_gpu=False):
np_loss, np_backprop = self._npXent(np_features, np_labels)
with self.test_session(use_gpu=use_gpu) as sess:
loss = tf.nn.softmax_cross_entropy_with_logits(np_features, np_labels)
backprop = loss.op.outputs[1]
tf_loss, tf_backprop = sess.run([loss, backprop])
self.assertAllClose(np_loss, tf_loss)
self.assertAllClose(np_backprop, tf_backprop)
def _testAll(self, features, labels):
self._testXent(features, labels, use_gpu=False)
self._testXent(features, labels, use_gpu=True)
def _testSingleClass(self, use_gpu=False):
with self.test_session(use_gpu=use_gpu) as sess:
loss = tf.nn.softmax_cross_entropy_with_logits(
np.array([[1.], [-1.], [0.]]).astype(np.float32),
np.array([[-1.], [0.], [1.]]).astype(np.float32))
backprop = loss.op.outputs[1]
tf_loss, tf_backprop = sess.run([loss, backprop])
self.assertAllClose([0.0, 0.0, 0.0], tf_loss)
self.assertAllClose([[2.0], [1.0], [0.0]], tf_backprop)
def testSingleClass(self):
self._testSingleClass(True)
self._testSingleClass(False)
def testRankTooLarge(self):
np_features = np.array(
[[[1., 1., 1., 1.]], [[1., 2., 3., 4.]]]).astype(np.float32)
np_labels = np.array(
[[[0., 0., 0., 1.]], [[0., .5, .5, 0.]]]).astype(np.float32)
self.assertRaisesRegexp(
ValueError, "must have the same rank",
tf.nn.softmax_cross_entropy_with_logits, np_features, np_labels)
def testNpXent(self):
# We create 2 batches of logits for testing.
# batch 0 is the boring uniform distribution: 1, 1, 1, 1, with target 3.
# batch 1 has a bit of difference: 1, 2, 3, 4, with soft targets (1, 2).
features = [[1., 1., 1., 1.], [1., 2., 3., 4.]]
labels = [[0., 0., 0., 1.], [0., .5, .5, 0.]]
# For batch 0, we expect the uniform distribution: 0.25, 0.25, 0.25, 0.25
# With a hard target 3, the backprop is [0.25, 0.25, 0.25, -0.75]
# The loss for this batch is -log(0.25) = 1.386
#
# For batch 1, we have:
# exp(0) = 1
# exp(1) = 2.718
# exp(2) = 7.389
# exp(3) = 20.085
# SUM = 31.192
# So we have as probabilities:
# exp(0) / SUM = 0.032
# exp(1) / SUM = 0.087
# exp(2) / SUM = 0.237
# exp(3) / SUM = 0.644
# With a soft target (1, 2), the backprop is
# [0.032, 0.087 - 0.5 = -0.413, 0.237 - 0.5 = -0.263, 0.644]
# The loss for this batch is [0.5 * -log(0.087), 0.5 * -log(0.237)]
# = [1.3862, 1.9401]
np_loss, np_backprop = self._npXent(np.array(features), np.array(labels))
self.assertAllClose(np.array([[0.25, 0.25, 0.25, -0.75],
[0.0321, -0.4129, -0.2632, 0.6439]]),
np_backprop,
rtol=1.e-3, atol=1.e-3)
self.assertAllClose(np.array([1.3862, 1.9401]), np_loss,
rtol=1.e-3, atol=1.e-3)
def testShapeMismatch(self):
with self.test_session():
with self.assertRaises(ValueError):
tf.nn.softmax_cross_entropy_with_logits(
[[0., 1.], [2., 3.]], [[0., 1., 0.], [1., 0., 0.]])
def testNotMatrix(self):
with self.test_session():
with self.assertRaises(ValueError):
tf.nn.softmax_cross_entropy_with_logits([0., 1., 2., 3.],
[0., 1., 0., 1.])
def testFloat(self):
self._testAll(
np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float32),
np.array([[0., 0., 0., 1.], [0., .5, .5, 0.]]).astype(np.float32))
def testDouble(self):
self._testXent(
np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float64),
np.array([[0., 0., 0., 1.], [0., .5, .5, 0.]]).astype(np.float64),
use_gpu=False)
def testGradient(self):
with self.test_session():
l = tf.constant([0.0, 0.0, 1.0, 0.0,
1.0, 0.0, 0.0, 0.0,
0.0, 0.5, 0.0, 0.5], shape=[3, 4],
dtype=tf.float64, name="l")
f = tf.constant([0.1, 0.2, 0.3, 0.4,
0.1, 0.4, 0.9, 1.6,
0.1, 0.8, 2.7, 6.4], shape=[3, 4],
dtype=tf.float64, name="f")
x = tf.nn.softmax_cross_entropy_with_logits(f, l, name="xent")
err = tf.test.compute_gradient_error(f, [3, 4], x, [3])
print("cross entropy gradient err = ", err)
self.assertLess(err, 5e-8)
if __name__ == "__main__":
tf.test.main()
|
yjhjstz/gyp
|
refs/heads/master
|
pylib/gyp/msvs_emulation.py
|
1407
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if vs_version.Path():
env['$(VSInstallDir)'] = vs_version.Path()
env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
dxsdk_dir = _FindDirectXInstallation()
env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
return env
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(
configs[0].get('msvs_system_include_dirs', []))
for config in configs[1:]:
system_includes = config.get('msvs_system_include_dirs', [])
all_system_includes = all_system_includes & OrderedSet(system_includes)
if not all_system_includes:
return None
# Expand macros in all_system_includes.
env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
expanded_system_includes = OrderedSet([ExpandMacros(include, env)
for include in all_system_includes])
if any(['$' in include for include in expanded_system_includes]):
# Some path relies on target-specific variables, bail.
return None
# Remove system includes shared by all targets from the targets.
for config in configs:
includes = config.get('msvs_system_include_dirs', [])
if includes: # Don't insert a msvs_system_include_dirs key if not needed.
# This must check the unexpanded includes list:
new_includes = [i for i in includes if i not in all_system_includes]
config['msvs_system_include_dirs'] = new_includes
return expanded_system_includes
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
unsupported_fields = [
'msvs_prebuild',
'msvs_postbuild',
]
unsupported = []
for field in unsupported_fields:
for config in configs.values():
if field in config:
unsupported += ["%s not supported (target %s)." %
(field, spec['target_name'])]
if unsupported:
raise Exception('\n'.join(unsupported))
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
ext = self.spec.get('product_extension', None)
if ext:
return ext
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
target_ext = '.' + self.GetExtension()
target_file_name = target_name + target_ext
replacements = {
'$(InputName)': '${root}',
'$(InputPath)': '${source}',
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(OutDir)\\': target_dir,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
'$(ProjectName)': self.spec['target_name'],
'$(TargetDir)\\': target_dir,
'$(TargetExt)': target_ext,
'$(TargetFileName)': target_file_name,
'$(TargetName)': target_name,
'$(TargetPath)': os.path.join(target_dir, target_file_name),
}
replacements.update(GetGlobalVSMacroEnv(self.vs_version))
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, '')
platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
if generate_debug_info == 'true':
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
else:
return None
def GetNoImportLibrary(self, config):
"""If NoImportLibrary: true, ninja will not expect the output to include
an import library."""
config = self._TargetConfig(config)
noimplib = self._Setting(('NoImportLibrary',), config)
return noimplib == 'true'
def GetAsmflags(self, config):
"""Returns the flags that need to be added to ml invocations."""
config = self._TargetConfig(config)
asmflags = []
safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
if safeseh == 'true':
asmflags.append('/safeseh')
return asmflags
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('FloatingPointModel',
map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
default='0')
cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('CallingConvention',
map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('DefaultCharIsUnsigned', map={'true': '/J'})
cl('TreatWChar_tAsBuiltInType',
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cl('EnableEnhancedInstructionSet',
map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e', '2015'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, output_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
stack_reserve_size = self._Setting(
('VCLinkerTool', 'StackReserveSize'), config, default='')
if stack_reserve_size:
stack_commit_size = self._Setting(
('VCLinkerTool', 'StackCommitSize'), config, default='')
if stack_commit_size:
stack_commit_size = ',' + stack_commit_size
ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
if self.GetArch(config) == 'x86':
safeseh_default = 'true'
else:
safeseh_default = None
ld('ImageHasSafeExceptionHandlers',
map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
default=safeseh_default)
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest, manifest_files = \
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation, build_dir):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get('rules', []):
if rule['extension'] == extension:
return True
return False
def _HasExplicitIdlActions(self, spec):
"""Determine if an action should not run midl for .idl files."""
return any([action.get('explicit_idl_action', 0)
for action in spec.get('actions', [])])
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
there isn't we need to generate implicit rules to build MIDL .idl files."""
return (self._HasExplicitRuleForExtension(spec, 'idl') or
self._HasExplicitIdlActions(spec))
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(self, input, output, implicit, command,
cflags_c, cflags_cc, expand_special):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ['/Yc' + self._PchHeader()]
if command == 'cxx':
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
self.output_obj, [])
elif command == 'cc':
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
self.output_obj, [])
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'),
allow_fallback=False)
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
system_includes, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
if system_includes:
system_includes = system_includes | OrderedSet(
env.get('INCLUDE', '').split(';'))
env['INCLUDE'] = ';'.join(system_includes)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
|
ArVID220u/judge
|
refs/heads/master
|
test_generator_model.py
|
2
|
#!/usr/bin/env python3
import random, os, sys
from subprocess import check_output
# first argument is the correct program
correct_program = "solution"
in_file = "test_gen.in"
for index in range(1,20):
in_str = ""
n = random.randint(2,10)
in_str += str(n) + "\n"
for j in range(n):
l = random.randint(0,1000)
in_str += str(l)
if j != n-1:
in_str += " "
in_str += "\n"
correct_out = ""
with open(in_file, "w") as infile:
infile.write(in_str)
with open(in_file, "rb") as infile:
correct_out = check_output([os.path.abspath(correct_program)], stdin=infile).decode("utf-8")
os.remove(in_file)
with open("testdata/secret" + str(index) + ".in", "w") as infile:
infile.write(in_str)
with open("testdata/secret" + str(index) + ".out", "w") as outfile:
outfile.write(correct_out)
|
necozay/tulip-control
|
refs/heads/master
|
tulip/graphics.py
|
1
|
# Copyright (c) 2013-2014 by California Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the California Institute of Technology nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
# OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
"""
Convenience functions for plotting
WARNING: The public functions dimension, newax, dom2vec, quiver will
eventually be removed. Their use in new applications is discouraged.
They come from https://github.com/johnyf/pyvectorized
"""
from __future__ import division
import logging
logger = logging.getLogger(__name__)
from warnings import warn
from itertools import izip_longest
import numpy as np
try:
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import axes3d
except Exception as e:
logger.error(e)
# from mayavi import mlab
def dimension(ndarray):
"""dimension of ndarray (DEPRECATED)
- ndim == 1:
dimension = 1
- ndim == 2:
dimension = shape[0]
"""
if ndarray.ndim < 2:
return ndarray.ndim
return ndarray.shape[0]
def newax(subplots=(1, 1), fig=None,
mode='list', dim=2):
"""Create (possibly multiple) new axes handles. (DEPRECATED)
@param fig: attach axes to this figure
@type fig: figure object,
should be consistent with C{dim}
@param subplots: number or layout of subplots
@type subplots: int or
2-tuple of subplot layout
@param mode: return the axes shaped as a
vector or as a matrix.
This is a convenience for later iterations
over the axes.
@type mode: 'matrix' | ['list']
@param dim: plot dimension:
- if dim == 2, then use matplotlib
- if dim == 3, then use mayavi
So the figure type depends on dim.
@return: C{(ax, fig)} where:
- C{ax}: axes created
- C{fig}: parent of ax
@rtype: list or list of lists,
depending on C{mode} above
"""
# layout or number of axes ?
try:
subplot_layout = tuple(subplots)
except:
subplot_layout = (1, subplots)
# reasonable layout ?
if len(subplot_layout) != 2:
raise Exception('newax:'
'subplot layout should be 2-tuple or int.')
# which figure ?
if fig is None:
fig = plt.figure()
# create subplot(s)
(nv, nh) = subplot_layout
n = np.prod(subplot_layout)
try:
dim = tuple(dim)
except:
# all same dim
dim = [dim] * n
# matplotlib (2D) or mayavi (3D) ?
ax = []
for (i, curdim) in enumerate(dim):
if curdim == 2:
curax = fig.add_subplot(nv, nh, i + 1)
ax.append(curax)
else:
curax = fig.add_subplot(nv, nh, i + 1, projection='3d')
ax.append(curax)
if curdim > 3:
warn('ndim > 3, but plot limited to 3.')
if mode is 'matrix':
ax = list(_grouper(nh, ax))
# single axes ?
if subplot_layout == (1, 1):
ax = ax[0]
return (ax, fig)
def dom2vec(domain, resolution):
"""Matrix of column vectors for meshgrid points. (DEPRECATED)
Returns a matrix of column vectors for the meshgrid
point coordinates over a parallelepiped domain
with the given resolution.
Example
=======
>>> domain = [0, 1, 0,2]
>>> resolution = [4, 5]
>>> q = domain2vec(domain, resolution)
@param domain: extremal values of parallelepiped
@type domain: [xmin, xmax, ymin, ymax, ...]
@param resolution: # points /dimension
@type resolution: [nx, ny, ...]
@return: q = matrix of column vectors (meshgrid point coordinates)
@rtype: [#dim x #points]
See also vec2meshgrid, domain2meshgrid, meshgrid2vec.
"""
domain = _grouper(2, domain)
lambda_linspace = lambda (dom, res): np.linspace(dom[0], dom[1], res)
axis_grids = map(lambda_linspace, zip(domain, resolution))
pnt_coor = np.meshgrid(*axis_grids)
q = np.vstack(map(np.ravel, pnt_coor))
return q
def quiver(x, v, ax=None, **kwargs):
"""Multi-dimensional quiver. (DEPRECATED)
Plot v columns at points in columns of x
in axes ax with plot formatting options in kwargs.
>>> import numpy as np
>>> import matplotlib as mpl
>>> from pyvectorized import quiver, dom2vec
>>> x = dom2vec([0, 10, 0, 11], [20, 20])
>>> v = np.vstack(np.sin(x[1, :] ), np.cos(x[2, :] ) )
>>> quiver(mpl.gca(), x, v)
see also
matplotlib.quiver, mayavi.quiver3
@param x: points where vectors are based
each column is a coordinate tuple
@type x: 2d lil | numpy.ndarray
@param v: vectors which to base at points x
@type v: 2d lil | numpy.ndarray
@param ax: axes handle, e.g., ax = gca())
@param x: matrix of points where vectors are plotted
@type x: [#dim x #points]
@param v: matrix of column vectors to plot at points x
@type v: [#dim x #points]
@param kwargs: plot formatting
@return: handle to plotted object(s)
"""
# multiple axes ?
try:
fields = [quiver(x, v, i, **kwargs) for i in ax]
return fields
except:
pass
if not ax:
ax = plt.gca()
dim = dimension(x)
if dim < 2:
raise Exception('ndim < 2')
elif dim < 3:
h = ax.quiver(x[0, :], x[1, :],
v[0, :], v[1, :], **kwargs)
else:
raise NotImplementedError
from mayavi.mlab import quiver3d
if ax:
print('axes arg ignored, mayavi used')
h = quiver3d(x[0, :], x[1, :], x[2, :],
v[0, :], v[1, :], v[2, :], **kwargs)
if dim > 3:
warn('quiver:ndim #dimensions > 3,' +
'plotting only 3D component.')
return h
def _grouper(n, iterable, fillvalue=None):
"""grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx
"""
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
|
glneo/gnuradio-davisaf
|
refs/heads/master
|
grc/examples/xmlrpc/xmlrpc_client_script.py
|
22
|
#!/usr/bin/env python
import time
import random
import xmlrpclib
#create server object
s = xmlrpclib.Server("http://localhost:1234")
#randomly change parameters of the sinusoid
for i in range(10):
#generate random values
new_freq = random.uniform(0, 5000)
new_ampl = random.uniform(0, 2)
new_offset = random.uniform(-1, 1)
#set new values
time.sleep(1)
s.set_freq(new_freq)
time.sleep(1)
s.set_ampl(new_ampl)
time.sleep(1)
s.set_offset(new_offset)
|
ageron/tensorflow
|
refs/heads/master
|
tensorflow/python/autograph/pyct/static_analysis/reaching_definitions.py
|
4
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Reaching definition analysis.
This analysis attaches a set of a Definition objects to each symbol, one
for each distinct definition that may reach it. The Definition objects are
mutable and may be used by subsequent analyses to further annotate data like
static type and value information.
The analysis also attaches the set of the symbols defined at the entry of
control flow statements.
Requires activity analysis.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import weakref
import gast
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import cfg
from tensorflow.python.autograph.pyct import transformer
from tensorflow.python.autograph.pyct.static_analysis import annos
class Definition(object):
"""Definition objects describe a unique definition of a variable.
Subclasses of this may be used by passing an appropriate factory function to
resolve.
Attributes:
param_of: Optional[ast.AST]
"""
def __init__(self):
self.param_of = None
def __repr__(self):
return '%s[%d]' % (self.__class__.__name__, id(self))
class _NodeState(object):
"""Abstraction for the state of the CFG walk for reaching definition analysis.
This is a value type. Only implements the strictly necessary operators.
Attributes:
value: Dict[qual_names.QN, Set[Definition, ...]], the defined symbols and
their possible definitions
"""
def __init__(self, init_from=None):
if init_from:
if isinstance(init_from, _NodeState):
self.value = {
s: set(other_infos) for s, other_infos in init_from.value.items()
}
elif isinstance(init_from, dict):
self.value = {s: set((init_from[s],)) for s in init_from}
else:
assert False, init_from
else:
self.value = {}
def __eq__(self, other):
if frozenset(self.value.keys()) != frozenset(other.value.keys()):
return False
ret = all(self.value[s] == other.value[s] for s in self.value)
return ret
def __ne__(self, other):
return not self.__eq__(other)
def __or__(self, other):
assert isinstance(other, _NodeState)
result = _NodeState(self)
for s, other_infos in other.value.items():
if s in result.value:
result.value[s].update(other_infos)
else:
result.value[s] = set(other_infos)
return result
def __sub__(self, other):
assert isinstance(other, set)
result = _NodeState(self)
for s in other:
result.value.pop(s, None)
return result
def __repr__(self):
return 'NodeState[%s]=%s' % (id(self), repr(self.value))
class Analyzer(cfg.GraphVisitor):
"""CFG visitor that determines reaching definitions at statement level."""
def __init__(self, graph, definition_factory):
self._definition_factory = definition_factory
super(Analyzer, self).__init__(graph)
# This allows communicating that nodes have extra reaching definitions,
# e.g. those that a function closes over.
self.extra_in = {}
self.gen_map = {}
def init_state(self, _):
return _NodeState()
def visit_node(self, node):
prev_defs_out = self.out[node]
defs_in = _NodeState(self.extra_in.get(node.ast_node, None))
for n in node.prev:
defs_in |= self.out[n]
if anno.hasanno(node.ast_node, anno.Static.SCOPE):
node_scope = anno.getanno(node.ast_node, anno.Static.SCOPE)
# The definition objects created by each node must be singletons because
# their ids are used in equality checks.
if node not in self.gen_map:
node_symbols = {}
for s in node_scope.modified:
def_ = self._definition_factory()
if s in node_scope.params:
def_.param_of = weakref.ref(node_scope.params[s])
node_symbols[s] = def_
self.gen_map[node] = _NodeState(node_symbols)
gen = self.gen_map[node]
kill = node_scope.modified | node_scope.deleted
defs_out = gen | (defs_in - kill)
else:
# Nodes that don't have a scope annotation are assumed not to touch any
# symbols.
# This Name node below is a literal name, e.g. False
# This can also happen if activity.py forgot to annotate the node with a
# scope object.
assert isinstance(
node.ast_node,
(gast.Name, gast.Break, gast.Continue, gast.Raise)), (node.ast_node,
node)
defs_out = defs_in
self.in_[node] = defs_in
self.out[node] = defs_out
# TODO(mdan): Move this to the superclass?
return prev_defs_out != defs_out
class TreeAnnotator(transformer.Base):
"""AST visitor that annotates each symbol name with its reaching definitions.
Simultaneously, the visitor runs the dataflow analysis on each function node,
accounting for the effect of closures. For example:
def foo():
bar = 1
def baz():
# bar = 1 reaches here
"""
def __init__(self, source_info, graphs, definition_factory):
super(TreeAnnotator, self).__init__(source_info)
self.definition_factory = definition_factory
self.graphs = graphs
self.current_analyzer = None
self.current_cfg_node = None
def visit_FunctionDef(self, node):
parent_analyzer = self.current_analyzer
subgraph = self.graphs[node]
# Preorder tree processing:
# 1. if this is a child function, the parent was already analyzed and it
# has the proper state value for the subgraph's entry
# 2. analyze the current function body
# 2. recursively walk the subtree; child functions will be processed
analyzer = Analyzer(subgraph, self.definition_factory)
if parent_analyzer is not None:
# Wire the state between the two subgraphs' analyzers.
parent_out_state = parent_analyzer.out[parent_analyzer.graph.index[node]]
# Exception: symbols modified in the child function are local to it
body_scope = anno.getanno(node, annos.NodeAnno.BODY_SCOPE)
parent_out_state -= body_scope.modified
analyzer.extra_in[node.args] = parent_out_state
# Complete the analysis for the local function and annotate its body.
analyzer.visit_forward()
# Recursively process any remaining subfunctions.
self.current_analyzer = analyzer
# Note: not visiting name, decorator_list and returns because they don't
# apply to this anlysis.
# TODO(mdan): Should we still process the function name?
node.args = self.visit(node.args)
node.body = self.visit_block(node.body)
self.current_analyzer = parent_analyzer
return node
def visit_Nonlocal(self, node):
raise NotImplementedError()
def visit_Global(self, node):
raise NotImplementedError()
def visit_ExceptHandler(self, node):
# TODO(b/123995141) Add Exception Handlers to the CFG
return node
def visit_Name(self, node):
if self.current_analyzer is None:
# Names may appear outside function defs - for example in class
# definitions.
return node
analyzer = self.current_analyzer
cfg_node = self.current_cfg_node
assert cfg_node is not None, ('name node, %s, outside of any statement?'
% node.id)
qn = anno.getanno(node, anno.Basic.QN)
if isinstance(node.ctx, gast.Load):
anno.setanno(node, anno.Static.DEFINITIONS,
tuple(analyzer.in_[cfg_node].value.get(qn, ())))
else:
anno.setanno(node, anno.Static.DEFINITIONS,
tuple(analyzer.out[cfg_node].value.get(qn, ())))
return node
def _aggregate_predecessors_defined_in(self, node):
preds = self.current_analyzer.graph.stmt_prev[node]
node_defined_in = set()
for p in preds:
node_defined_in |= set(self.current_analyzer.out[p].value.keys())
anno.setanno(node, anno.Static.DEFINED_VARS_IN, frozenset(node_defined_in))
def visit_If(self, node):
self._aggregate_predecessors_defined_in(node)
return self.generic_visit(node)
def visit_For(self, node):
self._aggregate_predecessors_defined_in(node)
# Manually accounting for the shortcoming described in
# cfg.AstToCfg.visit_For.
parent = self.current_cfg_node
self.current_cfg_node = self.current_analyzer.graph.index[node.iter]
node.target = self.visit(node.target)
self.current_cfg_node = parent
node.iter = self.visit(node.iter)
node.body = self.visit_block(node.body)
node.orelse = self.visit_block(node.orelse)
return node
def visit_While(self, node):
self._aggregate_predecessors_defined_in(node)
return self.generic_visit(node)
def visit(self, node):
parent = self.current_cfg_node
if (self.current_analyzer is not None and
node in self.current_analyzer.graph.index):
self.current_cfg_node = self.current_analyzer.graph.index[node]
node = super(TreeAnnotator, self).visit(node)
self.current_cfg_node = parent
return node
def resolve(node, source_info, graphs, definition_factory):
"""Resolves reaching definitions for each symbol.
Args:
node: ast.AST
source_info: transformer.SourceInfo
graphs: Dict[ast.FunctionDef, cfg.Graph]
definition_factory: Callable[[], Definition]
Returns:
ast.AST
"""
visitor = TreeAnnotator(source_info, graphs, definition_factory)
node = visitor.visit(node)
return node
|
tanmaythakur/django
|
refs/heads/master
|
django/utils/_os.py
|
502
|
from __future__ import unicode_literals
import os
import sys
import tempfile
from os.path import abspath, dirname, isabs, join, normcase, normpath, sep
from django.core.exceptions import SuspiciousFileOperation
from django.utils import six
from django.utils.encoding import force_text
if six.PY2:
fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
# Under Python 2, define our own abspath function that can handle joining
# unicode paths to a current working directory that has non-ASCII characters
# in it. This isn't necessary on Windows since the Windows version of abspath
# handles this correctly. It also handles drive letters differently than the
# pure Python implementation, so it's best not to replace it.
if six.PY3 or os.name == 'nt':
abspathu = abspath
else:
def abspathu(path):
"""
Version of os.path.abspath that uses the unicode representation
of the current working directory, thus avoiding a UnicodeDecodeError
in join when the cwd has non-ASCII characters.
"""
if not isabs(path):
path = join(os.getcwdu(), path)
return normpath(path)
def upath(path):
"""
Always return a unicode path.
"""
if six.PY2 and not isinstance(path, six.text_type):
return path.decode(fs_encoding)
return path
def npath(path):
"""
Always return a native path, that is unicode on Python 3 and bytestring on
Python 2.
"""
if six.PY2 and not isinstance(path, bytes):
return path.encode(fs_encoding)
return path
def safe_join(base, *paths):
"""
Joins one or more path components to the base path component intelligently.
Returns a normalized, absolute version of the final path.
The final path must be located inside of the base path component (otherwise
a ValueError is raised).
"""
base = force_text(base)
paths = [force_text(p) for p in paths]
final_path = abspathu(join(base, *paths))
base_path = abspathu(base)
# Ensure final_path starts with base_path (using normcase to ensure we
# don't false-negative on case insensitive operating systems like Windows),
# further, one of the following conditions must be true:
# a) The next character is the path separator (to prevent conditions like
# safe_join("/dir", "/../d"))
# b) The final path must be the same as the base path.
# c) The base path must be the most root path (meaning either "/" or "C:\\")
if (not normcase(final_path).startswith(normcase(base_path + sep)) and
normcase(final_path) != normcase(base_path) and
dirname(normcase(base_path)) != normcase(base_path)):
raise SuspiciousFileOperation(
'The joined path ({}) is located outside of the base path '
'component ({})'.format(final_path, base_path))
return final_path
def symlinks_supported():
"""
A function to check if creating symlinks are supported in the
host platform and/or if they are allowed to be created (e.g.
on Windows it requires admin permissions).
"""
tmpdir = tempfile.mkdtemp()
original_path = os.path.join(tmpdir, 'original')
symlink_path = os.path.join(tmpdir, 'symlink')
os.makedirs(original_path)
try:
os.symlink(original_path, symlink_path)
supported = True
except (OSError, NotImplementedError, AttributeError):
supported = False
else:
os.remove(symlink_path)
finally:
os.rmdir(original_path)
os.rmdir(tmpdir)
return supported
|
zhaodelong/django
|
refs/heads/master
|
tests/model_meta/models.py
|
192
|
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Relation(models.Model):
pass
class AbstractPerson(models.Model):
# DATA fields
data_abstract = models.CharField(max_length=10)
fk_abstract = models.ForeignKey(Relation, models.CASCADE, related_name='fk_abstract_rel')
# M2M fields
m2m_abstract = models.ManyToManyField(Relation, related_name='m2m_abstract_rel')
friends_abstract = models.ManyToManyField('self', related_name='friends_abstract', symmetrical=True)
following_abstract = models.ManyToManyField('self', related_name='followers_abstract', symmetrical=False)
# VIRTUAL fields
data_not_concrete_abstract = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['abstract_non_concrete_id'],
to_fields=['id'],
related_name='fo_abstract_rel',
)
# GFK fields
content_type_abstract = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_abstract = models.PositiveIntegerField()
content_object_abstract = GenericForeignKey('content_type_abstract', 'object_id_abstract')
# GR fields
generic_relation_abstract = GenericRelation(Relation)
class Meta:
abstract = True
class BasePerson(AbstractPerson):
# DATA fields
data_base = models.CharField(max_length=10)
fk_base = models.ForeignKey(Relation, models.CASCADE, related_name='fk_base_rel')
# M2M fields
m2m_base = models.ManyToManyField(Relation, related_name='m2m_base_rel')
friends_base = models.ManyToManyField('self', related_name='friends_base', symmetrical=True)
following_base = models.ManyToManyField('self', related_name='followers_base', symmetrical=False)
# VIRTUAL fields
data_not_concrete_base = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['base_non_concrete_id'],
to_fields=['id'],
related_name='fo_base_rel',
)
# GFK fields
content_type_base = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_base = models.PositiveIntegerField()
content_object_base = GenericForeignKey('content_type_base', 'object_id_base')
# GR fields
generic_relation_base = GenericRelation(Relation)
class Person(BasePerson):
# DATA fields
data_inherited = models.CharField(max_length=10)
fk_inherited = models.ForeignKey(Relation, models.CASCADE, related_name='fk_concrete_rel')
# M2M Fields
m2m_inherited = models.ManyToManyField(Relation, related_name='m2m_concrete_rel')
friends_inherited = models.ManyToManyField('self', related_name='friends_concrete', symmetrical=True)
following_inherited = models.ManyToManyField('self', related_name='followers_concrete', symmetrical=False)
# VIRTUAL fields
data_not_concrete_inherited = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['model_non_concrete_id'],
to_fields=['id'],
related_name='fo_concrete_rel',
)
# GFK fields
content_type_concrete = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_concrete = models.PositiveIntegerField()
content_object_concrete = GenericForeignKey('content_type_concrete', 'object_id_concrete')
# GR fields
generic_relation_concrete = GenericRelation(Relation)
class ProxyPerson(Person):
class Meta:
proxy = True
class Relating(models.Model):
# ForeignKey to BasePerson
baseperson = models.ForeignKey(BasePerson, models.CASCADE, related_name='relating_baseperson')
baseperson_hidden = models.ForeignKey(BasePerson, models.CASCADE, related_name='+')
# ForeignKey to Person
person = models.ForeignKey(Person, models.CASCADE, related_name='relating_person')
person_hidden = models.ForeignKey(Person, models.CASCADE, related_name='+')
# ForeignKey to ProxyPerson
proxyperson = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='relating_proxyperson')
proxyperson_hidden = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='+')
# ManyToManyField to BasePerson
basepeople = models.ManyToManyField(BasePerson, related_name='relating_basepeople')
basepeople_hidden = models.ManyToManyField(BasePerson, related_name='+')
# ManyToManyField to Person
people = models.ManyToManyField(Person, related_name='relating_people')
people_hidden = models.ManyToManyField(Person, related_name='+')
# ParentListTests models
class CommonAncestor(models.Model):
pass
class FirstParent(CommonAncestor):
first_ancestor = models.OneToOneField(CommonAncestor, models.SET_NULL, primary_key=True, parent_link=True)
class SecondParent(CommonAncestor):
second_ancestor = models.OneToOneField(CommonAncestor, models.SET_NULL, primary_key=True, parent_link=True)
class Child(FirstParent, SecondParent):
pass
|
Lujeni/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/network/fortios/argspec/facts/facts.py
|
20
|
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
"""
The arg spec for the fortios monitor module.
"""
class FactsArgs(object):
""" The arg spec for the fortios monitor module
"""
def __init__(self, **kwargs):
pass
argument_spec = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": False},
"gather_subset": {
"required": True, "type": "list", "elements": "dict",
"options": {
"fact": {"required": True, "type": "str"},
"filters": {"required": False, "type": "list", "elements": "dict"}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.