repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
ycaihua/redis-py
|
refs/heads/master
|
tests/test_pubsub.py
|
43
|
from __future__ import with_statement
import pytest
import time
import redis
from redis.exceptions import ConnectionError
from redis._compat import basestring, u, unichr
from .conftest import r as _redis_client
def wait_for_message(pubsub, timeout=0.1, ignore_subscribe_messages=False):
now = time.time()
timeout = now + timeout
while now < timeout:
message = pubsub.get_message(
ignore_subscribe_messages=ignore_subscribe_messages)
if message is not None:
return message
time.sleep(0.01)
now = time.time()
return None
def make_message(type, channel, data, pattern=None):
return {
'type': type,
'pattern': pattern and pattern.encode('utf-8') or None,
'channel': channel.encode('utf-8'),
'data': data.encode('utf-8') if isinstance(data, basestring) else data
}
def make_subscribe_test_data(pubsub, type):
if type == 'channel':
return {
'p': pubsub,
'sub_type': 'subscribe',
'unsub_type': 'unsubscribe',
'sub_func': pubsub.subscribe,
'unsub_func': pubsub.unsubscribe,
'keys': ['foo', 'bar', u('uni') + unichr(4456) + u('code')]
}
elif type == 'pattern':
return {
'p': pubsub,
'sub_type': 'psubscribe',
'unsub_type': 'punsubscribe',
'sub_func': pubsub.psubscribe,
'unsub_func': pubsub.punsubscribe,
'keys': ['f*', 'b*', u('uni') + unichr(4456) + u('*')]
}
assert False, 'invalid subscribe type: %s' % type
class TestPubSubSubscribeUnsubscribe(object):
def _test_subscribe_unsubscribe(self, p, sub_type, unsub_type, sub_func,
unsub_func, keys):
for key in keys:
assert sub_func(key) is None
# should be a message for each channel/pattern we just subscribed to
for i, key in enumerate(keys):
assert wait_for_message(p) == make_message(sub_type, key, i + 1)
for key in keys:
assert unsub_func(key) is None
# should be a message for each channel/pattern we just unsubscribed
# from
for i, key in enumerate(keys):
i = len(keys) - 1 - i
assert wait_for_message(p) == make_message(unsub_type, key, i)
def test_channel_subscribe_unsubscribe(self, r):
kwargs = make_subscribe_test_data(r.pubsub(), 'channel')
self._test_subscribe_unsubscribe(**kwargs)
def test_pattern_subscribe_unsubscribe(self, r):
kwargs = make_subscribe_test_data(r.pubsub(), 'pattern')
self._test_subscribe_unsubscribe(**kwargs)
def _test_resubscribe_on_reconnection(self, p, sub_type, unsub_type,
sub_func, unsub_func, keys):
for key in keys:
assert sub_func(key) is None
# should be a message for each channel/pattern we just subscribed to
for i, key in enumerate(keys):
assert wait_for_message(p) == make_message(sub_type, key, i + 1)
# manually disconnect
p.connection.disconnect()
# calling get_message again reconnects and resubscribes
# note, we may not re-subscribe to channels in exactly the same order
# so we have to do some extra checks to make sure we got them all
messages = []
for i in range(len(keys)):
messages.append(wait_for_message(p))
unique_channels = set()
assert len(messages) == len(keys)
for i, message in enumerate(messages):
assert message['type'] == sub_type
assert message['data'] == i + 1
assert isinstance(message['channel'], bytes)
channel = message['channel'].decode('utf-8')
unique_channels.add(channel)
assert len(unique_channels) == len(keys)
for channel in unique_channels:
assert channel in keys
def test_resubscribe_to_channels_on_reconnection(self, r):
kwargs = make_subscribe_test_data(r.pubsub(), 'channel')
self._test_resubscribe_on_reconnection(**kwargs)
def test_resubscribe_to_patterns_on_reconnection(self, r):
kwargs = make_subscribe_test_data(r.pubsub(), 'pattern')
self._test_resubscribe_on_reconnection(**kwargs)
def _test_subscribed_property(self, p, sub_type, unsub_type, sub_func,
unsub_func, keys):
assert p.subscribed is False
sub_func(keys[0])
# we're now subscribed even though we haven't processed the
# reply from the server just yet
assert p.subscribed is True
assert wait_for_message(p) == make_message(sub_type, keys[0], 1)
# we're still subscribed
assert p.subscribed is True
# unsubscribe from all channels
unsub_func()
# we're still technically subscribed until we process the
# response messages from the server
assert p.subscribed is True
assert wait_for_message(p) == make_message(unsub_type, keys[0], 0)
# now we're no longer subscribed as no more messages can be delivered
# to any channels we were listening to
assert p.subscribed is False
# subscribing again flips the flag back
sub_func(keys[0])
assert p.subscribed is True
assert wait_for_message(p) == make_message(sub_type, keys[0], 1)
# unsubscribe again
unsub_func()
assert p.subscribed is True
# subscribe to another channel before reading the unsubscribe response
sub_func(keys[1])
assert p.subscribed is True
# read the unsubscribe for key1
assert wait_for_message(p) == make_message(unsub_type, keys[0], 0)
# we're still subscribed to key2, so subscribed should still be True
assert p.subscribed is True
# read the key2 subscribe message
assert wait_for_message(p) == make_message(sub_type, keys[1], 1)
unsub_func()
# haven't read the message yet, so we're still subscribed
assert p.subscribed is True
assert wait_for_message(p) == make_message(unsub_type, keys[1], 0)
# now we're finally unsubscribed
assert p.subscribed is False
def test_subscribe_property_with_channels(self, r):
kwargs = make_subscribe_test_data(r.pubsub(), 'channel')
self._test_subscribed_property(**kwargs)
def test_subscribe_property_with_patterns(self, r):
kwargs = make_subscribe_test_data(r.pubsub(), 'pattern')
self._test_subscribed_property(**kwargs)
def test_ignore_all_subscribe_messages(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
checks = (
(p.subscribe, 'foo'),
(p.unsubscribe, 'foo'),
(p.psubscribe, 'f*'),
(p.punsubscribe, 'f*'),
)
assert p.subscribed is False
for func, channel in checks:
assert func(channel) is None
assert p.subscribed is True
assert wait_for_message(p) is None
assert p.subscribed is False
def test_ignore_individual_subscribe_messages(self, r):
p = r.pubsub()
checks = (
(p.subscribe, 'foo'),
(p.unsubscribe, 'foo'),
(p.psubscribe, 'f*'),
(p.punsubscribe, 'f*'),
)
assert p.subscribed is False
for func, channel in checks:
assert func(channel) is None
assert p.subscribed is True
message = wait_for_message(p, ignore_subscribe_messages=True)
assert message is None
assert p.subscribed is False
class TestPubSubMessages(object):
def setup_method(self, method):
self.message = None
def message_handler(self, message):
self.message = message
def test_published_message_to_channel(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
p.subscribe('foo')
assert r.publish('foo', 'test message') == 1
message = wait_for_message(p)
assert isinstance(message, dict)
assert message == make_message('message', 'foo', 'test message')
def test_published_message_to_pattern(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
p.subscribe('foo')
p.psubscribe('f*')
# 1 to pattern, 1 to channel
assert r.publish('foo', 'test message') == 2
message1 = wait_for_message(p)
message2 = wait_for_message(p)
assert isinstance(message1, dict)
assert isinstance(message2, dict)
expected = [
make_message('message', 'foo', 'test message'),
make_message('pmessage', 'foo', 'test message', pattern='f*')
]
assert message1 in expected
assert message2 in expected
assert message1 != message2
def test_channel_message_handler(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
p.subscribe(foo=self.message_handler)
assert r.publish('foo', 'test message') == 1
assert wait_for_message(p) is None
assert self.message == make_message('message', 'foo', 'test message')
def test_pattern_message_handler(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
p.psubscribe(**{'f*': self.message_handler})
assert r.publish('foo', 'test message') == 1
assert wait_for_message(p) is None
assert self.message == make_message('pmessage', 'foo', 'test message',
pattern='f*')
def test_unicode_channel_message_handler(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
channel = u('uni') + unichr(4456) + u('code')
channels = {channel: self.message_handler}
p.subscribe(**channels)
assert r.publish(channel, 'test message') == 1
assert wait_for_message(p) is None
assert self.message == make_message('message', channel, 'test message')
def test_unicode_pattern_message_handler(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
pattern = u('uni') + unichr(4456) + u('*')
channel = u('uni') + unichr(4456) + u('code')
p.psubscribe(**{pattern: self.message_handler})
assert r.publish(channel, 'test message') == 1
assert wait_for_message(p) is None
assert self.message == make_message('pmessage', channel,
'test message', pattern=pattern)
class TestPubSubAutoDecoding(object):
"These tests only validate that we get unicode values back"
channel = u('uni') + unichr(4456) + u('code')
pattern = u('uni') + unichr(4456) + u('*')
data = u('abc') + unichr(4458) + u('123')
def make_message(self, type, channel, data, pattern=None):
return {
'type': type,
'channel': channel,
'pattern': pattern,
'data': data
}
def setup_method(self, method):
self.message = None
def message_handler(self, message):
self.message = message
@pytest.fixture()
def r(self, request):
return _redis_client(request=request, decode_responses=True)
def test_channel_subscribe_unsubscribe(self, r):
p = r.pubsub()
p.subscribe(self.channel)
assert wait_for_message(p) == self.make_message('subscribe',
self.channel, 1)
p.unsubscribe(self.channel)
assert wait_for_message(p) == self.make_message('unsubscribe',
self.channel, 0)
def test_pattern_subscribe_unsubscribe(self, r):
p = r.pubsub()
p.psubscribe(self.pattern)
assert wait_for_message(p) == self.make_message('psubscribe',
self.pattern, 1)
p.punsubscribe(self.pattern)
assert wait_for_message(p) == self.make_message('punsubscribe',
self.pattern, 0)
def test_channel_publish(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
p.subscribe(self.channel)
r.publish(self.channel, self.data)
assert wait_for_message(p) == self.make_message('message',
self.channel,
self.data)
def test_pattern_publish(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
p.psubscribe(self.pattern)
r.publish(self.channel, self.data)
assert wait_for_message(p) == self.make_message('pmessage',
self.channel,
self.data,
pattern=self.pattern)
def test_channel_message_handler(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
p.subscribe(**{self.channel: self.message_handler})
r.publish(self.channel, self.data)
assert wait_for_message(p) is None
assert self.message == self.make_message('message', self.channel,
self.data)
# test that we reconnected to the correct channel
p.connection.disconnect()
assert wait_for_message(p) is None # should reconnect
new_data = self.data + u('new data')
r.publish(self.channel, new_data)
assert wait_for_message(p) is None
assert self.message == self.make_message('message', self.channel,
new_data)
def test_pattern_message_handler(self, r):
p = r.pubsub(ignore_subscribe_messages=True)
p.psubscribe(**{self.pattern: self.message_handler})
r.publish(self.channel, self.data)
assert wait_for_message(p) is None
assert self.message == self.make_message('pmessage', self.channel,
self.data,
pattern=self.pattern)
# test that we reconnected to the correct pattern
p.connection.disconnect()
assert wait_for_message(p) is None # should reconnect
new_data = self.data + u('new data')
r.publish(self.channel, new_data)
assert wait_for_message(p) is None
assert self.message == self.make_message('pmessage', self.channel,
new_data,
pattern=self.pattern)
class TestPubSubRedisDown(object):
def test_channel_subscribe(self, r):
r = redis.Redis(host='localhost', port=6390)
p = r.pubsub()
with pytest.raises(ConnectionError):
p.subscribe('foo')
|
pythonbyexample/PBE
|
refs/heads/master
|
dbe/businesstest/migrations/0002_auto__del_x__add_entry__add_set__add_task.py
|
1
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'x'
db.delete_table('businesstest_x')
# Adding model 'Entry'
db.create_table('businesstest_entry', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('task', self.gf('django.db.models.fields.related.ForeignKey')(related_name='entries', to=orm['businesstest.Task'])),
('eset', self.gf('django.db.models.fields.related.ForeignKey')(related_name='entries', to=orm['businesstest.Set'])),
('created', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
('time_taken', self.gf('django.db.models.fields.CharField')(max_length=8, null=True, blank=True)),
))
db.send_create_signal('businesstest', ['Entry'])
# Adding model 'Set'
db.create_table('businesstest_set', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('number', self.gf('django.db.models.fields.IntegerField')()),
('created', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
('finished', self.gf('django.db.models.fields.BooleanField')(default=False)),
('time_taken', self.gf('django.db.models.fields.CharField')(max_length=8, null=True, blank=True)),
))
db.send_create_signal('businesstest', ['Set'])
# Adding model 'Task'
db.create_table('businesstest_task', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('number', self.gf('django.db.models.fields.FloatField')()),
('question', self.gf('django.db.models.fields.TextField')(max_length=2000)),
('attachment', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('created', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('businesstest', ['Task'])
def backwards(self, orm):
# Adding model 'x'
db.create_table('businesstest_x', (
('description', self.gf('django.db.models.fields.TextField')(default='', max_length=2000, blank=True)),
('tm_target', self.gf('django.db.models.fields.FloatField')(default=-1, blank=True)),
('menu', self.gf('django.db.models.fields.CharField')(max_length=30)),
('creation_date', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('businesstest', ['x'])
# Deleting model 'Entry'
db.delete_table('businesstest_entry')
# Deleting model 'Set'
db.delete_table('businesstest_set')
# Deleting model 'Task'
db.delete_table('businesstest_task')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'businesstest.entry': {
'Meta': {'object_name': 'Entry'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'eset': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entries'", 'to': "orm['businesstest.Set']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entries'", 'to': "orm['businesstest.Task']"}),
'time_taken': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'})
},
'businesstest.set': {
'Meta': {'object_name': 'Set'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.IntegerField', [], {}),
'time_taken': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'businesstest.task': {
'Meta': {'object_name': 'Task'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'attachment': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.FloatField', [], {}),
'question': ('django.db.models.fields.TextField', [], {'max_length': '2000'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['businesstest']
|
greyhwndz/rethinkdb
|
refs/heads/next
|
test/rql_test/connections/http_support/jinja2/__init__.py
|
256
|
# -*- coding: utf-8 -*-
"""
jinja2
~~~~~~
Jinja2 is a template engine written in pure Python. It provides a
Django inspired non-XML syntax but supports inline expressions and
an optional sandboxed environment.
Nutshell
--------
Here a small example of a Jinja2 template::
{% extends 'base.html' %}
{% block title %}Memberlist{% endblock %}
{% block content %}
<ul>
{% for user in users %}
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
{% endfor %}
</ul>
{% endblock %}
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
__docformat__ = 'restructuredtext en'
__version__ = '2.7.3'
# high level interface
from jinja2.environment import Environment, Template
# loaders
from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \
DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \
ModuleLoader
# bytecode caches
from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \
MemcachedBytecodeCache
# undefined types
from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined
# exceptions
from jinja2.exceptions import TemplateError, UndefinedError, \
TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \
TemplateAssertionError
# decorators and public utilities
from jinja2.filters import environmentfilter, contextfilter, \
evalcontextfilter
from jinja2.utils import Markup, escape, clear_caches, \
environmentfunction, evalcontextfunction, contextfunction, \
is_undefined
__all__ = [
'Environment', 'Template', 'BaseLoader', 'FileSystemLoader',
'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader',
'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache',
'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined',
'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound',
'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError',
'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape',
'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined',
'evalcontextfilter', 'evalcontextfunction'
]
|
Veek/Python
|
refs/heads/master
|
IRC/Hexchat/fake_ctcp.py
|
1
|
import re, random, hexchat
from subprocess import Popen, PIPE
__module_name__ = 'Fake CTCP'
__module_version__ = '0.1'
__module_description__ = 'Fakes unessential CTCP requests: VERSION PING TIME'
FAKE_VERSION = 'pinoyChat v1.3.3.4 - Windows XP SP2,'\
' @400MHz Celeron Mendocino, Administrator:password'
def debug(msg):
hexchat.prnt('DEBUG: {}'.format(msg))
def get_mangled_date():
date_s = Popen(['date', '+"%a %b %d"'], stdout=PIPE).communicate()[0].rstrip()
date_s=date_s[1:-1]
hour = random.randint(00, 24)
min = random.randint(00, 60)
sec = random.randint(00, 60)
date = str(date_s) + ' ' + str(hour) + ':' + str(min) + ':' + str(sec)
return date
def extract_sender(word):
pat = '^:(.+?)!'
m = re.search(pat, word[0])
if m:
name = m.groups(1)[0]
return name
def ctcp_reply(nick, cmd, msg):
hexchat.command('nctcp {} {} {}'.format(nick, cmd, msg))
def ctcp_callback(word, word_eol, userdata):
# Grab the PRIVMSG IRC command
recv_cmd = word_eol[0]
sending_nick = extract_sender(word) # Grab sender of cmd
# Get the start of the PRIVMSG and copy till end into ..frag
idx = recv_cmd.index('PRIVMSG')
nic_cmd_frag = recv_cmd[idx:]
# Extract the nick and cmd. If nick is me, then handle dangerous
# cmds
try:
nick, cmd = nic_cmd_frag.split(':', 1)
except:
debug("ERROR freenode_ctcp.py! PRIVMSG - problem with :")
debug(word[0])
debug(word_eol[0])
return EAT_ALL
# Obtain current nickname from hexchat cfg
mynick = hexchat.get_info('nick')
if mynick in nick:
if 'VERSION' in cmd:
ctcp_reply(sending_nick, 'VERSION', FAKE_VERSION)
debug(word_eol)
return hexchat.EAT_ALL
elif 'TIME' in cmd:
ctcp_reply(sending_nick, 'TIME', get_mangled_date())
debug(word_eol)
return hexchat.EAT_ALL
elif 'PING' in cmd:
ctcp_reply(sending_nick, 'PING', 10)
debug(word_eol)
return hexchat.EAT_ALL
else:
debug(word_eol)
return hexchat.EAT_ALL
return hexchat.EAT_NONE
#-------------------------------------------------------------
hexchat.prnt('CTCP script loaded')
hexchat.hook_server('PRIVMSG', ctcp_callback)
|
mlavin/django
|
refs/heads/master
|
tests/template_tests/filter_tests/test_yesno.py
|
430
|
from django.template.defaultfilters import yesno
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_true(self):
self.assertEqual(yesno(True), 'yes')
def test_false(self):
self.assertEqual(yesno(False), 'no')
def test_none(self):
self.assertEqual(yesno(None), 'maybe')
def test_true_arguments(self):
self.assertEqual(yesno(True, 'certainly,get out of town,perhaps'), 'certainly')
def test_false_arguments(self):
self.assertEqual(yesno(False, 'certainly,get out of town,perhaps'), 'get out of town')
def test_none_two_arguments(self):
self.assertEqual(yesno(None, 'certainly,get out of town'), 'get out of town')
def test_none_three_arguments(self):
self.assertEqual(yesno(None, 'certainly,get out of town,perhaps'), 'perhaps')
|
robbi/pyload
|
refs/heads/stable
|
module/plugins/hoster/AndroidfilehostCom.py
|
4
|
# -*- coding: utf-8 -*
#
# Test links:
# https://www.androidfilehost.com/?fid=95916177934518197
import re
from ..internal.SimpleHoster import SimpleHoster
class AndroidfilehostCom(SimpleHoster):
__name__ = "AndroidfilehostCom"
__type__ = "hoster"
__version__ = "0.06"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?androidfilehost\.com/\?fid=\d+'
__config__ = [("activated", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool",
"Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10)]
__description__ = """Androidfilehost.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zapp-brannigan", "fuerst.reinje@web.de")]
NAME_PATTERN = r'<br />(?P<N>.*?)</h1>'
SIZE_PATTERN = r'<h4>size</h4>\s*<p>(?P<S>[\d.,]+)(?P<U>[\w^_]+)</p>'
HASHSUM_PATTERN = r'<h4>(?P<H>.*?)</h4>\s*<p><code>(?P<D>.*?)</code></p>'
OFFLINE_PATTERN = r'404 not found'
WAIT_PATTERN = r'users must wait <strong>(\d+) secs'
def setup(self):
self.multiDL = True
self.resume_download = True
self.chunk_limit = 1
def handle_free(self, pyfile):
wait = re.search(self.WAIT_PATTERN, self.data)
self.log_debug("Waiting time: %s seconds" % wait.group(1))
fid = re.search(r'id="fid" value="(\d+)" />', self.data).group(1)
self.log_debug("FID: %s" % fid)
html = self.load("https://www.androidfilehost.com/libs/otf/mirrors.otf.php",
post={'submit': 'submit',
'action': 'getdownloadmirrors',
'fid': fid})
self.link = re.findall('"url":"(.*?)"', html)[0].replace("\\", "")
mirror_host = self.link.split("/")[2]
self.log_debug("Mirror Host: %s" % mirror_host)
html = self.load("https://www.androidfilehost.com/libs/otf/stats.otf.php",
get={'fid': fid,
'w': 'download',
'mirror': mirror_host})
|
stevekuznetsov/tito
|
refs/heads/master
|
src/tito/tar.py
|
6
|
# Copyright (c) 2008-2009 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
import re
import struct
import sys
from tito.compat import decode_bytes, encode_bytes
RECORD_SIZE = 512
# Git writes its tarballs to be a multiple of 10240. I'm not sure why: the
# implementation in archive-tar.c doesn't have any comments on the matter.
GIT_BLOCK_SIZE = RECORD_SIZE * 20
class TarFixer(object):
"""Code for updating a tar header's mtime. For details on the tar format
see http://www.gnu.org/software/tar/manual/html_node/Standard.html and
http://en.wikipedia.org/wiki/Tar_%28computing%29
Tito passes "git archive" a tree ID. The "git archive" man page states:
git archive behaves differently when given a tree ID versus when given
a commit ID or tag ID. In the first case the current time is used as
the modification time of each file in the archive.
Using the current time means that every time we build the source tarball,
the file fingerprint will change since the metadata in the tarball changes.
We don't want that since build systems track the fingerprint to see if
the actual source has changed.
The resultant tarball will be in this format:
- Global header (512 bytes)
- Extended header block with git ref (512 bytes)
- [File header (512 bytes) + File data padded to multiple of 512] * number of files
- 1024 NUL bytes
- However many NUL bytes are necessary to pad the file to a multiple of GIT_BLOCK_SIZE
The block after the global header with the git ref is called an "extended header".
We are technically writing a "pax" archive because of the use of extensions. According
to the comments in git's archive-tar.c
pax extended header records have the format "%u %s=%s\n". %u contains
the size of the whole string (including the %u), the first %s is the
keyword, the second one is the value.
PAX (also known as POSIX.1-2001) always encodes everything in UTF-8.
"""
def __init__(self, fh, out, timestamp, gitref, maven_built=False):
self.maven_built = maven_built
# As defined in tar.h
# An collections.OrderedDict would be more appropriate here but I'm trying to
# maintain Python 2.6 compatibility.
self.tar_struct = [
('name', '100s'),
('mode', '8s'),
('uid', '8s'),
('gid', '8s'),
('size', '12s'),
('mtime', '12s'),
('checksum', '8s'),
('typeflag', '1s'),
('linkname', '100s'),
('magic', '6s'),
('version', '2s'),
('uname', '32s'),
('gname', '32s'),
('devmajor', '8s'),
('devminor', '8s'),
('prefix', '155s'),
]
# The items in the list below are zero-padded octal numbers in ASCII.
# All other fields are null-terminated character strings. Each numeric
# field of width w contains w minus 1 digits, and a null.
#
# The checksum is technically an octal_member but we handle it specially.
self.octal_members = [
'mode',
'uid',
'gid',
'size',
'mtime',
'devmajor',
'devminor',
]
# Add an '=' to use native byte order with standard sizes
self.struct_template = "=" + "".join(map(lambda x: x[1], self.tar_struct))
self.struct_members = list(map(lambda x: x[0], self.tar_struct))
self.struct_hash = dict(self.tar_struct)
# The tarballs created by git archive from tree IDs don't have a global
# header for some reason.
self.need_header = True
self.done = False
# We need to track the total number of bytes we've written so we can
# pad out the final tarball to be a multiple of GIT_BLOCK_SIZE
self.total_length = 0
self.fh = fh
self.out = out
self.timestamp = int(timestamp)
self.gitref = gitref
def full_read(self, read_size):
read = self.fh.read(read_size)
amount_read = len(read)
while (amount_read < read_size):
left_to_read = read_size - amount_read
next_read = self.fh.read(left_to_read)
if next_read == '':
raise IOError("Buffer underflow when reading")
amount_read += len(next_read)
read = read + next_read
return read
def write(self, data):
"""Write the data correctly depending on the mode of the file. While binary mode
is preferred, we support text mode for streams like stdout."""
if hasattr(self.out, 'mode') and 'b' in self.out.mode:
data = bytearray(data)
else:
data = decode_bytes(data, "utf8")
self.out.write(data)
def chunk_to_hash(self, chunk):
# Our struct template is only 500 bytes, but the last 12 bytes are NUL
# I elected to ignore them completely instead of including them in the
# template as '12x'. The unpack_from method will read the bytes our
# template defines from chunk and discard the rest.
unpacked = struct.unpack_from(self.struct_template, chunk)
unpacked = list(map(lambda x: decode_bytes(x, 'utf8'), unpacked))
# Zip what we read together with the member names and create a dictionary
chunk_props = dict(zip(self.struct_members, unpacked))
return chunk_props
def padded_size(self, length, pad_size=RECORD_SIZE):
"""Function to pad out a length to the nearest multiple of pad_size
that can contain it."""
blocks = length // pad_size
if length % pad_size != 0:
blocks += 1
return blocks * pad_size
def create_global_header(self):
header_props = {
'name': u'pax_global_header',
'mode': 0o666,
'uid': 0,
'gid': 0,
'size': 52, # The size of the extended header with the gitref
'mtime': self.timestamp,
'typeflag': u'g',
'linkname': u'',
'magic': u'ustar',
'version': u'00',
'uname': u'root',
'gname': u'root',
'devmajor': 0,
'devminor': 0,
'prefix': u'',
}
self.process_header(header_props)
def encode_header(self, chunk_props, encode_order=None):
pack_values = []
if encode_order is None:
encode_order = self.struct_members
for member in encode_order:
if member in self.octal_members:
# Pad out the octal value to the right length
member_template = self.struct_hash[member]
field_size = int(re.match('(\d+)', member_template).group(1)) - 1
fmt = "%0" + str(field_size) + "o\x00"
as_string = fmt % chunk_props[member]
pack_values.append(as_string.encode("utf8"))
else:
pack_values.append(chunk_props[member].encode("utf8"))
return pack_values
def process_header(self, chunk_props):
"""There is a header before every file and a global header at the top."""
chunk_props['checksum'] = self.calculate_checksum(chunk_props)
pack_values = self.encode_header(chunk_props)
# The struct itself is only 500 bytes so we have to pad it to 512
data_out = struct.pack(self.struct_template + "12x", *pack_values)
self.write(data_out)
self.total_length += len(data_out)
def process_extended_header(self):
# Trash the original comment
self.full_read(RECORD_SIZE)
self.create_extended_header()
def create_extended_header(self):
# pax extended header records have the format "%u %s=%s\n". %u contains
# the size of the whole string (including the %u), the first %s is the
# keyword, the second one is the value.
#
# Since the git ref is always 40 characters we can
# pre-compute the length to put in the extended header
comment = "52 comment=%s\n" % self.gitref
data_out = struct.pack("=52s460x", encode_bytes(comment, "ascii"))
self.write(data_out)
self.total_length += len(data_out)
def process_file_data(self, size):
data_out = self.full_read(self.padded_size(size))
self.write(data_out)
self.total_length += len(data_out)
def calculate_checksum(self, chunk_props):
"""The checksum field is the ASCII representation of the octal value of the simple
sum of all bytes in the header block. Each 8-bit byte in the header is added
to an unsigned integer, initialized to zero, the precision of which shall be
no less than seventeen bits. When calculating the checksum, the checksum field is
treated as if it were all spaces.
"""
chunk_props['checksum'] = " " * 8
values = self.encode_header(chunk_props)
new_chksum = 0
for val in values:
val_bytes = val.decode("utf8")
for b in val_bytes:
new_chksum += ord(b)
return "%07o\x00" % new_chksum
def process_chunk(self, chunk):
# Tar archives end with two 512 byte blocks of zeroes
if chunk == b"\x00" * 512:
self.write(b"\x00" * 512)
self.total_length += len(chunk)
if self.last_chunk_was_nulls:
final_padding = b"\x00" * (self.padded_size(self.total_length, GIT_BLOCK_SIZE) - self.total_length)
self.write(final_padding)
self.done = True
self.last_chunk_was_nulls = True
return
self.last_chunk_was_nulls = False
chunk_props = self.chunk_to_hash(chunk)
# Delete the old checksum since it's now invalid and we don't want even
# an inadvertent reference to it.
del(chunk_props['checksum'])
# Remove the trailing NUL byte(s) on the end of members
for k, v in chunk_props.items():
chunk_props[k] = v.rstrip("\x00")
# This line is the whole purpose of this class!
chunk_props['mtime'] = "%o" % self.timestamp
if self.maven_built:
# Maven does all sorts of horrible things in the tarfile it creates.
# Everything is padded out with spaces instead of NUL bytes and the uid
# and gid fields are left empty.
#
# Plus it sets the uname and gname to the current user resulting in
# the checksum changing from person to person.
# See https://jira.codehaus.org/browse/PLXCOMP-233
chunk_props['uname'] = 'root'
chunk_props['gname'] = 'root'
chunk_props['uid'] = '0'
chunk_props['gid'] = '0'
# In a tar file, the highest 3 bits in the mode represent if the tarfile
# should be extracted with the GID or UID set. Maven adds these but we don't
# want them, so we just take the last 4 which are the ones that matter to us.
chunk_props['mode'] = str(chunk_props['mode'])[-4:-1]
chunk_props['version'] = '00'
for x in ['size', 'devmajor', 'devminor']:
chunk_props[x] = chunk_props[x].strip()
for member in self.octal_members:
# Convert octals to decimal
chunk_props[member] = int(chunk_props[member], 8)
# If there is no global header, we need to create one
if self.need_header:
# When run against a tree ID, git archive doesn't create
# a global header. The first block is just the header for
# the first file.
if chunk_props['typeflag'] != 'g':
self.create_global_header()
self.create_extended_header()
self.process_header(chunk_props)
else:
self.process_header(chunk_props)
self.process_extended_header()
self.need_header = False
else:
self.process_header(chunk_props)
self.process_file_data(chunk_props['size'])
def fix(self):
# The gzip file object has its mode as an integer. We have to
# access the underlying file object to get the real mode.
if hasattr(self.fh, "myfileobj"):
mode = self.fh.myfileobj.mode
else:
mode = self.fh.mode
if 'b' not in mode:
raise IOError("The input file must be opened in binary mode!")
try:
chunk = self.full_read(RECORD_SIZE)
while chunk != "" and not self.done:
self.process_chunk(chunk)
if not self.done:
chunk = self.full_read(RECORD_SIZE)
finally:
self.fh.close()
if __name__ == '__main__':
if len(sys.argv) != 4:
sys.exit("Usage: %s UNIX_TIMESTAMP GIT_HASH TAR_FILE" % sys.argv[0])
try:
timestamp = int(sys.argv[1])
except:
sys.exit("UNIX_TIMESTAMP must be an integer")
gitref = sys.argv[2]
tar_file = sys.argv[3]
try:
fh = open(tar_file, 'rb')
except:
print("Could not read %s" % tar_file)
reader = TarFixer(fh, sys.stdout, timestamp, gitref)
reader.fix()
|
virtualrobotix/MissionPlanner
|
refs/heads/master
|
ExtLibs/Mavlink/pymavlink/generator/lib/minixsv/__init__.py
|
82
|
#
# minixsv, Release 0.9.0
# file: __init__.py
#
# minixsv package file
#
# history:
# 2004-10-26 rl created
#
# Copyright (c) 2004-2008 by Roland Leuthe. All rights reserved.
#
# --------------------------------------------------------------------
# The minixsv XML schema validator is
#
# Copyright (c) 2004-2008 by Roland Leuthe
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
######################################################################
# PUBLIC DEFINITIONS
######################################################################
# supported XML interfaces
XMLIF_MINIDOM = "XMLIF_MINIDOM"
XMLIF_4DOM = "XMLIF_4DOM"
XMLIF_ELEMENTTREE = "XMLIF_ELEMENTTREE"
# namespace definitions
EMPTY_PREFIX = None
EMPTY_NAMESPACE = None
XML_NAMESPACE = "http://www.w3.org/XML/1998/namespace"
XMLNS_NAMESPACE = "http://www.w3.org/2000/xmlns/"
XSD_NAMESPACE = "http://www.w3.org/2001/XMLSchema"
XSI_NAMESPACE = "http://www.w3.org/2001/XMLSchema-instance"
# definition of minixsv path
import os
MINIXSV_DIR = os.path.dirname(__file__)
# error handling definitions
from xsvalErrorHandler import IGNORE_WARNINGS, PRINT_WARNINGS, STOP_ON_WARNINGS
from xsvalErrorHandler import XsvalError
|
jkbrzt/httpie
|
refs/heads/master
|
tests/utils.py
|
1
|
# coding=utf-8
"""Utilities for HTTPie test suite."""
import sys
import time
import json
import tempfile
from io import BytesIO
from pathlib import Path
from typing import Optional, Union
from httpie.status import ExitStatus
from httpie.config import Config
from httpie.context import Environment
from httpie.core import main
# pytest-httpbin currently does not support chunked requests:
# <https://github.com/kevin1024/pytest-httpbin/issues/33>
# <https://github.com/kevin1024/pytest-httpbin/issues/28>
HTTPBIN_WITH_CHUNKED_SUPPORT = 'http://httpbin.org'
TESTS_ROOT = Path(__file__).parent
CRLF = '\r\n'
COLOR = '\x1b['
HTTP_OK = '200 OK'
# noinspection GrazieInspection
HTTP_OK_COLOR = (
'HTTP\x1b[39m\x1b[38;5;245m/\x1b[39m\x1b'
'[38;5;37m1.1\x1b[39m\x1b[38;5;245m \x1b[39m\x1b[38;5;37m200'
'\x1b[39m\x1b[38;5;245m \x1b[39m\x1b[38;5;136mOK'
)
def mk_config_dir() -> Path:
dirname = tempfile.mkdtemp(prefix='httpie_config_')
return Path(dirname)
def add_auth(url, auth):
proto, rest = url.split('://', 1)
return proto + '://' + auth + '@' + rest
class StdinBytesIO(BytesIO):
"""To be used for `MockEnvironment.stdin`"""
len = 0 # See `prepare_request_body()`
class MockEnvironment(Environment):
"""Environment subclass with reasonable defaults for testing."""
colors = 0
stdin_isatty = True,
stdout_isatty = True
is_windows = False
def __init__(self, create_temp_config_dir=True, **kwargs):
if 'stdout' not in kwargs:
kwargs['stdout'] = tempfile.TemporaryFile(
mode='w+b',
prefix='httpie_stdout'
)
if 'stderr' not in kwargs:
kwargs['stderr'] = tempfile.TemporaryFile(
mode='w+t',
prefix='httpie_stderr'
)
super().__init__(**kwargs)
self._create_temp_config_dir = create_temp_config_dir
self._delete_config_dir = False
self._temp_dir = Path(tempfile.gettempdir())
@property
def config(self) -> Config:
if (self._create_temp_config_dir
and self._temp_dir not in self.config_dir.parents):
self.create_temp_config_dir()
return super().config
def create_temp_config_dir(self):
self.config_dir = mk_config_dir()
self._delete_config_dir = True
def cleanup(self):
self.stdout.close()
self.stderr.close()
if self._delete_config_dir:
assert self._temp_dir in self.config_dir.parents
from shutil import rmtree
rmtree(self.config_dir, ignore_errors=True)
def __del__(self):
# noinspection PyBroadException
try:
self.cleanup()
except Exception:
pass
class BaseCLIResponse:
"""
Represents the result of simulated `$ http' invocation via `http()`.
Holds and provides access to:
- stdout output: print(self)
- stderr output: print(self.stderr)
- devnull output: print(self.devnull)
- exit_status output: print(self.exit_status)
"""
stderr: str = None
devnull: str = None
json: dict = None
exit_status: ExitStatus = None
class BytesCLIResponse(bytes, BaseCLIResponse):
"""
Used as a fallback when a StrCLIResponse cannot be used.
E.g. when the output contains binary data or when it is colorized.
`.json` will always be None.
"""
class StrCLIResponse(str, BaseCLIResponse):
@property
def json(self) -> Optional[dict]:
"""
Return deserialized the request or response JSON body,
if one (and only one) included in the output and is parsable.
"""
if not hasattr(self, '_json'):
self._json = None
# De-serialize JSON body if possible.
if COLOR in self:
# Colorized output cannot be parsed.
pass
elif self.strip().startswith('{'):
# Looks like JSON body.
self._json = json.loads(self)
elif self.count('Content-Type:') == 1:
# Looks like a HTTP message,
# try to extract JSON from its body.
try:
j = self.strip()[self.strip().rindex('\r\n\r\n'):]
except ValueError:
pass
else:
try:
# noinspection PyAttributeOutsideInit
self._json = json.loads(j)
except ValueError:
pass
return self._json
class ExitStatusError(Exception):
pass
def http(
*args,
program_name='http',
tolerate_error_exit_status=False,
**kwargs,
) -> Union[StrCLIResponse, BytesCLIResponse]:
# noinspection PyUnresolvedReferences
"""
Run HTTPie and capture stderr/out and exit status.
Content writtent to devnull will be captured only if
env.devnull is set manually.
Invoke `httpie.core.main()` with `args` and `kwargs`,
and return a `CLIResponse` subclass instance.
The return value is either a `StrCLIResponse`, or `BytesCLIResponse`
if unable to decode the output. Devnull is string when possible,
bytes otherwise.
The response has the following attributes:
`stdout` is represented by the instance itself (print r)
`stderr`: text written to stderr
`devnull` text written to devnull.
`exit_status`: the exit status
`json`: decoded JSON (if possible) or `None`
Exceptions are propagated.
If you pass ``tolerate_error_exit_status=True``, then error exit statuses
won't result into an exception.
Example:
$ http --auth=user:password GET httpbin.org/basic-auth/user/password
>>> httpbin = getfixture('httpbin')
>>> r = http('-a', 'user:pw', httpbin.url + '/basic-auth/user/pw')
>>> type(r) == StrCLIResponse
True
>>> r.exit_status
<ExitStatus.SUCCESS: 0>
>>> r.stderr
''
>>> 'HTTP/1.1 200 OK' in r
True
>>> r.json == {'authenticated': True, 'user': 'user'}
True
"""
env = kwargs.get('env')
if not env:
env = kwargs['env'] = MockEnvironment()
stdout = env.stdout
stderr = env.stderr
devnull = env.devnull
args = list(args)
args_with_config_defaults = args + env.config.default_options
add_to_args = []
if '--debug' not in args_with_config_defaults:
if (not tolerate_error_exit_status
and '--traceback' not in args_with_config_defaults):
add_to_args.append('--traceback')
if not any('--timeout' in arg for arg in args_with_config_defaults):
add_to_args.append('--timeout=3')
complete_args = [program_name, *add_to_args, *args]
# print(' '.join(complete_args))
def dump_stderr():
stderr.seek(0)
sys.stderr.write(stderr.read())
try:
try:
exit_status = main(args=complete_args, **kwargs)
if '--download' in args:
# Let the progress reporter thread finish.
time.sleep(.5)
except SystemExit:
if tolerate_error_exit_status:
exit_status = ExitStatus.ERROR
else:
dump_stderr()
raise
except Exception:
stderr.seek(0)
sys.stderr.write(stderr.read())
raise
else:
if (not tolerate_error_exit_status
and exit_status != ExitStatus.SUCCESS):
dump_stderr()
raise ExitStatusError(
'httpie.core.main() unexpectedly returned'
f' a non-zero exit status: {exit_status}'
)
stdout.seek(0)
stderr.seek(0)
devnull.seek(0)
output = stdout.read()
devnull_output = devnull.read()
try:
output = output.decode('utf8')
except UnicodeDecodeError:
r = BytesCLIResponse(output)
else:
r = StrCLIResponse(output)
try:
devnull_output = devnull_output.decode('utf8')
except Exception:
pass
r.devnull = devnull_output
r.stderr = stderr.read()
r.exit_status = exit_status
if r.exit_status != ExitStatus.SUCCESS:
sys.stderr.write(r.stderr)
return r
finally:
devnull.close()
stdout.close()
stderr.close()
env.cleanup()
|
ZHAW-INES/rioxo-uClinux-dist
|
refs/heads/rtsp
|
user/python/python-2.4.4/Lib/encodings/cp1257.py
|
15
|
""" Python Character Mapping Codec generated from 'CP1257.TXT' with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x20ac, # EURO SIGN
0x0081: None, # UNDEFINED
0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
0x0083: None, # UNDEFINED
0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x0085: 0x2026, # HORIZONTAL ELLIPSIS
0x0086: 0x2020, # DAGGER
0x0087: 0x2021, # DOUBLE DAGGER
0x0088: None, # UNDEFINED
0x0089: 0x2030, # PER MILLE SIGN
0x008a: None, # UNDEFINED
0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
0x008c: None, # UNDEFINED
0x008d: 0x00a8, # DIAERESIS
0x008e: 0x02c7, # CARON
0x008f: 0x00b8, # CEDILLA
0x0090: None, # UNDEFINED
0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x0095: 0x2022, # BULLET
0x0096: 0x2013, # EN DASH
0x0097: 0x2014, # EM DASH
0x0098: None, # UNDEFINED
0x0099: 0x2122, # TRADE MARK SIGN
0x009a: None, # UNDEFINED
0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
0x009c: None, # UNDEFINED
0x009d: 0x00af, # MACRON
0x009e: 0x02db, # OGONEK
0x009f: None, # UNDEFINED
0x00a1: None, # UNDEFINED
0x00a5: None, # UNDEFINED
0x00a8: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x00aa: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA
0x00af: 0x00c6, # LATIN CAPITAL LETTER AE
0x00b8: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x00ba: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA
0x00bf: 0x00e6, # LATIN SMALL LETTER AE
0x00c0: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x00c1: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK
0x00c2: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON
0x00c3: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x00c6: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00c7: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON
0x00c8: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x00ca: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x00cb: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE
0x00cc: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA
0x00cd: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA
0x00ce: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON
0x00cf: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA
0x00d0: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00d1: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00d2: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA
0x00d4: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON
0x00d8: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK
0x00d9: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x00da: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x00db: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON
0x00dd: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00de: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00e0: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x00e1: 0x012f, # LATIN SMALL LETTER I WITH OGONEK
0x00e2: 0x0101, # LATIN SMALL LETTER A WITH MACRON
0x00e3: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x00e6: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00e7: 0x0113, # LATIN SMALL LETTER E WITH MACRON
0x00e8: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x00ea: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x00eb: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE
0x00ec: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA
0x00ed: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA
0x00ee: 0x012b, # LATIN SMALL LETTER I WITH MACRON
0x00ef: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA
0x00f0: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00f1: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00f2: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA
0x00f4: 0x014d, # LATIN SMALL LETTER O WITH MACRON
0x00f8: 0x0173, # LATIN SMALL LETTER U WITH OGONEK
0x00f9: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x00fa: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x00fb: 0x016b, # LATIN SMALL LETTER U WITH MACRON
0x00fd: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00fe: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00ff: 0x02d9, # DOT ABOVE
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
|
karyon/django
|
refs/heads/master
|
tests/generic_inline_admin/urls.py
|
407
|
from django.conf.urls import url
from . import admin
urlpatterns = [
url(r'^generic_inline_admin/admin/', admin.site.urls),
]
|
kljensen/viff
|
refs/heads/master
|
viff/test/bedoza/util.py
|
2
|
# Copyright 2010 VIFF Development Team.
#
# This file is part of VIFF, the Virtual Ideal Functionality Framework.
#
# VIFF is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License (LGPL) as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# VIFF is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General
# Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with VIFF. If not, see <http://www.gnu.org/licenses/>.
from viff.config import generate_configs
from viff.test.util import RuntimeTestCase
from viff.bedoza.bedoza import BeDOZaRuntime
from viff.bedoza.util import _convolute
from viff.bedoza.add_macs import add_macs
from viff.bedoza.shares import PartialShare, PartialShareContents
# HACK: The paillier keys that are available as standard in VIFF tests
# are not suited for use with pypaillier. Hence, we use NaClPaillier
# to generate test keys. This confusion will disappear when pypaillier
# replaces the current Python-based paillier implementation.
from viff.paillierutil import NaClPaillier
# HACK^2: Currently, the NaClPaillier hack only works when triple is
# imported. It should ideally work without the triple package.
try:
import tripple
except ImportError:
tripple = None
# The PyPaillier and commitment packages are not standard parts of VIFF so we
# skip them instead of letting them fail if the packages are not available.
try:
import pypaillier
except ImportError:
pypaillier = None
def log(rt, msg):
print "player%d ------> %s" % (rt.id, msg)
class BeDOZaTestCase(RuntimeTestCase):
num_players = 3
runtime_class = BeDOZaRuntime
# In production, paillier keys should be something like 2000
# bit. For test purposes, it is ok to use small keys.
# TODO: paillier freezes if key size is too small, e.g. 13.
paillier_key_size = 250
def setUp(self):
RuntimeTestCase.setUp(self)
self.security_parameter = 32
# TODO: During test, we would like generation of Paillier keys to
# be deterministic. How do we obtain that?
def generate_configs(self, *args):
return generate_configs(
paillier=NaClPaillier(self.paillier_key_size), *args)
def skip_if_missing_packages(*test_cases):
"""Skipts the given list of test cases if some of the required
external viff packages (tripple, pypaillier) is not available.
"""
missing = []
if not pypaillier:
missing.append("pypaillier")
if not tripple:
missing.append("tripple")
if missing:
for test_case in test_cases:
test_case.skip = "Skipped due to missing packages: %s" % missing
class TestPartialShareGenerator(object):
"""Class for quick generation of partial shares with no
security. Suited only for use when partial shares are needed as
input to a test.
"""
def __init__(self, Zp, runtime, random, paillier):
self.paillier = paillier
self.Zp = Zp
self.runtime = runtime
self.random = random
def generate_share(self, value):
self.runtime.increment_pc()
# TODO: Exclusive?
r = [self.Zp(self.random.randint(0, self.Zp.modulus - 1))
for _ in range(self.runtime.num_players - 1)]
if self.runtime.id == 1:
share = value - sum(r)
else:
share = r[self.runtime.id - 2]
enc_share = self.paillier.encrypt(share.value)
enc_shares = _convolute(self.runtime, enc_share)
def create_partial_share(enc_shares, share):
return PartialShare(self.runtime, self.Zp, share, enc_shares)
self.runtime.schedule_callback(enc_shares, create_partial_share, share)
return enc_shares
def generate_random_shares(self, n):
self.runtime.increment_pc()
N_squared_list = [self.runtime.players[player_id].pubkey['n_square']
for player_id in self.runtime.players]
shares = [PartialShare(self.runtime, self.Zp) for _ in xrange(n)]
for inx in xrange(n):
r = self.random.randint(0, self.Zp.modulus - 1)
ri = self.Zp(r)
enc_share = self.paillier.encrypt(ri.value)
enc_shares = _convolute(self.runtime, enc_share)
def create_partial_share(enc_shares, ri, s, N_squared_list):
s.callback(PartialShareContents(ri, enc_shares,
N_squared_list))
self.runtime.schedule_callback(enc_shares,
create_partial_share,
ri,
shares[inx],
N_squared_list)
return shares
class TestShareGenerator(TestPartialShareGenerator):
"""Class for quick generation of shares with no security. Suited
only for use when shares are needed as input to a test.
"""
def __init__(self, Zp, runtime, random, paillier, u_bound, alpha):
self.u_bound = u_bound
self.alpha = alpha
TestPartialShareGenerator.__init__(self, Zp, runtime, random, paillier)
def generate_share(self, value):
self.runtime.increment_pc()
partial_share = TestPartialShareGenerator.generate_share(self, value)
full_share = add_macs(self.runtime, self.Zp, self.u_bound, self.alpha,
self.random, self.paillier, [partial_share])
return full_share[0]
def generate_random_shares(self, n):
self.runtime.increment_pc()
partial_shares = TestPartialShareGenerator.generate_random_shares(self, n)
return add_macs(self.runtime, self.Zp, self.u_bound, self.alpha,
self.random, self.paillier, partial_shares)
|
MakeHer/edx-platform
|
refs/heads/dashboard.2
|
lms/djangoapps/notes/migrations/0001_initial.py
|
50
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
import xmodule_django.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('course_id', xmodule_django.models.CourseKeyField(max_length=255, db_index=True)),
('uri', models.CharField(max_length=255, db_index=True)),
('text', models.TextField(default=b'')),
('quote', models.TextField(default=b'')),
('range_start', models.CharField(max_length=2048)),
('range_start_offset', models.IntegerField()),
('range_end', models.CharField(max_length=2048)),
('range_end_offset', models.IntegerField()),
('tags', models.TextField(default=b'')),
('created', models.DateTimeField(db_index=True, auto_now_add=True, null=True)),
('updated', models.DateTimeField(auto_now=True, db_index=True)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
]
|
marmyshev/bug_1117098
|
refs/heads/master
|
openlp/plugins/songs/lib/__init__.py
|
1
|
# -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2013 Raoul Snyman #
# Portions copyright (c) 2008-2013 Tim Bentley, Gerald Britton, Jonathan #
# Corwin, Samuel Findlay, Michael Gorven, Scott Guerrieri, Matthias Hub, #
# Meinert Jordan, Armin Köhler, Erik Lundin, Edwin Lunando, Brian T. Meyer. #
# Joshua Miller, Stevan Pettit, Andreas Preikschat, Mattias Põldaru, #
# Christian Richter, Philip Ridout, Simon Scudder, Jeffrey Smith, #
# Maikel Stuivenberg, Martin Thompson, Jon Tibble, Dave Warnock, #
# Frode Woldsund, Martin Zibricky, Patrick Zimmermann #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
"""
The :mod:`~openlp.plugins.songs.lib` module contains a number of library functions and classes used in the Songs plugin.
"""
import logging
import os
import re
from PyQt4 import QtGui
from openlp.core.lib import translate
from openlp.core.utils import AppLocation, CONTROL_CHARS
from openlp.plugins.songs.lib.db import MediaFile, Song
from .db import Author
from .ui import SongStrings
log = logging.getLogger(__name__)
WHITESPACE = re.compile(r'[\W_]+', re.UNICODE)
APOSTROPHE = re.compile('[\'`’ʻ′]', re.UNICODE)
# PATTERN will look for the next occurence of one of these symbols:
# \controlword - optionally preceded by \*, optionally followed by a number
# \'## - where ## is a pair of hex digits, representing a single character
# \# - where # is a single non-alpha character, representing a special symbol
# { or } - marking the beginning/end of a group
# a run of characters without any \ { } or end-of-line
PATTERN = re.compile(r"(\\\*)?\\([a-z]{1,32})(-?\d{1,10})?[ ]?|\\'([0-9a-f]{2})|\\([^a-z*])|([{}])|[\r\n]+|([^\\{}\r\n]+)", re.I)
# RTF control words which specify a "destination" to be ignored.
DESTINATIONS = frozenset((
'aftncn', 'aftnsep', 'aftnsepc', 'annotation', 'atnauthor',
'atndate', 'atnicn', 'atnid', 'atnparent', 'atnref', 'atntime',
'atrfend', 'atrfstart', 'author', 'background', 'bkmkend',
'bkmkstart', 'blipuid', 'buptim', 'category',
'colorschememapping', 'colortbl', 'comment', 'company', 'creatim',
'datafield', 'datastore', 'defchp', 'defpap', 'do', 'doccomm',
'docvar', 'dptxbxtext', 'ebcend', 'ebcstart', 'factoidname',
'falt', 'fchars', 'ffdeftext', 'ffentrymcr', 'ffexitmcr',
'ffformat', 'ffhelptext', 'ffl', 'ffname', 'ffstattext',
'file', 'filetbl', 'fldinst', 'fldtype', 'fname',
'fontemb', 'fontfile', 'footer', 'footerf', 'footerl', 'footerr',
'footnote', 'formfield', 'ftncn', 'ftnsep', 'ftnsepc', 'g',
'generator', 'gridtbl', 'header', 'headerf', 'headerl',
'headerr', 'hl', 'hlfr', 'hlinkbase', 'hlloc', 'hlsrc', 'hsv',
'htmltag', 'info', 'keycode', 'keywords', 'latentstyles',
'lchars', 'levelnumbers', 'leveltext', 'lfolevel', 'linkval',
'list', 'listlevel', 'listname', 'listoverride',
'listoverridetable', 'listpicture', 'liststylename', 'listtable',
'listtext', 'lsdlockedexcept', 'macc', 'maccPr', 'mailmerge',
'maln', 'malnScr', 'manager', 'margPr', 'mbar', 'mbarPr',
'mbaseJc', 'mbegChr', 'mborderBox', 'mborderBoxPr', 'mbox',
'mboxPr', 'mchr', 'mcount', 'mctrlPr', 'md', 'mdeg', 'mdegHide',
'mden', 'mdiff', 'mdPr', 'me', 'mendChr', 'meqArr', 'meqArrPr',
'mf', 'mfName', 'mfPr', 'mfunc', 'mfuncPr', 'mgroupChr',
'mgroupChrPr', 'mgrow', 'mhideBot', 'mhideLeft', 'mhideRight',
'mhideTop', 'mhtmltag', 'mlim', 'mlimloc', 'mlimlow',
'mlimlowPr', 'mlimupp', 'mlimuppPr', 'mm', 'mmaddfieldname',
'mmath', 'mmathPict', 'mmathPr', 'mmaxdist', 'mmc', 'mmcJc',
'mmconnectstr', 'mmconnectstrdata', 'mmcPr', 'mmcs',
'mmdatasource', 'mmheadersource', 'mmmailsubject', 'mmodso',
'mmodsofilter', 'mmodsofldmpdata', 'mmodsomappedname',
'mmodsoname', 'mmodsorecipdata', 'mmodsosort', 'mmodsosrc',
'mmodsotable', 'mmodsoudl', 'mmodsoudldata', 'mmodsouniquetag',
'mmPr', 'mmquery', 'mmr', 'mnary', 'mnaryPr', 'mnoBreak',
'mnum', 'mobjDist', 'moMath', 'moMathPara', 'moMathParaPr',
'mopEmu', 'mphant', 'mphantPr', 'mplcHide', 'mpos', 'mr',
'mrad', 'mradPr', 'mrPr', 'msepChr', 'mshow', 'mshp', 'msPre',
'msPrePr', 'msSub', 'msSubPr', 'msSubSup', 'msSubSupPr', 'msSup',
'msSupPr', 'mstrikeBLTR', 'mstrikeH', 'mstrikeTLBR', 'mstrikeV',
'msub', 'msubHide', 'msup', 'msupHide', 'mtransp', 'mtype',
'mvertJc', 'mvfmf', 'mvfml', 'mvtof', 'mvtol', 'mzeroAsc',
'mzFrodesc', 'mzeroWid', 'nesttableprops', 'nextfile',
'nonesttables', 'objalias', 'objclass', 'objdata', 'object',
'objname', 'objsect', 'objtime', 'oldcprops', 'oldpprops',
'oldsprops', 'oldtprops', 'oleclsid', 'operator', 'panose',
'password', 'passwordhash', 'pgp', 'pgptbl', 'picprop', 'pict',
'pn', 'pnseclvl', 'pntext', 'pntxta', 'pntxtb', 'printim',
'private', 'propname', 'protend', 'protstart', 'protusertbl',
'pxe', 'result', 'revtbl', 'revtim', 'rsidtbl', 'rxe', 'shp',
'shpgrp', 'shpinst', 'shppict', 'shprslt', 'shptxt', 'sn', 'sp',
'staticval', 'stylesheet', 'subject', 'sv', 'svb', 'tc',
'template', 'themedata', 'title', 'txe', 'ud', 'upr',
'userprops', 'wgrffmtfilter', 'windowcaption', 'writereservation',
'writereservhash', 'xe', 'xform', 'xmlattrname', 'xmlattrvalue',
'xmlclose', 'xmlname', 'xmlnstbl', 'xmlopen'))
# Translation of some special characters.
SPECIAL_CHARS = {
'\n': '\n',
'\r': '\n',
'~': '\u00A0',
'-': '\u00AD',
'_': '\u2011',
'par': '\n',
'sect': '\n\n',
# Required page and column break.
# Would be good if we could split verse into subverses here.
'page': '\n\n',
'column': '\n\n',
# Soft breaks.
'softpage': '[---]',
'softcol': '[---]',
'line': '\n',
'tab': '\t',
'emdash': '\u2014',
'endash': '\u2013',
'emspace': '\u2003',
'enspace': '\u2002',
'qmspace': '\u2005',
'bullet': '\u2022',
'lquote': '\u2018',
'rquote': '\u2019',
'ldblquote': '\u201C',
'rdblquote': '\u201D',
'ltrmark': '\u200E',
'rtlmark': '\u200F',
'zwj': '\u200D',
'zwnj': '\u200C'}
CHARSET_MAPPING = {
'0': 'cp1252',
'128': 'cp932',
'129': 'cp949',
'134': 'cp936',
'161': 'cp1253',
'162': 'cp1254',
'163': 'cp1258',
'177': 'cp1255',
'178': 'cp1256',
'186': 'cp1257',
'204': 'cp1251',
'222': 'cp874',
'238': 'cp1250'}
class VerseType(object):
"""
VerseType provides an enumeration for the tags that may be associated with verses in songs.
"""
Verse = 0
Chorus = 1
Bridge = 2
PreChorus = 3
Intro = 4
Ending = 5
Other = 6
names = [
'Verse',
'Chorus',
'Bridge',
'Pre-Chorus',
'Intro',
'Ending',
'Other']
tags = [name[0].lower() for name in names]
translated_names = [
translate('SongsPlugin.VerseType', 'Verse'),
translate('SongsPlugin.VerseType', 'Chorus'),
translate('SongsPlugin.VerseType', 'Bridge'),
translate('SongsPlugin.VerseType', 'Pre-Chorus'),
translate('SongsPlugin.VerseType', 'Intro'),
translate('SongsPlugin.VerseType', 'Ending'),
translate('SongsPlugin.VerseType', 'Other')]
translated_tags = [name[0].lower() for name in translated_names]
@staticmethod
def translated_tag(verse_tag, default=Other):
"""
Return the translated UPPERCASE tag for a given tag, used to show translated verse tags in UI
``verse_tag``
The string to return a VerseType for
``default``
Default return value if no matching tag is found
"""
verse_tag = verse_tag[0].lower()
for num, tag in enumerate(VerseType.tags):
if verse_tag == tag:
return VerseType.translated_tags[num].upper()
if len(VerseType.names) > default:
return VerseType.translated_tags[default].upper()
else:
return VerseType.translated_tags[VerseType.Other].upper()
@staticmethod
def translated_name(verse_tag, default=Other):
"""
Return the translated name for a given tag
``verse_tag``
The string to return a VerseType for
``default``
Default return value if no matching tag is found
"""
verse_tag = verse_tag[0].lower()
for num, tag in enumerate(VerseType.tags):
if verse_tag == tag:
return VerseType.translated_names[num]
if len(VerseType.names) > default:
return VerseType.translated_names[default]
else:
return VerseType.translated_names[VerseType.Other]
@staticmethod
def from_tag(verse_tag, default=Other):
"""
Return the VerseType for a given tag
``verse_tag``
The string to return a VerseType for
``default``
Default return value if no matching tag is found
"""
verse_tag = verse_tag[0].lower()
for num, tag in enumerate(VerseType.tags):
if verse_tag == tag:
return num
if len(VerseType.names) > default:
return default
else:
return VerseType.Other
@staticmethod
def from_translated_tag(verse_tag, default=Other):
"""
Return the VerseType for a given tag
``verse_tag``
The string to return a VerseType for
``default``
Default return value if no matching tag is found
"""
verse_tag = verse_tag[0].lower()
for num, tag in enumerate(VerseType.translated_tags):
if verse_tag == tag:
return num
if len(VerseType.names) > default:
return default
else:
return VerseType.Other
@staticmethod
def from_string(verse_name, default=Other):
"""
Return the VerseType for a given string
``verse_name``
The string to return a VerseType for
``default``
Default return value if no matching tag is found
"""
verse_name = verse_name.lower()
for num, name in enumerate(VerseType.names):
if verse_name == name.lower():
return num
return default
@staticmethod
def from_translated_string(verse_name):
"""
Return the VerseType for a given string
``verse_name``
The string to return a VerseType for
"""
verse_name = verse_name.lower()
for num, translation in enumerate(VerseType.translated_names):
if verse_name == translation.lower():
return num
@staticmethod
def from_loose_input(verse_name, default=Other):
"""
Return the VerseType for a given string
``verse_name``
The string to return a VerseType for
``default``
Default return value if no matching tag is found
"""
if len(verse_name) > 1:
verse_index = VerseType.from_translated_string(verse_name)
if verse_index is None:
verse_index = VerseType.from_string(verse_name, default)
elif len(verse_name) == 1:
verse_index = VerseType.from_translated_tag(verse_name, None)
if verse_index is None:
verse_index = VerseType.from_tag(verse_name, default)
else:
return default
return verse_index
def retrieve_windows_encoding(recommendation=None):
"""
Determines which encoding to use on an information source. The process uses both automated detection, which is
passed to this method as a recommendation, and user confirmation to return an encoding.
``recommendation``
A recommended encoding discovered programmatically for the user to confirm.
"""
# map chardet result to compatible windows standard code page
codepage_mapping = {'IBM866': 'cp866', 'TIS-620': 'cp874',
'SHIFT_JIS': 'cp932', 'GB2312': 'cp936', 'HZ-GB-2312': 'cp936',
'EUC-KR': 'cp949', 'Big5': 'cp950', 'ISO-8859-2': 'cp1250',
'windows-1250': 'cp1250', 'windows-1251': 'cp1251',
'windows-1252': 'cp1252', 'ISO-8859-7': 'cp1253',
'windows-1253': 'cp1253', 'ISO-8859-8': 'cp1255',
'windows-1255': 'cp1255'}
if recommendation in codepage_mapping:
recommendation = codepage_mapping[recommendation]
# Show dialog for encoding selection
encodings = [('cp1256', translate('SongsPlugin', 'Arabic (CP-1256)')),
('cp1257', translate('SongsPlugin', 'Baltic (CP-1257)')),
('cp1250', translate('SongsPlugin', 'Central European (CP-1250)')),
('cp1251', translate('SongsPlugin', 'Cyrillic (CP-1251)')),
('cp1253', translate('SongsPlugin', 'Greek (CP-1253)')),
('cp1255', translate('SongsPlugin', 'Hebrew (CP-1255)')),
('cp932', translate('SongsPlugin', 'Japanese (CP-932)')),
('cp949', translate('SongsPlugin', 'Korean (CP-949)')),
('cp936', translate('SongsPlugin', 'Simplified Chinese (CP-936)')),
('cp874', translate('SongsPlugin', 'Thai (CP-874)')),
('cp950', translate('SongsPlugin', 'Traditional Chinese (CP-950)')),
('cp1254', translate('SongsPlugin', 'Turkish (CP-1254)')),
('cp1258', translate('SongsPlugin', 'Vietnam (CP-1258)')),
('cp1252', translate('SongsPlugin', 'Western European (CP-1252)'))]
recommended_index = -1
if recommendation:
for index in range(len(encodings)):
if recommendation == encodings[index][0]:
recommended_index = index
break
if recommended_index > -1:
choice = QtGui.QInputDialog.getItem(None,
translate('SongsPlugin', 'Character Encoding'),
translate('SongsPlugin', 'The codepage setting is responsible\n'
'for the correct character representation.\nUsually you are fine with the preselected choice.'),
[pair[1] for pair in encodings], recommended_index, False)
else:
choice = QtGui.QInputDialog.getItem(None,
translate('SongsPlugin', 'Character Encoding'),
translate('SongsPlugin', 'Please choose the character encoding.\n'
'The encoding is responsible for the correct character representation.'),
[pair[1] for pair in encodings], 0, False)
if not choice[1]:
return None
return next(filter(lambda item: item[1] == choice[0], encodings))[0]
def clean_string(string):
"""
Strips punctuation from the passed string to assist searching.
"""
return WHITESPACE.sub(' ', APOSTROPHE.sub('', string)).lower()
def clean_title(title):
"""
Cleans the song title by removing Unicode control chars groups C0 & C1, as well as any trailing spaces.
"""
return CONTROL_CHARS.sub('', title).rstrip()
def clean_song(manager, song):
"""
Cleans the search title, rebuilds the search lyrics, adds a default author if the song does not have one and other
clean ups. This should always called when a new song is added or changed.
``manager``
The song's manager.
``song``
The song object.
"""
from .xml import SongXML
if song.title:
song.title = clean_title(song.title)
else:
song.title = ''
if song.alternate_title:
song.alternate_title = clean_title(song.alternate_title)
else:
song.alternate_title = ''
song.search_title = clean_string(song.title) + '@' + clean_string(song.alternate_title)
# Only do this, if we the song is a 1.9.4 song (or older).
if song.lyrics.find('<lyrics language="en">') != -1:
# Remove the old "language" attribute from lyrics tag (prior to 1.9.5). This is not very important, but this
# keeps the database clean. This can be removed when everybody has cleaned his songs.
song.lyrics = song.lyrics.replace('<lyrics language="en">', '<lyrics>')
verses = SongXML().get_verses(song.lyrics)
song.search_lyrics = ' '.join([clean_string(verse[1])
for verse in verses])
# We need a new and clean SongXML instance.
sxml = SongXML()
# Rebuild the song's verses, to remove any wrong verse names (for example translated ones), which might have
# been added prior to 1.9.5.
# List for later comparison.
compare_order = []
for verse in verses:
verse_type = VerseType.tags[VerseType.from_loose_input(verse[0]['type'])]
sxml.add_verse_to_lyrics(
verse_type,
verse[0]['label'],
verse[1],
verse[0].get('lang')
)
compare_order.append(('%s%s' % (verse_type, verse[0]['label'])).upper())
if verse[0]['label'] == '1':
compare_order.append(verse_type.upper())
song.lyrics = str(sxml.extract_xml(), 'utf-8')
# Rebuild the verse order, to convert translated verse tags, which might have been added prior to 1.9.5.
if song.verse_order:
order = CONTROL_CHARS.sub('', song.verse_order).strip().split()
else:
order = []
new_order = []
for verse_def in order:
verse_type = VerseType.tags[
VerseType.from_loose_input(verse_def[0])]
if len(verse_def) > 1:
new_order.append(('%s%s' % (verse_type, verse_def[1:])).upper())
else:
new_order.append(verse_type.upper())
song.verse_order = ' '.join(new_order)
# Check if the verse order contains tags for verses which do not exist.
for order in new_order:
if order not in compare_order:
song.verse_order = ''
break
else:
verses = SongXML().get_verses(song.lyrics)
song.search_lyrics = ' '.join([clean_string(verse[1])
for verse in verses])
# The song does not have any author, add one.
if not song.authors:
name = SongStrings.AuthorUnknown
author = manager.get_object_filtered(Author, Author.display_name == name)
if author is None:
author = Author.populate(display_name=name, last_name='', first_name='')
song.authors.append(author)
if song.copyright:
song.copyright = CONTROL_CHARS.sub('', song.copyright).strip()
def get_encoding(font, font_table, default_encoding, failed=False):
"""
Finds an encoding to use. Asks user, if necessary.
``font``
The number of currently active font.
``font_table``
Dictionary of fonts and respective encodings.
``default_encoding``
The default encoding to use when font_table is empty or no font is used.
``failed``
A boolean indicating whether the previous encoding didn't work.
"""
encoding = None
if font in font_table:
encoding = font_table[font]
if not encoding and default_encoding:
encoding = default_encoding
if not encoding or failed:
encoding = retrieve_windows_encoding()
default_encoding = encoding
font_table[font] = encoding
return encoding, default_encoding
def strip_rtf(text, default_encoding=None):
"""
This function strips RTF control structures and returns an unicode string.
Thanks to Markus Jarderot (MizardX) for this code, used by permission.
http://stackoverflow.com/questions/188545
``text``
RTF-encoded text, a string.
``default_encoding``
Default encoding to use when no encoding is specified.
"""
# Current font is the font tag we last met.
font = ''
# Character encoding is defined inside fonttable.
# font_table could contain eg u'0': u'cp1252'
font_table = {'': ''}
# Stack of things to keep track of when entering/leaving groups.
stack = []
# Whether this group (and all inside it) are "ignorable".
ignorable = False
# Number of ASCII characters to skip after an unicode character.
ucskip = 1
# Number of ASCII characters left to skip.
curskip = 0
# Output buffer.
out = []
# Encoded buffer.
ebytes = bytearray()
for match in PATTERN.finditer(text):
iinu, word, arg, hex, char, brace, tchar = match.groups()
# \x (non-alpha character)
if char:
if char in '\\{}':
tchar = char
else:
word = char
# Flush encoded buffer to output buffer
if ebytes and not hex and not tchar:
failed = False
while True:
try:
encoding, default_encoding = get_encoding(font, font_table, default_encoding, failed=failed)
if not encoding:
return None
dbytes = ebytes.decode(encoding)
# Code 5C is a peculiar case with Windows Codepage 932
if encoding == 'cp932' and '\\' in dbytes:
dbytes = dbytes.replace('\\', '\u00A5')
out.append(dbytes)
ebytes.clear()
except UnicodeDecodeError:
failed = True
else:
break
# {}
if brace:
curskip = 0
if brace == '{':
# Push state
stack.append((ucskip, ignorable, font))
elif brace == '}' and len(stack) > 0:
# Pop state
ucskip, ignorable, font = stack.pop()
# \command
elif word:
curskip = 0
if word in DESTINATIONS:
ignorable = True
elif word in SPECIAL_CHARS:
if not ignorable:
out.append(SPECIAL_CHARS[word])
elif word == 'uc':
ucskip = int(arg)
elif word == 'u':
c = int(arg)
if c < 0:
c += 0x10000
if not ignorable:
out.append(chr(c))
curskip = ucskip
elif word == 'fonttbl':
ignorable = True
elif word == 'f':
font = arg
elif word == 'ansicpg':
font_table[font] = 'cp' + arg
elif word == 'fcharset' and font not in font_table and arg in CHARSET_MAPPING:
font_table[font] = CHARSET_MAPPING[arg]
elif word == 'fldrslt':
pass
# \* 'Ignore if not understood' marker
elif iinu:
ignorable = True
# \'xx
elif hex:
if curskip > 0:
curskip -= 1
elif not ignorable:
ebytes.append(int(hex, 16))
elif tchar:
if curskip > 0:
curskip -= 1
elif not ignorable:
ebytes += tchar.encode()
text = ''.join(out)
return text, default_encoding
def delete_song(song_id, song_plugin):
"""
Deletes a song from the database. Media files associated to the song
are removed prior to the deletion of the song.
``song_id``
The ID of the song to delete.
``song_plugin``
The song plugin instance.
"""
media_files = song_plugin.manager.get_all_objects(MediaFile, MediaFile.song_id == song_id)
for media_file in media_files:
try:
os.remove(media_file.file_name)
except:
log.exception('Could not remove file: %s', media_file.file_name)
try:
save_path = os.path.join(AppLocation.get_section_data_path(song_plugin.name), 'audio', str(song_id))
if os.path.exists(save_path):
os.rmdir(save_path)
except OSError:
log.exception('Could not remove directory: %s', save_path)
song_plugin.manager.delete_object(Song, song_id)
|
printedheart/h2o-3
|
refs/heads/master
|
h2o-py/tests/testdir_algos/deeplearning/pyunit_checkpoint_new_category_in_predictorDL.py
|
4
|
import sys
sys.path.insert(1,"../../../")
import h2o, tests
def checkpoint_new_category_in_predictor():
sv1 = h2o.upload_file(h2o.locate("smalldata/iris/setosa_versicolor.csv"))
sv2 = h2o.upload_file(h2o.locate("smalldata/iris/setosa_versicolor.csv"))
vir = h2o.upload_file(h2o.locate("smalldata/iris/virginica.csv"))
m1 = h2o.deeplearning(x=sv1[[0,1,2,4]], y=sv1[3], epochs=100)
m2 = h2o.deeplearning(x=sv2[[0,1,2,4]], y=sv2[3], epochs=200, checkpoint=m1.id)
# attempt to continue building model, but with an expanded categorical predictor domain.
# this should fail
try:
m3 = h2o.deeplearning(x=vir[[0,1,2,4]], y=vir[3], epochs=200, checkpoint=m1.id)
assert False, "Expected continued model-building to fail with new categories introduced in predictor"
except EnvironmentError:
pass
# attempt to predict on new model, but with observations that have expanded categorical predictor domain.
predictions = m2.predict(vir)
if __name__ == '__main__':
tests.run_test(sys.argv, checkpoint_new_category_in_predictor)
|
gnu-sandhi/sandhi
|
refs/heads/master
|
modules/gr36/gnuradio-core/src/python/gnuradio/blks2impl/fm_demod.py
|
17
|
#
# Copyright 2006,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, optfir
from gnuradio.blks2impl.fm_emph import fm_deemph
from math import pi
class fm_demod_cf(gr.hier_block2):
"""
Generalized FM demodulation block with deemphasis and audio
filtering.
This block demodulates a band-limited, complex down-converted FM
channel into the the original baseband signal, optionally applying
deemphasis. Low pass filtering is done on the resultant signal. It
produces an output float strem in the range of [-1.0, +1.0].
@param channel_rate: incoming sample rate of the FM baseband
@type sample_rate: integer
@param deviation: maximum FM deviation (default = 5000)
@type deviation: float
@param audio_decim: input to output decimation rate
@type audio_decim: integer
@param audio_pass: audio low pass filter passband frequency
@type audio_pass: float
@param audio_stop: audio low pass filter stop frequency
@type audio_stop: float
@param gain: gain applied to audio output (default = 1.0)
@type gain: float
@param tau: deemphasis time constant (default = 75e-6), specify 'None'
to prevent deemphasis
"""
def __init__(self, channel_rate, audio_decim, deviation,
audio_pass, audio_stop, gain=1.0, tau=75e-6):
gr.hier_block2.__init__(self, "fm_demod_cf",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(1, 1, gr.sizeof_float)) # Output signature
k = channel_rate/(2*pi*deviation)
QUAD = gr.quadrature_demod_cf(k)
audio_taps = optfir.low_pass(gain, # Filter gain
channel_rate, # Sample rate
audio_pass, # Audio passband
audio_stop, # Audio stopband
0.1, # Passband ripple
60) # Stopband attenuation
LPF = gr.fir_filter_fff(audio_decim, audio_taps)
if tau is not None:
DEEMPH = fm_deemph(channel_rate, tau)
self.connect(self, QUAD, DEEMPH, LPF, self)
else:
self.connect(self, QUAD, LPF, self)
class demod_20k0f3e_cf(fm_demod_cf):
"""
NBFM demodulation block, 20 KHz channels
This block demodulates a complex, downconverted, narrowband FM
channel conforming to 20K0F3E emission standards, outputting
floats in the range [-1.0, +1.0].
@param sample_rate: incoming sample rate of the FM baseband
@type sample_rate: integer
@param audio_decim: input to output decimation rate
@type audio_decim: integer
"""
def __init__(self, channel_rate, audio_decim):
fm_demod_cf.__init__(self, channel_rate, audio_decim,
5000, # Deviation
3000, # Audio passband frequency
4500) # Audio stopband frequency
class demod_200kf3e_cf(fm_demod_cf):
"""
WFM demodulation block, mono.
This block demodulates a complex, downconverted, wideband FM
channel conforming to 200KF3E emission standards, outputting
floats in the range [-1.0, +1.0].
@param sample_rate: incoming sample rate of the FM baseband
@type sample_rate: integer
@param audio_decim: input to output decimation rate
@type audio_decim: integer
"""
def __init__(self, channel_rate, audio_decim):
fm_demod_cf.__init__(self, channel_rate, audio_decim,
75000, # Deviation
15000, # Audio passband
16000, # Audio stopband
20.0) # Audio gain
|
lihui7115/ChromiumGStreamerBackend
|
refs/heads/master
|
third_party/cython/src/Cython/Compiler/__init__.py
|
1472
|
# empty file
|
fandemonium/code
|
refs/heads/master
|
fix_index_fastqgz_names.py
|
2
|
##fixing index fastq.gz seq names to match RDP assembler's seq names###
import sys
import gzip
from Bio import SeqIO
if len(sys.argv) != 3:
print "USAGE: python fix_index_fastqgz_names.py XXX_I.fastq.gz FIXED_I.fastq"
sys.exit()
f = gzip.open(sys.argv[1], 'rU')
output = open(sys.argv[2], 'w')
for records in SeqIO.parse(f, 'fastq'):
records.id = records.id.split(' ')[0] + ":0"
records.description = records.description.split(' ')[0] + ":0"
records.name = records.name.strip() + ":0"
# print records
SeqIO.write(records, output, "fastq")
|
oinopion/pipeye
|
refs/heads/master
|
pipeye/views.py
|
3
|
from django.shortcuts import render
def home(request):
return render(request, 'home.html')
|
wathen/PhD
|
refs/heads/master
|
MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/SplitMatrix/TH/MHDmulti.py
|
5
|
import petsc4py
import sys
petsc4py.init(sys.argv)
from petsc4py import PETSc
import MatrixOperations as MO
import numpy as np
class Matrix(object):
def __init__(self):
pass
def create(self, mat):
pass
def destroy(self, mat):
pass
class P(Matrix):
def __init__(self, Fspace,P,Mass,L,F,M):
self.Fspace = Fspace
self.P = P
self.Mass = Mass
self.L = L
self.kspFp = F
self.M = M
# self.N = (n, n, n)
# self.F = zeros([n+2]*3, order='f')
self.IS0 = PETSc.IS().createGeneral(range(self.Fspace[0].dim()))
self.IS1 = PETSc.IS().createGeneral(range(self.Fspace[0].dim(),self.Fspace[0].dim()+self.Fspace[1].dim()))
self.IS2 = PETSc.IS().createGeneral(range(self.Fspace[0].dim()+self.Fspace[1].dim(),self.Fspace[0].dim()+self.Fspace[1].dim()+self.Fspace[2].dim()))
self.IS3 = PETSc.IS().createGeneral(range(self.Fspace[0].dim()+self.Fspace[1].dim()+self.Fspace[2].dim(),self.Fspace[0].dim()+self.Fspace[1].dim()+self.Fspace[2].dim()+self.Fspace[3].dim()))
def create(self, A):
self.IS = MO.IndexSet(self.Fspace)
self.F = self.P.getSubMatrix(self.IS0,self.IS0)
self.Bt = self.P.getSubMatrix(self.IS0,self.IS2)
self.Ct = self.P.getSubMatrix(self.IS0,self.IS1)
self.C = self.P.getSubMatrix(self.IS1,self.IS0)
self.A = self.P.getSubMatrix(self.IS3,self.IS3)
print 13333
def mult(self, A, x, y):
print 'multi apply'
u = x.getSubVector(self.IS0)
p = x.getSubVector(self.IS2)
b = x.getSubVector(self.IS1)
r = x.getSubVector(self.IS3)
FQp = p.duplicate()
uOut = self.F*u+self.Bt*p+self.Ct*b
Qp =self.Mass*p
self.kspFp.solve(Qp,FQp)
pOut = -self.L*FQp
bOut = self.C*u+self.M*b
rOut = self.A*r
y.array = (np.concatenate([uOut.array, bOut.array, pOut.array, rOut.array]))
# print x.array
def matMult(self, A, x, y):
print 'multi apply'
u = x.getSubVector(self.IS0)
p = x.getSubVector(self.IS2)
b = x.getSubVector(self.IS1)
r = x.getSubVector(self.IS3)
FQp = p.duplicate()
uOut = self.F*u+self.Bt*p+self.Ct*b
Qp =self.Mass*p
self.kspFp.solve(Qp,FQp)
pOut = -self.L*FQp
bOut = self.C*u+self.M*b
rOut = self.A*r
y.array = (np.concatenate([uOut.array, bOut.array, pOut.array, rOut.array]))
def multTranspose(self, A, x, y):
"y <- A' * x"
self.mult(x, y)
class MHDmat(Matrix):
def __init__(self, Fspace,A):
self.Fspace = Fspace
self.A = A
self.IS = MO.IndexSet(Fspace)
def mult(self, A, x, y):
u = x.getSubVector(self.IS[0])
p = x.getSubVector(self.IS[1])
b = x.getSubVector(self.IS[2])
r = x.getSubVector(self.IS[3])
yu = MO.PETScMultiDuplications(u,3)
uOut = u.duplicate()
self.A[0].mult(u,yu[0])
self.A[2].multTranspose(p,yu[1])
if self.A[1] != None:
self.A[1].multTranspose(b,yu[2])
yu[2].scale(-1)
for i in range(3):
uOut.axpy(1,yu[i])
yp = MO.PETScMultiDuplications(p,2)
pOut = p.duplicate()
self.A[2].mult(u,yp[0])
self.A[5].mult(p,yp[1])
for i in range(2):
pOut.axpy(1,yp[i])
yb = MO.PETScMultiDuplications(b,3)
bOut = b.duplicate()
self.A[1].mult(u,yb[0])
self.A[3].mult(b,yb[1])
self.A[4].mult(r,yb[2])
for i in range(3):
bOut.axpy(1,yb[i])
yr = MO.PETScMultiDuplications(r,2)
rOut = r.duplicate()
self.A[4].multTranspose(b,yr[0])
self.A[6].mult(r,yr[1])
for i in range(2):
rOut.axpy(1,yr[i])
y.array = (np.concatenate([uOut.array, pOut.array, bOut.array, rOut.array]))
def getMatrix(self,matrix):
if matrix == 'Ct':
return self.A[1]
elif matrix == 'Bt':
return self.A[2]
elif matrix == 'A':
return self.A[0]
class MatFluid(Matrix):
def __init__(self, Fspace,A):
self.Fspace = Fspace
self.A = A
self.IS = MO.IndexSet(Fspace)
def mult(self, A, x, y):
u = x.getSubVector(self.IS[0])
p = x.getSubVector(self.IS[1])
yu = MO.PETScMultiDuplications(u,2)
uOut = u.duplicate()
self.A[0].mult(u,yu[0])
self.A[2].multTranspose(p,yu[1])
for i in range(2):
uOut.axpy(1,yu[i])
yp = MO.PETScMultiDuplications(p,2)
pOut = p.duplicate()
self.A[2].mult(u,yp[0])
self.A[5].mult(p,yp[1])
for i in range(2):
pOut.axpy(1,yp[i])
y.array = (np.concatenate([uOut.array, pOut.array]))
def getMatrix(self,matrix):
if matrix == 'Bt':
return self.A[2]
class MatMag(Matrix):
def __init__(self, Fspace,A):
self.Fspace = Fspace
self.A = A
self.IS = MO.IndexSet(Fspace)
def mult(self, A, x, y):
b = x.getSubVector(self.IS[0])
r = x.getSubVector(self.IS[1])
yb = MO.PETScMultiDuplications(b,2)
bOut = b.duplicate()
self.A[3].mult(b,yb[0])
self.A[4].mult(r,yb[1])
for i in range(2):
bOut.axpy(1,yb[i])
yr = MO.PETScMultiDuplications(r,2)
rOut = r.duplicate()
self.A[4].multTranspose(b,yr[0])
self.A[6].mult(r,yr[1])
for i in range(2):
rOut.axpy(1,yr[i])
y.array = (np.concatenate([ bOut.array, rOut.array]))
|
jmgilbert2/energi
|
refs/heads/energi_v0_egihash_integration
|
qa/rpc-tests/forknotify.py
|
66
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test -alertnotify
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class ForkNotifyTest(BitcoinTestFramework):
alert_filename = None # Set by setup_network
def setup_network(self):
self.nodes = []
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
with open(self.alert_filename, 'w') as f:
pass # Just open then close to create zero-length file
self.nodes.append(start_node(0, self.options.tmpdir,
["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]))
# Node1 mines block.version=211 blocks
self.nodes.append(start_node(1, self.options.tmpdir,
["-blockversion=211"]))
connect_nodes(self.nodes[1], 0)
self.is_network_split = False
self.sync_all()
def run_test(self):
# Mine 51 up-version blocks
self.nodes[1].generate(51)
self.sync_all()
# -alertnotify should trigger on the 51'st,
# but mine and sync another to give
# -alertnotify time to write
self.nodes[1].generate(1)
self.sync_all()
with open(self.alert_filename, 'r') as f:
alert_text = f.read()
if len(alert_text) == 0:
raise AssertionError("-alertnotify did not warn of up-version blocks")
# Mine more up-version blocks, should not get more alerts:
self.nodes[1].generate(1)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
with open(self.alert_filename, 'r') as f:
alert_text2 = f.read()
if alert_text != alert_text2:
raise AssertionError("-alertnotify excessive warning of up-version blocks")
if __name__ == '__main__':
ForkNotifyTest().main()
|
stackforge/monasca-log-api
|
refs/heads/master
|
monasca_log_api/app/base/model.py
|
1
|
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import timeutils
import six
from monasca_log_api.common.rest import utils as rest_utils
def serialize_envelope(envelope):
"""Returns json representation of an envelope.
:return: json object of envelope
:rtype: six.text_type
"""
json = rest_utils.as_json(envelope, ensure_ascii=False)
if six.PY2:
raw = six.text_type(json.replace(r'\\', r'\\\\'), encoding='utf-8',
errors='replace')
else:
raw = json
return raw
class LogEnvelopeException(Exception):
pass
class Envelope(dict):
def __init__(self, log, meta):
if not log:
error_msg = 'Envelope cannot be created without log'
raise LogEnvelopeException(error_msg)
if 'tenantId' not in meta or not meta.get('tenantId'):
error_msg = 'Envelope cannot be created without tenant'
raise LogEnvelopeException(error_msg)
creation_time = self._get_creation_time()
super(Envelope, self).__init__(
log=log,
creation_time=creation_time,
meta=meta
)
@staticmethod
def _get_creation_time():
return timeutils.utcnow_ts()
@classmethod
def new_envelope(cls, log, tenant_id, region, dimensions=None):
"""Creates new log envelope
Log envelope is combined ouf of following properties
* log - dict
* creation_time - timestamp
* meta - meta block
Example output json would like this:
.. code-block:: json
{
"log": {
"message": "Some message",
"dimensions": {
"hostname": "devstack"
}
},
"creation_time": 1447834886,
"meta": {
"tenantId": "e4bd29509eda473092d32aadfee3e7b1",
"region": "pl"
}
}
:param dict log: original log element (containing message and other
params
:param str tenant_id: tenant id to be put in meta field
:param str region: region to be put in meta field
:param dict dimensions: additional dimensions to be appended to log
object dimensions
"""
if dimensions:
log['dimensions'].update(dimensions)
log_meta = {
'region': region,
'tenantId': tenant_id
}
return cls(log, log_meta)
@property
def log(self):
return self.get('log', None)
@property
def creation_time(self):
return self.get('creation_time', None)
@property
def meta(self):
return self.get('meta', None)
|
shiblon/pytour
|
refs/heads/master
|
static/js/pypyjs/pypy-nojit.js-0.3.1/lib/modules/test/test_multibytecodec.py
|
8
|
# test_multibytecodec.py
# Unit test for multibytecodec itself
#
from test import test_support
from test.test_support import TESTFN
import unittest, StringIO, codecs, sys, os
import _multibytecodec
ALL_CJKENCODINGS = [
# _codecs_cn
'gb2312', 'gbk', 'gb18030', 'hz',
# _codecs_hk
'big5hkscs',
# _codecs_jp
'cp932', 'shift_jis', 'euc_jp', 'euc_jisx0213', 'shift_jisx0213',
'euc_jis_2004', 'shift_jis_2004',
# _codecs_kr
'cp949', 'euc_kr', 'johab',
# _codecs_tw
'big5', 'cp950',
# _codecs_iso2022
'iso2022_jp', 'iso2022_jp_1', 'iso2022_jp_2', 'iso2022_jp_2004',
'iso2022_jp_3', 'iso2022_jp_ext', 'iso2022_kr',
]
class Test_MultibyteCodec(unittest.TestCase):
def test_nullcoding(self):
for enc in ALL_CJKENCODINGS:
self.assertEqual(''.decode(enc), u'')
self.assertEqual(unicode('', enc), u'')
self.assertEqual(u''.encode(enc), '')
def test_str_decode(self):
for enc in ALL_CJKENCODINGS:
self.assertEqual('abcd'.encode(enc), 'abcd')
def test_errorcallback_longindex(self):
dec = codecs.getdecoder('euc-kr')
myreplace = lambda exc: (u'', sys.maxint+1)
codecs.register_error('test.cjktest', myreplace)
self.assertRaises((IndexError, OverflowError), dec,
'apple\x92ham\x93spam', 'test.cjktest')
def test_codingspec(self):
for enc in ALL_CJKENCODINGS:
code = '# coding: {}\n'.format(enc)
exec code
def test_init_segfault(self):
# bug #3305: this used to segfault
self.assertRaises(AttributeError,
_multibytecodec.MultibyteStreamReader, None)
self.assertRaises(AttributeError,
_multibytecodec.MultibyteStreamWriter, None)
class Test_IncrementalEncoder(unittest.TestCase):
def test_stateless(self):
# cp949 encoder isn't stateful at all.
encoder = codecs.getincrementalencoder('cp949')()
self.assertEqual(encoder.encode(u'\ud30c\uc774\uc36c \ub9c8\uc744'),
'\xc6\xc4\xc0\xcc\xbd\xe3 \xb8\xb6\xc0\xbb')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode(u'\u2606\u223c\u2606', True),
'\xa1\xd9\xa1\xad\xa1\xd9')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode(u'', True), '')
self.assertEqual(encoder.encode(u'', False), '')
self.assertEqual(encoder.reset(), None)
def test_stateful(self):
# jisx0213 encoder is stateful for a few codepoints. eg)
# U+00E6 => A9DC
# U+00E6 U+0300 => ABC4
# U+0300 => ABDC
encoder = codecs.getincrementalencoder('jisx0213')()
self.assertEqual(encoder.encode(u'\u00e6\u0300'), '\xab\xc4')
self.assertEqual(encoder.encode(u'\u00e6'), '')
self.assertEqual(encoder.encode(u'\u0300'), '\xab\xc4')
self.assertEqual(encoder.encode(u'\u00e6', True), '\xa9\xdc')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode(u'\u0300'), '\xab\xdc')
self.assertEqual(encoder.encode(u'\u00e6'), '')
self.assertEqual(encoder.encode('', True), '\xa9\xdc')
self.assertEqual(encoder.encode('', True), '')
def test_stateful_keep_buffer(self):
encoder = codecs.getincrementalencoder('jisx0213')()
self.assertEqual(encoder.encode(u'\u00e6'), '')
self.assertRaises(UnicodeEncodeError, encoder.encode, u'\u0123')
self.assertEqual(encoder.encode(u'\u0300\u00e6'), '\xab\xc4')
self.assertRaises(UnicodeEncodeError, encoder.encode, u'\u0123')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode(u'\u0300'), '\xab\xdc')
self.assertEqual(encoder.encode(u'\u00e6'), '')
self.assertRaises(UnicodeEncodeError, encoder.encode, u'\u0123')
self.assertEqual(encoder.encode(u'', True), '\xa9\xdc')
def test_issue5640(self):
encoder = codecs.getincrementalencoder('shift-jis')('backslashreplace')
self.assertEqual(encoder.encode(u'\xff'), b'\\xff')
self.assertEqual(encoder.encode(u'\n'), b'\n')
class Test_IncrementalDecoder(unittest.TestCase):
def test_dbcs(self):
# cp949 decoder is simple with only 1 or 2 bytes sequences.
decoder = codecs.getincrementaldecoder('cp949')()
self.assertEqual(decoder.decode('\xc6\xc4\xc0\xcc\xbd'),
u'\ud30c\uc774')
self.assertEqual(decoder.decode('\xe3 \xb8\xb6\xc0\xbb'),
u'\uc36c \ub9c8\uc744')
self.assertEqual(decoder.decode(''), u'')
def test_dbcs_keep_buffer(self):
decoder = codecs.getincrementaldecoder('cp949')()
self.assertEqual(decoder.decode('\xc6\xc4\xc0'), u'\ud30c')
self.assertRaises(UnicodeDecodeError, decoder.decode, '', True)
self.assertEqual(decoder.decode('\xcc'), u'\uc774')
self.assertEqual(decoder.decode('\xc6\xc4\xc0'), u'\ud30c')
self.assertRaises(UnicodeDecodeError, decoder.decode, '\xcc\xbd', True)
self.assertEqual(decoder.decode('\xcc'), u'\uc774')
def test_iso2022(self):
decoder = codecs.getincrementaldecoder('iso2022-jp')()
ESC = '\x1b'
self.assertEqual(decoder.decode(ESC + '('), u'')
self.assertEqual(decoder.decode('B', True), u'')
self.assertEqual(decoder.decode(ESC + '$'), u'')
self.assertEqual(decoder.decode('B@$'), u'\u4e16')
self.assertEqual(decoder.decode('@$@'), u'\u4e16')
self.assertEqual(decoder.decode('$', True), u'\u4e16')
self.assertEqual(decoder.reset(), None)
self.assertEqual(decoder.decode('@$'), u'@$')
self.assertEqual(decoder.decode(ESC + '$'), u'')
self.assertRaises(UnicodeDecodeError, decoder.decode, '', True)
self.assertEqual(decoder.decode('B@$'), u'\u4e16')
class Test_StreamReader(unittest.TestCase):
def test_bug1728403(self):
try:
with open(TESTFN, 'w') as f:
f.write('\xa1')
f = codecs.open(TESTFN, encoding='cp949')
self.assertRaises(UnicodeDecodeError, f.read, 2)
finally:
try: f.close()
except: pass
os.unlink(TESTFN)
class Test_StreamWriter(unittest.TestCase):
@unittest.skipUnless(len(u'\U00012345') == 2, 'need a narrow build')
def test_gb18030(self):
s = StringIO.StringIO()
c = codecs.getwriter('gb18030')(s)
c.write(u'123')
self.assertEqual(s.getvalue(), '123')
c.write(u'\U00012345')
self.assertEqual(s.getvalue(), '123\x907\x959')
c.write(u'\U00012345'[0])
self.assertEqual(s.getvalue(), '123\x907\x959')
c.write(u'\U00012345'[1] + u'\U00012345' + u'\uac00\u00ac')
self.assertEqual(s.getvalue(),
'123\x907\x959\x907\x959\x907\x959\x827\xcf5\x810\x851')
c.write(u'\U00012345'[0])
self.assertEqual(s.getvalue(),
'123\x907\x959\x907\x959\x907\x959\x827\xcf5\x810\x851')
self.assertRaises(UnicodeError, c.reset)
self.assertEqual(s.getvalue(),
'123\x907\x959\x907\x959\x907\x959\x827\xcf5\x810\x851')
@unittest.skipUnless(len(u'\U00012345') == 2, 'need a narrow build')
def test_utf_8(self):
s= StringIO.StringIO()
c = codecs.getwriter('utf-8')(s)
c.write(u'123')
self.assertEqual(s.getvalue(), '123')
c.write(u'\U00012345')
self.assertEqual(s.getvalue(), '123\xf0\x92\x8d\x85')
# Python utf-8 codec can't buffer surrogate pairs yet.
if 0:
c.write(u'\U00012345'[0])
self.assertEqual(s.getvalue(), '123\xf0\x92\x8d\x85')
c.write(u'\U00012345'[1] + u'\U00012345' + u'\uac00\u00ac')
self.assertEqual(s.getvalue(),
'123\xf0\x92\x8d\x85\xf0\x92\x8d\x85\xf0\x92\x8d\x85'
'\xea\xb0\x80\xc2\xac')
c.write(u'\U00012345'[0])
self.assertEqual(s.getvalue(),
'123\xf0\x92\x8d\x85\xf0\x92\x8d\x85\xf0\x92\x8d\x85'
'\xea\xb0\x80\xc2\xac')
c.reset()
self.assertEqual(s.getvalue(),
'123\xf0\x92\x8d\x85\xf0\x92\x8d\x85\xf0\x92\x8d\x85'
'\xea\xb0\x80\xc2\xac\xed\xa0\x88')
c.write(u'\U00012345'[1])
self.assertEqual(s.getvalue(),
'123\xf0\x92\x8d\x85\xf0\x92\x8d\x85\xf0\x92\x8d\x85'
'\xea\xb0\x80\xc2\xac\xed\xa0\x88\xed\xbd\x85')
def test_streamwriter_strwrite(self):
s = StringIO.StringIO()
wr = codecs.getwriter('gb18030')(s)
wr.write('abcd')
self.assertEqual(s.getvalue(), 'abcd')
class Test_ISO2022(unittest.TestCase):
def test_g2(self):
iso2022jp2 = '\x1b(B:hu4:unit\x1b.A\x1bNi de famille'
uni = u':hu4:unit\xe9 de famille'
self.assertEqual(iso2022jp2.decode('iso2022-jp-2'), uni)
def test_iso2022_jp_g0(self):
self.assertNotIn('\x0e', u'\N{SOFT HYPHEN}'.encode('iso-2022-jp-2'))
for encoding in ('iso-2022-jp-2004', 'iso-2022-jp-3'):
e = u'\u3406'.encode(encoding)
self.assertFalse(filter(lambda x: x >= '\x80', e))
def test_bug1572832(self):
if sys.maxunicode >= 0x10000:
myunichr = unichr
else:
myunichr = lambda x: unichr(0xD7C0+(x>>10)) + unichr(0xDC00+(x&0x3FF))
for x in xrange(0x10000, 0x110000):
# Any ISO 2022 codec will cause the segfault
myunichr(x).encode('iso_2022_jp', 'ignore')
class TestStateful(unittest.TestCase):
text = u'\u4E16\u4E16'
encoding = 'iso-2022-jp'
expected = b'\x1b$B@$@$'
expected_reset = b'\x1b$B@$@$\x1b(B'
def test_encode(self):
self.assertEqual(self.text.encode(self.encoding), self.expected_reset)
def test_incrementalencoder(self):
encoder = codecs.getincrementalencoder(self.encoding)()
output = b''.join(
encoder.encode(char)
for char in self.text)
self.assertEqual(output, self.expected)
def test_incrementalencoder_final(self):
encoder = codecs.getincrementalencoder(self.encoding)()
last_index = len(self.text) - 1
output = b''.join(
encoder.encode(char, index == last_index)
for index, char in enumerate(self.text))
self.assertEqual(output, self.expected_reset)
class TestHZStateful(TestStateful):
text = u'\u804a\u804a'
encoding = 'hz'
expected = b'~{ADAD'
expected_reset = b'~{ADAD~}'
def test_main():
test_support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
|
FlorianLudwig/odoo
|
refs/heads/8.0
|
addons/account/wizard/__init__.py
|
362
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_report_common
import account_report_common_partner
import account_report_common_journal
import account_report_common_account
import account_automatic_reconcile
import account_move_line_reconcile_select
import account_move_line_unreconcile_select
import account_reconcile_partner_process
import account_reconcile
import account_unreconcile
import account_invoice_refund
import account_journal_select
import account_move_bank_reconcile
import account_subscription_generate
import account_report_aged_partner_balance
import account_report_partner_ledger
import account_report_partner_balance
import account_period_close
import account_fiscalyear_close
import account_fiscalyear_close_state
import account_vat
import account_open_closed_fiscalyear
import account_invoice_state
import account_chart
import account_tax_chart
import account_financial_report
#TODO: remove this file no moe used
# also remove related view fiel
import account_validate_account_move
import account_use_model
import account_state_open
import account_report_print_journal
import account_report_central_journal
import account_report_general_journal
import account_report_general_ledger
import account_report_account_balance
import account_change_currency
import pos_box
import account_statement_from_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ultmaster/eoj3
|
refs/heads/master
|
eoj3/local_settings.example.py
|
42
|
DEBUG = True
|
jlebon/kubernetes
|
refs/heads/master
|
examples/celery-rabbitmq/celery-app-add/run_tasks.py
|
471
|
#!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import syslog
import time
from celery_conf import add
while True:
x = random.randint(1, 10)
y = random.randint(1, 10)
res = add.delay(x, y)
time.sleep(5)
if res.ready():
res.get()
|
bramalingam/openmicroscopy
|
refs/heads/develop
|
examples/OmeroClients/constructors.py
|
20
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import omero
import omero.clients
image = omero.model.ImageI()
dataset = omero.model.DatasetI(long(1), False)
image.linkDataset(dataset)
|
fivejjs/PTVS
|
refs/heads/master
|
Python/Tests/TestData/DjangoAnalysisTestApp/myapp/urls.py
|
18
|
from django.conf.urls import patterns, url
from django.views.generic import DetailView, ListView
from myapp.models import *
urlpatterns = patterns('',
url(r'^$',
ListView.as_view(
queryset=MyModel.objects.order_by('-pub_date')[:5],
context_object_name='latest_poll_list',
template_name='myapp/index.html'
),
name='index'),
url(r'^(?P<pk>\d+)/$',
DetailView.as_view(
model=MyModel,
template_name='myapp/details.html'
),
name='detail'),
url(r'^(?P<pk>\d+)/$',
DetailView.as_view(model=MyModel2),
name='detail'),
)
|
badock/nova
|
refs/heads/master
|
nova/virt/netutils.py
|
18
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Network-related utilities for supporting libvirt connection code."""
import os
import jinja2
import netaddr
from oslo.config import cfg
from nova.network import model
from nova import paths
CONF = cfg.CONF
netutils_opts = [
cfg.StrOpt('injected_network_template',
default=paths.basedir_def('nova/virt/interfaces.template'),
help='Template file for injected network'),
]
CONF.register_opts(netutils_opts)
CONF.import_opt('use_ipv6', 'nova.netconf')
def get_net_and_mask(cidr):
net = netaddr.IPNetwork(cidr)
return str(net.ip), str(net.netmask)
def get_net_and_prefixlen(cidr):
net = netaddr.IPNetwork(cidr)
return str(net.ip), str(net._prefixlen)
def get_ip_version(cidr):
net = netaddr.IPNetwork(cidr)
return int(net.version)
def _get_first_network(network, version):
# Using a generator expression with a next() call for the first element
# of a list since we don't want to evaluate the whole list as we can
# have a lot of subnets
try:
return (i for i in network['subnets']
if i['version'] == version).next()
except StopIteration:
pass
def get_injected_network_template(network_info, use_ipv6=None, template=None,
libvirt_virt_type=None):
"""Returns a rendered network template for the given network_info.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param use_ipv6: If False, do not return IPv6 template information
even if an IPv6 subnet is present in network_info.
:param template: Path to the interfaces template file.
:param libvirt_virt_type: The Libvirt `virt_type`, will be `None` for
other hypervisors..
"""
if use_ipv6 is None:
use_ipv6 = CONF.use_ipv6
if not template:
template = CONF.injected_network_template
if not (network_info and template):
return
nets = []
ifc_num = -1
ipv6_is_available = False
for vif in network_info:
if not vif['network'] or not vif['network']['subnets']:
continue
network = vif['network']
# NOTE(bnemec): The template only supports a single subnet per
# interface and I'm not sure how/if that can be fixed, so this
# code only takes the first subnet of the appropriate type.
subnet_v4 = _get_first_network(network, 4)
subnet_v6 = _get_first_network(network, 6)
ifc_num += 1
if not network.get_meta('injected'):
continue
address = None
netmask = None
gateway = ''
broadcast = None
dns = None
if subnet_v4:
if subnet_v4.get_meta('dhcp_server') is not None:
continue
if subnet_v4['ips']:
ip = subnet_v4['ips'][0]
address = ip['address']
netmask = model.get_netmask(ip, subnet_v4)
if subnet_v4['gateway']:
gateway = subnet_v4['gateway']['address']
broadcast = str(subnet_v4.as_netaddr().broadcast)
dns = ' '.join([i['address'] for i in subnet_v4['dns']])
address_v6 = None
gateway_v6 = ''
netmask_v6 = None
dns_v6 = None
have_ipv6 = (use_ipv6 and subnet_v6)
if have_ipv6:
if subnet_v6.get_meta('dhcp_server') is not None:
continue
if subnet_v6['ips']:
ipv6_is_available = True
ip_v6 = subnet_v6['ips'][0]
address_v6 = ip_v6['address']
netmask_v6 = model.get_netmask(ip_v6, subnet_v6)
if subnet_v6['gateway']:
gateway_v6 = subnet_v6['gateway']['address']
dns_v6 = ' '.join([i['address'] for i in subnet_v6['dns']])
net_info = {'name': 'eth%d' % ifc_num,
'address': address,
'netmask': netmask,
'gateway': gateway,
'broadcast': broadcast,
'dns': dns,
'address_v6': address_v6,
'gateway_v6': gateway_v6,
'netmask_v6': netmask_v6,
'dns_v6': dns_v6,
}
nets.append(net_info)
if not nets:
return
tmpl_path, tmpl_file = os.path.split(CONF.injected_network_template)
env = jinja2.Environment(loader=jinja2.FileSystemLoader(tmpl_path),
trim_blocks=True)
template = env.get_template(tmpl_file)
return template.render({'interfaces': nets,
'use_ipv6': ipv6_is_available,
'libvirt_virt_type': libvirt_virt_type})
|
eerimoq/asn1tools
|
refs/heads/master
|
tests/files/3gpp/s1ap_14_4_0.py
|
1
|
EXPECTED = {'S1AP-CommonDataTypes': {'extensibility-implied': False,
'imports': {},
'object-classes': {},
'object-sets': {},
'tags': 'AUTOMATIC',
'types': {'Criticality': {'type': 'ENUMERATED',
'values': [('reject', 0),
('ignore', 1),
('notify', 2)]},
'Presence': {'type': 'ENUMERATED',
'values': [('optional', 0),
('conditional', 1),
('mandatory', 2)]},
'PrivateIE-ID': {'members': [{'name': 'local',
'restricted-to': [(0,
65535)],
'type': 'INTEGER'},
{'name': 'global',
'type': 'OBJECT '
'IDENTIFIER'}],
'type': 'CHOICE'},
'ProcedureCode': {'restricted-to': [(0,
255)],
'type': 'INTEGER'},
'ProtocolExtensionID': {'restricted-to': [(0,
65535)],
'type': 'INTEGER'},
'ProtocolIE-ID': {'restricted-to': [(0,
65535)],
'type': 'INTEGER'},
'TriggeringMessage': {'type': 'ENUMERATED',
'values': [('initiating-message',
0),
('successful-outcome',
1),
('unsuccessfull-outcome',
2)]}},
'values': {}},
'S1AP-Constants': {'extensibility-implied': False,
'imports': {'S1AP-CommonDataTypes': ['ProcedureCode',
'ProtocolIE-ID']},
'object-classes': {},
'object-sets': {},
'tags': 'AUTOMATIC',
'types': {},
'values': {'id-Additional-GUTI': {'type': 'ProtocolIE-ID',
'value': 224},
'id-AdditionalCSFallbackIndicator': {'type': 'ProtocolIE-ID',
'value': 187},
'id-AssistanceDataForPaging': {'type': 'ProtocolIE-ID',
'value': 211},
'id-BearerType': {'type': 'ProtocolIE-ID',
'value': 233},
'id-Bearers-SubjectToStatusTransfer-Item': {'type': 'ProtocolIE-ID',
'value': 89},
'id-BroadcastCancelledAreaList': {'type': 'ProtocolIE-ID',
'value': 141},
'id-BroadcastCompletedAreaList': {'type': 'ProtocolIE-ID',
'value': 120},
'id-CE-mode-B-SupportIndicator': {'type': 'ProtocolIE-ID',
'value': 242},
'id-CNDomain': {'type': 'ProtocolIE-ID',
'value': 109},
'id-CSFallbackIndicator': {'type': 'ProtocolIE-ID',
'value': 108},
'id-CSG-Id': {'type': 'ProtocolIE-ID',
'value': 127},
'id-CSG-IdList': {'type': 'ProtocolIE-ID',
'value': 128},
'id-CSGMembershipInfo': {'type': 'ProtocolIE-ID',
'value': 226},
'id-CSGMembershipStatus': {'type': 'ProtocolIE-ID',
'value': 146},
'id-Cause': {'type': 'ProtocolIE-ID',
'value': 2},
'id-CellAccessMode': {'type': 'ProtocolIE-ID',
'value': 145},
'id-CellIdentifierAndCELevelForCECapableUEs': {'type': 'ProtocolIE-ID',
'value': 212},
'id-CellTrafficTrace': {'type': 'ProcedureCode',
'value': 42},
'id-ConcurrentWarningMessageIndicator': {'type': 'ProtocolIE-ID',
'value': 142},
'id-ConnectionEstablishmentIndication': {'type': 'ProcedureCode',
'value': 54},
'id-Correlation-ID': {'type': 'ProtocolIE-ID',
'value': 156},
'id-Coverage-Level': {'type': 'ProtocolIE-ID',
'value': 250},
'id-CriticalityDiagnostics': {'type': 'ProtocolIE-ID',
'value': 58},
'id-DCN-ID': {'type': 'ProtocolIE-ID',
'value': 246},
'id-DL-CP-SecurityInformation': {'type': 'ProtocolIE-ID',
'value': 253},
'id-DLCOUNTValueExtended': {'type': 'ProtocolIE-ID',
'value': 180},
'id-DLCOUNTValuePDCP-SNlength18': {'type': 'ProtocolIE-ID',
'value': 218},
'id-DLNASPDUDeliveryAckRequest': {'type': 'ProtocolIE-ID',
'value': 249},
'id-Data-Forwarding-Not-Possible': {'type': 'ProtocolIE-ID',
'value': 143},
'id-DataCodingScheme': {'type': 'ProtocolIE-ID',
'value': 118},
'id-DeactivateTrace': {'type': 'ProcedureCode',
'value': 26},
'id-DefaultPagingDRX': {'type': 'ProtocolIE-ID',
'value': 137},
'id-Direct-Forwarding-Path-Availability': {'type': 'ProtocolIE-ID',
'value': 79},
'id-DownlinkS1cdma2000tunnelling': {'type': 'ProcedureCode',
'value': 19},
'id-E-RABAdmittedItem': {'type': 'ProtocolIE-ID',
'value': 20},
'id-E-RABAdmittedList': {'type': 'ProtocolIE-ID',
'value': 18},
'id-E-RABDataForwardingItem': {'type': 'ProtocolIE-ID',
'value': 14},
'id-E-RABFailedToBeReleasedList': {'type': 'ProtocolIE-ID',
'value': 103},
'id-E-RABFailedToModifyList': {'type': 'ProtocolIE-ID',
'value': 32},
'id-E-RABFailedToModifyListBearerModConf': {'type': 'ProtocolIE-ID',
'value': 205},
'id-E-RABFailedToReleaseList': {'type': 'ProtocolIE-ID',
'value': 34},
'id-E-RABFailedToResumeItemResumeReq': {'type': 'ProtocolIE-ID',
'value': 236},
'id-E-RABFailedToResumeItemResumeRes': {'type': 'ProtocolIE-ID',
'value': 238},
'id-E-RABFailedToResumeListResumeReq': {'type': 'ProtocolIE-ID',
'value': 235},
'id-E-RABFailedToResumeListResumeRes': {'type': 'ProtocolIE-ID',
'value': 237},
'id-E-RABFailedToSetupListBearerSURes': {'type': 'ProtocolIE-ID',
'value': 29},
'id-E-RABFailedToSetupListCtxtSURes': {'type': 'ProtocolIE-ID',
'value': 48},
'id-E-RABFailedToSetupListHOReqAck': {'type': 'ProtocolIE-ID',
'value': 19},
'id-E-RABFailedtoSetupItemHOReqAck': {'type': 'ProtocolIE-ID',
'value': 21},
'id-E-RABInformationListItem': {'type': 'ProtocolIE-ID',
'value': 78},
'id-E-RABItem': {'type': 'ProtocolIE-ID',
'value': 35},
'id-E-RABModificationIndication': {'type': 'ProcedureCode',
'value': 50},
'id-E-RABModify': {'type': 'ProcedureCode',
'value': 6},
'id-E-RABModifyItemBearerModConf': {'type': 'ProtocolIE-ID',
'value': 204},
'id-E-RABModifyItemBearerModRes': {'type': 'ProtocolIE-ID',
'value': 37},
'id-E-RABModifyListBearerModConf': {'type': 'ProtocolIE-ID',
'value': 203},
'id-E-RABModifyListBearerModRes': {'type': 'ProtocolIE-ID',
'value': 31},
'id-E-RABNotToBeModifiedItemBearerModInd': {'type': 'ProtocolIE-ID',
'value': 202},
'id-E-RABNotToBeModifiedListBearerModInd': {'type': 'ProtocolIE-ID',
'value': 201},
'id-E-RABRelease': {'type': 'ProcedureCode',
'value': 7},
'id-E-RABReleaseIndication': {'type': 'ProcedureCode',
'value': 8},
'id-E-RABReleaseItem': {'type': 'ProtocolIE-ID',
'value': 38},
'id-E-RABReleaseItemBearerRelComp': {'type': 'ProtocolIE-ID',
'value': 15},
'id-E-RABReleaseItemHOCmd': {'type': 'ProtocolIE-ID',
'value': 49},
'id-E-RABReleaseListBearerRelComp': {'type': 'ProtocolIE-ID',
'value': 69},
'id-E-RABReleasedList': {'type': 'ProtocolIE-ID',
'value': 110},
'id-E-RABSetup': {'type': 'ProcedureCode',
'value': 5},
'id-E-RABSetupItemBearerSURes': {'type': 'ProtocolIE-ID',
'value': 39},
'id-E-RABSetupItemCtxtSURes': {'type': 'ProtocolIE-ID',
'value': 50},
'id-E-RABSetupListBearerSURes': {'type': 'ProtocolIE-ID',
'value': 28},
'id-E-RABSetupListCtxtSURes': {'type': 'ProtocolIE-ID',
'value': 51},
'id-E-RABSubjecttoDataForwardingList': {'type': 'ProtocolIE-ID',
'value': 12},
'id-E-RABToBeModifiedItemBearerModInd': {'type': 'ProtocolIE-ID',
'value': 200},
'id-E-RABToBeModifiedItemBearerModReq': {'type': 'ProtocolIE-ID',
'value': 36},
'id-E-RABToBeModifiedListBearerModInd': {'type': 'ProtocolIE-ID',
'value': 199},
'id-E-RABToBeModifiedListBearerModReq': {'type': 'ProtocolIE-ID',
'value': 30},
'id-E-RABToBeReleasedList': {'type': 'ProtocolIE-ID',
'value': 33},
'id-E-RABToBeReleasedListBearerModConf': {'type': 'ProtocolIE-ID',
'value': 210},
'id-E-RABToBeSetupItemBearerSUReq': {'type': 'ProtocolIE-ID',
'value': 17},
'id-E-RABToBeSetupItemCtxtSUReq': {'type': 'ProtocolIE-ID',
'value': 52},
'id-E-RABToBeSetupItemHOReq': {'type': 'ProtocolIE-ID',
'value': 27},
'id-E-RABToBeSetupListBearerSUReq': {'type': 'ProtocolIE-ID',
'value': 16},
'id-E-RABToBeSetupListCtxtSUReq': {'type': 'ProtocolIE-ID',
'value': 24},
'id-E-RABToBeSetupListHOReq': {'type': 'ProtocolIE-ID',
'value': 53},
'id-E-RABToBeSwitchedDLItem': {'type': 'ProtocolIE-ID',
'value': 23},
'id-E-RABToBeSwitchedDLList': {'type': 'ProtocolIE-ID',
'value': 22},
'id-E-RABToBeSwitchedULItem': {'type': 'ProtocolIE-ID',
'value': 94},
'id-E-RABToBeSwitchedULList': {'type': 'ProtocolIE-ID',
'value': 95},
'id-E-RABtoReleaseListHOCmd': {'type': 'ProtocolIE-ID',
'value': 13},
'id-E-UTRAN-Trace-ID': {'type': 'ProtocolIE-ID',
'value': 86},
'id-ECGIListForRestart': {'type': 'ProtocolIE-ID',
'value': 182},
'id-ENBConfigurationUpdate': {'type': 'ProcedureCode',
'value': 29},
'id-EUTRAN-CGI': {'type': 'ProtocolIE-ID',
'value': 100},
'id-EUTRANRoundTripDelayEstimationInfo': {'type': 'ProtocolIE-ID',
'value': 140},
'id-EmergencyAreaIDListForRestart': {'type': 'ProtocolIE-ID',
'value': 190},
'id-EnhancedCoverageRestricted': {'type': 'ProtocolIE-ID',
'value': 251},
'id-ErrorIndication': {'type': 'ProcedureCode',
'value': 15},
'id-ExpectedUEBehaviour': {'type': 'ProtocolIE-ID',
'value': 196},
'id-ExtendedRepetitionPeriod': {'type': 'ProtocolIE-ID',
'value': 144},
'id-GERANtoLTEHOInformationRes': {'type': 'ProtocolIE-ID',
'value': 55},
'id-GUMMEI-ID': {'type': 'ProtocolIE-ID',
'value': 75},
'id-GUMMEIList': {'type': 'ProtocolIE-ID',
'value': 154},
'id-GUMMEIType': {'type': 'ProtocolIE-ID',
'value': 170},
'id-GW-TransportLayerAddress': {'type': 'ProtocolIE-ID',
'value': 155},
'id-GWContextReleaseIndication': {'type': 'ProtocolIE-ID',
'value': 164},
'id-Global-ENB-ID': {'type': 'ProtocolIE-ID',
'value': 59},
'id-HO-Cause': {'type': 'ProtocolIE-ID',
'value': 168},
'id-HandoverCancel': {'type': 'ProcedureCode',
'value': 4},
'id-HandoverNotification': {'type': 'ProcedureCode',
'value': 2},
'id-HandoverPreparation': {'type': 'ProcedureCode',
'value': 0},
'id-HandoverResourceAllocation': {'type': 'ProcedureCode',
'value': 1},
'id-HandoverRestrictionList': {'type': 'ProtocolIE-ID',
'value': 41},
'id-HandoverType': {'type': 'ProtocolIE-ID',
'value': 1},
'id-InformationOnRecommendedCellsAndENBsForPaging': {'type': 'ProtocolIE-ID',
'value': 213},
'id-InitialContextSetup': {'type': 'ProcedureCode',
'value': 9},
'id-Inter-SystemInformationTransferTypeEDT': {'type': 'ProtocolIE-ID',
'value': 121},
'id-Inter-SystemInformationTransferTypeMDT': {'type': 'ProtocolIE-ID',
'value': 122},
'id-Kill': {'type': 'ProcedureCode',
'value': 43},
'id-KillAllWarningMessages': {'type': 'ProtocolIE-ID',
'value': 191},
'id-LHN-ID': {'type': 'ProtocolIE-ID',
'value': 186},
'id-LPPa-PDU': {'type': 'ProtocolIE-ID',
'value': 147},
'id-LocationReport': {'type': 'ProcedureCode',
'value': 33},
'id-LocationReportingControl': {'type': 'ProcedureCode',
'value': 31},
'id-LocationReportingFailureIndication': {'type': 'ProcedureCode',
'value': 32},
'id-LoggedMBSFNMDT': {'type': 'ProtocolIE-ID',
'value': 197},
'id-M3Configuration': {'type': 'ProtocolIE-ID',
'value': 171},
'id-M4Configuration': {'type': 'ProtocolIE-ID',
'value': 172},
'id-M5Configuration': {'type': 'ProtocolIE-ID',
'value': 173},
'id-M6Configuration': {'type': 'ProtocolIE-ID',
'value': 220},
'id-M7Configuration': {'type': 'ProtocolIE-ID',
'value': 221},
'id-MDT-Location-Info': {'type': 'ProtocolIE-ID',
'value': 174},
'id-MDTConfiguration': {'type': 'ProtocolIE-ID',
'value': 162},
'id-MME-Group-ID': {'type': 'ProtocolIE-ID',
'value': 223},
'id-MME-UE-S1AP-ID': {'type': 'ProtocolIE-ID',
'value': 0},
'id-MME-UE-S1AP-ID-2': {'type': 'ProtocolIE-ID',
'value': 158},
'id-MMECPRelocationIndication': {'type': 'ProcedureCode',
'value': 61},
'id-MMEConfigurationTransfer': {'type': 'ProcedureCode',
'value': 41},
'id-MMEConfigurationUpdate': {'type': 'ProcedureCode',
'value': 30},
'id-MMEDirectInformationTransfer': {'type': 'ProcedureCode',
'value': 38},
'id-MMERelaySupportIndicator': {'type': 'ProtocolIE-ID',
'value': 163},
'id-MMEStatusTransfer': {'type': 'ProcedureCode',
'value': 25},
'id-MMEname': {'type': 'ProtocolIE-ID',
'value': 61},
'id-MSClassmark2': {'type': 'ProtocolIE-ID',
'value': 132},
'id-MSClassmark3': {'type': 'ProtocolIE-ID',
'value': 133},
'id-ManagementBasedMDTAllowed': {'type': 'ProtocolIE-ID',
'value': 165},
'id-ManagementBasedMDTPLMNList': {'type': 'ProtocolIE-ID',
'value': 177},
'id-Masked-IMEISV': {'type': 'ProtocolIE-ID',
'value': 192},
'id-MessageIdentifier': {'type': 'ProtocolIE-ID',
'value': 111},
'id-MobilityInformation': {'type': 'ProtocolIE-ID',
'value': 175},
'id-Muting-Availability-Indication': {'type': 'ProtocolIE-ID',
'value': 207},
'id-Muting-Pattern-Information': {'type': 'ProtocolIE-ID',
'value': 208},
'id-NAS-DownlinkCount': {'type': 'ProtocolIE-ID',
'value': 126},
'id-NAS-PDU': {'type': 'ProtocolIE-ID',
'value': 26},
'id-NASDeliveryIndication': {'type': 'ProcedureCode',
'value': 57},
'id-NASNonDeliveryIndication': {'type': 'ProcedureCode',
'value': 16},
'id-NASSecurityParametersfromE-UTRAN': {'type': 'ProtocolIE-ID',
'value': 135},
'id-NASSecurityParameterstoE-UTRAN': {'type': 'ProtocolIE-ID',
'value': 136},
'id-NB-IoT-DefaultPagingDRX': {'type': 'ProtocolIE-ID',
'value': 234},
'id-NB-IoT-Paging-eDRXInformation': {'type': 'ProtocolIE-ID',
'value': 239},
'id-NB-IoT-UEIdentityIndexValue': {'type': 'ProtocolIE-ID',
'value': 244},
'id-NumberofBroadcastRequest': {'type': 'ProtocolIE-ID',
'value': 115},
'id-OverloadResponse': {'type': 'ProtocolIE-ID',
'value': 101},
'id-OverloadStart': {'type': 'ProcedureCode',
'value': 34},
'id-OverloadStop': {'type': 'ProcedureCode',
'value': 35},
'id-PS-ServiceNotAvailable': {'type': 'ProtocolIE-ID',
'value': 150},
'id-PWSFailureIndication': {'type': 'ProcedureCode',
'value': 51},
'id-PWSRestartIndication': {'type': 'ProcedureCode',
'value': 49},
'id-PWSfailedECGIList': {'type': 'ProtocolIE-ID',
'value': 222},
'id-Paging': {'type': 'ProcedureCode',
'value': 10},
'id-Paging-eDRXInformation': {'type': 'ProtocolIE-ID',
'value': 227},
'id-PagingPriority': {'type': 'ProtocolIE-ID',
'value': 151},
'id-PathSwitchRequest': {'type': 'ProcedureCode',
'value': 3},
'id-PrivacyIndicator': {'type': 'ProtocolIE-ID',
'value': 166},
'id-PrivateMessage': {'type': 'ProcedureCode',
'value': 39},
'id-ProSeAuthorized': {'type': 'ProtocolIE-ID',
'value': 195},
'id-ProSeUEtoNetworkRelaying': {'type': 'ProtocolIE-ID',
'value': 216},
'id-RAT-Type': {'type': 'ProtocolIE-ID',
'value': 232},
'id-RRC-Establishment-Cause': {'type': 'ProtocolIE-ID',
'value': 134},
'id-RRC-Resume-Cause': {'type': 'ProtocolIE-ID',
'value': 245},
'id-ReceiveStatusOfULPDCPSDUsExtended': {'type': 'ProtocolIE-ID',
'value': 181},
'id-ReceiveStatusOfULPDCPSDUsPDCP-SNlength18': {'type': 'ProtocolIE-ID',
'value': 219},
'id-RecommendedCellItem': {'type': 'ProtocolIE-ID',
'value': 214},
'id-RecommendedENBItem': {'type': 'ProtocolIE-ID',
'value': 215},
'id-RegisteredLAI': {'type': 'ProtocolIE-ID',
'value': 159},
'id-RelativeMMECapacity': {'type': 'ProtocolIE-ID',
'value': 87},
'id-RelayNode-Indicator': {'type': 'ProtocolIE-ID',
'value': 160},
'id-RepetitionPeriod': {'type': 'ProtocolIE-ID',
'value': 114},
'id-RequestType': {'type': 'ProtocolIE-ID',
'value': 98},
'id-RerouteNASRequest': {'type': 'ProcedureCode',
'value': 52},
'id-Reset': {'type': 'ProcedureCode',
'value': 14},
'id-ResetType': {'type': 'ProtocolIE-ID',
'value': 92},
'id-RetrieveUEInformation': {'type': 'ProcedureCode',
'value': 58},
'id-Routing-ID': {'type': 'ProtocolIE-ID',
'value': 148},
'id-S-TMSI': {'type': 'ProtocolIE-ID',
'value': 96},
'id-S1-Message': {'type': 'ProtocolIE-ID',
'value': 225},
'id-S1Setup': {'type': 'ProcedureCode',
'value': 17},
'id-SIPTO-Correlation-ID': {'type': 'ProtocolIE-ID',
'value': 183},
'id-SIPTO-L-GW-TransportLayerAddress': {'type': 'ProtocolIE-ID',
'value': 184},
'id-SON-Information-Report': {'type': 'ProtocolIE-ID',
'value': 206},
'id-SONConfigurationTransferECT': {'type': 'ProtocolIE-ID',
'value': 129},
'id-SONConfigurationTransferMCT': {'type': 'ProtocolIE-ID',
'value': 130},
'id-SRVCCHOIndication': {'type': 'ProtocolIE-ID',
'value': 125},
'id-SRVCCOperationNotPossible': {'type': 'ProtocolIE-ID',
'value': 243},
'id-SRVCCOperationPossible': {'type': 'ProtocolIE-ID',
'value': 124},
'id-SecurityContext': {'type': 'ProtocolIE-ID',
'value': 40},
'id-SecurityKey': {'type': 'ProtocolIE-ID',
'value': 73},
'id-SerialNumber': {'type': 'ProtocolIE-ID',
'value': 112},
'id-ServedDCNs': {'type': 'ProtocolIE-ID',
'value': 247},
'id-ServedGUMMEIs': {'type': 'ProtocolIE-ID',
'value': 105},
'id-ServedPLMNs': {'type': 'ProtocolIE-ID',
'value': 63},
'id-SignallingBasedMDTPLMNList': {'type': 'ProtocolIE-ID',
'value': 178},
'id-Source-ToTarget-TransparentContainer': {'type': 'ProtocolIE-ID',
'value': 104},
'id-Source-ToTarget-TransparentContainer-Secondary': {'type': 'ProtocolIE-ID',
'value': 138},
'id-SourceID': {'type': 'ProtocolIE-ID',
'value': 3},
'id-SourceMME-GUMMEI': {'type': 'ProtocolIE-ID',
'value': 157},
'id-SourceMME-UE-S1AP-ID': {'type': 'ProtocolIE-ID',
'value': 88},
'id-SubscriberProfileIDforRFP': {'type': 'ProtocolIE-ID',
'value': 106},
'id-SupportedTAs': {'type': 'ProtocolIE-ID',
'value': 64},
'id-Synchronisation-Information': {'type': 'ProtocolIE-ID',
'value': 209},
'id-TAI': {'type': 'ProtocolIE-ID', 'value': 67},
'id-TAIItem': {'type': 'ProtocolIE-ID',
'value': 47},
'id-TAIList': {'type': 'ProtocolIE-ID',
'value': 46},
'id-TAIListForRestart': {'type': 'ProtocolIE-ID',
'value': 188},
'id-Target-ToSource-TransparentContainer': {'type': 'ProtocolIE-ID',
'value': 123},
'id-Target-ToSource-TransparentContainer-Secondary': {'type': 'ProtocolIE-ID',
'value': 139},
'id-TargetID': {'type': 'ProtocolIE-ID',
'value': 4},
'id-Time-Synchronisation-Info': {'type': 'ProtocolIE-ID',
'value': 149},
'id-Time-UE-StayedInCell-EnhancedGranularity': {'type': 'ProtocolIE-ID',
'value': 167},
'id-TimeToWait': {'type': 'ProtocolIE-ID',
'value': 65},
'id-TraceActivation': {'type': 'ProtocolIE-ID',
'value': 25},
'id-TraceCollectionEntityIPAddress': {'type': 'ProtocolIE-ID',
'value': 131},
'id-TraceFailureIndication': {'type': 'ProcedureCode',
'value': 28},
'id-TraceStart': {'type': 'ProcedureCode',
'value': 27},
'id-TrafficLoadReductionIndication': {'type': 'ProtocolIE-ID',
'value': 161},
'id-TransportInformation': {'type': 'ProtocolIE-ID',
'value': 185},
'id-Tunnel-Information-for-BBF': {'type': 'ProtocolIE-ID',
'value': 176},
'id-UE-Level-QoS-Parameters': {'type': 'ProtocolIE-ID',
'value': 252},
'id-UE-RetentionInformation': {'type': 'ProtocolIE-ID',
'value': 228},
'id-UE-S1AP-IDs': {'type': 'ProtocolIE-ID',
'value': 99},
'id-UE-Usage-Type': {'type': 'ProtocolIE-ID',
'value': 230},
'id-UE-associatedLogicalS1-ConnectionItem': {'type': 'ProtocolIE-ID',
'value': 91},
'id-UE-associatedLogicalS1-ConnectionListResAck': {'type': 'ProtocolIE-ID',
'value': 93},
'id-UECapabilityInfoIndication': {'type': 'ProcedureCode',
'value': 22},
'id-UEContextModification': {'type': 'ProcedureCode',
'value': 21},
'id-UEContextModificationIndication': {'type': 'ProcedureCode',
'value': 53},
'id-UEContextRelease': {'type': 'ProcedureCode',
'value': 23},
'id-UEContextReleaseRequest': {'type': 'ProcedureCode',
'value': 18},
'id-UEContextResume': {'type': 'ProcedureCode',
'value': 56},
'id-UEContextSuspend': {'type': 'ProcedureCode',
'value': 55},
'id-UEIdentityIndexValue': {'type': 'ProtocolIE-ID',
'value': 80},
'id-UEInformationTransfer': {'type': 'ProcedureCode',
'value': 59},
'id-UEPagingID': {'type': 'ProtocolIE-ID',
'value': 43},
'id-UERadioCapability': {'type': 'ProtocolIE-ID',
'value': 74},
'id-UERadioCapabilityForPaging': {'type': 'ProtocolIE-ID',
'value': 198},
'id-UERadioCapabilityMatch': {'type': 'ProcedureCode',
'value': 48},
'id-UESecurityCapabilities': {'type': 'ProtocolIE-ID',
'value': 107},
'id-UESidelinkAggregateMaximumBitrate': {'type': 'ProtocolIE-ID',
'value': 248},
'id-UEUserPlaneCIoTSupportIndicator': {'type': 'ProtocolIE-ID',
'value': 241},
'id-UL-CP-SecurityInformation': {'type': 'ProtocolIE-ID',
'value': 254},
'id-ULCOUNTValueExtended': {'type': 'ProtocolIE-ID',
'value': 179},
'id-ULCOUNTValuePDCP-SNlength18': {'type': 'ProtocolIE-ID',
'value': 217},
'id-UTRANtoLTEHOInformationRes': {'type': 'ProtocolIE-ID',
'value': 57},
'id-UplinkS1cdma2000tunnelling': {'type': 'ProcedureCode',
'value': 20},
'id-UserLocationInformation': {'type': 'ProtocolIE-ID',
'value': 189},
'id-V2XServicesAuthorized': {'type': 'ProtocolIE-ID',
'value': 240},
'id-VoiceSupportMatchIndicator': {'type': 'ProtocolIE-ID',
'value': 169},
'id-WarningAreaList': {'type': 'ProtocolIE-ID',
'value': 113},
'id-WarningMessageContents': {'type': 'ProtocolIE-ID',
'value': 119},
'id-WarningSecurityInfo': {'type': 'ProtocolIE-ID',
'value': 117},
'id-WarningType': {'type': 'ProtocolIE-ID',
'value': 116},
'id-WriteReplaceWarning': {'type': 'ProcedureCode',
'value': 36},
'id-cdma2000HORequiredIndication': {'type': 'ProtocolIE-ID',
'value': 84},
'id-cdma2000HOStatus': {'type': 'ProtocolIE-ID',
'value': 83},
'id-cdma2000OneXRAND': {'type': 'ProtocolIE-ID',
'value': 97},
'id-cdma2000OneXSRVCCInfo': {'type': 'ProtocolIE-ID',
'value': 102},
'id-cdma2000PDU': {'type': 'ProtocolIE-ID',
'value': 70},
'id-cdma2000RATType': {'type': 'ProtocolIE-ID',
'value': 71},
'id-cdma2000SectorID': {'type': 'ProtocolIE-ID',
'value': 72},
'id-downlinkNASTransport': {'type': 'ProcedureCode',
'value': 11},
'id-downlinkNonUEAssociatedLPPaTransport': {'type': 'ProcedureCode',
'value': 46},
'id-downlinkUEAssociatedLPPaTransport': {'type': 'ProcedureCode',
'value': 44},
'id-eNB-StatusTransfer-TransparentContainer': {'type': 'ProtocolIE-ID',
'value': 90},
'id-eNB-UE-S1AP-ID': {'type': 'ProtocolIE-ID',
'value': 8},
'id-eNBCPRelocationIndication': {'type': 'ProcedureCode',
'value': 60},
'id-eNBConfigurationTransfer': {'type': 'ProcedureCode',
'value': 40},
'id-eNBDirectInformationTransfer': {'type': 'ProcedureCode',
'value': 37},
'id-eNBIndirectX2TransportLayerAddresses': {'type': 'ProtocolIE-ID',
'value': 193},
'id-eNBStatusTransfer': {'type': 'ProcedureCode',
'value': 24},
'id-eNBX2ExtendedTransportLayerAddresses': {'type': 'ProtocolIE-ID',
'value': 153},
'id-eNBname': {'type': 'ProtocolIE-ID',
'value': 60},
'id-extended-UEIdentityIndexValue': {'type': 'ProtocolIE-ID',
'value': 231},
'id-initialUEMessage': {'type': 'ProcedureCode',
'value': 12},
'id-pagingDRX': {'type': 'ProtocolIE-ID',
'value': 44},
'id-uE-HistoryInformationFromTheUE': {'type': 'ProtocolIE-ID',
'value': 194},
'id-uEaggregateMaximumBitrate': {'type': 'ProtocolIE-ID',
'value': 66},
'id-uplinkNASTransport': {'type': 'ProcedureCode',
'value': 13},
'id-uplinkNonUEAssociatedLPPaTransport': {'type': 'ProcedureCode',
'value': 47},
'id-uplinkUEAssociatedLPPaTransport': {'type': 'ProcedureCode',
'value': 45},
'id-x2TNLConfigurationInfo': {'type': 'ProtocolIE-ID',
'value': 152},
'maxEARFCN': {'type': 'INTEGER',
'value': 262143},
'maxPrivateIEs': {'type': 'INTEGER',
'value': 65535},
'maxProtocolExtensions': {'type': 'INTEGER',
'value': 65535},
'maxProtocolIEs': {'type': 'INTEGER',
'value': 65535},
'maxnoofBPLMNs': {'type': 'INTEGER', 'value': 6},
'maxnoofCSGs': {'type': 'INTEGER', 'value': 256},
'maxnoofCellID': {'type': 'INTEGER',
'value': 65535},
'maxnoofCellIDforMDT': {'type': 'INTEGER',
'value': 32},
'maxnoofCellinEAI': {'type': 'INTEGER',
'value': 65535},
'maxnoofCellinTAI': {'type': 'INTEGER',
'value': 65535},
'maxnoofCells': {'type': 'INTEGER', 'value': 16},
'maxnoofCellsforRestart': {'type': 'INTEGER',
'value': 256},
'maxnoofCellsineNB': {'type': 'INTEGER',
'value': 256},
'maxnoofDCNs': {'type': 'INTEGER', 'value': 32},
'maxnoofE-RABs': {'type': 'INTEGER',
'value': 256},
'maxnoofEPLMNs': {'type': 'INTEGER',
'value': 15},
'maxnoofEPLMNsPlusOne': {'type': 'INTEGER',
'value': 16},
'maxnoofEmergencyAreaID': {'type': 'INTEGER',
'value': 65535},
'maxnoofErrors': {'type': 'INTEGER',
'value': 256},
'maxnoofForbLACs': {'type': 'INTEGER',
'value': 4096},
'maxnoofForbTACs': {'type': 'INTEGER',
'value': 4096},
'maxnoofGroupIDs': {'type': 'INTEGER',
'value': 65535},
'maxnoofIndividualS1ConnectionsToReset': {'type': 'INTEGER',
'value': 256},
'maxnoofMBSFNAreaMDT': {'type': 'INTEGER',
'value': 8},
'maxnoofMDTPLMNs': {'type': 'INTEGER',
'value': 16},
'maxnoofMMECs': {'type': 'INTEGER',
'value': 256},
'maxnoofPLMNsPerMME': {'type': 'INTEGER',
'value': 32},
'maxnoofRATs': {'type': 'INTEGER', 'value': 8},
'maxnoofRecommendedCells': {'type': 'INTEGER',
'value': 16},
'maxnoofRecommendedENBs': {'type': 'INTEGER',
'value': 16},
'maxnoofRestartEmergencyAreaIDs': {'type': 'INTEGER',
'value': 256},
'maxnoofRestartTAIs': {'type': 'INTEGER',
'value': 2048},
'maxnoofTACs': {'type': 'INTEGER', 'value': 256},
'maxnoofTAIforWarning': {'type': 'INTEGER',
'value': 65535},
'maxnoofTAIs': {'type': 'INTEGER', 'value': 256},
'maxnoofTAforMDT': {'type': 'INTEGER',
'value': 8},
'maxnoofeNBX2ExtTLAs': {'type': 'INTEGER',
'value': 16},
'maxnoofeNBX2GTPTLAs': {'type': 'INTEGER',
'value': 16},
'maxnoofeNBX2TLAs': {'type': 'INTEGER',
'value': 2}}},
'S1AP-Containers': {'extensibility-implied': False,
'imports': {'S1AP-CommonDataTypes': ['Criticality',
'Presence',
'PrivateIE-ID',
'ProtocolExtensionID',
'ProtocolIE-ID'],
'S1AP-Constants': ['maxPrivateIEs',
'maxProtocolExtensions',
'maxProtocolIEs']},
'object-classes': {'S1AP-PRIVATE-IES': {'members': [{'name': '&id',
'type': 'PrivateIE-ID'},
{'name': '&criticality',
'type': 'Criticality'},
{'name': '&Value',
'type': 'OpenType'},
{'name': '&presence',
'type': 'Presence'}]},
'S1AP-PROTOCOL-EXTENSION': {'members': [{'name': '&id',
'type': 'ProtocolExtensionID'},
{'name': '&criticality',
'type': 'Criticality'},
{'name': '&Extension',
'type': 'OpenType'},
{'name': '&presence',
'type': 'Presence'}]},
'S1AP-PROTOCOL-IES': {'members': [{'name': '&id',
'type': 'ProtocolIE-ID'},
{'name': '&criticality',
'type': 'Criticality'},
{'name': '&Value',
'type': 'OpenType'},
{'name': '&presence',
'type': 'Presence'}]},
'S1AP-PROTOCOL-IES-PAIR': {'members': [{'name': '&id',
'type': 'ProtocolIE-ID'},
{'name': '&firstCriticality',
'type': 'Criticality'},
{'name': '&FirstValue',
'type': 'OpenType'},
{'name': '&secondCriticality',
'type': 'Criticality'},
{'name': '&SecondValue',
'type': 'OpenType'},
{'name': '&presence',
'type': 'Presence'}]}},
'object-sets': {},
'tags': 'AUTOMATIC',
'types': {'PrivateIE-Container': {'element': {'actual-parameters': ['{'],
'type': 'PrivateIE-Field'},
'parameters': ['IEsSetParam'],
'size': [(1,
'maxPrivateIEs')],
'type': 'SEQUENCE OF'},
'PrivateIE-Field': {'members': [{'name': 'id',
'table': {'type': 'IEsSetParam'},
'type': 'S1AP-PRIVATE-IES.&id'},
{'name': 'criticality',
'table': ['IEsSetParam',
['id']],
'type': 'S1AP-PRIVATE-IES.&criticality'},
{'name': 'value',
'table': ['IEsSetParam',
['id']],
'type': 'S1AP-PRIVATE-IES.&Value'}],
'parameters': ['IEsSetParam'],
'type': 'SEQUENCE'},
'ProtocolExtensionContainer': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolExtensionField'},
'parameters': ['ExtensionSetParam'],
'size': [(1,
'maxProtocolExtensions')],
'type': 'SEQUENCE '
'OF'},
'ProtocolExtensionField': {'members': [{'name': 'id',
'table': {'type': 'ExtensionSetParam'},
'type': 'S1AP-PROTOCOL-EXTENSION.&id'},
{'name': 'criticality',
'table': ['ExtensionSetParam',
['id']],
'type': 'S1AP-PROTOCOL-EXTENSION.&criticality'},
{'name': 'extensionValue',
'table': ['ExtensionSetParam',
['id']],
'type': 'S1AP-PROTOCOL-EXTENSION.&Extension'}],
'parameters': ['ExtensionSetParam'],
'type': 'SEQUENCE'},
'ProtocolIE-Container': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-Field'},
'parameters': ['IEsSetParam'],
'size': [(0,
'maxProtocolIEs')],
'type': 'SEQUENCE OF'},
'ProtocolIE-ContainerList': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'parameters': ['lowerBound',
'upperBound',
'IEsSetParam'],
'size': [('lowerBound',
'upperBound')],
'type': 'SEQUENCE '
'OF'},
'ProtocolIE-ContainerPair': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-FieldPair'},
'parameters': ['IEsSetParam'],
'size': [(0,
'maxProtocolIEs')],
'type': 'SEQUENCE '
'OF'},
'ProtocolIE-ContainerPairList': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-ContainerPair'},
'parameters': ['lowerBound',
'upperBound',
'IEsSetParam'],
'size': [('lowerBound',
'upperBound')],
'type': 'SEQUENCE '
'OF'},
'ProtocolIE-Field': {'members': [{'name': 'id',
'table': {'type': 'IEsSetParam'},
'type': 'S1AP-PROTOCOL-IES.&id'},
{'name': 'criticality',
'table': ['IEsSetParam',
['id']],
'type': 'S1AP-PROTOCOL-IES.&criticality'},
{'name': 'value',
'table': ['IEsSetParam',
['id']],
'type': 'S1AP-PROTOCOL-IES.&Value'}],
'parameters': ['IEsSetParam'],
'type': 'SEQUENCE'},
'ProtocolIE-FieldPair': {'members': [{'name': 'id',
'table': {'type': 'IEsSetParam'},
'type': 'S1AP-PROTOCOL-IES-PAIR.&id'},
{'name': 'firstCriticality',
'table': ['IEsSetParam',
['id']],
'type': 'S1AP-PROTOCOL-IES-PAIR.&firstCriticality'},
{'name': 'firstValue',
'table': ['IEsSetParam',
['id']],
'type': 'S1AP-PROTOCOL-IES-PAIR.&FirstValue'},
{'name': 'secondCriticality',
'table': ['IEsSetParam',
['id']],
'type': 'S1AP-PROTOCOL-IES-PAIR.&secondCriticality'},
{'name': 'secondValue',
'table': ['IEsSetParam',
['id']],
'type': 'S1AP-PROTOCOL-IES-PAIR.&SecondValue'}],
'parameters': ['IEsSetParam'],
'type': 'SEQUENCE'},
'ProtocolIE-SingleContainer': {'actual-parameters': ['{'],
'parameters': ['IEsSetParam'],
'type': 'ProtocolIE-Field'}},
'values': {}},
'S1AP-IEs': {'extensibility-implied': False,
'imports': {'S1AP-CommonDataTypes': ['Criticality',
'ProcedureCode',
'ProtocolIE-ID',
'TriggeringMessage'],
'S1AP-Constants': ['id-Bearers-SubjectToStatusTransfer-Item',
'id-DLCOUNTValueExtended',
'id-DLCOUNTValuePDCP-SNlength18',
'id-E-RABInformationListItem',
'id-E-RABItem',
'id-HO-Cause',
'id-LoggedMBSFNMDT',
'id-M3Configuration',
'id-M4Configuration',
'id-M5Configuration',
'id-M6Configuration',
'id-M7Configuration',
'id-MDT-Location-Info',
'id-MDTConfiguration',
'id-MobilityInformation',
'id-Muting-Availability-Indication',
'id-Muting-Pattern-Information',
'id-ProSeUEtoNetworkRelaying',
'id-RAT-Type',
'id-ReceiveStatusOfULPDCPSDUsExtended',
'id-ReceiveStatusOfULPDCPSDUsPDCP-SNlength18',
'id-RecommendedCellItem',
'id-RecommendedENBItem',
'id-SON-Information-Report',
'id-SignallingBasedMDTPLMNList',
'id-Synchronisation-Information',
'id-Time-Synchronisation-Info',
'id-Time-UE-StayedInCell-EnhancedGranularity',
'id-ULCOUNTValueExtended',
'id-ULCOUNTValuePDCP-SNlength18',
'id-eNBIndirectX2TransportLayerAddresses',
'id-eNBX2ExtendedTransportLayerAddresses',
'id-uE-HistoryInformationFromTheUE',
'id-x2TNLConfigurationInfo',
'maxEARFCN',
'maxnoofBPLMNs',
'maxnoofCSGs',
'maxnoofCellID',
'maxnoofCellIDforMDT',
'maxnoofCellinEAI',
'maxnoofCellinTAI',
'maxnoofCells',
'maxnoofCellsforRestart',
'maxnoofCellsineNB',
'maxnoofDCNs',
'maxnoofE-RABs',
'maxnoofEPLMNs',
'maxnoofEPLMNsPlusOne',
'maxnoofEmergencyAreaID',
'maxnoofErrors',
'maxnoofForbLACs',
'maxnoofForbTACs',
'maxnoofGroupIDs',
'maxnoofMBSFNAreaMDT',
'maxnoofMDTPLMNs',
'maxnoofMMECs',
'maxnoofPLMNsPerMME',
'maxnoofRATs',
'maxnoofRecommendedCells',
'maxnoofRecommendedENBs',
'maxnoofRestartEmergencyAreaIDs',
'maxnoofRestartTAIs',
'maxnoofTACs',
'maxnoofTAIforWarning',
'maxnoofTAforMDT',
'maxnoofeNBX2ExtTLAs',
'maxnoofeNBX2GTPTLAs',
'maxnoofeNBX2TLAs'],
'S1AP-Containers': ['ProtocolExtensionContainer',
'ProtocolIE-SingleContainer',
'S1AP-PROTOCOL-EXTENSION',
'S1AP-PROTOCOL-IES',
'{',
'{',
'}',
'}']},
'object-classes': {},
'object-sets': {'Additional-GUTI-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'AllocationAndRetentionPriority-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'AssistanceDataForPaging-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'AssistanceDataForRecommendedCells-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'Bearers-SubjectToStatusTransfer-ItemExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'Bearers-SubjectToStatusTransfer-ItemIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'CGI-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'COUNTValueExtended-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'COUNTvalue-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'COUNTvaluePDCP-SNlength18-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CSG-IdList-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CancelledCellinEAI-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CancelledCellinTAI-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'Cdma2000OneXSRVCCInfo-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CellBasedMDT-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CellID-Broadcast-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CellID-Cancelled-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CellIdentifierAndCELevelForCECapableUEs-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CellType-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CompletedCellinEAI-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CompletedCellinTAI-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CriticalityDiagnostics-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CriticalityDiagnostics-IE-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'DL-CP-SecurityInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABInformationListIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABInformationListItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABItemIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABQoSParameters-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ENB-StatusTransfer-TransparentContainer-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ENBX2ExtTLA-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'EUTRAN-CGI-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'EmergencyAreaID-Broadcast-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'EmergencyAreaID-Cancelled-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ExpectedUEActivityBehaviour-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ExpectedUEBehaviour-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ForbiddenLAs-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ForbiddenTAs-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'GBR-QosInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'GERAN-Cell-ID-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'GUMMEI-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'GlobalENB-ID-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'HandoverRestrictionList-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ImmediateMDT-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'InformationForCECapableUEs-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'InformationOnRecommendedCellsAndENBsForPaging-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'LAI-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'LastVisitedEUTRANCellInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'ListeningSubframePattern-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'LoggedMBSFNMDT-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'LoggedMDT-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'M1PeriodicReporting-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'M1ThresholdEventA2-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'M3Configuration-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'M4Configuration-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'M5Configuration-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'M6Configuration-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'M7Configuration-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'MBSFN-ResultToLogInfo-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'MDT-Configuration-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'MDTMode-ExtensionIE': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'MutingPatternInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'NB-IoT-Paging-eDRXInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'Paging-eDRXInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'PagingAttemptInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ProSeAuthorized-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'RIMTransfer-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'RLFReportInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'RecommendedCellItemIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'RecommendedCellsForPaging-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'RecommendedCellsForPagingItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'RecommendedENBItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'RecommendedENBItemIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'RecommendedENBsForPaging-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'RequestType-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'S-TMSI-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'SONConfigurationTransfer-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'SONInformation-ExtensionIE': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'SONInformationReply-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'SecurityContext-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ServedDCNsItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'ServedGUMMEIsItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'SourceeNB-ID-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'SourceeNB-ToTargeteNB-TransparentContainer-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'SupportedTAs-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'SynchronisationInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TABasedMDT-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TAI-Broadcast-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TAI-Cancelled-Item-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TAI-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TAIBasedMDT-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TargetRNC-ID-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TargeteNB-ID-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TargeteNB-ToSourceeNB-TransparentContainer-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TimeSynchronisationInfo-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'TraceActivation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'Tunnel-Information-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'UE-S1AP-ID-pair-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'UE-Sidelink-Aggregate-MaximumBitrates-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'UE-associatedLogicalS1-ConnectionItemExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'UEAggregate-MaximumBitrates-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'UESecurityCapabilities-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'UL-CP-SecurityInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'UserLocationInformation-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'V2XServicesAuthorized-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'X2TNLConfigurationInfo-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []}},
'tags': 'AUTOMATIC',
'types': {'Additional-GUTI': {'members': [{'name': 'gUMMEI',
'type': 'GUMMEI'},
{'name': 'm-TMSI',
'type': 'M-TMSI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'AdditionalCSFallbackIndicator': {'type': 'ENUMERATED',
'values': [('no-restriction',
0),
('restriction',
1),
None]},
'AllocationAndRetentionPriority': {'members': [{'name': 'priorityLevel',
'type': 'PriorityLevel'},
{'name': 'pre-emptionCapability',
'type': 'Pre-emptionCapability'},
{'name': 'pre-emptionVulnerability',
'type': 'Pre-emptionVulnerability'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'AreaScopeOfMDT': {'members': [{'name': 'cellBased',
'type': 'CellBasedMDT'},
{'name': 'tABased',
'type': 'TABasedMDT'},
{'name': 'pLMNWide',
'type': 'NULL'},
None,
{'name': 'tAIBased',
'type': 'TAIBasedMDT'}],
'type': 'CHOICE'},
'AssistanceDataForCECapableUEs': {'members': [{'name': 'cellIdentifierAndCELevelForCECapableUEs',
'type': 'CellIdentifierAndCELevelForCECapableUEs'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'AssistanceDataForPaging': {'members': [{'name': 'assistanceDataForRecommendedCells',
'optional': True,
'type': 'AssistanceDataForRecommendedCells'},
{'name': 'assistanceDataForCECapableUEs',
'optional': True,
'type': 'AssistanceDataForCECapableUEs'},
{'name': 'pagingAttemptInformation',
'optional': True,
'type': 'PagingAttemptInformation'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'AssistanceDataForRecommendedCells': {'members': [{'name': 'recommendedCellsForPaging',
'type': 'RecommendedCellsForPaging'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'BPLMNs': {'element': {'type': 'PLMNidentity'},
'size': [(1, 'maxnoofBPLMNs')],
'type': 'SEQUENCE OF'},
'BearerType': {'type': 'ENUMERATED',
'values': [('non-IP', 0), None]},
'Bearers-SubjectToStatusTransfer-Item': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'uL-COUNTvalue',
'type': 'COUNTvalue'},
{'name': 'dL-COUNTvalue',
'type': 'COUNTvalue'},
{'name': 'receiveStatusofULPDCPSDUs',
'optional': True,
'type': 'ReceiveStatusofULPDCPSDUs'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'Bearers-SubjectToStatusTransferList': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofE-RABs')],
'type': 'SEQUENCE '
'OF'},
'BitRate': {'restricted-to': [(0, 10000000000)],
'type': 'INTEGER'},
'BroadcastCancelledAreaList': {'members': [{'name': 'cellID-Cancelled',
'type': 'CellID-Cancelled'},
{'name': 'tAI-Cancelled',
'type': 'TAI-Cancelled'},
{'name': 'emergencyAreaID-Cancelled',
'type': 'EmergencyAreaID-Cancelled'},
None],
'type': 'CHOICE'},
'BroadcastCompletedAreaList': {'members': [{'name': 'cellID-Broadcast',
'type': 'CellID-Broadcast'},
{'name': 'tAI-Broadcast',
'type': 'TAI-Broadcast'},
{'name': 'emergencyAreaID-Broadcast',
'type': 'EmergencyAreaID-Broadcast'},
None],
'type': 'CHOICE'},
'CE-mode-B-SupportIndicator': {'type': 'ENUMERATED',
'values': [('supported',
0),
None]},
'CELevel': {'type': 'OCTET STRING'},
'CGI': {'members': [{'name': 'pLMNidentity',
'type': 'PLMNidentity'},
{'name': 'lAC', 'type': 'LAC'},
{'name': 'cI', 'type': 'CI'},
{'name': 'rAC',
'optional': True,
'type': 'RAC'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CI': {'size': [2], 'type': 'OCTET STRING'},
'CNDomain': {'type': 'ENUMERATED',
'values': [('ps', 0), ('cs', 1)]},
'COUNTValueExtended': {'members': [{'name': 'pDCP-SNExtended',
'type': 'PDCP-SNExtended'},
{'name': 'hFNModified',
'type': 'HFNModified'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'COUNTvalue': {'members': [{'name': 'pDCP-SN',
'type': 'PDCP-SN'},
{'name': 'hFN',
'type': 'HFN'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'COUNTvaluePDCP-SNlength18': {'members': [{'name': 'pDCP-SNlength18',
'type': 'PDCP-SNlength18'},
{'name': 'hFNforPDCP-SNlength18',
'type': 'HFNforPDCP-SNlength18'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CSFallbackIndicator': {'type': 'ENUMERATED',
'values': [('cs-fallback-required',
0),
None,
('cs-fallback-high-priority',
1)]},
'CSG-Id': {'size': [27], 'type': 'BIT STRING'},
'CSG-IdList': {'element': {'type': 'CSG-IdList-Item'},
'size': [(1, 'maxnoofCSGs')],
'type': 'SEQUENCE OF'},
'CSG-IdList-Item': {'members': [{'name': 'cSG-Id',
'type': 'CSG-Id'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CSGMembershipStatus': {'type': 'ENUMERATED',
'values': [('member', 0),
('not-member', 1)]},
'CancelledCellinEAI': {'element': {'type': 'CancelledCellinEAI-Item'},
'size': [(1,
'maxnoofCellinEAI')],
'type': 'SEQUENCE OF'},
'CancelledCellinEAI-Item': {'members': [{'name': 'eCGI',
'type': 'EUTRAN-CGI'},
{'name': 'numberOfBroadcasts',
'type': 'NumberOfBroadcasts'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CancelledCellinTAI': {'element': {'type': 'CancelledCellinTAI-Item'},
'size': [(1,
'maxnoofCellinTAI')],
'type': 'SEQUENCE OF'},
'CancelledCellinTAI-Item': {'members': [{'name': 'eCGI',
'type': 'EUTRAN-CGI'},
{'name': 'numberOfBroadcasts',
'type': 'NumberOfBroadcasts'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'Cause': {'members': [{'name': 'radioNetwork',
'type': 'CauseRadioNetwork'},
{'name': 'transport',
'type': 'CauseTransport'},
{'name': 'nas',
'type': 'CauseNas'},
{'name': 'protocol',
'type': 'CauseProtocol'},
{'name': 'misc',
'type': 'CauseMisc'},
None],
'type': 'CHOICE'},
'CauseMisc': {'type': 'ENUMERATED',
'values': [('control-processing-overload',
0),
('not-enough-user-plane-processing-resources',
1),
('hardware-failure', 2),
('om-intervention', 3),
('unspecified', 4),
('unknown-PLMN', 5),
None]},
'CauseNas': {'type': 'ENUMERATED',
'values': [('normal-release', 0),
('authentication-failure', 1),
('detach', 2),
('unspecified', 3),
None,
('csg-subscription-expiry',
4)]},
'CauseProtocol': {'type': 'ENUMERATED',
'values': [('transfer-syntax-error',
0),
('abstract-syntax-error-reject',
1),
('abstract-syntax-error-ignore-and-notify',
2),
('message-not-compatible-with-receiver-state',
3),
('semantic-error', 4),
('abstract-syntax-error-falsely-constructed-message',
5),
('unspecified', 6),
None]},
'CauseRadioNetwork': {'type': 'ENUMERATED',
'values': [('unspecified', 0),
('tx2relocoverall-expiry',
1),
('successful-handover',
2),
('release-due-to-eutran-generated-reason',
3),
('handover-cancelled',
4),
('partial-handover',
5),
('ho-failure-in-target-EPC-eNB-or-target-system',
6),
('ho-target-not-allowed',
7),
('tS1relocoverall-expiry',
8),
('tS1relocprep-expiry',
9),
('cell-not-available',
10),
('unknown-targetID',
11),
('no-radio-resources-available-in-target-cell',
12),
('unknown-mme-ue-s1ap-id',
13),
('unknown-enb-ue-s1ap-id',
14),
('unknown-pair-ue-s1ap-id',
15),
('handover-desirable-for-radio-reason',
16),
('time-critical-handover',
17),
('resource-optimisation-handover',
18),
('reduce-load-in-serving-cell',
19),
('user-inactivity',
20),
('radio-connection-with-ue-lost',
21),
('load-balancing-tau-required',
22),
('cs-fallback-triggered',
23),
('ue-not-available-for-ps-service',
24),
('radio-resources-not-available',
25),
('failure-in-radio-interface-procedure',
26),
('invalid-qos-combination',
27),
('interrat-redirection',
28),
('interaction-with-other-procedure',
29),
('unknown-E-RAB-ID',
30),
('multiple-E-RAB-ID-instances',
31),
('encryption-and-or-integrity-protection-algorithms-not-supported',
32),
('s1-intra-system-handover-triggered',
33),
('s1-inter-system-handover-triggered',
34),
('x2-handover-triggered',
35),
None,
('redirection-towards-1xRTT',
36),
('not-supported-QCI-value',
37),
('invalid-CSG-Id', 38),
('release-due-to-pre-emption',
39)]},
'CauseTransport': {'type': 'ENUMERATED',
'values': [('transport-resource-unavailable',
0),
('unspecified', 1),
None]},
'Cdma2000HORequiredIndication': {'type': 'ENUMERATED',
'values': [('true', 0),
None]},
'Cdma2000HOStatus': {'type': 'ENUMERATED',
'values': [('hOSuccess', 0),
('hOFailure', 1),
None]},
'Cdma2000OneXMEID': {'type': 'OCTET STRING'},
'Cdma2000OneXMSI': {'type': 'OCTET STRING'},
'Cdma2000OneXPilot': {'type': 'OCTET STRING'},
'Cdma2000OneXRAND': {'type': 'OCTET STRING'},
'Cdma2000OneXSRVCCInfo': {'members': [{'name': 'cdma2000OneXMEID',
'type': 'Cdma2000OneXMEID'},
{'name': 'cdma2000OneXMSI',
'type': 'Cdma2000OneXMSI'},
{'name': 'cdma2000OneXPilot',
'type': 'Cdma2000OneXPilot'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'Cdma2000PDU': {'type': 'OCTET STRING'},
'Cdma2000RATType': {'type': 'ENUMERATED',
'values': [('hRPD', 0),
('onexRTT', 1),
None]},
'Cdma2000SectorID': {'type': 'OCTET STRING'},
'Cell-Size': {'type': 'ENUMERATED',
'values': [('verysmall', 0),
('small', 1),
('medium', 2),
('large', 3),
None]},
'CellAccessMode': {'type': 'ENUMERATED',
'values': [('hybrid', 0), None]},
'CellBasedMDT': {'members': [{'name': 'cellIdListforMDT',
'type': 'CellIdListforMDT'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CellID-Broadcast': {'element': {'type': 'CellID-Broadcast-Item'},
'size': [(1, 'maxnoofCellID')],
'type': 'SEQUENCE OF'},
'CellID-Broadcast-Item': {'members': [{'name': 'eCGI',
'type': 'EUTRAN-CGI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CellID-Cancelled': {'element': {'type': 'CellID-Cancelled-Item'},
'size': [(1, 'maxnoofCellID')],
'type': 'SEQUENCE OF'},
'CellID-Cancelled-Item': {'members': [{'name': 'eCGI',
'type': 'EUTRAN-CGI'},
{'name': 'numberOfBroadcasts',
'type': 'NumberOfBroadcasts'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CellIdListforMDT': {'element': {'type': 'EUTRAN-CGI'},
'size': [(1,
'maxnoofCellIDforMDT')],
'type': 'SEQUENCE OF'},
'CellIdentifierAndCELevelForCECapableUEs': {'members': [{'name': 'global-Cell-ID',
'type': 'EUTRAN-CGI'},
{'name': 'cELevel',
'type': 'CELevel'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CellIdentity': {'size': [28], 'type': 'BIT STRING'},
'CellType': {'members': [{'name': 'cell-Size',
'type': 'Cell-Size'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CompletedCellinEAI': {'element': {'type': 'CompletedCellinEAI-Item'},
'size': [(1,
'maxnoofCellinEAI')],
'type': 'SEQUENCE OF'},
'CompletedCellinEAI-Item': {'members': [{'name': 'eCGI',
'type': 'EUTRAN-CGI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CompletedCellinTAI': {'element': {'type': 'CompletedCellinTAI-Item'},
'size': [(1,
'maxnoofCellinTAI')],
'type': 'SEQUENCE OF'},
'CompletedCellinTAI-Item': {'members': [{'name': 'eCGI',
'type': 'EUTRAN-CGI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'ConcurrentWarningMessageIndicator': {'type': 'ENUMERATED',
'values': [('true',
0)]},
'Correlation-ID': {'size': [4], 'type': 'OCTET STRING'},
'Coverage-Level': {'type': 'ENUMERATED',
'values': [('extendedcoverage', 0),
None]},
'CriticalityDiagnostics': {'members': [{'name': 'procedureCode',
'optional': True,
'type': 'ProcedureCode'},
{'name': 'triggeringMessage',
'optional': True,
'type': 'TriggeringMessage'},
{'name': 'procedureCriticality',
'optional': True,
'type': 'Criticality'},
{'name': 'iEsCriticalityDiagnostics',
'optional': True,
'type': 'CriticalityDiagnostics-IE-List'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CriticalityDiagnostics-IE-Item': {'members': [{'name': 'iECriticality',
'type': 'Criticality'},
{'name': 'iE-ID',
'type': 'ProtocolIE-ID'},
{'name': 'typeOfError',
'type': 'TypeOfError'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CriticalityDiagnostics-IE-List': {'element': {'type': 'CriticalityDiagnostics-IE-Item'},
'size': [(1,
'maxnoofErrors')],
'type': 'SEQUENCE '
'OF'},
'DCN-ID': {'restricted-to': [(0, 65535)],
'type': 'INTEGER'},
'DL-CP-SecurityInformation': {'members': [{'name': 'dl-NAS-MAC',
'type': 'DL-NAS-MAC'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'DL-Forwarding': {'type': 'ENUMERATED',
'values': [('dL-Forwarding-proposed',
0),
None]},
'DL-NAS-MAC': {'size': [16], 'type': 'BIT STRING'},
'DLNASPDUDeliveryAckRequest': {'type': 'ENUMERATED',
'values': [('requested',
0),
None]},
'Data-Forwarding-Not-Possible': {'type': 'ENUMERATED',
'values': [('data-Forwarding-not-Possible',
0),
None]},
'DataCodingScheme': {'size': [8], 'type': 'BIT STRING'},
'Direct-Forwarding-Path-Availability': {'type': 'ENUMERATED',
'values': [('directPathAvailable',
0),
None]},
'E-RAB-ID': {'restricted-to': [(0, 15), None],
'type': 'INTEGER'},
'E-RABInformationList': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1, 'maxnoofE-RABs')],
'type': 'SEQUENCE OF'},
'E-RABInformationListItem': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'dL-Forwarding',
'optional': True,
'type': 'DL-Forwarding'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABItem': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'cause',
'type': 'Cause'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABLevelQoSParameters': {'members': [{'name': 'qCI',
'type': 'QCI'},
{'name': 'allocationRetentionPriority',
'type': 'AllocationAndRetentionPriority'},
{'name': 'gbrQosInformation',
'optional': True,
'type': 'GBR-QosInformation'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABList': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1, 'maxnoofE-RABs')],
'type': 'SEQUENCE OF'},
'E-UTRAN-Trace-ID': {'size': [8],
'type': 'OCTET STRING'},
'EARFCN': {'restricted-to': [(0, 'maxEARFCN'), None],
'type': 'INTEGER'},
'ECGI-List': {'element': {'type': 'EUTRAN-CGI'},
'size': [(1, 'maxnoofCellsineNB')],
'type': 'SEQUENCE OF'},
'ECGIList': {'element': {'type': 'EUTRAN-CGI'},
'size': [(1, 'maxnoofCellID')],
'type': 'SEQUENCE OF'},
'ECGIListForRestart': {'element': {'type': 'EUTRAN-CGI'},
'size': [(1,
'maxnoofCellsforRestart')],
'type': 'SEQUENCE OF'},
'ENB-ID': {'members': [{'name': 'macroENB-ID',
'size': [20],
'type': 'BIT STRING'},
{'name': 'homeENB-ID',
'size': [28],
'type': 'BIT STRING'},
None,
{'name': 'short-macroENB-ID',
'size': [18],
'type': 'BIT STRING'},
{'name': 'long-macroENB-ID',
'size': [21],
'type': 'BIT STRING'}],
'type': 'CHOICE'},
'ENB-StatusTransfer-TransparentContainer': {'members': [{'name': 'bearers-SubjectToStatusTransferList',
'type': 'Bearers-SubjectToStatusTransferList'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'ENB-UE-S1AP-ID': {'restricted-to': [(0, 16777215)],
'type': 'INTEGER'},
'ENBIndirectX2TransportLayerAddresses': {'element': {'type': 'TransportLayerAddress'},
'size': [(1,
'maxnoofeNBX2TLAs')],
'type': 'SEQUENCE '
'OF'},
'ENBX2ExtTLA': {'members': [{'name': 'iPsecTLA',
'optional': True,
'type': 'TransportLayerAddress'},
{'name': 'gTPTLAa',
'optional': True,
'type': 'ENBX2GTPTLAs'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'ENBX2ExtTLAs': {'element': {'type': 'ENBX2ExtTLA'},
'size': [(1, 'maxnoofeNBX2ExtTLAs')],
'type': 'SEQUENCE OF'},
'ENBX2GTPTLAs': {'element': {'type': 'TransportLayerAddress'},
'size': [(1, 'maxnoofeNBX2GTPTLAs')],
'type': 'SEQUENCE OF'},
'ENBX2TLAs': {'element': {'type': 'TransportLayerAddress'},
'size': [(1, 'maxnoofeNBX2TLAs')],
'type': 'SEQUENCE OF'},
'ENBname': {'size': [(1, 150), None],
'type': 'PrintableString'},
'EPLMNs': {'element': {'type': 'PLMNidentity'},
'size': [(1, 'maxnoofEPLMNs')],
'type': 'SEQUENCE OF'},
'EUTRAN-CGI': {'members': [{'name': 'pLMNidentity',
'type': 'PLMNidentity'},
{'name': 'cell-ID',
'type': 'CellIdentity'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'EUTRANRoundTripDelayEstimationInfo': {'restricted-to': [(0,
2047)],
'type': 'INTEGER'},
'EmergencyAreaID': {'size': [3],
'type': 'OCTET STRING'},
'EmergencyAreaID-Broadcast': {'element': {'type': 'EmergencyAreaID-Broadcast-Item'},
'size': [(1,
'maxnoofEmergencyAreaID')],
'type': 'SEQUENCE OF'},
'EmergencyAreaID-Broadcast-Item': {'members': [{'name': 'emergencyAreaID',
'type': 'EmergencyAreaID'},
{'name': 'completedCellinEAI',
'type': 'CompletedCellinEAI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'EmergencyAreaID-Cancelled': {'element': {'type': 'EmergencyAreaID-Cancelled-Item'},
'size': [(1,
'maxnoofEmergencyAreaID')],
'type': 'SEQUENCE OF'},
'EmergencyAreaID-Cancelled-Item': {'members': [{'name': 'emergencyAreaID',
'type': 'EmergencyAreaID'},
{'name': 'cancelledCellinEAI',
'type': 'CancelledCellinEAI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'EmergencyAreaIDList': {'element': {'type': 'EmergencyAreaID'},
'size': [(1,
'maxnoofEmergencyAreaID')],
'type': 'SEQUENCE OF'},
'EmergencyAreaIDListForRestart': {'element': {'type': 'EmergencyAreaID'},
'size': [(1,
'maxnoofRestartEmergencyAreaIDs')],
'type': 'SEQUENCE '
'OF'},
'EncryptionAlgorithms': {'size': [16, None],
'type': 'BIT STRING'},
'EnhancedCoverageRestricted': {'type': 'ENUMERATED',
'values': [('restricted',
0),
None]},
'EventType': {'type': 'ENUMERATED',
'values': [('direct', 0),
('change-of-serve-cell', 1),
('stop-change-of-serve-cell',
2),
None]},
'ExpectedActivityPeriod': {'restricted-to': [(1, 30),
40,
50,
60,
80,
100,
120,
150,
180,
181,
None],
'type': 'INTEGER'},
'ExpectedHOInterval': {'type': 'ENUMERATED',
'values': [('sec15', 0),
('sec30', 1),
('sec60', 2),
('sec90', 3),
('sec120', 4),
('sec180', 5),
('long-time', 6),
None]},
'ExpectedIdlePeriod': {'restricted-to': [(1, 30),
40,
50,
60,
80,
100,
120,
150,
180,
181,
None],
'type': 'INTEGER'},
'ExpectedUEActivityBehaviour': {'members': [{'name': 'expectedActivityPeriod',
'optional': True,
'type': 'ExpectedActivityPeriod'},
{'name': 'expectedIdlePeriod',
'optional': True,
'type': 'ExpectedIdlePeriod'},
{'name': 'sourceofUEActivityBehaviourInformation',
'optional': True,
'type': 'SourceOfUEActivityBehaviourInformation'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'ExpectedUEBehaviour': {'members': [{'name': 'expectedActivity',
'optional': True,
'type': 'ExpectedUEActivityBehaviour'},
{'name': 'expectedHOInterval',
'optional': True,
'type': 'ExpectedHOInterval'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'Extended-UEIdentityIndexValue': {'size': [14],
'type': 'BIT STRING'},
'ExtendedRNC-ID': {'restricted-to': [(4096, 65535)],
'type': 'INTEGER'},
'ExtendedRepetitionPeriod': {'restricted-to': [(4096,
131071)],
'type': 'INTEGER'},
'ForbiddenInterRATs': {'type': 'ENUMERATED',
'values': [('all', 0),
('geran', 1),
('utran', 2),
('cdma2000', 3),
None,
('geranandutran', 4),
('cdma2000andutran',
5)]},
'ForbiddenLACs': {'element': {'type': 'LAC'},
'size': [(1, 'maxnoofForbLACs')],
'type': 'SEQUENCE OF'},
'ForbiddenLAs': {'element': {'type': 'ForbiddenLAs-Item'},
'size': [(1, 'maxnoofEPLMNsPlusOne')],
'type': 'SEQUENCE OF'},
'ForbiddenLAs-Item': {'members': [{'name': 'pLMN-Identity',
'type': 'PLMNidentity'},
{'name': 'forbiddenLACs',
'type': 'ForbiddenLACs'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'ForbiddenTACs': {'element': {'type': 'TAC'},
'size': [(1, 'maxnoofForbTACs')],
'type': 'SEQUENCE OF'},
'ForbiddenTAs': {'element': {'type': 'ForbiddenTAs-Item'},
'size': [(1, 'maxnoofEPLMNsPlusOne')],
'type': 'SEQUENCE OF'},
'ForbiddenTAs-Item': {'members': [{'name': 'pLMN-Identity',
'type': 'PLMNidentity'},
{'name': 'forbiddenTACs',
'type': 'ForbiddenTACs'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'GBR-QosInformation': {'members': [{'name': 'e-RAB-MaximumBitrateDL',
'type': 'BitRate'},
{'name': 'e-RAB-MaximumBitrateUL',
'type': 'BitRate'},
{'name': 'e-RAB-GuaranteedBitrateDL',
'type': 'BitRate'},
{'name': 'e-RAB-GuaranteedBitrateUL',
'type': 'BitRate'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'GERAN-Cell-ID': {'members': [{'name': 'lAI',
'type': 'LAI'},
{'name': 'rAC',
'type': 'RAC'},
{'name': 'cI',
'type': 'CI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'GTP-TEID': {'size': [4], 'type': 'OCTET STRING'},
'GUMMEI': {'members': [{'name': 'pLMN-Identity',
'type': 'PLMNidentity'},
{'name': 'mME-Group-ID',
'type': 'MME-Group-ID'},
{'name': 'mME-Code',
'type': 'MME-Code'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'GUMMEIList': {'element': {'type': 'GUMMEI'},
'size': [(1, 'maxnoofMMECs')],
'type': 'SEQUENCE OF'},
'GUMMEIType': {'type': 'ENUMERATED',
'values': [('native', 0),
('mapped', 1),
None]},
'GWContextReleaseIndication': {'type': 'ENUMERATED',
'values': [('true', 0),
None]},
'Global-ENB-ID': {'members': [{'name': 'pLMNidentity',
'type': 'PLMNidentity'},
{'name': 'eNB-ID',
'type': 'ENB-ID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'HFN': {'restricted-to': [(0, 1048575)],
'type': 'INTEGER'},
'HFNModified': {'restricted-to': [(0, 131071)],
'type': 'INTEGER'},
'HFNforPDCP-SNlength18': {'restricted-to': [(0, 16383)],
'type': 'INTEGER'},
'HandoverRestrictionList': {'members': [{'name': 'servingPLMN',
'type': 'PLMNidentity'},
{'name': 'equivalentPLMNs',
'optional': True,
'type': 'EPLMNs'},
{'name': 'forbiddenTAs',
'optional': True,
'type': 'ForbiddenTAs'},
{'name': 'forbiddenLAs',
'optional': True,
'type': 'ForbiddenLAs'},
{'name': 'forbiddenInterRATs',
'optional': True,
'type': 'ForbiddenInterRATs'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'HandoverType': {'type': 'ENUMERATED',
'values': [('intralte', 0),
('ltetoutran', 1),
('ltetogeran', 2),
('utrantolte', 3),
('gerantolte', 4),
None]},
'IMSI': {'size': [(3, 8)], 'type': 'OCTET STRING'},
'ImmediateMDT': {'members': [{'name': 'measurementsToActivate',
'type': 'MeasurementsToActivate'},
{'name': 'm1reportingTrigger',
'type': 'M1ReportingTrigger'},
{'name': 'm1thresholdeventA2',
'optional': True,
'type': 'M1ThresholdEventA2'},
{'name': 'm1periodicReporting',
'optional': True,
'type': 'M1PeriodicReporting'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'InformationOnRecommendedCellsAndENBsForPaging': {'members': [{'name': 'recommendedCellsForPaging',
'type': 'RecommendedCellsForPaging'},
{'name': 'recommendENBsForPaging',
'type': 'RecommendedENBsForPaging'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'IntegrityProtectionAlgorithms': {'size': [16, None],
'type': 'BIT STRING'},
'IntendedNumberOfPagingAttempts': {'restricted-to': [(1,
16),
None],
'type': 'INTEGER'},
'InterfacesToTrace': {'size': [8],
'type': 'BIT STRING'},
'KillAllWarningMessages': {'type': 'ENUMERATED',
'values': [('true', 0)]},
'L3-Information': {'type': 'OCTET STRING'},
'LAC': {'size': [2], 'type': 'OCTET STRING'},
'LAI': {'members': [{'name': 'pLMNidentity',
'type': 'PLMNidentity'},
{'name': 'lAC', 'type': 'LAC'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'LHN-ID': {'size': [(32, 256)], 'type': 'OCTET STRING'},
'LPPa-PDU': {'type': 'OCTET STRING'},
'LastVisitedCell-Item': {'members': [{'name': 'e-UTRAN-Cell',
'type': 'LastVisitedEUTRANCellInformation'},
{'name': 'uTRAN-Cell',
'type': 'LastVisitedUTRANCellInformation'},
{'name': 'gERAN-Cell',
'type': 'LastVisitedGERANCellInformation'},
None],
'type': 'CHOICE'},
'LastVisitedEUTRANCellInformation': {'members': [{'name': 'global-Cell-ID',
'type': 'EUTRAN-CGI'},
{'name': 'cellType',
'type': 'CellType'},
{'name': 'time-UE-StayedInCell',
'type': 'Time-UE-StayedInCell'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'LastVisitedGERANCellInformation': {'members': [{'name': 'undefined',
'type': 'NULL'},
None],
'type': 'CHOICE'},
'LastVisitedUTRANCellInformation': {'type': 'OCTET '
'STRING'},
'Links-to-log': {'type': 'ENUMERATED',
'values': [('uplink', 0),
('downlink', 1),
('both-uplink-and-downlink',
2),
None]},
'ListeningSubframePattern': {'members': [{'name': 'pattern-period',
'type': 'ENUMERATED',
'values': [('ms1280',
0),
('ms2560',
1),
('ms5120',
2),
('ms10240',
3),
None]},
{'name': 'pattern-offset',
'restricted-to': [(0,
10239),
None],
'type': 'INTEGER'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'LoggedMBSFNMDT': {'members': [{'name': 'loggingInterval',
'type': 'LoggingInterval'},
{'name': 'loggingDuration',
'type': 'LoggingDuration'},
{'name': 'mBSFN-ResultToLog',
'optional': True,
'type': 'MBSFN-ResultToLog'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'LoggedMDT': {'members': [{'name': 'loggingInterval',
'type': 'LoggingInterval'},
{'name': 'loggingDuration',
'type': 'LoggingDuration'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'LoggingDuration': {'type': 'ENUMERATED',
'values': [('m10', 0),
('m20', 1),
('m40', 2),
('m60', 3),
('m90', 4),
('m120', 5)]},
'LoggingInterval': {'type': 'ENUMERATED',
'values': [('ms128', 0),
('ms256', 1),
('ms512', 2),
('ms1024', 3),
('ms2048', 4),
('ms3072', 5),
('ms4096', 6),
('ms6144', 7)]},
'M-TMSI': {'size': [4], 'type': 'OCTET STRING'},
'M1PeriodicReporting': {'members': [{'name': 'reportInterval',
'type': 'ReportIntervalMDT'},
{'name': 'reportAmount',
'type': 'ReportAmountMDT'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'M1ReportingTrigger': {'type': 'ENUMERATED',
'values': [('periodic', 0),
('a2eventtriggered',
1),
None,
('a2eventtriggered-periodic',
2)]},
'M1ThresholdEventA2': {'members': [{'name': 'measurementThreshold',
'type': 'MeasurementThresholdA2'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'M3Configuration': {'members': [{'name': 'm3period',
'type': 'M3period'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'M3period': {'type': 'ENUMERATED',
'values': [('ms100', 0),
('ms1000', 1),
('ms10000', 2),
None]},
'M4Configuration': {'members': [{'name': 'm4period',
'type': 'M4period'},
{'name': 'm4-links-to-log',
'type': 'Links-to-log'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'M4period': {'type': 'ENUMERATED',
'values': [('ms1024', 0),
('ms2048', 1),
('ms5120', 2),
('ms10240', 3),
('min1', 4),
None]},
'M5Configuration': {'members': [{'name': 'm5period',
'type': 'M5period'},
{'name': 'm5-links-to-log',
'type': 'Links-to-log'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'M5period': {'type': 'ENUMERATED',
'values': [('ms1024', 0),
('ms2048', 1),
('ms5120', 2),
('ms10240', 3),
('min1', 4),
None]},
'M6Configuration': {'members': [{'name': 'm6report-Interval',
'type': 'M6report-Interval'},
{'name': 'm6delay-threshold',
'optional': True,
'type': 'M6delay-threshold'},
{'name': 'm6-links-to-log',
'type': 'Links-to-log'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'M6delay-threshold': {'type': 'ENUMERATED',
'values': [('ms30', 0),
('ms40', 1),
('ms50', 2),
('ms60', 3),
('ms70', 4),
('ms80', 5),
('ms90', 6),
('ms100', 7),
('ms150', 8),
('ms300', 9),
('ms500', 10),
('ms750', 11),
None]},
'M6report-Interval': {'type': 'ENUMERATED',
'values': [('ms1024', 0),
('ms2048', 1),
('ms5120', 2),
('ms10240', 3),
None]},
'M7Configuration': {'members': [{'name': 'm7period',
'type': 'M7period'},
{'name': 'm7-links-to-log',
'type': 'Links-to-log'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'M7period': {'restricted-to': [(1, 60), None],
'type': 'INTEGER'},
'MBSFN-ResultToLog': {'element': {'type': 'MBSFN-ResultToLogInfo'},
'size': [(1,
'maxnoofMBSFNAreaMDT')],
'type': 'SEQUENCE OF'},
'MBSFN-ResultToLogInfo': {'members': [{'name': 'mBSFN-AreaId',
'optional': True,
'restricted-to': [(0,
255)],
'type': 'INTEGER'},
{'name': 'carrierFreq',
'type': 'EARFCN'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'MDT-Activation': {'type': 'ENUMERATED',
'values': [('immediate-MDT-only', 0),
('immediate-MDT-and-Trace',
1),
('logged-MDT-only', 2),
None,
('logged-MBSFN-MDT', 3)]},
'MDT-Configuration': {'members': [{'name': 'mdt-Activation',
'type': 'MDT-Activation'},
{'name': 'areaScopeOfMDT',
'type': 'AreaScopeOfMDT'},
{'name': 'mDTMode',
'type': 'MDTMode'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'MDT-Location-Info': {'size': [8],
'type': 'BIT STRING'},
'MDTMode': {'members': [{'name': 'immediateMDT',
'type': 'ImmediateMDT'},
{'name': 'loggedMDT',
'type': 'LoggedMDT'},
None,
{'name': 'mDTMode-Extension',
'type': 'MDTMode-Extension'}],
'type': 'CHOICE'},
'MDTMode-Extension': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'MDTPLMNList': {'element': {'type': 'PLMNidentity'},
'size': [(1, 'maxnoofMDTPLMNs')],
'type': 'SEQUENCE OF'},
'MME-Code': {'size': [1], 'type': 'OCTET STRING'},
'MME-Group-ID': {'size': [2], 'type': 'OCTET STRING'},
'MME-UE-S1AP-ID': {'restricted-to': [(0, 4294967295)],
'type': 'INTEGER'},
'MMEPagingTarget': {'members': [{'name': 'global-ENB-ID',
'type': 'Global-ENB-ID'},
{'name': 'tAI',
'type': 'TAI'},
None],
'type': 'CHOICE'},
'MMERelaySupportIndicator': {'type': 'ENUMERATED',
'values': [('true', 0),
None]},
'MMEname': {'size': [(1, 150), None],
'type': 'PrintableString'},
'MSClassmark2': {'type': 'OCTET STRING'},
'MSClassmark3': {'type': 'OCTET STRING'},
'ManagementBasedMDTAllowed': {'type': 'ENUMERATED',
'values': [('allowed', 0),
None]},
'Masked-IMEISV': {'size': [64], 'type': 'BIT STRING'},
'MeasurementThresholdA2': {'members': [{'name': 'threshold-RSRP',
'type': 'Threshold-RSRP'},
{'name': 'threshold-RSRQ',
'type': 'Threshold-RSRQ'},
None],
'type': 'CHOICE'},
'MeasurementsToActivate': {'size': [8],
'type': 'BIT STRING'},
'MessageIdentifier': {'size': [16],
'type': 'BIT STRING'},
'MobilityInformation': {'size': [32],
'type': 'BIT STRING'},
'MutingAvailabilityIndication': {'type': 'ENUMERATED',
'values': [('available',
0),
('unavailable',
1),
None]},
'MutingPatternInformation': {'members': [{'name': 'muting-pattern-period',
'type': 'ENUMERATED',
'values': [('ms0',
0),
('ms1280',
1),
('ms2560',
2),
('ms5120',
3),
('ms10240',
4),
None]},
{'name': 'muting-pattern-offset',
'optional': True,
'restricted-to': [(0,
10239),
None],
'type': 'INTEGER'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'NAS-PDU': {'type': 'OCTET STRING'},
'NASSecurityParametersfromE-UTRAN': {'type': 'OCTET '
'STRING'},
'NASSecurityParameterstoE-UTRAN': {'type': 'OCTET '
'STRING'},
'NB-IoT-DefaultPagingDRX': {'type': 'ENUMERATED',
'values': [('v128', 0),
('v256', 1),
('v512', 2),
('v1024', 3),
None]},
'NB-IoT-Paging-eDRX-Cycle': {'type': 'ENUMERATED',
'values': [('hf2', 0),
('hf4', 1),
('hf6', 2),
('hf8', 3),
('hf10', 4),
('hf12', 5),
('hf14', 6),
('hf16', 7),
('hf32', 8),
('hf64', 9),
('hf128', 10),
('hf256', 11),
('hf512', 12),
('hf1024', 13),
None]},
'NB-IoT-Paging-eDRXInformation': {'members': [{'name': 'nB-IoT-paging-eDRX-Cycle',
'type': 'NB-IoT-Paging-eDRX-Cycle'},
{'name': 'nB-IoT-pagingTimeWindow',
'optional': True,
'type': 'NB-IoT-PagingTimeWindow'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'NB-IoT-PagingTimeWindow': {'type': 'ENUMERATED',
'values': [('s1', 0),
('s2', 1),
('s3', 2),
('s4', 3),
('s5', 4),
('s6', 5),
('s7', 6),
('s8', 7),
('s9', 8),
('s10', 9),
('s11', 10),
('s12', 11),
('s13', 12),
('s14', 13),
('s15', 14),
('s16', 15),
None]},
'NB-IoT-UEIdentityIndexValue': {'size': [12],
'type': 'BIT STRING'},
'NextPagingAreaScope': {'type': 'ENUMERATED',
'values': [('same', 0),
('changed', 1),
None]},
'NumberOfBroadcasts': {'restricted-to': [(0, 65535)],
'type': 'INTEGER'},
'NumberofBroadcastRequest': {'restricted-to': [(0,
65535)],
'type': 'INTEGER'},
'OldBSS-ToNewBSS-Information': {'type': 'OCTET STRING'},
'OverloadAction': {'type': 'ENUMERATED',
'values': [('reject-non-emergency-mo-dt',
0),
('reject-rrc-cr-signalling',
1),
('permit-emergency-sessions-and-mobile-terminated-services-only',
2),
None,
('permit-high-priority-sessions-and-mobile-terminated-services-only',
3),
('reject-delay-tolerant-access',
4),
('permit-high-priority-sessions-and-exception-reporting-and-mobile-terminated-services-only',
5),
('not-accept-mo-data-or-delay-tolerant-access-from-CP-CIoT',
6)]},
'OverloadResponse': {'members': [{'name': 'overloadAction',
'type': 'OverloadAction'},
None],
'type': 'CHOICE'},
'PDCP-SN': {'restricted-to': [(0, 4095)],
'type': 'INTEGER'},
'PDCP-SNExtended': {'restricted-to': [(0, 32767)],
'type': 'INTEGER'},
'PDCP-SNlength18': {'restricted-to': [(0, 262143)],
'type': 'INTEGER'},
'PLMNidentity': {'type': 'TBCD-STRING'},
'PS-ServiceNotAvailable': {'type': 'ENUMERATED',
'values': [('ps-service-not-available',
0),
None]},
'PWSfailedECGIList': {'element': {'type': 'EUTRAN-CGI'},
'size': [(1,
'maxnoofCellsineNB')],
'type': 'SEQUENCE OF'},
'Paging-eDRX-Cycle': {'type': 'ENUMERATED',
'values': [('hfhalf', 0),
('hf1', 1),
('hf2', 2),
('hf4', 3),
('hf6', 4),
('hf8', 5),
('hf10', 6),
('hf12', 7),
('hf14', 8),
('hf16', 9),
('hf32', 10),
('hf64', 11),
('hf128', 12),
('hf256', 13),
None]},
'Paging-eDRXInformation': {'members': [{'name': 'paging-eDRX-Cycle',
'type': 'Paging-eDRX-Cycle'},
{'name': 'pagingTimeWindow',
'optional': True,
'type': 'PagingTimeWindow'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'PagingAttemptCount': {'restricted-to': [(1, 16), None],
'type': 'INTEGER'},
'PagingAttemptInformation': {'members': [{'name': 'pagingAttemptCount',
'type': 'PagingAttemptCount'},
{'name': 'intendedNumberOfPagingAttempts',
'type': 'IntendedNumberOfPagingAttempts'},
{'name': 'nextPagingAreaScope',
'optional': True,
'type': 'NextPagingAreaScope'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'PagingDRX': {'type': 'ENUMERATED',
'values': [('v32', 0),
('v64', 1),
('v128', 2),
('v256', 3),
None]},
'PagingPriority': {'type': 'ENUMERATED',
'values': [('priolevel1', 0),
('priolevel2', 1),
('priolevel3', 2),
('priolevel4', 3),
('priolevel5', 4),
('priolevel6', 5),
('priolevel7', 6),
('priolevel8', 7),
None]},
'PagingTimeWindow': {'type': 'ENUMERATED',
'values': [('s1', 0),
('s2', 1),
('s3', 2),
('s4', 3),
('s5', 4),
('s6', 5),
('s7', 6),
('s8', 7),
('s9', 8),
('s10', 9),
('s11', 10),
('s12', 11),
('s13', 12),
('s14', 13),
('s15', 14),
('s16', 15),
None]},
'PedestrianUE': {'type': 'ENUMERATED',
'values': [('authorized', 0),
('not-authorized', 1),
None]},
'Port-Number': {'size': [2], 'type': 'OCTET STRING'},
'Pre-emptionCapability': {'type': 'ENUMERATED',
'values': [('shall-not-trigger-pre-emption',
0),
('may-trigger-pre-emption',
1)]},
'Pre-emptionVulnerability': {'type': 'ENUMERATED',
'values': [('not-pre-emptable',
0),
('pre-emptable',
1)]},
'PriorityLevel': {'named-numbers': {'highest': 1,
'lowest': 14,
'no-priority': 15,
'spare': 0},
'restricted-to': [(0, 15)],
'type': 'INTEGER'},
'PrivacyIndicator': {'type': 'ENUMERATED',
'values': [('immediate-MDT', 0),
('logged-MDT', 1),
None]},
'ProSeAuthorized': {'members': [{'name': 'proSeDirectDiscovery',
'optional': True,
'type': 'ProSeDirectDiscovery'},
{'name': 'proSeDirectCommunication',
'optional': True,
'type': 'ProSeDirectCommunication'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'ProSeDirectCommunication': {'type': 'ENUMERATED',
'values': [('authorized',
0),
('not-authorized',
1),
None]},
'ProSeDirectDiscovery': {'type': 'ENUMERATED',
'values': [('authorized', 0),
('not-authorized',
1),
None]},
'ProSeUEtoNetworkRelaying': {'type': 'ENUMERATED',
'values': [('authorized',
0),
('not-authorized',
1),
None]},
'QCI': {'restricted-to': [(0, 255)], 'type': 'INTEGER'},
'RAC': {'size': [1], 'type': 'OCTET STRING'},
'RAT-Type': {'type': 'ENUMERATED',
'values': [('nbiot', 0), None]},
'RIMInformation': {'type': 'OCTET STRING'},
'RIMRoutingAddress': {'members': [{'name': 'gERAN-Cell-ID',
'type': 'GERAN-Cell-ID'},
None,
{'name': 'targetRNC-ID',
'type': 'TargetRNC-ID'},
{'name': 'eHRPD-Sector-ID',
'size': [16],
'type': 'OCTET '
'STRING'}],
'type': 'CHOICE'},
'RIMTransfer': {'members': [{'name': 'rIMInformation',
'type': 'RIMInformation'},
{'name': 'rIMRoutingAddress',
'optional': True,
'type': 'RIMRoutingAddress'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'RLFReportInformation': {'members': [{'name': 'uE-RLF-Report-Container',
'type': 'UE-RLF-Report-Container'},
{'name': 'uE-RLF-Report-Container-for-extended-bands',
'optional': True,
'type': 'UE-RLF-Report-Container-for-extended-bands'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'RNC-ID': {'restricted-to': [(0, 4095)],
'type': 'INTEGER'},
'RRC-Container': {'type': 'OCTET STRING'},
'RRC-Establishment-Cause': {'type': 'ENUMERATED',
'values': [('emergency', 0),
('highPriorityAccess',
1),
('mt-Access', 2),
('mo-Signalling',
3),
('mo-Data', 4),
None,
('delay-TolerantAccess',
5),
('mo-VoiceCall',
6),
('mo-ExceptionData',
7)]},
'ReceiveStatusOfULPDCPSDUsExtended': {'size': [(1,
16384)],
'type': 'BIT '
'STRING'},
'ReceiveStatusOfULPDCPSDUsPDCP-SNlength18': {'size': [(1,
131072)],
'type': 'BIT '
'STRING'},
'ReceiveStatusofULPDCPSDUs': {'size': [4096],
'type': 'BIT STRING'},
'RecommendedCellItem': {'members': [{'name': 'eUTRAN-CGI',
'type': 'EUTRAN-CGI'},
{'name': 'timeStayedInCell',
'optional': True,
'restricted-to': [(0,
4095)],
'type': 'INTEGER'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'RecommendedCellList': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofRecommendedCells')],
'type': 'SEQUENCE OF'},
'RecommendedCellsForPaging': {'members': [{'name': 'recommendedCellList',
'type': 'RecommendedCellList'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'RecommendedENBItem': {'members': [{'name': 'mMEPagingTarget',
'type': 'MMEPagingTarget'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'RecommendedENBList': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofRecommendedENBs')],
'type': 'SEQUENCE OF'},
'RecommendedENBsForPaging': {'members': [{'name': 'recommendedENBList',
'type': 'RecommendedENBList'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'RelativeMMECapacity': {'restricted-to': [(0, 255)],
'type': 'INTEGER'},
'RelayNode-Indicator': {'type': 'ENUMERATED',
'values': [('true', 0), None]},
'RepetitionPeriod': {'restricted-to': [(0, 4095)],
'type': 'INTEGER'},
'ReportAmountMDT': {'type': 'ENUMERATED',
'values': [('r1', 0),
('r2', 1),
('r4', 2),
('r8', 3),
('r16', 4),
('r32', 5),
('r64', 6),
('rinfinity', 7)]},
'ReportArea': {'type': 'ENUMERATED',
'values': [('ecgi', 0), None]},
'ReportIntervalMDT': {'type': 'ENUMERATED',
'values': [('ms120', 0),
('ms240', 1),
('ms480', 2),
('ms640', 3),
('ms1024', 4),
('ms2048', 5),
('ms5120', 6),
('ms10240', 7),
('min1', 8),
('min6', 9),
('min12', 10),
('min30', 11),
('min60', 12)]},
'RequestType': {'members': [{'name': 'eventType',
'type': 'EventType'},
{'name': 'reportArea',
'type': 'ReportArea'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'Routing-ID': {'restricted-to': [(0, 255)],
'type': 'INTEGER'},
'S-TMSI': {'members': [{'name': 'mMEC',
'type': 'MME-Code'},
{'name': 'm-TMSI',
'type': 'M-TMSI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'SONConfigurationTransfer': {'members': [{'name': 'targeteNB-ID',
'type': 'TargeteNB-ID'},
{'name': 'sourceeNB-ID',
'type': 'SourceeNB-ID'},
{'name': 'sONInformation',
'type': 'SONInformation'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'SONInformation': {'members': [{'name': 'sONInformationRequest',
'type': 'SONInformationRequest'},
{'name': 'sONInformationReply',
'type': 'SONInformationReply'},
None,
{'name': 'sONInformation-Extension',
'type': 'SONInformation-Extension'}],
'type': 'CHOICE'},
'SONInformation-Extension': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'SONInformationReply': {'members': [{'name': 'x2TNLConfigurationInfo',
'optional': True,
'type': 'X2TNLConfigurationInfo'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'SONInformationReport': {'members': [{'name': 'rLFReportInformation',
'type': 'RLFReportInformation'},
None],
'type': 'CHOICE'},
'SONInformationRequest': {'type': 'ENUMERATED',
'values': [('x2TNL-Configuration-Info',
0),
None,
('time-Synchronisation-Info',
1),
('activate-Muting',
2),
('deactivate-Muting',
3)]},
'SRVCCHOIndication': {'type': 'ENUMERATED',
'values': [('pSandCS', 0),
('cSonly', 1),
None]},
'SRVCCOperationNotPossible': {'type': 'ENUMERATED',
'values': [('notPossible',
0),
None]},
'SRVCCOperationPossible': {'type': 'ENUMERATED',
'values': [('possible', 0),
None]},
'SecurityContext': {'members': [{'name': 'nextHopChainingCount',
'restricted-to': [(0,
7)],
'type': 'INTEGER'},
{'name': 'nextHopParameter',
'type': 'SecurityKey'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'SecurityKey': {'size': [256], 'type': 'BIT STRING'},
'SerialNumber': {'size': [16], 'type': 'BIT STRING'},
'ServedDCNs': {'element': {'type': 'ServedDCNsItem'},
'size': [(0, 'maxnoofDCNs')],
'type': 'SEQUENCE OF'},
'ServedDCNsItem': {'members': [{'name': 'dCN-ID',
'type': 'DCN-ID'},
{'name': 'relativeDCNCapacity',
'type': 'RelativeMMECapacity'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'ServedGUMMEIs': {'element': {'type': 'ServedGUMMEIsItem'},
'size': [(1, 'maxnoofRATs')],
'type': 'SEQUENCE OF'},
'ServedGUMMEIsItem': {'members': [{'name': 'servedPLMNs',
'type': 'ServedPLMNs'},
{'name': 'servedGroupIDs',
'type': 'ServedGroupIDs'},
{'name': 'servedMMECs',
'type': 'ServedMMECs'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'ServedGroupIDs': {'element': {'type': 'MME-Group-ID'},
'size': [(1, 'maxnoofGroupIDs')],
'type': 'SEQUENCE OF'},
'ServedMMECs': {'element': {'type': 'MME-Code'},
'size': [(1, 'maxnoofMMECs')],
'type': 'SEQUENCE OF'},
'ServedPLMNs': {'element': {'type': 'PLMNidentity'},
'size': [(1, 'maxnoofPLMNsPerMME')],
'type': 'SEQUENCE OF'},
'Source-ToTarget-TransparentContainer': {'type': 'OCTET '
'STRING'},
'SourceBSS-ToTargetBSS-TransparentContainer': {'type': 'OCTET '
'STRING'},
'SourceOfUEActivityBehaviourInformation': {'type': 'ENUMERATED',
'values': [('subscription-information',
0),
('statistics',
1),
None]},
'SourceRNC-ToTargetRNC-TransparentContainer': {'type': 'OCTET '
'STRING'},
'SourceeNB-ID': {'members': [{'name': 'global-ENB-ID',
'type': 'Global-ENB-ID'},
{'name': 'selected-TAI',
'type': 'TAI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'}],
'type': 'SEQUENCE'},
'SourceeNB-ToTargeteNB-TransparentContainer': {'members': [{'name': 'rRC-Container',
'type': 'RRC-Container'},
{'name': 'e-RABInformationList',
'optional': True,
'type': 'E-RABInformationList'},
{'name': 'targetCell-ID',
'type': 'EUTRAN-CGI'},
{'name': 'subscriberProfileIDforRFP',
'optional': True,
'type': 'SubscriberProfileIDforRFP'},
{'name': 'uE-HistoryInformation',
'type': 'UE-HistoryInformation'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'StratumLevel': {'restricted-to': [(0, 3), None],
'type': 'INTEGER'},
'SubscriberProfileIDforRFP': {'restricted-to': [(1,
256)],
'type': 'INTEGER'},
'SupportedTAs': {'element': {'type': 'SupportedTAs-Item'},
'size': [(1, 'maxnoofTACs')],
'type': 'SEQUENCE OF'},
'SupportedTAs-Item': {'members': [{'name': 'tAC',
'type': 'TAC'},
{'name': 'broadcastPLMNs',
'type': 'BPLMNs'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'SynchronisationInformation': {'members': [{'name': 'sourceStratumLevel',
'optional': True,
'type': 'StratumLevel'},
{'name': 'listeningSubframePattern',
'optional': True,
'type': 'ListeningSubframePattern'},
{'name': 'aggressoreCGI-List',
'optional': True,
'type': 'ECGI-List'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'SynchronisationStatus': {'type': 'ENUMERATED',
'values': [('synchronous', 0),
('asynchronous',
1),
None]},
'TABasedMDT': {'members': [{'name': 'tAListforMDT',
'type': 'TAListforMDT'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TAC': {'size': [2], 'type': 'OCTET STRING'},
'TAI': {'members': [{'name': 'pLMNidentity',
'type': 'PLMNidentity'},
{'name': 'tAC', 'type': 'TAC'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TAI-Broadcast': {'element': {'type': 'TAI-Broadcast-Item'},
'size': [(1, 'maxnoofTAIforWarning')],
'type': 'SEQUENCE OF'},
'TAI-Broadcast-Item': {'members': [{'name': 'tAI',
'type': 'TAI'},
{'name': 'completedCellinTAI',
'type': 'CompletedCellinTAI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TAI-Cancelled': {'element': {'type': 'TAI-Cancelled-Item'},
'size': [(1, 'maxnoofTAIforWarning')],
'type': 'SEQUENCE OF'},
'TAI-Cancelled-Item': {'members': [{'name': 'tAI',
'type': 'TAI'},
{'name': 'cancelledCellinTAI',
'type': 'CancelledCellinTAI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TAIBasedMDT': {'members': [{'name': 'tAIListforMDT',
'type': 'TAIListforMDT'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TAIListForRestart': {'element': {'type': 'TAI'},
'size': [(1,
'maxnoofRestartTAIs')],
'type': 'SEQUENCE OF'},
'TAIListforMDT': {'element': {'type': 'TAI'},
'size': [(1, 'maxnoofTAforMDT')],
'type': 'SEQUENCE OF'},
'TAIListforWarning': {'element': {'type': 'TAI'},
'size': [(1,
'maxnoofTAIforWarning')],
'type': 'SEQUENCE OF'},
'TAListforMDT': {'element': {'type': 'TAC'},
'size': [(1, 'maxnoofTAforMDT')],
'type': 'SEQUENCE OF'},
'TBCD-STRING': {'size': [3], 'type': 'OCTET STRING'},
'Target-ToSource-TransparentContainer': {'type': 'OCTET '
'STRING'},
'TargetBSS-ToSourceBSS-TransparentContainer': {'type': 'OCTET '
'STRING'},
'TargetID': {'members': [{'name': 'targeteNB-ID',
'type': 'TargeteNB-ID'},
{'name': 'targetRNC-ID',
'type': 'TargetRNC-ID'},
{'name': 'cGI', 'type': 'CGI'},
None],
'type': 'CHOICE'},
'TargetRNC-ID': {'members': [{'name': 'lAI',
'type': 'LAI'},
{'name': 'rAC',
'optional': True,
'type': 'RAC'},
{'name': 'rNC-ID',
'type': 'RNC-ID'},
{'name': 'extendedRNC-ID',
'optional': True,
'type': 'ExtendedRNC-ID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TargetRNC-ToSourceRNC-TransparentContainer': {'type': 'OCTET '
'STRING'},
'TargeteNB-ID': {'members': [{'name': 'global-ENB-ID',
'type': 'Global-ENB-ID'},
{'name': 'selected-TAI',
'type': 'TAI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TargeteNB-ToSourceeNB-TransparentContainer': {'members': [{'name': 'rRC-Container',
'type': 'RRC-Container'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'Threshold-RSRP': {'restricted-to': [(0, 97)],
'type': 'INTEGER'},
'Threshold-RSRQ': {'restricted-to': [(0, 34)],
'type': 'INTEGER'},
'Time-UE-StayedInCell': {'restricted-to': [(0, 4095)],
'type': 'INTEGER'},
'Time-UE-StayedInCell-EnhancedGranularity': {'restricted-to': [(0,
40950)],
'type': 'INTEGER'},
'TimeSynchronisationInfo': {'members': [{'name': 'stratumLevel',
'type': 'StratumLevel'},
{'name': 'synchronisationStatus',
'type': 'SynchronisationStatus'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TimeToWait': {'type': 'ENUMERATED',
'values': [('v1s', 0),
('v2s', 1),
('v5s', 2),
('v10s', 3),
('v20s', 4),
('v60s', 5),
None]},
'TraceActivation': {'members': [{'name': 'e-UTRAN-Trace-ID',
'type': 'E-UTRAN-Trace-ID'},
{'name': 'interfacesToTrace',
'type': 'InterfacesToTrace'},
{'name': 'traceDepth',
'type': 'TraceDepth'},
{'name': 'traceCollectionEntityIPAddress',
'type': 'TransportLayerAddress'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TraceDepth': {'type': 'ENUMERATED',
'values': [('minimum', 0),
('medium', 1),
('maximum', 2),
('minimumWithoutVendorSpecificExtension',
3),
('mediumWithoutVendorSpecificExtension',
4),
('maximumWithoutVendorSpecificExtension',
5),
None]},
'TrafficLoadReductionIndication': {'restricted-to': [(1,
99)],
'type': 'INTEGER'},
'TransportInformation': {'members': [{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'uL-GTP-TEID',
'type': 'GTP-TEID'},
None],
'type': 'SEQUENCE'},
'TransportLayerAddress': {'size': [(1, 160), None],
'type': 'BIT STRING'},
'TunnelInformation': {'members': [{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'uDP-Port-Number',
'optional': True,
'type': 'Port-Number'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TypeOfError': {'type': 'ENUMERATED',
'values': [('not-understood', 0),
('missing', 1),
None]},
'UE-HistoryInformation': {'element': {'type': 'LastVisitedCell-Item'},
'size': [(1, 'maxnoofCells')],
'type': 'SEQUENCE OF'},
'UE-HistoryInformationFromTheUE': {'type': 'OCTET '
'STRING'},
'UE-RLF-Report-Container': {'type': 'OCTET STRING'},
'UE-RLF-Report-Container-for-extended-bands': {'type': 'OCTET '
'STRING'},
'UE-RetentionInformation': {'type': 'ENUMERATED',
'values': [('ues-retained',
0),
None]},
'UE-S1AP-ID-pair': {'members': [{'name': 'mME-UE-S1AP-ID',
'type': 'MME-UE-S1AP-ID'},
{'name': 'eNB-UE-S1AP-ID',
'type': 'ENB-UE-S1AP-ID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'UE-S1AP-IDs': {'members': [{'name': 'uE-S1AP-ID-pair',
'type': 'UE-S1AP-ID-pair'},
{'name': 'mME-UE-S1AP-ID',
'type': 'MME-UE-S1AP-ID'},
None],
'type': 'CHOICE'},
'UE-Usage-Type': {'restricted-to': [(0, 255)],
'type': 'INTEGER'},
'UE-associatedLogicalS1-ConnectionItem': {'members': [{'name': 'mME-UE-S1AP-ID',
'optional': True,
'type': 'MME-UE-S1AP-ID'},
{'name': 'eNB-UE-S1AP-ID',
'optional': True,
'type': 'ENB-UE-S1AP-ID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'UEAggregateMaximumBitrate': {'members': [{'name': 'uEaggregateMaximumBitRateDL',
'type': 'BitRate'},
{'name': 'uEaggregateMaximumBitRateUL',
'type': 'BitRate'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'UEIdentityIndexValue': {'size': [10],
'type': 'BIT STRING'},
'UEPagingID': {'members': [{'name': 's-TMSI',
'type': 'S-TMSI'},
{'name': 'iMSI',
'type': 'IMSI'},
None],
'type': 'CHOICE'},
'UERadioCapability': {'type': 'OCTET STRING'},
'UERadioCapabilityForPaging': {'type': 'OCTET STRING'},
'UESecurityCapabilities': {'members': [{'name': 'encryptionAlgorithms',
'type': 'EncryptionAlgorithms'},
{'name': 'integrityProtectionAlgorithms',
'type': 'IntegrityProtectionAlgorithms'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'UESidelinkAggregateMaximumBitrate': {'members': [{'name': 'uESidelinkAggregateMaximumBitRate',
'type': 'BitRate'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'UEUserPlaneCIoTSupportIndicator': {'type': 'ENUMERATED',
'values': [('supported',
0),
None]},
'UL-CP-SecurityInformation': {'members': [{'name': 'ul-NAS-MAC',
'type': 'UL-NAS-MAC'},
{'name': 'ul-NAS-Count',
'type': 'UL-NAS-Count'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'UL-NAS-Count': {'size': [5], 'type': 'BIT STRING'},
'UL-NAS-MAC': {'size': [16], 'type': 'BIT STRING'},
'UserLocationInformation': {'members': [{'name': 'eutran-cgi',
'type': 'EUTRAN-CGI'},
{'name': 'tai',
'type': 'TAI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'V2XServicesAuthorized': {'members': [{'name': 'vehicleUE',
'optional': True,
'type': 'VehicleUE'},
{'name': 'pedestrianUE',
'optional': True,
'type': 'PedestrianUE'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'VehicleUE': {'type': 'ENUMERATED',
'values': [('authorized', 0),
('not-authorized', 1),
None]},
'VoiceSupportMatchIndicator': {'type': 'ENUMERATED',
'values': [('supported',
0),
('not-supported',
1),
None]},
'WarningAreaList': {'members': [{'name': 'cellIDList',
'type': 'ECGIList'},
{'name': 'trackingAreaListforWarning',
'type': 'TAIListforWarning'},
{'name': 'emergencyAreaIDList',
'type': 'EmergencyAreaIDList'},
None],
'type': 'CHOICE'},
'WarningMessageContents': {'size': [(1, 9600)],
'type': 'OCTET STRING'},
'WarningSecurityInfo': {'size': [50],
'type': 'OCTET STRING'},
'WarningType': {'size': [2], 'type': 'OCTET STRING'},
'X2TNLConfigurationInfo': {'members': [{'name': 'eNBX2TransportLayerAddresses',
'type': 'ENBX2TLAs'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'}},
'values': {}},
'S1AP-PDU-Contents': {'extensibility-implied': False,
'imports': {'S1AP-Constants': ['id-Additional-GUTI',
'id-AdditionalCSFallbackIndicator',
'id-AssistanceDataForPaging',
'id-BearerType',
'id-BroadcastCancelledAreaList',
'id-BroadcastCompletedAreaList',
'id-CE-mode-B-SupportIndicator',
'id-CNDomain',
'id-CSFallbackIndicator',
'id-CSG-Id',
'id-CSG-IdList',
'id-CSGMembershipInfo',
'id-CSGMembershipStatus',
'id-Cause',
'id-CellAccessMode',
'id-CellIdentifierAndCELevelForCECapableUEs',
'id-ConcurrentWarningMessageIndicator',
'id-Correlation-ID',
'id-Coverage-Level',
'id-CriticalityDiagnostics',
'id-DCN-ID',
'id-DL-CP-SecurityInformation',
'id-DLNASPDUDeliveryAckRequest',
'id-Data-Forwarding-Not-Possible',
'id-DataCodingScheme',
'id-DefaultPagingDRX',
'id-Direct-Forwarding-Path-Availability',
'id-E-RABAdmittedItem',
'id-E-RABAdmittedList',
'id-E-RABDataForwardingItem',
'id-E-RABFailedToBeReleasedList',
'id-E-RABFailedToModifyList',
'id-E-RABFailedToModifyListBearerModConf',
'id-E-RABFailedToReleaseList',
'id-E-RABFailedToResumeItemResumeReq',
'id-E-RABFailedToResumeItemResumeRes',
'id-E-RABFailedToResumeListResumeReq',
'id-E-RABFailedToResumeListResumeRes',
'id-E-RABFailedToSetupListBearerSURes',
'id-E-RABFailedToSetupListCtxtSURes',
'id-E-RABFailedToSetupListHOReqAck',
'id-E-RABFailedtoSetupItemHOReqAck',
'id-E-RABModify',
'id-E-RABModifyItemBearerModConf',
'id-E-RABModifyItemBearerModRes',
'id-E-RABModifyListBearerModConf',
'id-E-RABModifyListBearerModRes',
'id-E-RABNotToBeModifiedItemBearerModInd',
'id-E-RABNotToBeModifiedListBearerModInd',
'id-E-RABRelease',
'id-E-RABReleaseIndication',
'id-E-RABReleaseItemBearerRelComp',
'id-E-RABReleaseItemHOCmd',
'id-E-RABReleaseListBearerRelComp',
'id-E-RABReleasedList',
'id-E-RABSetup',
'id-E-RABSetupItemBearerSURes',
'id-E-RABSetupItemCtxtSURes',
'id-E-RABSetupListBearerSURes',
'id-E-RABSetupListCtxtSURes',
'id-E-RABSubjecttoDataForwardingList',
'id-E-RABToBeModifiedItemBearerModInd',
'id-E-RABToBeModifiedItemBearerModReq',
'id-E-RABToBeModifiedListBearerModInd',
'id-E-RABToBeModifiedListBearerModReq',
'id-E-RABToBeReleasedList',
'id-E-RABToBeReleasedListBearerModConf',
'id-E-RABToBeSetupItemBearerSUReq',
'id-E-RABToBeSetupItemCtxtSUReq',
'id-E-RABToBeSetupItemHOReq',
'id-E-RABToBeSetupListBearerSUReq',
'id-E-RABToBeSetupListCtxtSUReq',
'id-E-RABToBeSetupListHOReq',
'id-E-RABToBeSwitchedDLItem',
'id-E-RABToBeSwitchedDLList',
'id-E-RABToBeSwitchedULItem',
'id-E-RABToBeSwitchedULList',
'id-E-RABtoReleaseListHOCmd',
'id-E-UTRAN-Trace-ID',
'id-ECGIListForRestart',
'id-EUTRAN-CGI',
'id-EUTRANRoundTripDelayEstimationInfo',
'id-EmergencyAreaIDListForRestart',
'id-EnhancedCoverageRestricted',
'id-ExpectedUEBehaviour',
'id-ExtendedRepetitionPeriod',
'id-GERANtoLTEHOInformationRes',
'id-GUMMEI-ID',
'id-GUMMEIList',
'id-GUMMEIType',
'id-GW-TransportLayerAddress',
'id-GWContextReleaseIndication',
'id-Global-ENB-ID',
'id-HandoverRestrictionList',
'id-HandoverType',
'id-InformationOnRecommendedCellsAndENBsForPaging',
'id-InitialContextSetup',
'id-Inter-SystemInformationTransferTypeEDT',
'id-Inter-SystemInformationTransferTypeMDT',
'id-KillAllWarningMessages',
'id-LHN-ID',
'id-LPPa-PDU',
'id-MME-Group-ID',
'id-MME-UE-S1AP-ID',
'id-MME-UE-S1AP-ID-2',
'id-MMERelaySupportIndicator',
'id-MMEname',
'id-MSClassmark2',
'id-MSClassmark3',
'id-ManagementBasedMDTAllowed',
'id-ManagementBasedMDTPLMNList',
'id-Masked-IMEISV',
'id-MessageIdentifier',
'id-NAS-DownlinkCount',
'id-NAS-PDU',
'id-NASSecurityParametersfromE-UTRAN',
'id-NASSecurityParameterstoE-UTRAN',
'id-NB-IoT-DefaultPagingDRX',
'id-NB-IoT-Paging-eDRXInformation',
'id-NB-IoT-UEIdentityIndexValue',
'id-NumberofBroadcastRequest',
'id-OverloadResponse',
'id-PS-ServiceNotAvailable',
'id-PWSFailureIndication',
'id-PWSfailedECGIList',
'id-Paging-eDRXInformation',
'id-PagingPriority',
'id-PrivacyIndicator',
'id-ProSeAuthorized',
'id-RRC-Establishment-Cause',
'id-RRC-Resume-Cause',
'id-RegisteredLAI',
'id-RelativeMMECapacity',
'id-RelayNode-Indicator',
'id-RepetitionPeriod',
'id-RequestType',
'id-ResetType',
'id-Routing-ID',
'id-S-TMSI',
'id-S1-Message',
'id-SIPTO-Correlation-ID',
'id-SIPTO-L-GW-TransportLayerAddress',
'id-SONConfigurationTransferECT',
'id-SONConfigurationTransferMCT',
'id-SRVCCHOIndication',
'id-SRVCCOperationNotPossible',
'id-SRVCCOperationPossible',
'id-SecurityContext',
'id-SecurityKey',
'id-SerialNumber',
'id-ServedDCNs',
'id-ServedGUMMEIs',
'id-Source-ToTarget-TransparentContainer',
'id-Source-ToTarget-TransparentContainer-Secondary',
'id-SourceMME-GUMMEI',
'id-SourceMME-UE-S1AP-ID',
'id-SubscriberProfileIDforRFP',
'id-SupportedTAs',
'id-TAI',
'id-TAIItem',
'id-TAIList',
'id-TAIListForRestart',
'id-Target-ToSource-TransparentContainer',
'id-Target-ToSource-TransparentContainer-Secondary',
'id-TargetID',
'id-TimeToWait',
'id-TraceActivation',
'id-TraceCollectionEntityIPAddress',
'id-TrafficLoadReductionIndication',
'id-TransportInformation',
'id-Tunnel-Information-for-BBF',
'id-UE-Level-QoS-Parameters',
'id-UE-RetentionInformation',
'id-UE-S1AP-IDs',
'id-UE-Usage-Type',
'id-UE-associatedLogicalS1-ConnectionItem',
'id-UE-associatedLogicalS1-ConnectionListResAck',
'id-UEIdentityIndexValue',
'id-UEPagingID',
'id-UERadioCapability',
'id-UERadioCapabilityForPaging',
'id-UESecurityCapabilities',
'id-UESidelinkAggregateMaximumBitrate',
'id-UEUserPlaneCIoTSupportIndicator',
'id-UL-CP-SecurityInformation',
'id-UTRANtoLTEHOInformationRes',
'id-UserLocationInformation',
'id-V2XServicesAuthorized',
'id-VoiceSupportMatchIndicator',
'id-WarningAreaList',
'id-WarningMessageContents',
'id-WarningSecurityInfo',
'id-WarningType',
'id-cdma2000HORequiredIndication',
'id-cdma2000HOStatus',
'id-cdma2000OneXRAND',
'id-cdma2000OneXSRVCCInfo',
'id-cdma2000PDU',
'id-cdma2000RATType',
'id-cdma2000SectorID',
'id-eNB-StatusTransfer-TransparentContainer',
'id-eNB-UE-S1AP-ID',
'id-eNBname',
'id-extended-UEIdentityIndexValue',
'id-pagingDRX',
'id-uEaggregateMaximumBitrate',
'maxnoofCellID',
'maxnoofCellinEAI',
'maxnoofCellinTAI',
'maxnoofE-RABs',
'maxnoofEmergencyAreaID',
'maxnoofErrors',
'maxnoofIndividualS1ConnectionsToReset',
'maxnoofTAIforWarning',
'maxnoofTAIs'],
'S1AP-Containers': ['PrivateIE-Container',
'ProtocolExtensionContainer',
'ProtocolIE-Container',
'ProtocolIE-ContainerList',
'ProtocolIE-ContainerPair',
'ProtocolIE-ContainerPairList',
'ProtocolIE-SingleContainer',
'S1AP-PRIVATE-IES',
'S1AP-PROTOCOL-EXTENSION',
'S1AP-PROTOCOL-IES',
'S1AP-PROTOCOL-IES-PAIR',
'{',
'{',
'{',
'{',
'{',
'{',
'{',
'}',
'}',
'}',
'}',
'}',
'}',
'}'],
'S1AP-IEs': ['Additional-GUTI',
'AdditionalCSFallbackIndicator',
'AssistanceDataForPaging',
'BearerType',
'BroadcastCancelledAreaList',
'BroadcastCompletedAreaList',
'CE-mode-B-SupportIndicator',
'CNDomain',
'CSFallbackIndicator',
'CSG-Id',
'CSG-IdList',
'CSGMembershipStatus',
'Cause',
'Cdma2000HORequiredIndication',
'Cdma2000HOStatus',
'Cdma2000OneXRAND',
'Cdma2000OneXSRVCCInfo',
'Cdma2000PDU',
'Cdma2000RATType',
'Cdma2000SectorID',
'CellAccessMode',
'CellIdentifierAndCELevelForCECapableUEs',
'ConcurrentWarningMessageIndicator',
'Correlation-ID',
'Coverage-Level',
'CriticalityDiagnostics',
'DCN-ID',
'DL-CP-SecurityInformation',
'DLNASPDUDeliveryAckRequest',
'Data-Forwarding-Not-Possible',
'DataCodingScheme',
'Direct-Forwarding-Path-Availability',
'E-RAB-ID',
'E-RABLevelQoSParameters',
'E-RABList',
'E-UTRAN-Trace-ID',
'ECGIListForRestart',
'ENB-StatusTransfer-TransparentContainer',
'ENB-UE-S1AP-ID',
'ENBname',
'EUTRAN-CGI',
'EUTRANRoundTripDelayEstimationInfo',
'EmergencyAreaIDListForRestart',
'EnhancedCoverageRestricted',
'ExpectedUEBehaviour',
'Extended-UEIdentityIndexValue',
'ExtendedRepetitionPeriod',
'GTP-TEID',
'GUMMEI',
'GUMMEIList',
'GUMMEIType',
'GWContextReleaseIndication',
'Global-ENB-ID',
'HandoverRestrictionList',
'HandoverType',
'InformationOnRecommendedCellsAndENBsForPaging',
'KillAllWarningMessages',
'LAI',
'LHN-ID',
'LPPa-PDU',
'MDTPLMNList',
'MME-Group-ID',
'MME-UE-S1AP-ID',
'MMERelaySupportIndicator',
'MMEname',
'MSClassmark2',
'MSClassmark3',
'ManagementBasedMDTAllowed',
'Masked-IMEISV',
'MessageIdentifier',
'NAS-PDU',
'NASSecurityParametersfromE-UTRAN',
'NASSecurityParameterstoE-UTRAN',
'NB-IoT-DefaultPagingDRX',
'NB-IoT-Paging-eDRXInformation',
'NB-IoT-UEIdentityIndexValue',
'NumberofBroadcastRequest',
'OverloadResponse',
'PLMNidentity',
'PS-ServiceNotAvailable',
'PWSfailedECGIList',
'Paging-eDRXInformation',
'PagingDRX',
'PagingPriority',
'PrivacyIndicator',
'ProSeAuthorized',
'RIMTransfer',
'RRC-Establishment-Cause',
'RelativeMMECapacity',
'RelayNode-Indicator',
'RepetitionPeriod',
'RequestType',
'Routing-ID',
'S-TMSI',
'SONConfigurationTransfer',
'SRVCCHOIndication',
'SRVCCOperationNotPossible',
'SRVCCOperationPossible',
'SecurityContext',
'SecurityKey',
'SerialNumber',
'ServedDCNs',
'ServedGUMMEIs',
'Source-ToTarget-TransparentContainer',
'SourceBSS-ToTargetBSS-TransparentContainer',
'SourceRNC-ToTargetRNC-TransparentContainer',
'SourceeNB-ToTargeteNB-TransparentContainer',
'SubscriberProfileIDforRFP',
'SupportedTAs',
'TAI',
'TAIListForRestart',
'Target-ToSource-TransparentContainer',
'TargetBSS-ToSourceBSS-TransparentContainer',
'TargetID',
'TargetRNC-ToSourceRNC-TransparentContainer',
'TargeteNB-ToSourceeNB-TransparentContainer',
'TimeToWait',
'TraceActivation',
'TrafficLoadReductionIndication',
'TransportInformation',
'TransportLayerAddress',
'TunnelInformation',
'UE-RetentionInformation',
'UE-S1AP-IDs',
'UE-Usage-Type',
'UE-associatedLogicalS1-ConnectionItem',
'UEAggregateMaximumBitrate',
'UEIdentityIndexValue',
'UEPagingID',
'UERadioCapability',
'UERadioCapabilityForPaging',
'UESecurityCapabilities',
'UESidelinkAggregateMaximumBitrate',
'UEUserPlaneCIoTSupportIndicator',
'UL-CP-SecurityInformation',
'UserLocationInformation',
'V2XServicesAuthorized',
'VoiceSupportMatchIndicator',
'WarningAreaList',
'WarningMessageContents',
'WarningSecurityInfo',
'WarningType']},
'object-classes': {},
'object-sets': {'CSGMembershipInfo-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'CellTrafficTraceIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ConnectionEstablishmentIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'DeactivateTraceIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'DownlinkNASTransport-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'DownlinkNonUEAssociatedLPPaTransport-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'DownlinkS1cdma2000tunnellingIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'DownlinkUEAssociatedLPPaTransport-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABAdmittedItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABAdmittedItemIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABDataForwardingItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABDataForwardingItemIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABFailedToResumeItemResumeReq-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABFailedToResumeItemResumeReqIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABFailedToResumeItemResumeRes-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABFailedToResumeItemResumeResIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABFailedToSetupItemHOReqAckExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABFailedtoSetupItemHOReqAckIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABModificationConfirmIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABModificationIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABModifyItemBearerModConfExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABModifyItemBearerModConfIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABModifyItemBearerModResExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABModifyItemBearerModResIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABModifyRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABModifyResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABNotToBeModifiedItemBearerModInd-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABNotToBeModifiedItemBearerModIndIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABReleaseCommandIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABReleaseIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABReleaseItemBearerRelCompExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABReleaseItemBearerRelCompIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABReleaseResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABSetupItemBearerSUResExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABSetupItemBearerSUResIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABSetupItemCtxtSUResExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABSetupItemCtxtSUResIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABSetupRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABSetupResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABToBeModifiedItemBearerModInd-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABToBeModifiedItemBearerModIndIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABToBeModifiedItemBearerModReqIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABToBeModifyItemBearerModReqExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'E-RABToBeSetupItemBearerSUReqExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'E-RABToBeSetupItemBearerSUReqIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABToBeSetupItemCtxtSUReqExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'E-RABToBeSetupItemCtxtSUReqIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABToBeSetupItemHOReq-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': []},
'E-RABToBeSetupItemHOReqIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABToBeSwitchedDLItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABToBeSwitchedDLItemIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'E-RABToBeSwitchedULItem-ExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'E-RABToBeSwitchedULItemIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ENBCPRelocationIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ENBConfigurationTransferIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ENBConfigurationUpdateAcknowledgeIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ENBConfigurationUpdateFailureIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ENBConfigurationUpdateIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ENBDirectInformationTransferIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ENBStatusTransferIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ErrorIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'HandoverCancelAcknowledgeIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'HandoverCancelIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'HandoverCommandIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'HandoverFailureIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'HandoverNotifyIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'HandoverPreparationFailureIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'HandoverRequestAcknowledgeIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'HandoverRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'HandoverRequiredIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'InitialContextSetupFailureIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'InitialContextSetupRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'InitialContextSetupResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'InitialUEMessage-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'KillRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'KillResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'LocationReportIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'LocationReportingControlIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'LocationReportingFailureIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'MMECPRelocationIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'MMEConfigurationTransferIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'MMEConfigurationUpdateAcknowledgeIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'MMEConfigurationUpdateFailureIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'MMEConfigurationUpdateIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'MMEDirectInformationTransferIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'MMEStatusTransferIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'NASDeliveryIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': ['.']},
'NASNonDeliveryIndication-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'OverloadStartIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'OverloadStopIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'PWSFailureIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'PWSRestartIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'PagingIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'PathSwitchRequestAcknowledgeIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'PathSwitchRequestFailureIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'PathSwitchRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'PrivateMessageIEs': {'class': 'S1AP-PRIVATE-IES',
'members': ['.']},
'RerouteNASRequest-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ResetAcknowledgeIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'ResetIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'RetrieveUEInformationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'S1SetupFailureIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'S1SetupRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'S1SetupResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'TAIItemExtIEs': {'class': 'S1AP-PROTOCOL-EXTENSION',
'members': ['.']},
'TAIItemIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'TraceFailureIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'TraceStartIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UE-associatedLogicalS1-ConnectionItemRes': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UE-associatedLogicalS1-ConnectionItemResAck': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UECapabilityInfoIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextModificationConfirmIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextModificationFailureIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextModificationIndicationIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextModificationRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextModificationResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextReleaseCommand-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextReleaseComplete-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextReleaseRequest-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextResumeFailureIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextResumeRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextResumeResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextSuspendRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEContextSuspendResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UEInformationTransferIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UERadioCapabilityMatchRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UERadioCapabilityMatchResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UplinkNASTransport-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UplinkNonUEAssociatedLPPaTransport-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UplinkS1cdma2000tunnellingIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'UplinkUEAssociatedLPPaTransport-IEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'WriteReplaceWarningRequestIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []},
'WriteReplaceWarningResponseIEs': {'class': 'S1AP-PROTOCOL-IES',
'members': []}},
'tags': 'AUTOMATIC',
'types': {'CSGMembershipInfo': {'members': [{'name': 'cSGMembershipStatus',
'type': 'CSGMembershipStatus'},
{'name': 'cSG-Id',
'type': 'CSG-Id'},
{'name': 'cellAccessMode',
'optional': True,
'type': 'CellAccessMode'},
{'name': 'pLMNidentity',
'optional': True,
'type': 'PLMNidentity'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'CellTrafficTrace': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ConnectionEstablishmentIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'DeactivateTrace': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'DownlinkNASTransport': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'DownlinkNonUEAssociatedLPPaTransport': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'DownlinkS1cdma2000tunnelling': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'DownlinkUEAssociatedLPPaTransport': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RAB-IE-ContainerList': {'actual-parameters': [1,
'maxnoofE-RABs',
'{'],
'parameters': ['IEsSetParam'],
'type': 'ProtocolIE-ContainerList'},
'E-RAB-IE-ContainerPairList': {'actual-parameters': [1,
'maxnoofE-RABs',
'{'],
'parameters': ['IEsSetParam'],
'type': 'ProtocolIE-ContainerPairList'},
'E-RABAdmittedItem': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'gTP-TEID',
'type': 'GTP-TEID'},
{'name': 'dL-transportLayerAddress',
'optional': True,
'type': 'TransportLayerAddress'},
{'name': 'dL-gTP-TEID',
'optional': True,
'type': 'GTP-TEID'},
{'name': 'uL-TransportLayerAddress',
'optional': True,
'type': 'TransportLayerAddress'},
{'name': 'uL-GTP-TEID',
'optional': True,
'type': 'GTP-TEID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABAdmittedList': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'E-RABDataForwardingItem': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'dL-transportLayerAddress',
'optional': True,
'type': 'TransportLayerAddress'},
{'name': 'dL-gTP-TEID',
'optional': True,
'type': 'GTP-TEID'},
{'name': 'uL-TransportLayerAddress',
'optional': True,
'type': 'TransportLayerAddress'},
{'name': 'uL-GTP-TEID',
'optional': True,
'type': 'GTP-TEID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABFailedToResumeItemResumeReq': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'cause',
'type': 'Cause'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABFailedToResumeItemResumeRes': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'cause',
'type': 'Cause'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABFailedToResumeListResumeReq': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'E-RABFailedToResumeListResumeRes': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'E-RABFailedToSetupItemHOReqAck': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'cause',
'type': 'Cause'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABFailedtoSetupListHOReqAck': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'E-RABModificationConfirm': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RABModificationIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RABModifyItemBearerModConf': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABModifyItemBearerModRes': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABModifyListBearerModConf': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofE-RABs')],
'type': 'SEQUENCE '
'OF'},
'E-RABModifyListBearerModRes': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofE-RABs')],
'type': 'SEQUENCE '
'OF'},
'E-RABModifyRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RABModifyResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RABNotToBeModifiedItemBearerModInd': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'dL-GTP-TEID',
'type': 'GTP-TEID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABNotToBeModifiedListBearerModInd': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'E-RABReleaseCommand': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RABReleaseIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RABReleaseItemBearerRelComp': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABReleaseListBearerRelComp': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofE-RABs')],
'type': 'SEQUENCE '
'OF'},
'E-RABReleaseResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RABSetupItemBearerSURes': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'gTP-TEID',
'type': 'GTP-TEID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABSetupItemCtxtSURes': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'gTP-TEID',
'type': 'GTP-TEID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABSetupListBearerSURes': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofE-RABs')],
'type': 'SEQUENCE '
'OF'},
'E-RABSetupListCtxtSURes': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofE-RABs')],
'type': 'SEQUENCE '
'OF'},
'E-RABSetupRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RABSetupResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'E-RABSubjecttoDataForwardingList': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'E-RABToBeModifiedItemBearerModInd': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'dL-GTP-TEID',
'type': 'GTP-TEID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABToBeModifiedItemBearerModReq': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'e-RABLevelQoSParameters',
'type': 'E-RABLevelQoSParameters'},
{'name': 'nAS-PDU',
'type': 'NAS-PDU'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABToBeModifiedListBearerModInd': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'E-RABToBeModifiedListBearerModReq': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofE-RABs')],
'type': 'SEQUENCE '
'OF'},
'E-RABToBeSetupItemBearerSUReq': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'e-RABlevelQoSParameters',
'type': 'E-RABLevelQoSParameters'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'gTP-TEID',
'type': 'GTP-TEID'},
{'name': 'nAS-PDU',
'type': 'NAS-PDU'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABToBeSetupItemCtxtSUReq': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'e-RABlevelQoSParameters',
'type': 'E-RABLevelQoSParameters'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'gTP-TEID',
'type': 'GTP-TEID'},
{'name': 'nAS-PDU',
'optional': True,
'type': 'NAS-PDU'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABToBeSetupItemHOReq': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'gTP-TEID',
'type': 'GTP-TEID'},
{'name': 'e-RABlevelQosParameters',
'type': 'E-RABLevelQoSParameters'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABToBeSetupListBearerSUReq': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofE-RABs')],
'type': 'SEQUENCE '
'OF'},
'E-RABToBeSetupListCtxtSUReq': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofE-RABs')],
'type': 'SEQUENCE '
'OF'},
'E-RABToBeSetupListHOReq': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'E-RABToBeSwitchedDLItem': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'gTP-TEID',
'type': 'GTP-TEID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABToBeSwitchedDLList': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'E-RABToBeSwitchedULItem': {'members': [{'name': 'e-RAB-ID',
'type': 'E-RAB-ID'},
{'name': 'transportLayerAddress',
'type': 'TransportLayerAddress'},
{'name': 'gTP-TEID',
'type': 'GTP-TEID'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'E-RABToBeSwitchedULList': {'actual-parameters': ['{'],
'type': 'E-RAB-IE-ContainerList'},
'ENBCPRelocationIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ENBConfigurationTransfer': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ENBConfigurationUpdate': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ENBConfigurationUpdateAcknowledge': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ENBConfigurationUpdateFailure': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ENBDirectInformationTransfer': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ENBStatusTransfer': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ErrorIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'HandoverCancel': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'HandoverCancelAcknowledge': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'HandoverCommand': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'HandoverFailure': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'HandoverNotify': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'HandoverPreparationFailure': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'HandoverRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'HandoverRequestAcknowledge': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'HandoverRequired': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'InitialContextSetupFailure': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'InitialContextSetupRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'InitialContextSetupResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'InitialUEMessage': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'Inter-SystemInformationTransferType': {'members': [{'name': 'rIMTransfer',
'type': 'RIMTransfer'},
None],
'type': 'CHOICE'},
'KillRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'KillResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'LocationReport': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'LocationReportingControl': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'LocationReportingFailureIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'MMECPRelocationIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'MMEConfigurationTransfer': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'MMEConfigurationUpdate': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'MMEConfigurationUpdateAcknowledge': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'MMEConfigurationUpdateFailure': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'MMEDirectInformationTransfer': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'MMEStatusTransfer': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'NASDeliveryIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'NASNonDeliveryIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'OverloadStart': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'OverloadStop': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'PWSFailureIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'PWSRestartIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'Paging': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'PathSwitchRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'PathSwitchRequestAcknowledge': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'PathSwitchRequestFailure': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'PrivateMessage': {'members': [{'actual-parameters': ['{'],
'name': 'privateIEs',
'type': 'PrivateIE-Container'},
None],
'type': 'SEQUENCE'},
'ProtocolError-IE-ContainerList': {'actual-parameters': [1,
'maxnoofE-RABs',
'{'],
'parameters': ['IEsSetParam'],
'type': 'ProtocolIE-ContainerList'},
'RerouteNASRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'Reset': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ResetAcknowledge': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'ResetAll': {'type': 'ENUMERATED',
'values': [('reset-all', 0),
None]},
'ResetType': {'members': [{'name': 's1-Interface',
'type': 'ResetAll'},
{'name': 'partOfS1-Interface',
'type': 'UE-associatedLogicalS1-ConnectionListRes'},
None],
'type': 'CHOICE'},
'RetrieveUEInformation': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'S1SetupFailure': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'S1SetupRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'S1SetupResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'TAIItem': {'members': [{'name': 'tAI',
'type': 'TAI'},
{'actual-parameters': ['{'],
'name': 'iE-Extensions',
'optional': True,
'type': 'ProtocolExtensionContainer'},
None],
'type': 'SEQUENCE'},
'TAIList': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1, 'maxnoofTAIs')],
'type': 'SEQUENCE OF'},
'TraceFailureIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'TraceStart': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UE-associatedLogicalS1-ConnectionListRes': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofIndividualS1ConnectionsToReset')],
'type': 'SEQUENCE '
'OF'},
'UE-associatedLogicalS1-ConnectionListResAck': {'element': {'actual-parameters': ['{'],
'type': 'ProtocolIE-SingleContainer'},
'size': [(1,
'maxnoofIndividualS1ConnectionsToReset')],
'type': 'SEQUENCE '
'OF'},
'UECapabilityInfoIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextModificationConfirm': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextModificationFailure': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextModificationIndication': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextModificationRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextModificationResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextReleaseCommand': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextReleaseComplete': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextReleaseRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextResumeFailure': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextResumeRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextResumeResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextSuspendRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEContextSuspendResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UEInformationTransfer': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UERadioCapabilityMatchRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UERadioCapabilityMatchResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UplinkNASTransport': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UplinkNonUEAssociatedLPPaTransport': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UplinkS1cdma2000tunnelling': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'UplinkUEAssociatedLPPaTransport': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'WriteReplaceWarningRequest': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'},
'WriteReplaceWarningResponse': {'members': [{'actual-parameters': ['{'],
'name': 'protocolIEs',
'type': 'ProtocolIE-Container'},
None],
'type': 'SEQUENCE'}},
'values': {}},
'S1AP-PDU-Descriptions': {'extensibility-implied': False,
'imports': {'S1AP-CommonDataTypes': ['Criticality',
'ProcedureCode'],
'S1AP-Constants': ['id-CellTrafficTrace',
'id-ConnectionEstablishmentIndication',
'id-DeactivateTrace',
'id-DownlinkS1cdma2000tunnelling',
'id-E-RABModificationIndication',
'id-E-RABModify',
'id-E-RABRelease',
'id-E-RABReleaseIndication',
'id-E-RABSetup',
'id-ENBConfigurationUpdate',
'id-ErrorIndication',
'id-HandoverCancel',
'id-HandoverNotification',
'id-HandoverPreparation',
'id-HandoverResourceAllocation',
'id-InitialContextSetup',
'id-Kill',
'id-LocationReport',
'id-LocationReportingControl',
'id-LocationReportingFailureIndication',
'id-MMECPRelocationIndication',
'id-MMEConfigurationTransfer',
'id-MMEConfigurationUpdate',
'id-MMEDirectInformationTransfer',
'id-MMEStatusTransfer',
'id-NASDeliveryIndication',
'id-NASNonDeliveryIndication',
'id-OverloadStart',
'id-OverloadStop',
'id-PWSFailureIndication',
'id-PWSRestartIndication',
'id-Paging',
'id-PathSwitchRequest',
'id-PrivateMessage',
'id-RerouteNASRequest',
'id-Reset',
'id-RetrieveUEInformation',
'id-S1Setup',
'id-TraceFailureIndication',
'id-TraceStart',
'id-UECapabilityInfoIndication',
'id-UEContextModification',
'id-UEContextModificationIndication',
'id-UEContextRelease',
'id-UEContextReleaseRequest',
'id-UEContextResume',
'id-UEContextSuspend',
'id-UEInformationTransfer',
'id-UERadioCapabilityMatch',
'id-UplinkS1cdma2000tunnelling',
'id-WriteReplaceWarning',
'id-downlinkNASTransport',
'id-downlinkNonUEAssociatedLPPaTransport',
'id-downlinkUEAssociatedLPPaTransport',
'id-eNBCPRelocationIndication',
'id-eNBConfigurationTransfer',
'id-eNBDirectInformationTransfer',
'id-eNBStatusTransfer',
'id-initialUEMessage',
'id-uplinkNASTransport',
'id-uplinkNonUEAssociatedLPPaTransport',
'id-uplinkUEAssociatedLPPaTransport'],
'S1AP-PDU-Contents': ['CellTrafficTrace',
'ConnectionEstablishmentIndication',
'DeactivateTrace',
'DownlinkNASTransport',
'DownlinkNonUEAssociatedLPPaTransport',
'DownlinkS1cdma2000tunnelling',
'DownlinkUEAssociatedLPPaTransport',
'E-RABModificationConfirm',
'E-RABModificationIndication',
'E-RABModifyRequest',
'E-RABModifyResponse',
'E-RABReleaseCommand',
'E-RABReleaseIndication',
'E-RABReleaseResponse',
'E-RABSetupRequest',
'E-RABSetupResponse',
'ENBCPRelocationIndication',
'ENBConfigurationTransfer',
'ENBConfigurationUpdate',
'ENBConfigurationUpdateAcknowledge',
'ENBConfigurationUpdateFailure',
'ENBDirectInformationTransfer',
'ENBStatusTransfer',
'ErrorIndication',
'HandoverCancel',
'HandoverCancelAcknowledge',
'HandoverCommand',
'HandoverFailure',
'HandoverNotify',
'HandoverPreparationFailure',
'HandoverRequest',
'HandoverRequestAcknowledge',
'HandoverRequired',
'InitialContextSetupFailure',
'InitialContextSetupRequest',
'InitialContextSetupResponse',
'InitialUEMessage',
'KillRequest',
'KillResponse',
'LocationReport',
'LocationReportingControl',
'LocationReportingFailureIndication',
'MMECPRelocationIndication',
'MMEConfigurationTransfer',
'MMEConfigurationUpdate',
'MMEConfigurationUpdateAcknowledge',
'MMEConfigurationUpdateFailure',
'MMEDirectInformationTransfer',
'MMEStatusTransfer',
'NASDeliveryIndication',
'NASNonDeliveryIndication',
'OverloadStart',
'OverloadStop',
'PWSFailureIndication',
'PWSRestartIndication',
'Paging',
'PathSwitchRequest',
'PathSwitchRequestAcknowledge',
'PathSwitchRequestFailure',
'PrivateMessage',
'RerouteNASRequest',
'Reset',
'ResetAcknowledge',
'RetrieveUEInformation',
'S1SetupFailure',
'S1SetupRequest',
'S1SetupResponse',
'TraceFailureIndication',
'TraceStart',
'UECapabilityInfoIndication',
'UEContextModificationConfirm',
'UEContextModificationFailure',
'UEContextModificationIndication',
'UEContextModificationRequest',
'UEContextModificationResponse',
'UEContextReleaseCommand',
'UEContextReleaseComplete',
'UEContextReleaseRequest',
'UEContextResumeFailure',
'UEContextResumeRequest',
'UEContextResumeResponse',
'UEContextSuspendRequest',
'UEContextSuspendResponse',
'UEInformationTransfer',
'UERadioCapabilityMatchRequest',
'UERadioCapabilityMatchResponse',
'UplinkNASTransport',
'UplinkNonUEAssociatedLPPaTransport',
'UplinkS1cdma2000tunnelling',
'UplinkUEAssociatedLPPaTransport',
'WriteReplaceWarningRequest',
'WriteReplaceWarningResponse']},
'object-classes': {'S1AP-ELEMENTARY-PROCEDURE': {'members': [{'name': '&InitiatingMessage',
'type': 'OpenType'},
{'name': '&SuccessfulOutcome',
'type': 'OpenType'},
{'name': '&UnsuccessfulOutcome',
'type': 'OpenType'},
{'name': '&procedureCode',
'type': 'ProcedureCode'},
{'name': '&criticality',
'type': 'Criticality'}]}},
'object-sets': {'S1AP-ELEMENTARY-PROCEDURES': {'class': 'S1AP-ELEMENTARY-PROCEDURE',
'members': []},
'S1AP-ELEMENTARY-PROCEDURES-CLASS-1': {'class': 'S1AP-ELEMENTARY-PROCEDURE',
'members': ['handoverPreparation',
'handoverResourceAllocation',
'pathSwitchRequest',
'e-RABSetup',
'e-RABModify',
'e-RABRelease',
'initialContextSetup',
'handoverCancel',
'kill',
'reset',
's1Setup',
'uEContextModification',
'uEContextRelease',
'eNBConfigurationUpdate',
'mMEConfigurationUpdate',
'writeReplaceWarning',
',',
'.',
',',
'uERadioCapabilityMatch',
'e-RABModificationIndication',
'uEContextModificationIndication',
'uEContextSuspend',
'uEContextResume']},
'S1AP-ELEMENTARY-PROCEDURES-CLASS-2': {'class': 'S1AP-ELEMENTARY-PROCEDURE',
'members': ['handoverNotification',
'e-RABReleaseIndication',
'paging',
'downlinkNASTransport',
'initialUEMessage',
'uplinkNASTransport',
'errorIndication',
'nASNonDeliveryIndication',
'uEContextReleaseRequest',
'downlinkS1cdma2000tunnelling',
'uplinkS1cdma2000tunnelling',
'uECapabilityInfoIndication',
'eNBStatusTransfer',
'mMEStatusTransfer',
'deactivateTrace',
'traceStart',
'traceFailureIndication',
'cellTrafficTrace',
'locationReportingControl',
'locationReportingFailureIndication',
'locationReport',
'overloadStart',
'overloadStop',
'eNBDirectInformationTransfer',
'mMEDirectInformationTransfer',
'eNBConfigurationTransfer',
'mMEConfigurationTransfer',
'privateMessage',
',',
'.',
',',
'downlinkUEAssociatedLPPaTransport',
'uplinkUEAssociatedLPPaTransport',
'downlinkNonUEAssociatedLPPaTransport',
'uplinkNonUEAssociatedLPPaTransport',
'pWSRestartIndication',
'rerouteNASRequest',
'pWSFailureIndication',
'connectionEstablishmentIndication',
'nASDeliveryIndication',
'retrieveUEInformation',
'uEInformationTransfer',
'eNBCPRelocationIndication',
'mMECPRelocationIndication']}},
'tags': 'AUTOMATIC',
'types': {'InitiatingMessage': {'members': [{'name': 'procedureCode',
'table': {'type': 'S1AP-ELEMENTARY-PROCEDURES'},
'type': 'S1AP-ELEMENTARY-PROCEDURE.&procedureCode'},
{'name': 'criticality',
'table': ['S1AP-ELEMENTARY-PROCEDURES',
['procedureCode']],
'type': 'S1AP-ELEMENTARY-PROCEDURE.&criticality'},
{'name': 'value',
'table': ['S1AP-ELEMENTARY-PROCEDURES',
['procedureCode']],
'type': 'S1AP-ELEMENTARY-PROCEDURE.&InitiatingMessage'}],
'type': 'SEQUENCE'},
'S1AP-PDU': {'members': [{'name': 'initiatingMessage',
'type': 'InitiatingMessage'},
{'name': 'successfulOutcome',
'type': 'SuccessfulOutcome'},
{'name': 'unsuccessfulOutcome',
'type': 'UnsuccessfulOutcome'},
None],
'type': 'CHOICE'},
'SuccessfulOutcome': {'members': [{'name': 'procedureCode',
'table': {'type': 'S1AP-ELEMENTARY-PROCEDURES'},
'type': 'S1AP-ELEMENTARY-PROCEDURE.&procedureCode'},
{'name': 'criticality',
'table': ['S1AP-ELEMENTARY-PROCEDURES',
['procedureCode']],
'type': 'S1AP-ELEMENTARY-PROCEDURE.&criticality'},
{'name': 'value',
'table': ['S1AP-ELEMENTARY-PROCEDURES',
['procedureCode']],
'type': 'S1AP-ELEMENTARY-PROCEDURE.&SuccessfulOutcome'}],
'type': 'SEQUENCE'},
'UnsuccessfulOutcome': {'members': [{'name': 'procedureCode',
'table': {'type': 'S1AP-ELEMENTARY-PROCEDURES'},
'type': 'S1AP-ELEMENTARY-PROCEDURE.&procedureCode'},
{'name': 'criticality',
'table': ['S1AP-ELEMENTARY-PROCEDURES',
['procedureCode']],
'type': 'S1AP-ELEMENTARY-PROCEDURE.&criticality'},
{'name': 'value',
'table': ['S1AP-ELEMENTARY-PROCEDURES',
['procedureCode']],
'type': 'S1AP-ELEMENTARY-PROCEDURE.&UnsuccessfulOutcome'}],
'type': 'SEQUENCE'}},
'values': {'cellTrafficTrace': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'connectionEstablishmentIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'deactivateTrace': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'downlinkNASTransport': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'downlinkNonUEAssociatedLPPaTransport': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'downlinkS1cdma2000tunnelling': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'downlinkUEAssociatedLPPaTransport': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'e-RABModificationIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'e-RABModify': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'e-RABRelease': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'e-RABReleaseIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'e-RABSetup': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'eNBCPRelocationIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'eNBConfigurationTransfer': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'eNBConfigurationUpdate': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'eNBDirectInformationTransfer': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'eNBStatusTransfer': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'errorIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'handoverCancel': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'handoverNotification': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'handoverPreparation': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'handoverResourceAllocation': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'initialContextSetup': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'initialUEMessage': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'kill': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'locationReport': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'locationReportingControl': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'locationReportingFailureIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'mMECPRelocationIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'mMEConfigurationTransfer': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'mMEConfigurationUpdate': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'mMEDirectInformationTransfer': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'mMEStatusTransfer': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'nASDeliveryIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'nASNonDeliveryIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'overloadStart': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'overloadStop': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'pWSFailureIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'pWSRestartIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'paging': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'pathSwitchRequest': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'privateMessage': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'rerouteNASRequest': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'reset': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'retrieveUEInformation': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
's1Setup': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'traceFailureIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'traceStart': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uECapabilityInfoIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uEContextModification': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uEContextModificationIndication': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uEContextRelease': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uEContextReleaseRequest': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uEContextResume': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uEContextSuspend': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uEInformationTransfer': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uERadioCapabilityMatch': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uplinkNASTransport': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uplinkNonUEAssociatedLPPaTransport': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uplinkS1cdma2000tunnelling': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'uplinkUEAssociatedLPPaTransport': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None},
'writeReplaceWarning': {'type': 'S1AP-ELEMENTARY-PROCEDURE',
'value': None}}}}
|
danielvdao/facebookMacBot
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/mbcharsetprober.py
|
2923
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
class MultiByteCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mDistributionAnalyzer = None
self._mCodingSM = None
self._mLastChar = [0, 0]
def reset(self):
CharSetProber.reset(self)
if self._mCodingSM:
self._mCodingSM.reset()
if self._mDistributionAnalyzer:
self._mDistributionAnalyzer.reset()
self._mLastChar = [0, 0]
def get_charset_name(self):
pass
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mDistributionAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
return self._mDistributionAnalyzer.get_confidence()
|
cosmoharrigan/opencog
|
refs/heads/master
|
opencog/python/blending/src/chooser/base_chooser.py
|
22
|
from abc import ABCMeta, abstractmethod
from blending.util.blending_config import BlendConfig
from blending.util.blending_error import blending_status
__author__ = 'DongMin Kim'
class BaseChooser(object):
"""Abstract class to provide 'atom_choose()' interface.
The blender will call the method 'atom_choose()', and this method will call
the method 'atom_choose_impl()' in the derived class.
Attributes:
a: An instance of AtomSpace.
last_status: A last status of class.
ret: The chosen atoms.
:type a: opencog.atomspace.AtomSpace
:type last_status: int
:type ret: list[Atom]
"""
__metaclass__ = ABCMeta
def __init__(self, a):
self.a = a
self.last_status = blending_status.UNKNOWN_ERROR
self.ret = []
self.make_default_config()
def make_default_config(self):
"""Initialize a default config for this class."""
BlendConfig().update(self.a, "choose-atom-type", "Node")
BlendConfig().update(self.a, "choose-least-count", "2")
@abstractmethod
def atom_choose_impl(self, focus_atoms, config_base):
"""Abstract factory method for derived class.
Args:
focus_atoms: The atoms to blend.
config_base: A Node to save custom config.
:param focus_atoms: list[Atom]
:param config_base: Atom
Raises:
NotImplementedError: Someone tried to call the abstract method.
"""
raise NotImplementedError("Please implement this method.")
def atom_choose(self, focus_atoms, config_base):
"""Wrapper method to control exception in derived class.
Args:
focus_atoms: The atoms to blend.
config_base: A Node to save custom config.
:param focus_atoms: list[Atom]
:param config_base: Atom
Returns:
The chosen atom(s).
Example:
[(ConceptNode "atom-0"),
(ConceptNode "atom-1"),
(ConceptNode "atom-2"),
...]
If a list is empty, then means atoms chooser couldn't find the
proper atom(s) with given condition.
:rtype : list[Atom]
Raises:
UserWarning: An error occurred in choosing the atoms.
"""
self.last_status = blending_status.IN_PROCESS
self.atom_choose_impl(focus_atoms, config_base)
if self.last_status == blending_status.IN_PROCESS:
self.last_status = blending_status.SUCCESS
else:
self.ret = []
raise UserWarning('ERROR_IN_ATOMS_CHOOSER')
return self.ret
|
davidfather/TizenRT
|
refs/heads/master
|
external/iotivity/iotivity_1.2-rel/extlibs/gtest/gtest-1.7.0/test/gtest_shuffle_test.py
|
3023
|
#!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
|
tanmaykm/edx-platform
|
refs/heads/master
|
lms/djangoapps/lms_xblock/models.py
|
63
|
"""
Models used by LMS XBlock infrastructure.
Includes:
XBlockAsidesConfig: A ConfigurationModel for managing how XBlockAsides are
rendered in the LMS.
"""
from django.db.models import TextField
from config_models.models import ConfigurationModel
from xblock.core import XBlockAside
class XBlockAsidesConfig(ConfigurationModel):
"""
Configuration for XBlockAsides.
"""
class Meta(ConfigurationModel.Meta):
app_label = "lms_xblock"
disabled_blocks = TextField(
default="about course_info static_tab",
help_text="Space-separated list of XBlocks on which XBlockAsides should never render."
)
@classmethod
def possible_asides(cls):
"""
Return a list of all asides that are enabled across all XBlocks.
"""
return [aside_type for aside_type, __ in XBlockAside.load_classes()]
|
TomWerner/AlumniMentoring
|
refs/heads/master
|
mentoring/models.py
|
1
|
import datetime
from django.contrib.auth.models import User
from django.db import models
from mentoring.util import generate_confirmation_token
genders = (
('a', 'Agender'),
('c', 'Cisgender'),
('m', 'Male'),
('n', 'Non-binary'),
('t', 'Transgender'),
('f', 'Female'),
('l', 'Another gender not listed'),
('p', 'Prefer not to answer')
)
feedback_givers = (('1', 'Mentor'), ('2', 'Mentee'))
degree_options = (('ba', 'Bachelor of Arts'),
('bs', 'Bachelor of Sciences'),
('m', 'Masters'),
('d', 'Ph.D'),
('pd', 'MD Ph.D'),
('md', 'MD'),
('jd', 'JD'),
('mp', 'MPhil')
)
mentoring_categories = (
('1', 'Choice of Major'),
('2', 'Academia or Industry'),
('3', 'Resume/CV Critique'),
('4', 'Parenting vs Career'),
('5', 'Work life balance'),
('6', 'Life after Iowa'),
('7', 'Study Abroad'),
('8', 'International Experience'),
('9', 'Fellowships'),
('10', 'Goals'),
('11', 'Shadowing Opportunities'),
('12', 'Grad school applications'),
('13', 'Med school applications'),
('14', 'Job/Internship search'),
('15', 'Networking'),
('16', 'Advanced degrees'),
('17', 'Workplace issues'),
('18', 'Personal Experiences'),
('19', 'Gender specific'),
)
communication_options = (
('1', 'In Person'),
('2', 'Phone'),
('3', 'Email'),
('4', 'Other'),
)
class Mentor(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
gender = models.CharField(max_length=50, null=True)
approved = models.BooleanField(default=False)
confirmed = models.BooleanField(default=False)
confirmation_token = models.CharField(max_length=50, null=True, blank=True)
active_until = models.DateTimeField(null=True, blank=True)
def get_employment(self):
return self.mentoremployment_set.all()
@staticmethod
def get_education_headers_as_tuple():
return 'School', 'Major 1', 'Major 2', 'Minor 1', 'Minor 2', 'Degree', 'Graduation Year', 'Is Latest Degree'
def get_contact_information(self):
return self.mentorcontactinformation
def get_education(self):
return self.mentoreducation_set.all()
def primary_email(self):
return self.mentorcontactinformation.primary_email
def full_name(self):
return self.first_name + " " + self.last_name
def __str__(self):
approved = "Approved" if self.approved else "Not Approved"
return self.full_name() + "(" + approved + ")"
def email(self):
return self.mentorcontactinformation.email()
def phone_number(self):
return self.mentorcontactinformation.phone_number()
def mailing_address(self):
return self.mentorcontactinformation.mailing_address()
def web_contacts(self):
return self.mentorcontactinformation.web_contacts()
def education(self):
return "\n\n".join([x.display_string() for x in self.mentoreducation_set.all()])
def employment(self):
return "\n\n".join([x.display_string() for x in self.mentoremployment_set.all()])
def preferences(self):
return self.mentorpreference.display_string()
class Mentee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
gender = models.CharField(max_length=50, null=True)
active = models.BooleanField(default=True)
approved = models.BooleanField(default=False)
confirmed = models.BooleanField(default=False)
confirmation_token = models.CharField(max_length=50, null=True, blank=True)
active_until = models.DateTimeField(null=True, blank=True)
def get_employment(self):
return []
@staticmethod
def get_education_headers_as_tuple():
return 'School', 'Major 1', 'Major 2', 'Minor 1', 'Minor 2', 'Graduation Year'
def get_contact_information(self):
return self.menteecontactinformation
def get_education(self):
return self.menteeeducation_set.all()
def full_name(self):
return self.first_name + " " + self.last_name
def __str__(self):
approved = "Approved" if self.approved else "Not Approved"
return self.full_name() + "(" + approved + ")"
def primary_email(self):
return self.menteecontactinformation.primary_email
def email(self):
return self.menteecontactinformation.email()
def phone_number(self):
return self.menteecontactinformation.phone_number()
def mailing_address(self):
return self.menteecontactinformation.mailing_address()
def web_contacts(self):
return self.menteecontactinformation.web_contacts()
def education(self):
return "\n\n".join([x.display_string() for x in self.menteeeducation_set.all()])
def employment(self):
return "This application does not record mentee employment at this time."
def preferences(self):
return self.menteepreference.display_string()
def has_no_mentor(self):
pairs = self.mentormenteepairs_set.all()
return len([x for x in pairs if x.is_active()]) == 0
def score_mentor(self, mentor):
score = 0
score += self._score_mentor_preferences(mentor)
score += self._score_mentor_education(mentor)
return score
def _score_mentor_preferences(self, mentor):
if self.menteepreference is None:
return 0
return self.menteepreference.score_mentor(mentor)
def _score_mentor_education(self, mentor):
return sum([x.score_mentor(mentor) for x in self.menteeeducation_set.all()])
class MentorContactInformation(models.Model):
mentor = models.OneToOneField(Mentor, on_delete=models.CASCADE)
primary_phone = models.CharField(max_length=20)
secondary_phone = models.CharField(max_length=20, null=True, blank=True)
primary_email = models.EmailField()
secondary_email = models.EmailField(null=True, blank=True)
linkedin_url = models.CharField(max_length=100, null=True, blank=True)
facebook_url = models.CharField(max_length=100, null=True, blank=True)
personal_url = models.CharField(max_length=100, null=True, blank=True)
street_address = models.CharField(max_length=100)
city = models.CharField(max_length=100)
state = models.CharField(max_length=30)
def email(self):
result = self.primary_email
if self.secondary_email is not None and len(self.secondary_email) > 0:
result += "\n(" + self.secondary_email + ")"
return result
def phone_number(self):
result = self.primary_phone
if self.secondary_phone is not None and len(self.secondary_phone) > 0:
result += "\n(" + self.secondary_phone + ")"
return result
def mailing_address(self):
return self.street_address + "\n" + self.city + " " + self.state
def web_contacts(self):
return "LinkedIn: " + self.linkedin_url + "\n" + \
"Facebook:" + self.facebook_url + "\n" + \
"Personal: " + self.personal_url
class MenteeContactInformation(models.Model):
mentee = models.OneToOneField(Mentee, on_delete=models.CASCADE)
primary_phone = models.CharField(max_length=20)
secondary_phone = models.CharField(max_length=20, null=True, blank=True)
primary_email = models.EmailField()
secondary_email = models.EmailField(null=True, blank=True)
linkedin_url = models.CharField(max_length=100, null=True, blank=True)
facebook_url = models.CharField(max_length=100, null=True, blank=True)
personal_url = models.CharField(max_length=100, null=True, blank=True)
street_address = models.CharField(max_length=100)
city = models.CharField(max_length=100)
state = models.CharField(max_length=30)
def email(self):
result = self.primary_email
if self.secondary_email is not None and len(self.secondary_email) > 0:
result += "\n(" + self.secondary_email + ")"
return result
def phone_number(self):
result = self.primary_phone
if self.secondary_phone is not None and len(self.secondary_phone) > 0:
result += "\n(" + self.secondary_phone + ")"
return result
def mailing_address(self):
return self.street_address + "\n" + self.city + " " + self.state
def web_contacts(self):
return "LinkedIn: " + self.linkedin_url + "\n" + \
"Facebook:" + self.facebook_url + "\n" + \
"Personal: " + self.personal_url
class MentorEducation(models.Model):
mentor = models.ForeignKey(Mentor)
school = models.CharField(max_length=100)
major1 = models.CharField(max_length=100)
major2 = models.CharField(max_length=100, blank=True, null=True)
minor1 = models.CharField(max_length=100, blank=True, null=True)
minor2 = models.CharField(max_length=100, blank=True, null=True)
degree = models.CharField(max_length=3, choices=degree_options)
graduation_year = models.DateField(null=True, blank=True)
def display_string(self):
grad_year = str(self.graduation_year.strftime("%B %Y")) if self.graduation_year else 'Year Unknown'
return self.school + \
" (" + self.get_degree_display() + ", " + grad_year + ")\n" + \
"Major(s): " + ", ".join(x for x in [self.major1, self.major2] if x is not None) + "\n" + \
"Minor(s): " + ", ".join(x for x in [self.minor1, self.minor2] if x is not None) + "\n"
def is_latest_degree(self):
educations = self.mentor.mentoreducation_set.all().order_by('graduation_year')
return self == list(educations)[-1]
def data_as_tuple(self):
return self.school, self.major1, self.major2, self.minor1, self.minor2, self.get_degree_display(), \
str(self.graduation_year.strftime("%B %Y")) if self.graduation_year else 'Year Unknown', \
self.is_latest_degree()
class MenteeEducation(models.Model):
mentee = models.ForeignKey(Mentee)
school = models.CharField(max_length=100)
major1 = models.CharField(max_length=100)
major2 = models.CharField(max_length=100, blank=True, null=True)
minor1 = models.CharField(max_length=100, blank=True, null=True)
minor2 = models.CharField(max_length=100, blank=True, null=True)
graduation_year = models.DateField()
def data_as_tuple(self):
return self.school, self.major1, self.major2, self.minor1, self.minor2,\
str(self.graduation_year.strftime("%B %Y")) if self.graduation_year else 'Not Provided'
def display_string(self):
grad_year = str(self.graduation_year.strftime("%B %Y")) if self.graduation_year else 'Not Provided'
return self.school + \
" (" + grad_year + ")\n" + \
"Major(s): " + ", ".join(x for x in [self.major1, self.major2] if x is not None) + "\n" + \
"Minor(s): " + ", ".join(x for x in [self.minor1, self.minor2] if x is not None) + "\n"
def score_mentor(self, mentor):
score = 0
for education in mentor.mentoreducation_set.all():
majors = [education.major1, education.major2]
minors = [education.minor1, education.minor2]
for major in [self.major1, self.major2]:
if major and major in majors:
score += 100
if major and major in minors:
score += 50
for minor in [self.minor1, self.minor2]:
if minor and minor in minors:
score += 50
if minor and minor in minors:
score += 25
return score
class MentorEmployment(models.Model):
mentor = models.ForeignKey(Mentor)
company = models.CharField(max_length=100)
title = models.CharField(max_length=100)
description = models.TextField()
def display_string(self):
return self.title + " at " + self.company
def data_as_tuple(self):
return self.company, self. title, self.description
class MentorMenteePairs(models.Model):
mentor = models.ForeignKey(Mentor)
mentee = models.ForeignKey(Mentee)
start_date = models.DateField()
end_date = models.DateField(null=True, blank=True)
def is_active(self):
return self.end_date is None or self.end_date > datetime.date.today()
def __str__(self):
return str(self.mentor) + " and " + str(self.mentee) + " (" + str(self.start_date) + " to " + str(
self.end_date) + ")"
def has_no_filled_out_feedback(self):
return (self.feedback_set.count() == 0 and not self.is_active()) or \
(self.feedback_set.count() > 0 and
len([feedback for feedback in self.feedback_set.all() if feedback.filled_out()]) == 0)
def get_mentor_feedback(self):
return self.feedback_set.filter(giver='1').first()
def get_mentee_feedback(self):
return self.feedback_set.filter(giver='2').first()
class Feedback(models.Model):
pairing = models.ForeignKey(MentorMenteePairs, on_delete=models.CASCADE)
token = models.CharField(max_length=50, null=True, blank=True)
giver = models.CharField(choices=feedback_givers, max_length=1)
went_well = models.TextField(max_length=1000, null=True, blank=True)
went_poorly = models.TextField(max_length=1000, null=True, blank=True)
other = models.TextField(max_length=1000, null=True, blank=True)
def giver_name_with_role(self):
return self.giver_name() + " (" + self.get_giver_display() + ")"
def get_email_recipient(self):
if self.giver == '1':
return self.pairing.mentor.primary_email()
else:
return self.pairing.mentee.primary_email()
def giver_name(self):
if self.giver == '1':
return self.pairing.mentor.full_name()
else:
return self.pairing.mentee.full_name()
def filled_out(self):
return self.went_well or self.went_poorly or self.other
@staticmethod
def create_feedback(mentor_mentee_pair, mentee=True):
if mentee:
mentee_token = generate_confirmation_token(mentor_mentee_pair.mentee.primary_email())
result = Feedback(pairing_id=mentor_mentee_pair.id, token=mentee_token, giver='2')
else:
mentor_token = generate_confirmation_token(mentor_mentee_pair.mentor.primary_email())
result = Feedback(pairing_id=mentor_mentee_pair.id, token=mentor_token, giver='1')
result.save()
return result
class MenteePreference(models.Model):
mentee = models.OneToOneField(Mentee, on_delete=models.CASCADE)
first_choice = models.CharField(max_length=2, choices=mentoring_categories)
second_choice = models.CharField(max_length=2, choices=mentoring_categories, null=True, blank=True)
third_choice = models.CharField(max_length=2, choices=mentoring_categories, null=True, blank=True)
preferred_communication = models.CharField(max_length=1, choices=communication_options)
def display_string(self):
choices = []
if self.first_choice is not None:
choices.append(self.get_first_choice_display())
if self.second_choice is not None:
choices.append(self.get_second_choice_display())
if self.third_choice is not None:
choices.append(self.get_third_choice_display())
return "Would like to get *" + self.get_preferred_communication_display() + "* advice on\n" + \
"\n".join(str(num + 1) + ".) " + choice for num, choice in zip(range(3), choices))
def score_mentor(self, mentor):
mentor_pref = mentor.mentorpreference
score = 0
score += (self.preferred_communication == mentor_pref.preferred_communication) * 100
my_choices = [self.first_choice, self.second_choice, self.third_choice]
their_choices = [mentor_pref.first_choice, mentor_pref.second_choice, mentor_pref.third_choice]
for i, my_choice in enumerate(my_choices):
if my_choice and my_choice in their_choices:
score += int(100 / (i + 1)) # 100 for first choice, 50 for seocnd, 33 for third
return score
class MentorPreference(models.Model):
mentor = models.OneToOneField(Mentor, on_delete=models.CASCADE)
first_choice = models.CharField(max_length=2, choices=mentoring_categories)
second_choice = models.CharField(max_length=2, choices=mentoring_categories, null=True, blank=True)
third_choice = models.CharField(max_length=2, choices=mentoring_categories, null=True, blank=True)
preferred_communication = models.CharField(max_length=1, choices=communication_options)
def display_string(self):
choices = []
if self.first_choice is not None:
choices.append(self.get_first_choice_display())
if self.second_choice is not None:
choices.append(self.get_second_choice_display())
if self.third_choice is not None:
choices.append(self.get_third_choice_display())
return "Would like to give *" + self.get_preferred_communication_display() + "* advice on\n" + \
"\n".join(str(num + 1) + ".) " + choice for num, choice in zip(range(3), choices))
|
patcon/open-cabinet
|
refs/heads/master
|
venv/lib/python2.7/site-packages/django/core/management/commands/dumpdata.py
|
305
|
from collections import OrderedDict
from django.apps import apps
from django.core import serializers
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, router
class Command(BaseCommand):
help = ("Output the contents of the database as a fixture of the given "
"format (using each model's default manager unless --all is "
"specified).")
def add_arguments(self, parser):
parser.add_argument('args', metavar='app_label[.ModelName]', nargs='*',
help='Restricts dumped data to the specified app_label or app_label.ModelName.')
parser.add_argument('--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.')
parser.add_argument('--indent', default=None, dest='indent', type=int,
help='Specifies the indent level to use when pretty-printing output.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to dump fixtures from. '
'Defaults to the "default" database.')
parser.add_argument('-e', '--exclude', dest='exclude', action='append', default=[],
help='An app_label or app_label.ModelName to exclude '
'(use multiple --exclude to exclude multiple apps/models).')
parser.add_argument('--natural-foreign', action='store_true', dest='use_natural_foreign_keys', default=False,
help='Use natural foreign keys if they are available.')
parser.add_argument('--natural-primary', action='store_true', dest='use_natural_primary_keys', default=False,
help='Use natural primary keys if they are available.')
parser.add_argument('-a', '--all', action='store_true', dest='use_base_manager', default=False,
help="Use Django's base manager to dump all models stored in the database, "
"including those that would otherwise be filtered or modified by a custom manager.")
parser.add_argument('--pks', dest='primary_keys',
help="Only dump objects with given primary keys. "
"Accepts a comma separated list of keys. "
"This option will only work when you specify one model.")
parser.add_argument('-o', '--output', default=None, dest='output',
help='Specifies file to which the output is written.')
def handle(self, *app_labels, **options):
format = options.get('format')
indent = options.get('indent')
using = options.get('database')
excludes = options.get('exclude')
output = options.get('output')
show_traceback = options.get('traceback')
use_natural_foreign_keys = options.get('use_natural_foreign_keys')
use_natural_primary_keys = options.get('use_natural_primary_keys')
use_base_manager = options.get('use_base_manager')
pks = options.get('primary_keys')
if pks:
primary_keys = pks.split(',')
else:
primary_keys = []
excluded_apps = set()
excluded_models = set()
for exclude in excludes:
if '.' in exclude:
try:
model = apps.get_model(exclude)
except LookupError:
raise CommandError('Unknown model in excludes: %s' % exclude)
excluded_models.add(model)
else:
try:
app_config = apps.get_app_config(exclude)
except LookupError as e:
raise CommandError(str(e))
excluded_apps.add(app_config)
if len(app_labels) == 0:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict((app_config, None)
for app_config in apps.get_app_configs()
if app_config.models_module is not None and app_config not in excluded_apps)
else:
if len(app_labels) > 1 and primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict()
for label in app_labels:
try:
app_label, model_label = label.split('.')
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
try:
model = app_config.get_model(model_label)
except LookupError:
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
app_list_value = app_list.setdefault(app_config, [])
# We may have previously seen a "all-models" request for
# this app (no model qualifier was given). In this case
# there is no need adding specific models to the list.
if app_list_value is not None:
if model not in app_list_value:
app_list_value.append(model)
except ValueError:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
# This is just an app - no model qualifier
app_label = label
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
app_list[app_config] = None
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
if format not in serializers.get_public_serializer_formats():
try:
serializers.get_serializer(format)
except serializers.SerializerDoesNotExist:
pass
raise CommandError("Unknown serialization format: %s" % format)
def get_objects(count_only=False):
"""
Collate the objects to be serialized. If count_only is True, just
count the number of objects to be serialized.
"""
for model in serializers.sort_dependencies(app_list.items()):
if model in excluded_models:
continue
if not model._meta.proxy and router.allow_migrate_model(using, model):
if use_base_manager:
objects = model._base_manager
else:
objects = model._default_manager
queryset = objects.using(using).order_by(model._meta.pk.name)
if primary_keys:
queryset = queryset.filter(pk__in=primary_keys)
if count_only:
yield queryset.order_by().count()
else:
for obj in queryset.iterator():
yield obj
try:
self.stdout.ending = None
progress_output = None
object_count = 0
# If dumpdata is outputting to stdout, there is no way to display progress
if (output and self.stdout.isatty() and options['verbosity'] > 0):
progress_output = self.stdout
object_count = sum(get_objects(count_only=True))
stream = open(output, 'w') if output else None
try:
serializers.serialize(format, get_objects(), indent=indent,
use_natural_foreign_keys=use_natural_foreign_keys,
use_natural_primary_keys=use_natural_primary_keys,
stream=stream or self.stdout, progress_output=progress_output,
object_count=object_count)
finally:
if stream:
stream.close()
except Exception as e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)
|
humdings/IbPy
|
refs/heads/master
|
ib/opt/connection.py
|
9
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# Defines the Connection class to encapsulate a connection to IB TWS.
#
# Connection instances defer failed attribute lookup to their receiver
# and sender member objects. This makes it easy to access the
# receiver to register functions:
#
# >>> con = ibConnection()
# >>> con.register(my_callable)
#
# And it makes it easy to access the sender functions:
#
# >>> con.reqScannerParameters()
# >>> con.placeOrder(...)
#
##
from ib.opt.dispatcher import Dispatcher
from ib.opt.receiver import Receiver
from ib.opt.sender import Sender
class Connection(object):
""" Encapsulates a connection to TWS.
"""
def __init__(self, host, port, clientId, receiver, sender, dispatcher):
""" Constructor.
@param host name of host for connection; default is localhost
@param port port number for connection; default is 7496
@param clientId client identifier to send when connected
@param receiver instance for reading from the connected socket
@param sender instance for writing to the connected socket
@param dispatcher instance for dispatching socket messages
"""
self.host = host
self.port = port
self.clientId = clientId
self.receiver = receiver
self.sender = sender
self.dispatcher = dispatcher
def __getattr__(self, name):
""" x.__getattr__('name') <==> x.name
@return attribute of instance dispatcher, receiver, or sender
"""
for obj in (self.dispatcher, self.receiver, self.sender):
try:
return getattr(obj, name)
except (AttributeError, ):
pass
err = "'%s' object has no attribute '%s'"
raise AttributeError(err % (self.__class__.__name__, name))
def connect(self):
""" Establish a connection to TWS with instance attributes.
@return True if connected, otherwise raises an exception
"""
return self.sender.connect(self.host, self.port, self.clientId,
self.receiver)
@classmethod
def create(cls, host='localhost', port=7496, clientId=0,
receiver=None, sender=None, dispatcher=None):
""" Creates and returns Connection class (or subclass) instance.
For the receiver, sender, and dispatcher parameters, pass in
an object instance for those duties; leave as None to have new
instances constructed.
@param host name of host for connection; default is localhost
@param port port number for connection; default is 7496
@param clientId client identifier to send when connected
@param receiver=None object for reading messages
@param sender=None object for writing requests
@param dispatcher=None object for dispatching messages
@return Connection (or subclass) instance
"""
dispatcher = Dispatcher() if dispatcher is None else dispatcher
receiver = Receiver(dispatcher) if receiver is None else receiver
sender = Sender(dispatcher) if sender is None else sender
return cls(host, port, clientId, receiver, sender, dispatcher)
|
liaorubei/depot_tools
|
refs/heads/master
|
third_party/pylint/checkers/imports.py
|
67
|
# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""imports checkers for Python code"""
import sys
from collections import defaultdict
import six
from six.moves import map # pylint: disable=redefined-builtin
from logilab.common.graph import get_cycles, DotBackend
from logilab.common.ureports import VerbatimText, Paragraph
import astroid
from astroid import are_exclusive
from astroid.modutils import get_module_part, is_standard_module
from pylint.interfaces import IAstroidChecker
from pylint.utils import EmptyReport
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages, is_import_error
def _except_import_error(node):
"""
Check if the try-except node has an ImportError handler.
Return True if an ImportError handler was infered, False otherwise.
"""
if not isinstance(node, astroid.TryExcept):
return
return any(map(is_import_error, node.handlers))
def get_first_import(node, context, name, base, level):
"""return the node where [base.]<name> is imported or None if not found
"""
fullname = '%s.%s' % (base, name) if base else name
first = None
found = False
for first in context.body:
if first is node:
continue
if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
continue
if isinstance(first, astroid.Import):
if any(fullname == iname[0] for iname in first.names):
found = True
break
elif isinstance(first, astroid.From):
if level == first.level and any(
fullname == '%s.%s' % (first.modname, iname[0])
for iname in first.names):
found = True
break
if found and not are_exclusive(first, node):
return first
# utilities to represents import dependencies as tree and dot graph ###########
def make_tree_defs(mod_files_list):
"""get a list of 2-uple (module, list_of_files_which_import_this_module),
it will return a dictionary to represent this as a tree
"""
tree_defs = {}
for mod, files in mod_files_list:
node = (tree_defs, ())
for prefix in mod.split('.'):
node = node[0].setdefault(prefix, [{}, []])
node[1] += files
return tree_defs
def repr_tree_defs(data, indent_str=None):
"""return a string which represents imports as a tree"""
lines = []
nodes = data.items()
for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
if not files:
files = ''
else:
files = '(%s)' % ','.join(files)
if indent_str is None:
lines.append('%s %s' % (mod, files))
sub_indent_str = ' '
else:
lines.append(r'%s\-%s %s' % (indent_str, mod, files))
if i == len(nodes)-1:
sub_indent_str = '%s ' % indent_str
else:
sub_indent_str = '%s| ' % indent_str
if sub:
lines.append(repr_tree_defs(sub, sub_indent_str))
return '\n'.join(lines)
def dependencies_graph(filename, dep_info):
"""write dependencies as a dot (graphviz) file
"""
done = {}
printer = DotBackend(filename[:-4], rankdir='LR')
printer.emit('URL="." node[shape="box"]')
for modname, dependencies in sorted(six.iteritems(dep_info)):
done[modname] = 1
printer.emit_node(modname)
for modname in dependencies:
if modname not in done:
done[modname] = 1
printer.emit_node(modname)
for depmodname, dependencies in sorted(six.iteritems(dep_info)):
for modname in dependencies:
printer.emit_edge(modname, depmodname)
printer.generate(filename)
def make_graph(filename, dep_info, sect, gtype):
"""generate a dependencies graph and add some information about it in the
report's section
"""
dependencies_graph(filename, dep_info)
sect.append(Paragraph('%simports graph has been written to %s'
% (gtype, filename)))
# the import checker itself ###################################################
MSGS = {
'F0401': ('Unable to import %s',
'import-error',
'Used when pylint has been unable to import a module.'),
'R0401': ('Cyclic import (%s)',
'cyclic-import',
'Used when a cyclic import between two or more modules is \
detected.'),
'W0401': ('Wildcard import %s',
'wildcard-import',
'Used when `from module import *` is detected.'),
'W0402': ('Uses of a deprecated module %r',
'deprecated-module',
'Used a module marked as deprecated is imported.'),
'W0403': ('Relative import %r, should be %r',
'relative-import',
'Used when an import relative to the package directory is '
'detected.',
{'maxversion': (3, 0)}),
'W0404': ('Reimport %r (imported line %s)',
'reimported',
'Used when a module is reimported multiple times.'),
'W0406': ('Module import itself',
'import-self',
'Used when a module is importing itself.'),
'W0410': ('__future__ import is not the first non docstring statement',
'misplaced-future',
'Python 2.5 and greater require __future__ import to be the \
first non docstring statement in the module.',
{'maxversion': (3, 0)}),
}
class ImportsChecker(BaseChecker):
"""checks for
* external modules dependencies
* relative / wildcard imports
* cyclic imports
* uses of deprecated modules
"""
__implements__ = IAstroidChecker
name = 'imports'
msgs = MSGS
priority = -2
if sys.version_info < (3,):
deprecated_modules = ('regsub', 'TERMIOS', 'Bastion', 'rexec')
else:
deprecated_modules = ('stringprep', 'optparse')
options = (('deprecated-modules',
{'default' : deprecated_modules,
'type' : 'csv',
'metavar' : '<modules>',
'help' : 'Deprecated modules which should not be used, \
separated by a comma'}
),
('import-graph',
{'default' : '',
'type' : 'string',
'metavar' : '<file.dot>',
'help' : 'Create a graph of every (i.e. internal and \
external) dependencies in the given file (report RP0402 must not be disabled)'}
),
('ext-import-graph',
{'default' : '',
'type' : 'string',
'metavar' : '<file.dot>',
'help' : 'Create a graph of external dependencies in the \
given file (report RP0402 must not be disabled)'}
),
('int-import-graph',
{'default' : '',
'type' : 'string',
'metavar' : '<file.dot>',
'help' : 'Create a graph of internal dependencies in the \
given file (report RP0402 must not be disabled)'}
),
)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
self.stats = None
self.import_graph = None
self.__int_dep_info = self.__ext_dep_info = None
self.reports = (('RP0401', 'External dependencies',
self.report_external_dependencies),
('RP0402', 'Modules dependencies graph',
self.report_dependencies_graph),
)
def open(self):
"""called before visiting project (i.e set of modules)"""
self.linter.add_stats(dependencies={})
self.linter.add_stats(cycles=[])
self.stats = self.linter.stats
self.import_graph = defaultdict(set)
def close(self):
"""called before visiting project (i.e set of modules)"""
# don't try to compute cycles if the associated message is disabled
if self.linter.is_message_enabled('cyclic-import'):
vertices = list(self.import_graph)
for cycle in get_cycles(self.import_graph, vertices=vertices):
self.add_message('cyclic-import', args=' -> '.join(cycle))
def visit_import(self, node):
"""triggered when an import statement is seen"""
modnode = node.root()
for name, _ in node.names:
importedmodnode = self.get_imported_module(node, name)
if importedmodnode is None:
continue
self._check_relative_import(modnode, node, importedmodnode, name)
self._add_imported_module(node, importedmodnode.name)
self._check_deprecated_module(node, name)
self._check_reimport(node, name)
# TODO This appears to be the list of all messages of the checker...
# @check_messages('W0410', 'W0401', 'W0403', 'W0402', 'W0404', 'W0406', 'F0401')
@check_messages(*(MSGS.keys()))
def visit_from(self, node):
"""triggered when a from statement is seen"""
basename = node.modname
if basename == '__future__':
# check if this is the first non-docstring statement in the module
prev = node.previous_sibling()
if prev:
# consecutive future statements are possible
if not (isinstance(prev, astroid.From)
and prev.modname == '__future__'):
self.add_message('misplaced-future', node=node)
return
for name, _ in node.names:
if name == '*':
self.add_message('wildcard-import', args=basename, node=node)
modnode = node.root()
importedmodnode = self.get_imported_module(node, basename)
if importedmodnode is None:
return
self._check_relative_import(modnode, node, importedmodnode, basename)
self._check_deprecated_module(node, basename)
for name, _ in node.names:
if name != '*':
self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name))
self._check_reimport(node, name, basename, node.level)
def get_imported_module(self, importnode, modname):
try:
return importnode.do_import_module(modname)
except astroid.InferenceError as ex:
if str(ex) != modname:
args = '%r (%s)' % (modname, ex)
else:
args = repr(modname)
if not _except_import_error(importnode.parent):
self.add_message("import-error", args=args, node=importnode)
def _check_relative_import(self, modnode, importnode, importedmodnode,
importedasname):
"""check relative import. node is either an Import or From node, modname
the imported module name.
"""
if not self.linter.is_message_enabled('relative-import'):
return
if importedmodnode.file is None:
return False # built-in module
if modnode is importedmodnode:
return False # module importing itself
if modnode.absolute_import_activated() or getattr(importnode, 'level', None):
return False
if importedmodnode.name != importedasname:
# this must be a relative import...
self.add_message('relative-import',
args=(importedasname, importedmodnode.name),
node=importnode)
def _add_imported_module(self, node, importedmodname):
"""notify an imported module, used to analyze dependencies"""
try:
importedmodname = get_module_part(importedmodname)
except ImportError:
pass
context_name = node.root().name
if context_name == importedmodname:
# module importing itself !
self.add_message('import-self', node=node)
elif not is_standard_module(importedmodname):
# handle dependencies
importedmodnames = self.stats['dependencies'].setdefault(
importedmodname, set())
if not context_name in importedmodnames:
importedmodnames.add(context_name)
# update import graph
mgraph = self.import_graph[context_name]
if importedmodname not in mgraph:
mgraph.add(importedmodname)
def _check_deprecated_module(self, node, mod_path):
"""check if the module is deprecated"""
for mod_name in self.config.deprecated_modules:
if mod_path == mod_name or mod_path.startswith(mod_name + '.'):
self.add_message('deprecated-module', node=node, args=mod_path)
def _check_reimport(self, node, name, basename=None, level=None):
"""check if the import is necessary (i.e. not already done)"""
if not self.linter.is_message_enabled('reimported'):
return
frame = node.frame()
root = node.root()
contexts = [(frame, level)]
if root is not frame:
contexts.append((root, None))
for context, level in contexts:
first = get_first_import(node, context, name, basename, level)
if first is not None:
self.add_message('reimported', node=node,
args=(name, first.fromlineno))
def report_external_dependencies(self, sect, _, dummy):
"""return a verbatim layout for displaying dependencies"""
dep_info = make_tree_defs(six.iteritems(self._external_dependencies_info()))
if not dep_info:
raise EmptyReport()
tree_str = repr_tree_defs(dep_info)
sect.append(VerbatimText(tree_str))
def report_dependencies_graph(self, sect, _, dummy):
"""write dependencies as a dot (graphviz) file"""
dep_info = self.stats['dependencies']
if not dep_info or not (self.config.import_graph
or self.config.ext_import_graph
or self.config.int_import_graph):
raise EmptyReport()
filename = self.config.import_graph
if filename:
make_graph(filename, dep_info, sect, '')
filename = self.config.ext_import_graph
if filename:
make_graph(filename, self._external_dependencies_info(),
sect, 'external ')
filename = self.config.int_import_graph
if filename:
make_graph(filename, self._internal_dependencies_info(),
sect, 'internal ')
def _external_dependencies_info(self):
"""return cached external dependencies information or build and
cache them
"""
if self.__ext_dep_info is None:
package = self.linter.current_name
self.__ext_dep_info = result = {}
for importee, importers in six.iteritems(self.stats['dependencies']):
if not importee.startswith(package):
result[importee] = importers
return self.__ext_dep_info
def _internal_dependencies_info(self):
"""return cached internal dependencies information or build and
cache them
"""
if self.__int_dep_info is None:
package = self.linter.current_name
self.__int_dep_info = result = {}
for importee, importers in six.iteritems(self.stats['dependencies']):
if importee.startswith(package):
result[importee] = importers
return self.__int_dep_info
def register(linter):
"""required method to auto register this checker """
linter.register_checker(ImportsChecker(linter))
|
evlyapin/ansible-modules-core
|
refs/heads/devel
|
utilities/logic/set_fact.py
|
115
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2013 Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
author: "Dag Wieers (@dagwieers)"
module: set_fact
short_description: Set host facts from a task
description:
- This module allows setting new variables. Variables are set on a host-by-host basis
just like facts discovered by the setup module.
- These variables will survive between plays.
options:
key_value:
description:
- The C(set_fact) module takes key=value pairs as variables to set
in the playbook scope. Or alternatively, accepts complex arguments
using the C(args:) statement.
required: true
default: null
version_added: "1.2"
'''
EXAMPLES = '''
# Example setting host facts using key=value pairs
- set_fact: one_fact="something" other_fact="{{ local_var }}"
# Example setting host facts using complex arguments
- set_fact:
one_fact: something
other_fact: "{{ local_var * 2 }}"
another_fact: "{{ some_registered_var.results | map(attribute='ansible_facts.some_fact') | list }}"
# As of 1.8, Ansible will convert boolean strings ('true', 'false', 'yes', 'no')
# to proper boolean values when using the key=value syntax, however it is still
# recommended that booleans be set using the complex argument style:
- set_fact:
one_fact: true
other_fact: false
'''
|
Lucasgscruz/harpia
|
refs/heads/master
|
harpia/bpGUI/findColor.py
|
2
|
# -*- coding: utf-8 -*-
# [HARPIA PROJECT]
#
#
# S2i - Intelligent Industrial Systems
# DAS - Automation and Systems Department
# UFSC - Federal University of Santa Catarina
# Copyright: 2006 - 2007 Luis Carlos Dill Junges (lcdjunges@yahoo.com.br), Clovis Peruchi Scotti (scotti@ieee.org),
# Guilherme Augusto Rutzen (rutzen@das.ufsc.br), Mathias Erdtmann (erdtmann@gmail.com) and S2i (www.s2i.das.ufsc.br)
# 2007 - 2009 Clovis Peruchi Scotti (scotti@ieee.org), S2i (www.s2i.das.ufsc.br)
#
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further information, check the COPYING file distributed with this software.
#
# ----------------------------------------------------------------------
import gtk
from harpia.GladeWindow import GladeWindow
from harpia.s2icommonproperties import S2iCommonProperties, APP, DIR
# i18n
import os
from harpia.utils.XMLUtils import XMLParser
import gettext
_ = gettext.gettext
gettext.bindtextdomain(APP, DIR)
gettext.textdomain(APP)
# ----------------------------------------------------------------------
class Properties(GladeWindow, S2iCommonProperties):
# ----------------------------------------------------------------------
def __init__(self, PropertiesXML, S2iBlockProperties):
self.m_sDataDir = os.environ['HARPIA_DATA_DIR']
filename = self.m_sDataDir + 'glade/findColor.ui'
self.m_oPropertiesXML = PropertiesXML
self.m_oS2iBlockProperties = S2iBlockProperties
widget_list = [
'Properties',
'_R',
'_G',
'_B',
'_R_T',
'_G_T',
'_B_T',
'BackgroundColor',
'BorderColor',
'HelpView',
'prop_confirm'
]
handlers = [
'on_cancel_clicked',
'on_prop_confirm_clicked',
'on_BackColorButton_clicked',
'on_BorderColorButton_clicked'
]
top_window = 'Properties'
GladeWindow.__init__(self, filename, top_window, widget_list, handlers)
# load properties values
self.block_properties = self.m_oPropertiesXML.getTag("properties").getTag("block").getChildTags("property")
for Property in self.block_properties:
name = Property.getAttr("name")
value = Property.getAttr("value")
if name == "_R":
self.widgets['_R'].set_value(float(value));
if name == "_G":
self.widgets['_G'].set_value(float(value));
if name == "_B":
self.widgets['_B'].set_value(float(value));
if name == "_R_T":
self.widgets['_R_T'].set_value(float(value));
if name == "_G_T":
self.widgets['_G_T'].set_value(float(value));
if name == "_B_T":
self.widgets['_B_T'].set_value(float(value));
# if Property.name == "isFilling":
# if Property.value == "true":
# self.widgets['isFilling'].set_active( True );
# else:
# self.widgets['isFilling'].set_active( False );
# if Property.name == "isScalling":
# if Property.value == "true":
# self.widgets['isScalling'].set_active( True );
# else:
# self.widgets['isScalling'].set_active( False );
# if Property.name == "isCenter":
# if Property.value == "true":
# self.widgets['isAtCenter'].set_active( True );
# else:
# self.widgets['isAtPoint'].set_active( True );
self.configure()
# load help text
t_oS2iHelp = XMLParser(self.m_sDataDir + 'help/findColor' + _('_en.help'))
t_oTextBuffer = gtk.TextBuffer()
t_oTextBuffer.set_text(unicode(str(t_oS2iHelp.getTag("help").getTag("content").getTagContent())))
self.widgets['HelpView'].set_buffer(t_oTextBuffer)
# ----------------------------------------------------------------------
def __del__(self):
pass
# ----------------------------------------------------------------------
def getHelp(self):
return "Detecta formas circulares na imagem de entrada.\
Saida 1 é a resposta da avaliacao(*) e a saida dois mostra \
os circulos encontrados."
#-----------------------------------------------------------------------
def on_prop_confirm_clicked(self, *args):
self.widgets['prop_confirm'].grab_focus()
for Property in self.block_properties:
name = Property.getAttr("name")
if name == "_R":
Property.setAttr("value", unicode(self.widgets['_R'].get_value()))
if name == "_G":
Property.setAttr("value", unicode(self.widgets['_G'].get_value()))
if name == "_B":
Property.setAttr("value", unicode(self.widgets['_B'].get_value()))
if name == "_R_T":
Property.setAttr("value", unicode(self.widgets['_R_T'].get_value()))
if name == "_G_T":
Property.setAttr("value", unicode(self.widgets['_G_T'].get_value()))
if name == "_B_T":
Property.setAttr("value", unicode(self.widgets['_B_T'].get_value()))
# if Property.name == "isCenter":
# if self.widgets['isAtCenter'].get_active():
# Property.value = u"true"
# else:
# Property.value = u"false"
# if Property.name == "isFilling":
# if self.widgets['isFilling'].get_active():
# Property.value = u"true"
# else:
# Property.value = u"false"
# if Property.name == "isScalling":
# if self.widgets['isScalling'].get_active():
# Property.value = u"true"
# else:
# Property.value = u"false"
self.m_oS2iBlockProperties.SetPropertiesXML(self.m_oPropertiesXML)
self.m_oS2iBlockProperties.SetBorderColor(self.m_oBorderColor)
self.m_oS2iBlockProperties.SetBackColor(self.m_oBackColor)
self.widgets['Properties'].destroy()
# ----------------------------------------------------------------------
# propProperties = Properties()()
# propProperties.show( center=0 )
# ------------------------------------------------------------------------------
# Code generation
# ------------------------------------------------------------------------------
def generate(blockTemplate):
blockTemplate.header += r"""
int GetColor(IplImage * imagem, int x, int y)
{
return (int)(((uchar*)(imagem->imageData + imagem->widthStep*y))[x]);
}
void SetColor(IplImage * imagem, int x, int y, uchar color)
{
((uchar*)(imagem->imageData + imagem->widthStep*y))[x] = color;
}
void CheckImg(IplImage * img, uchar c_value, uchar tolerance)
{
uchar min,max;
int y_It,x_It;
if((int)c_value < (int)tolerance)
tolerance = c_value;
if(((int)c_value+(int)tolerance) > 255)
tolerance = 255 - c_value;
min = c_value - tolerance;
max = c_value + tolerance;
for(y_It=0;y_It<(img->height);y_It++)
for(x_It=0;x_It<(img->width);x_It++)
{
uchar val;
val = GetColor(img,x_It,y_It);
if(val >= min && val <= max)
SetColor(img,x_It,y_It,255);
else
SetColor(img,x_It,y_It,0);
}
}
CvPoint GetCenter(IplImage * src, long int * nOfPts)//, long int * numOfPoints)
{
long int numOfMatchingPoints;
long int posXsum;
long int posYsum;
int x_It, y_It;
CvPoint Center;
posXsum = 0;
posYsum = 0;
numOfMatchingPoints = 0;
for(y_It=0;y_It<(src->height);y_It++)
for(x_It=0;x_It<(src->width);x_It++)
if(GetColor(src,x_It,y_It))
{
posXsum += x_It;
posYsum += y_It;
numOfMatchingPoints++;
}
if(numOfMatchingPoints > 0)
{
Center.x = (int)(posXsum/numOfMatchingPoints);
Center.y = (int)(posYsum/numOfMatchingPoints);
}
else
numOfMatchingPoints = -1;
// (*numOfPoints) = numOfMatchingPoints;
if(nOfPts)
*nOfPts = numOfMatchingPoints;
return Center;
}
double dist22Points(CvPoint a, CvPoint b)
{
int xD,yD;
xD = a.x - b.x;
yD = a.y - b.y;
xD = (xD>0)?xD:-xD;
yD = (yD>0)?yD:-yD;
return (double)(xD*xD + yD*yD);
}
double GetVariance(IplImage * src,CvPoint center)//, long int * numOfPoints)
{
long int numOfMatchingPoints;
double distSquaresSum;
double variance;
int x_It,y_It;
numOfMatchingPoints = 0;
distSquaresSum = 0.0;
for(y_It=0;y_It<(src->height);y_It++)
for(x_It=0;x_It<(src->width);x_It++)
if(GetColor(src,x_It,y_It))
{
numOfMatchingPoints++;
distSquaresSum += dist22Points(center,cvPoint(x_It,y_It));
}
if(numOfMatchingPoints)
variance = distSquaresSum/numOfMatchingPoints;
else
variance = -1;
return variance;
}
long int CheckForColor(IplImage * src, IplImage * dst, uchar * c_value, uchar * tolerance, CvPoint * pointCenter, double * variance)
{
uchar B,B_T,G,G_T,R,R_T;
int i;
long int numOfPoints;
CvPoint centro;
IplImage * m_pChans[3] = {NULL,NULL,NULL};
numOfPoints = 0;
B = c_value[0];
G = c_value[1];
R = c_value[2];
B_T = tolerance[0];
G_T = tolerance[1];
R_T = tolerance[2];
for(i=0;i<3;i++)
m_pChans[i] = cvCreateImage(cvGetSize(src),IPL_DEPTH_8U, 1);
cvSplit(src,m_pChans[0],m_pChans[1],m_pChans[2], NULL);
CheckImg(m_pChans[0],B,B_T);
CheckImg(m_pChans[1],G,G_T);
CheckImg(m_pChans[2],R,R_T);
cvAnd(m_pChans[0], m_pChans[1], dst, NULL );
cvAnd(m_pChans[2], dst, dst, NULL );
centro = GetCenter(dst,&numOfPoints);//,&numOfPoints);
if(numOfPoints != -1)
*variance = GetVariance(dst,centro);
pointCenter->x = centro.x;
pointCenter->y = centro.y;
cvReleaseImage( &m_pChans[0] );
cvReleaseImage( &m_pChans[1] );
cvReleaseImage( &m_pChans[2] );
return numOfPoints;
}
"""
for propIter in blockTemplate.properties:
if propIter[0] == '_B':
c_B = propIter[1]
elif propIter[0] == '_B_T':
c_B_T = propIter[1]
elif propIter[0] == '_G':
c_G = propIter[1]
elif propIter[0] == '_G_T':
c_G_T = propIter[1]
elif propIter[0] == '_R':
c_R = propIter[1]
elif propIter[0] == '_R_T':
c_R_T = propIter[1]
# o1 - pto
# o2 - numOfPoints
# o3 - variance
# o4 - img
blockTemplate.imagesIO = \
'IplImage * block$$_img_i1 = NULL;\n' + \
'IplImage * block$$_img_o4 = NULL;\n' + \
'CvPoint block$$_point_o1;\n' + \
'uchar block$$c_value[3] = {' + c_B + ',' + c_G + ',' + c_R + '};\n' + \
'uchar block$$tolerance[3] = {' + c_B_T + ',' + c_G_T + ',' + c_R_T + '};\n' + \
'double block$$_double_o2;\n' + \
'double block$$_double_o3;\n'
blockTemplate.functionCall = '\nif(block$$_img_i1){\n' + \
' IplImage * block$$_img_t1 = cvCreateImage(cvGetSize(block$$_img_i1),IPL_DEPTH_8U, 1);\n' + \
' if(!block$$_img_o4)\n' + \
' block$$_img_o4 = cvCloneImage(block$$_img_i1);\n' + \
' else\n' + \
' cvCopy(block$$_img_i1,block$$_img_o4,0);\n' + \
' block$$_double_o2 = CheckForColor(block$$_img_i1, block$$_img_t1, block$$c_value, block$$tolerance, &block$$_point_o1, &block$$_double_o3);\n' + \
' cvCircle(block$$_img_o4,block$$_point_o1,8,cvScalarAll(255),4,8,0);\n' + \
' cvReleaseImage(&block$$_img_t1);\n' + \
'}\n'
blockTemplate.dealloc = 'cvReleaseImage(&block$$_img_o4);\n' + \
'cvReleaseImage(&block$$_img_i1);\n'
# ------------------------------------------------------------------------------
# Block Setup
# ------------------------------------------------------------------------------
def getBlock():
return {'Label': _('Find object of a given color'),
'Path': {'Python': 'findColor',
'Glade': 'glade/findColor.ui',
'Xml': 'xml/findColor.xml'},
'Icon': 'images/findColor.png',
'Color': '50:50:200:150',
'InTypes': {0: 'HRP_IMAGE'},
'OutTypes': {0: "HRP_POINT", 1: "HRP_DOUBLE", 2: "HRP_DOUBLE", 3: "HRP_IMAGE"},
'Description': _(
'Find object of a given color and points its center\n Output 1 = Center Point\n Output2 = Number of matching points\n Output3 = Variance \n Output4 = Tagged Image'),
'TreeGroup': _('Feature Detection')
}
|
jasonwee/asus-rt-n14uhp-mrtg
|
refs/heads/master
|
src/lesson_runtime_features/resource_getrusage.py
|
1
|
import resource
import time
RESOURCES = [
('ru_utime', 'User time'),
('ru_stime', 'System time'),
('ru_maxrss', 'Max. Resident Set Size'),
('ru_ixrss', 'Shared Memory Size'),
('ru_idrss', 'Unshared Memory Size'),
('ru_isrss', 'Stack Size'),
('ru_inblock', 'Block inputs'),
('ru_oublock', 'Block outputs'),
]
usage = resource.getrusage(resource.RUSAGE_SELF)
for name, desc in RESOURCES:
print('{:<25} ({:<10}) = {}'.format(
desc, name, getattr(usage, name)))
|
paulopperman/Hyperloop
|
refs/heads/master
|
src/hyperloop/tube_wall_temp.py
|
8
|
"""
tubeModel.py -
Determines the steady state temperature of the hyperloop tube.
Calculates Q released/absorbed by hyperloop tube due to:
Internal Convection, Tube Conduction, Ambient Natural Convection, Solar Flux In, Radiation Out
-original calculations from Jeff Berton, ported and extended by Jeff Chin
Compatible with OpenMDAO v0.8.1
"""
from math import log, pi, sqrt, e
from openmdao.main.api import Assembly, Component
from openmdao.lib.drivers.api import BroydenSolver
from openmdao.lib.datatypes.api import Float, Bool
from openmdao.main.api import convert_units as cu
from pycycle.api import FlowStationVar
class TubeWallTemp(Component):
""" Calculates Q released/absorbed by the hyperloop tube """
#--Inputs--
#Hyperloop Parameters/Design Variables
diameter_outer_tube = Float(2.23, units = 'm', iotype='in', desc='tube outer diameter') #7.3ft
length_tube = Float(482803, units = 'm', iotype='in', desc='Length of entire Hyperloop') #300 miles, 1584000ft
num_pods = Float(34, units = 'K', iotype='in', desc='Number of Pods in the Tube at a given time') #
temp_boundary = Float(322.0, units = 'K', iotype='in', desc='Average Temperature of the tube wall') #
temp_outside_ambient = Float(305.6, units = 'K', iotype='in', desc='Average Temperature of the outside air') #
nozzle_air = FlowStationVar(iotype="in", desc="air exiting the pod nozzle", copy=None)
bearing_air = FlowStationVar(iotype="in", desc="air exiting the air bearings", copy=None)
#constants
solar_insolation = Float(1000., iotype="in", units = 'W/m**2', desc='solar irradiation at sea level on a clear day') #
nn_incidence_factor = Float(0.7, iotype="in", desc='Non-normal incidence factor') #
surface_reflectance = Float(0.5, desc='Solar Reflectance Index') #
q_per_area_solar = Float(350., units = 'W/m**2', desc='Solar Heat Rate Absorbed per Area') #
q_total_solar = Float(375989751., iotype="in", units = 'W', desc='Solar Heat Absorbed by Tube') #
emissivity_tube = Float(0.5, iotype="in", units = 'W', desc='Emmissivity of the Tube') #
sb_constant = Float(0.00000005670373, iotype="in", units = 'W/((m**2)*(K**4))', desc='Stefan-Boltzmann Constant') #
#--Outputs--
area_rad = Float(337486.1, units = 'm**2', iotype='out', desc='Tube Radiating Area') #
#Required for Natural Convection Calcs
GrDelTL3 = Float(1946216.7, units = '1/((ft**3)*F)', iotype='out', desc='Heat Radiated to the outside') #
Pr = Float(0.707, iotype='out', desc='Prandtl') #
Gr = Float(12730351223., iotype='out', desc='Grashof #') #
Ra = Float(8996312085., iotype='out', desc='Rayleigh #') #
Nu = Float(232.4543713, iotype='out', desc='Nusselt #') #
k = Float(0.02655, units = 'W/(m*K)', iotype='out', desc='Thermal conductivity') #
h = Float(0.845464094, units = 'W/((m**2)*K)', iotype='out', desc='Heat Radiated to the outside') #
area_convection = Float(3374876.115, units = 'W', iotype='out', desc='Convection Area') #
#Natural Convection
q_per_area_nat_conv = Float(7.9, units = 'W/(m**2)', iotype='out', desc='Heat Radiated per Area to the outside') #
total_q_nat_conv = Float(286900419., units = 'W', iotype='out', desc='Total Heat Radiated to the outside via Natural Convection') #
#Exhausted from Pods
heat_rate_pod = Float(519763, units = 'W', iotype='out', desc='Heating Due to a Single Pods') #
total_heat_rate_pods = Float(17671942., units = 'W', iotype='out', desc='Heating Due to a All Pods') #
#Radiated Out
q_rad_per_area = Float(31.6, units = 'W/(m**2)', iotype='out', desc='Heat Radiated to the outside') #
q_rad_tot = Float(106761066.5, units = 'W', iotype='out', desc='Heat Radiated to the outside') #
#Radiated In
viewing_angle = Float(1074256, units = 'm**2', iotype='out', desc='Effective Area hit by Sun') #
#Total Heating
q_total_out = Float(286900419., units = 'W', iotype='out', desc='Total Heat Released via Radiation and Natural Convection') #
q_total_in = Float(286900419., units = 'W', iotype='out', desc='Total Heat Absorbed/Added via Pods and Solar Absorption') #
#Residual (for solver)
ss_temp_residual = Float(units = 'K', iotype='out', desc='Residual of T_released - T_absorbed')
def execute(self):
"""Calculate Various Paramters"""
bearing_q = cu(self.bearing_air.W,'lbm/s','kg/s') * cu(self.bearing_air.Cp,'Btu/(lbm*degR)','J/(kg*K)') * (cu(self.bearing_air.Tt,'degR','degK') - self.temp_boundary)
nozzle_q = cu(self.nozzle_air.W,'lbm/s','kg/s') * cu(self.nozzle_air.Cp,'Btu/(lbm*degR)','J/(kg*K)') * (cu(self.nozzle_air.Tt,'degR','degK') - self.temp_boundary)
#Q = mdot * cp * deltaT
self.heat_rate_pod = nozzle_q +bearing_q
#Total Q = Q * (number of pods)
self.total_heat_rate_pods = self.heat_rate_pod*self.num_pods
#Determine thermal resistance of outside via Natural Convection or forced convection
if(self.temp_outside_ambient < 400):
self.GrDelTL3 = 41780000000000000000*((self.temp_outside_ambient)**(-4.639)) #SI units (https://mdao.grc.nasa.gov/publications/Berton-Thesis.pdf pg51)
else:
self.GrDelTL3 = 4985000000000000000*((self.temp_outside_ambient)**(-4.284)) #SI units (https://mdao.grc.nasa.gov/publications/Berton-Thesis.pdf pg51)
#Prandtl Number
#Pr = viscous diffusion rate/ thermal diffusion rate = Cp * dyanamic viscosity / thermal conductivity
#Pr << 1 means thermal diffusivity dominates
#Pr >> 1 means momentum diffusivity dominates
if (self.temp_outside_ambient < 400):
self.Pr = 1.23*(self.temp_outside_ambient**(-0.09685)) #SI units (https://mdao.grc.nasa.gov/publications/Berton-Thesis.pdf pg51)
else:
self.Pr = 0.59*(self.temp_outside_ambient**(0.0239))
#Grashof Number
#Relationship between buoyancy and viscosity
#Laminar = Gr < 10^8
#Turbulent = Gr > 10^9
self.Gr = self.GrDelTL3*(self.temp_boundary-self.temp_outside_ambient)*(self.diameter_outer_tube**3)
#Rayleigh Number
#Buoyancy driven flow (natural convection)
self.Ra = self.Pr * self.Gr
#Nusselt Number
#Nu = convecive heat transfer / conductive heat transfer
if (self.Ra<=10**12): #valid in specific flow regime
self.Nu = (0.6 + 0.387*self.Ra**(1./6.)/(1 + (0.559/self.Pr)**(9./16.))**(8./27.))**2 #3rd Ed. of Introduction to Heat Transfer by Incropera and DeWitt, equations (9.33) and (9.34) on page 465
if(self.temp_outside_ambient < 400):
self.k = 0.0001423*(self.temp_outside_ambient**(0.9138)) #SI units (https://mdao.grc.nasa.gov/publications/Berton-Thesis.pdf pg51)
else:
self.k = 0.0002494*(self.temp_outside_ambient**(0.8152))
#h = k*Nu/Characteristic Length
self.h = (self.k * self.Nu)/ self.diameter_outer_tube
#Convection Area = Surface Area
self.area_convection = pi * self.length_tube * self.diameter_outer_tube
#Determine heat radiated per square meter (Q)
self.q_per_area_nat_conv = self.h*(self.temp_boundary-self.temp_outside_ambient)
#Determine total heat radiated over entire tube (Qtotal)
self.total_q_nat_conv = self.q_per_area_nat_conv * self.area_convection
#Determine heat incoming via Sun radiation (Incidence Flux)
#Sun hits an effective rectangular cross section
self.area_viewing = self.length_tube* self.diameter_outer_tube
self.q_per_area_solar = (1-self.surface_reflectance)* self.nn_incidence_factor * self.solar_insolation
self.q_total_solar = self.q_per_area_solar * self.area_viewing
#Determine heat released via radiation
#Radiative area = surface area
self.area_rad = self.area_convection
#P/A = SB*emmisitivity*(T^4 - To^4)
self.q_rad_per_area = self.sb_constant*self.emissivity_tube*((self.temp_boundary**4) - (self.temp_outside_ambient**4))
#P = A * (P/A)
self.q_rad_tot = self.area_rad * self.q_rad_per_area
#------------
#Sum Up
self.q_total_out = self.q_rad_tot + self.total_q_nat_conv
self.q_total_in = self.q_total_solar + self.total_heat_rate_pods
self.ss_temp_residual = (self.q_total_out - self.q_total_in)/1e6
#run stand-alone component
if __name__ == "__main__":
from openmdao.main.api import set_as_top
class TubeHeatBalance(Assembly):
def configure(self):
tm = self.add('tm', TubeWallTemp())
#tm.bearing_air.setTotalTP()
driver = self.add('driver',BroydenSolver())
driver.add_parameter('tm.temp_boundary',low=0.,high=10000.)
driver.add_constraint('tm.ss_temp_residual=0')
driver.workflow.add(['tm'])
test = TubeHeatBalance()
set_as_top(test)
#set input values
test.tm.nozzle_air.setTotalTP(1710, 0.304434211)
test.tm.nozzle_air.W = 1.08
test.tm.bearing_air.W = 0.
test.tm.diameter_outer_tube = 2.22504#, units = 'm', iotype='in', desc='Tube out diameter') #7.3ft
test.tm.length_tube = 482803.#, units = 'm', iotype='in', desc='Length of entire Hyperloop') #300 miles, 1584000ft
test.tm.num_pods = 34.#, units = 'K', iotype='in', desc='Number of Pods in the Tube at a given time') #
test.tm.temp_boundary = 340#, units = 'K', iotype='in', desc='Average Temperature of the tube') #
test.tm.temp_outside_ambient = 305.6#, units = 'K', iotype='in', desc='Average Temperature of the outside air') #
test.run()
print "-----Completed Tube Heat Flux Model Calculations---"
print ""
print "CompressQ-{} SolarQ-{} RadQ-{} ConvecQ-{}".format(test.tm.total_heat_rate_pods, test.tm.q_total_solar, test.tm.q_rad_tot, test.tm.total_q_nat_conv )
print "Equilibrium Wall Temperature: {} K or {} F".format(test.tm.temp_boundary, cu(test.tm.temp_boundary,'degK','degF'))
print "Ambient Temperature: {} K or {} F".format(test.tm.temp_outside_ambient, cu(test.tm.temp_outside_ambient,'degK','degF'))
print "Q Out = {} W ==> Q In = {} W ==> Error: {}%".format(test.tm.q_total_out,test.tm.q_total_in,((test.tm.q_total_out-test.tm.q_total_in)/test.tm.q_total_out)*100)
|
monetate/sqlalchemy
|
refs/heads/master
|
test/ext/mypy/files/cols_notype_on_fk_col.py
|
3
|
from typing import Optional
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy.orm import Mapped
from sqlalchemy.orm import registry
reg: registry = registry()
@reg.mapped
class User:
__tablename__ = "user"
id = Column(Integer(), primary_key=True)
name = Column(String)
@reg.mapped
class Address:
__tablename__ = "address"
id = Column(Integer, primary_key=True)
user_id: Mapped[int] = Column(ForeignKey("user.id"))
email_address = Column(String)
ad1 = Address()
p: Optional[int] = ad1.user_id
# it's not optional because we called it Mapped[int]
# and not Mapped[Optional[int]]
p2: int = ad1.user_id
# class-level descriptor access
User.name.in_(["x", "y"])
# class-level descriptor access
Address.user_id.in_([1, 2])
|
jasonzzz/ansible
|
refs/heads/devel
|
lib/ansible/utils/color.py
|
13
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
from ansible import constants as C
ANSIBLE_COLOR=True
if C.ANSIBLE_NOCOLOR:
ANSIBLE_COLOR=False
elif not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
ANSIBLE_COLOR=False
else:
try:
import curses
curses.setupterm()
if curses.tigetnum('colors') < 0:
ANSIBLE_COLOR=False
except ImportError:
# curses library was not found
pass
except curses.error:
# curses returns an error (e.g. could not find terminal)
ANSIBLE_COLOR=False
if C.ANSIBLE_FORCE_COLOR:
ANSIBLE_COLOR=True
# --- begin "pretty"
#
# pretty - A miniature library that provides a Python print and stdout
# wrapper that makes colored terminal text easier to use (e.g. without
# having to mess around with ANSI escape sequences). This code is public
# domain - there is no license except that you must leave this header.
#
# Copyright (C) 2008 Brian Nez <thedude at bri1 dot com>
#
# http://nezzen.net/2008/06/23/colored-text-in-python-using-ansi-escape-sequences/
codeCodes = {
'black': u'0;30', 'bright gray': u'0;37',
'blue': u'0;34', 'white': u'1;37',
'green': u'0;32', 'bright blue': u'1;34',
'cyan': u'0;36', 'bright green': u'1;32',
'red': u'0;31', 'bright cyan': u'1;36',
'purple': u'0;35', 'bright red': u'1;31',
'yellow': u'0;33', 'bright purple': u'1;35',
'dark gray': u'1;30', 'bright yellow': u'1;33',
'magenta': u'0;35', 'bright magenta': u'1;35',
'normal': u'0' ,
}
def stringc(text, color):
"""String in color."""
if ANSIBLE_COLOR:
return u"\033[%sm%s\033[0m" % (codeCodes[color], text)
else:
return text
# --- end "pretty"
def colorize(lead, num, color):
""" Print 'lead' = 'num' in 'color' """
s = u"%s=%-4s" % (lead, str(num))
if num != 0 and ANSIBLE_COLOR and color is not None:
s = stringc(s, color)
return s
def hostcolor(host, stats, color=True):
if ANSIBLE_COLOR and color:
if stats['failures'] != 0 or stats['unreachable'] != 0:
return u"%-37s" % stringc(host, 'red')
elif stats['changed'] != 0:
return u"%-37s" % stringc(host, 'yellow')
else:
return u"%-37s" % stringc(host, 'green')
return u"%-26s" % host
|
dhp-denero/server-tools
|
refs/heads/8.0
|
base_suspend_security/tests/__init__.py
|
37
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This module copyright (C) 2015 Therp BV <http://therp.nl>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_base_suspend_security
|
ssteo/moviepy
|
refs/heads/master
|
docs/conf.py
|
1
|
# -*- coding: utf-8 -*-
#
# MoviePy documentation build configuration file, created by
# sphinx-quickstart on Sat Jul 13 14:47:48 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo',
'sphinx.ext.viewcode', 'sphinx.ext.autosummary', 'numpydoc']
numpydoc_class_members_toctree = False
numpydoc_show_class_members = False
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MoviePy'
copyright = u'2017, Zulko'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# version = '0.2'
# The full version, including alpha/beta/rc tags.
# release = '0.2.3.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
sys.path.append(os.path.abspath('_themes'))
#html_theme_path = ['_themes']
html_theme = 'sphinx_rtd_theme' # formerly 'kr'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '_static/logo_small.jpeg'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MoviePydoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'MoviePy.tex', u'MoviePy Documentation',
u'Zulko', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'moviepy', u'MoviePy Documentation',
[u'Zulko'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MoviePy', u'MoviePy Documentation',
u'Zulko', 'MoviePy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'MoviePy'
epub_author = u'Zulko'
epub_publisher = u'Zulko'
epub_copyright = u'2017, Zulko'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# If 'no', URL addresses will not be shown.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
#autodoc_member_order = 'bysource'
|
bukepo/openthread
|
refs/heads/master
|
tools/harness-automation/autothreadharness/exceptions.py
|
10
|
#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
class FailError(Exception):
"""This error class is the base for all errors that cause the current test case fails.
"""
class FatalError(FailError):
"""This error class is the base for all errors that cause the whole test procedure stops.
"""
class GoldenDeviceNotEnoughError(FailError):
"""Raised when no more golden devices are available for trying.
"""
|
shurihell/testasia
|
refs/heads/test1
|
common/djangoapps/util/tests/test_keyword_sub_utils.py
|
130
|
"""
Tests for keyword_substitution.py
"""
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from ddt import ddt, file_data
from mock import patch
from util.date_utils import get_default_time_display
from util import keyword_substitution as Ks
@ddt
class KeywordSubTest(ModuleStoreTestCase):
""" Tests for the keyword substitution feature """
def setUp(self):
super(KeywordSubTest, self).setUp(create_user=False)
self.user = UserFactory.create(
email="testuser@edx.org",
username="testuser",
profile__name="Test User"
)
self.course = CourseFactory.create(
org='edx',
course='999',
display_name='test_course'
)
self.context = {
'user_id': self.user.id,
'course_title': self.course.display_name,
'name': self.user.profile.name,
'course_end_date': get_default_time_display(self.course.end),
}
@file_data('fixtures/test_keyword_coursename_sub.json')
def test_course_name_sub(self, test_info):
""" Tests subbing course name in various scenarios """
course_name = self.course.display_name
result = Ks.substitute_keywords_with_data(
test_info['test_string'], self.context,
)
self.assertIn(course_name, result)
self.assertEqual(result, test_info['expected'])
def test_anonymous_id_sub(self):
"""
Test that anonymous_id is subbed
"""
test_string = "Turn %%USER_ID%% into anonymous id"
anonymous_id = Ks.anonymous_id_from_user_id(self.user.id)
result = Ks.substitute_keywords_with_data(
test_string, self.context,
)
self.assertNotIn('%%USER_ID%%', result)
self.assertIn(anonymous_id, result)
def test_name_sub(self):
"""
Test that the user's full name is correctly subbed
"""
test_string = "This is the test string. subthis: %%USER_FULLNAME%% into user name"
user_name = self.user.profile.name
result = Ks.substitute_keywords_with_data(
test_string, self.context,
)
self.assertNotIn('%%USER_FULLNAME%%', result)
self.assertIn(user_name, result)
def test_illegal_subtag(self):
"""
Test that sub-ing doesn't ocurr with illegal tags
"""
test_string = "%%user_id%%"
result = Ks.substitute_keywords_with_data(
test_string, self.context,
)
self.assertEquals(test_string, result)
def test_should_not_sub(self):
"""
Test that sub-ing doesn't work without subtags
"""
test_string = "this string has no subtags"
result = Ks.substitute_keywords_with_data(
test_string, self.context,
)
self.assertEquals(test_string, result)
@file_data('fixtures/test_keywordsub_multiple_tags.json')
def test_sub_multiple_tags(self, test_info):
""" Test that subbing works with multiple subtags """
anon_id = '123456789'
with patch('util.keyword_substitution.anonymous_id_from_user_id', lambda user_id: anon_id):
result = Ks.substitute_keywords_with_data(
test_info['test_string'], self.context,
)
self.assertEqual(result, test_info['expected'])
def test_subbing_no_userid_or_courseid(self):
"""
Tests that no subbing occurs if no user_id or no course_id is given.
"""
test_string = 'This string should not be subbed here %%USER_ID%%'
no_course_context = dict(
(key, value) for key, value in self.context.iteritems() if key != 'course_title'
)
result = Ks.substitute_keywords_with_data(test_string, no_course_context)
self.assertEqual(test_string, result)
no_user_id_context = dict(
(key, value) for key, value in self.context.iteritems() if key != 'user_id'
)
result = Ks.substitute_keywords_with_data(test_string, no_user_id_context)
self.assertEqual(test_string, result)
|
yoavfu/xbmc
|
refs/heads/master
|
lib/libUPnP/Platinum/Build/Tools/Scripts/XCodeMake.py
|
262
|
#! /usr/bin/env python
"""
XCode Build Script
$Id: XCodeMake.py 655 2010-09-29 22:40:22Z soothe $
"""
import os
import sys
import getopt
import subprocess
# ------------------------------------------------------------
# usage
# ------------------------------------------------------------
def usage(errMsg):
try:
print 'Error: %s' % (errMsg)
except NameError:
pass
print 'Usage: '
print ' %s -p <path to project> -b [Release|Debug|etc.] -t [All|Platinum|PlatinumFramework|etc.] -s [macosx|iphoneos]' % (sys.argv[0])
print ''
print ' REQUIRED OPTIONS'
print '\t-p <project>'
print '\t-b <configuration>'
print '\t-t <target>'
print '\t-s <sdk>'
print ''
print ' BUILD OPTIONS'
print '\t-c\tMake clean'
# ------------------------------------------------------------
# main
# ------------------------------------------------------------
try:
opts, args = getopt.getopt(sys.argv[1:], "p:b:t:s:c")
except getopt.GetoptError, (msg, opt):
# print 'Error: invalid argument, %s: %s' % (opt, msg)
usage('invalid argument, %s: %s' % (opt, msg))
sys.exit(2)
# Build options
doingBuild = False
rebuildAll = False
makeClean = False
for opt, arg in opts:
if opt == '-p':
projectFile = arg
doingBuild = True
elif opt == '-b':
buildName = arg
doingBuild = True
elif opt == '-t':
targetName = arg
elif opt == '-s':
sdk = arg
elif opt == '-c':
makeClean = True
try:
buildSwitch = 'build'
if makeClean: buildSwitch = 'clean'
cmd_list = ['xcodebuild', '-project', '%s' % projectFile, '-target', '%s' % targetName, '-sdk', '%s' % sdk, '-configuration', '%s' % buildName, '%s' % buildSwitch]
cmd = " ".join(cmd_list)
print 'Executing:'
print cmd
retVal = subprocess.call(cmd_list)
# only the least sig 8 bits are the real return value
if retVal != 0:
print cmd
print '** BUILD FAILURE **'
sys.exit(retVal)
except NameError, (name):
usage('missing argument %s' % (name))
sys.exit(2)
|
ol-loginov/intellij-community
|
refs/heads/master
|
python/testData/copyPaste/singleLine/Indent41.after.py
|
747
|
class C:
def foo(self):
x = 1
y = 2
|
jhawkesworth/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/f5/bigip_irule.py
|
38
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_irule
short_description: Manage iRules across different modules on a BIG-IP
description:
- Manage iRules across different modules on a BIG-IP.
version_added: 2.2
options:
content:
description:
- When used instead of 'src', sets the contents of an iRule directly to
the specified value. This is for simple values, but can be used with
lookup plugins for anything complex or with formatting. Either one
of C(src) or C(content) must be provided.
type: str
module:
description:
- The BIG-IP module to add the iRule to.
type: str
required: True
choices:
- ltm
- gtm
name:
description:
- The name of the iRule.
type: str
required: True
src:
description:
- The iRule file to interpret and upload to the BIG-IP. Either one
of C(src) or C(content) must be provided.
type: path
required: True
state:
description:
- Whether the iRule should exist or not.
type: str
choices:
- present
- absent
default: present
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
version_added: 2.5
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Add the iRule contained in template irule.tcl to the LTM module
bigip_irule:
content: "{{ lookup('template', 'irule.tcl') }}"
module: ltm
name: MyiRule
state: present
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
- name: Add the iRule contained in static file irule.tcl to the LTM module
bigip_irule:
module: ltm
name: MyiRule
src: irule.tcl
state: present
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
'''
RETURN = r'''
module:
description: The module that the iRule was added to
returned: changed and success
type: str
sample: gtm
src:
description: The filename that included the iRule source
returned: changed and success, when provided
type: str
sample: /opt/src/irules/example1.tcl
content:
description: The content of the iRule that was managed
returned: changed and success
type: str
sample: "when LB_FAILED { set wipHost [LB::server addr] }"
'''
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
class Parameters(AnsibleF5Parameters):
api_map = {
'apiAnonymous': 'content',
}
updatables = [
'content',
]
api_attributes = [
'apiAnonymous',
]
returnables = [
'content', 'src', 'module',
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def content(self):
if self._values['content'] is None:
result = self.src_content
else:
result = self._values['content']
return str(result).strip()
@property
def src(self):
if self._values['src'] is None:
return None
return self._values['src']
@property
def src_content(self):
if not os.path.exists(self._values['src']):
raise F5ModuleError(
"The specified 'src' was not found."
)
with open(self._values['src']) as f:
result = f.read()
return result
class Changes(Parameters):
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.client = kwargs.get('client', None)
self.module = kwargs.get('module', None)
self.kwargs = kwargs
def exec_module(self):
if self.module.params['module'] == 'ltm':
manager = self.get_manager('ltm')
elif self.module.params['module'] == 'gtm':
manager = self.get_manager('gtm')
else:
raise F5ModuleError(
"An unknown iRule module type was specified"
)
return manager.exec_module()
def get_manager(self, type):
if type == 'ltm':
return LtmManager(**self.kwargs)
elif type == 'gtm':
return GtmManager(**self.kwargs)
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.have = None
self.want = ModuleParameters(params=self.module.params)
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state in ["present"]:
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def present(self):
if not self.want.content and not self.want.src:
raise F5ModuleError(
"Either 'content' or 'src' must be provided"
)
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
if not self.exists():
raise F5ModuleError("Failed to create the iRule")
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the iRule")
return True
class LtmManager(BaseManager):
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/rule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/rule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/rule/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response['selfLink']
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/rule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/rule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
class GtmManager(BaseManager):
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/rule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/gtm/rule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/gtm/rule/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response['selfLink']
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/rule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/rule/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
content=dict(),
src=dict(
type='path',
),
name=dict(required=True),
module=dict(
required=True,
choices=['gtm', 'ltm']
),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.mutually_exclusive = [
['content', 'src']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
mutually_exclusive=spec.mutually_exclusive
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
aurix/lammps-induced-dipole-polarization-pair-style
|
refs/heads/master
|
tools/i-pi/ipi/inputs/thermostats.py
|
33
|
"""Deals with creating the thermostats class.
Copyright (C) 2013, Joshua More and Michele Ceriotti
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http.//www.gnu.org/licenses/>.
Chooses between the different possible thermostat options and creates the
appropriate thermostat object, with suitable parameters.
Classes:
InputThermo: Deals with creating the thermostat object from a file, and
writing the checkpoints.
"""
__all__ = ['InputThermo']
import numpy as np
from ipi.utils.depend import *
from ipi.utils.inputvalue import *
from ipi.engine.thermostats import *
class InputThermo(Input):
"""Thermostat input class.
Handles generating the appropriate thermostat class from the xml input file,
and generating the xml checkpoiunt tags and data from an instance of the
object.
Attributes:
mode: An optional string giving the type of the thermostat used. Defaults
to 'langevin'.
Fields:
ethermo: An optional float giving the amount of heat energy transferred
to the bath. Defaults to 0.0.
tau: An optional float giving the damping time scale. Defaults to 1.0.
pile_scale: Scaling for the PILE damping relative to the critical damping.
A: An optional array of floats giving the drift matrix. Defaults to 0.0.
C: An optional array of floats giving the static covariance matrix.
Defaults to 0.0.
s: An optional array of floats giving the additional momentum-scaled
momenta in GLE. Defaults to 0.0.
"""
attribs = { "mode": (InputAttribute, { "dtype" : str,
"options" : [ "", "langevin", "svr", "pile_l", "pile_g", "gle", "nm_gle", "nm_gle_g" ],
"help" : "The style of thermostatting. 'langevin' specifies a white noise langevin equation to be attached to the cartesian representation of the momenta. 'svr' attaches a velocity rescaling thermostat to the cartesian representation of the momenta. Both 'pile_l' and 'pile_g' attaches a white noise langevin thermostat to the normal mode representation, with 'pile_l' attaching a local langevin thermostat to the centroid mode and 'pile_g' instead attaching a global velocity rescaling thermostat. 'gle' attaches a coloured noise langevin thermostat to the cartesian representation of the momenta, 'nm_gle' attaches a coloured noise langevin thermostat to the normal mode representation of the momenta and a langevin thermostat to the centroid and 'nm_gle_g' attaches a gle thermostat to the normal modes and a svr thermostat to the centroid."
}) }
fields = { "ethermo" : (InputValue, { "dtype" : float,
"default" : 0.0,
"help" : "The initial value of the thermostat energy. Used when the simulation is restarted to guarantee continuity of the conserved quantity.",
"dimension" : "energy" }),
"tau" : (InputValue, { "dtype" : float,
"default" : 0.0,
"help" : "The friction coefficient for white noise thermostats.",
"dimension" : "time" }),
"pile_scale" : (InputValue, { "dtype" : float,
"default" : 1.0,
"help" : "Scaling for the PILE damping relative to the critical damping."} ),
"A" : (InputArray, { "dtype" : float,
"default" : input_default(factory=np.zeros, args = (0,)),
"help" : "The friction matrix for GLE thermostats.",
"dimension" : "frequency" }),
"C" : (InputArray, { "dtype" : float,
"default" : input_default(factory=np.zeros, args = (0,)),
"help" : "The covariance matrix for GLE thermostats.",
"dimension" : "temperature" }),
"s" : (InputArray, { "dtype" : float,
"default" : input_default(factory=np.zeros, args = (0,)),
"help" : "Input values for the additional momenta in GLE.",
"dimension" : "ms-momentum" })
}
default_help = "Simulates an external heat bath to keep the velocity distribution at the correct temperature."
default_label = "THERMOSTATS"
def store(self, thermo):
"""Takes a thermostat instance and stores a minimal representation of it.
Args:
thermo: A thermostat object.
Raises:
TypeError: Raised if the thermostat is not a recognized type.
"""
super(InputThermo,self).store(thermo)
if type(thermo) is ThermoLangevin:
self.mode.store("langevin")
self.tau.store(thermo.tau)
elif type(thermo) is ThermoSVR:
self.mode.store("svr")
self.tau.store(thermo.tau)
elif type(thermo) is ThermoPILE_L:
self.mode.store("pile_l")
self.tau.store(thermo.tau)
self.pile_scale.store(thermo.pilescale)
elif type(thermo) is ThermoPILE_G:
self.mode.store("pile_g")
self.tau.store(thermo.tau)
self.pile_scale.store(thermo.pilescale)
elif type(thermo) is ThermoGLE:
self.mode.store("gle")
self.A.store(thermo.A)
if dget(thermo,"C")._func is None:
self.C.store(thermo.C)
self.s.store(thermo.s)
elif type(thermo) is ThermoNMGLE:
self.mode.store("nm_gle")
self.A.store(thermo.A)
if dget(thermo,"C")._func is None:
self.C.store(thermo.C)
self.s.store(thermo.s)
elif type(thermo) is ThermoNMGLEG:
self.mode.store("nm_gle_g")
self.A.store(thermo.A)
self.tau.store(thermo.tau)
if dget(thermo,"C")._func is None:
self.C.store(thermo.C)
self.s.store(thermo.s)
elif type(thermo) is Thermostat:
self.mode.store("")
else:
raise TypeError("Unknown thermostat mode " + type(thermo).__name__)
self.ethermo.store(thermo.ethermo)
def fetch(self):
"""Creates a thermostat object.
Returns:
A thermostat object of the appropriate type and with the appropriate
parameters given the attributes of the InputThermo object.
Raises:
TypeError: Raised if the thermostat type is not a recognized option.
"""
super(InputThermo,self).fetch()
if self.mode.fetch() == "langevin":
thermo = ThermoLangevin(tau=self.tau.fetch())
elif self.mode.fetch() == "svr":
thermo = ThermoSVR(tau=self.tau.fetch())
elif self.mode.fetch() == "pile_l":
thermo = ThermoPILE_L(tau=self.tau.fetch(), scale=self.pile_scale.fetch())
elif self.mode.fetch() == "pile_g":
thermo = ThermoPILE_G(tau=self.tau.fetch(), scale=self.pile_scale.fetch())
elif self.mode.fetch() == "gle":
rC = self.C.fetch()
if len(rC) == 0:
rC = None
thermo = ThermoGLE(A=self.A.fetch(),C=rC)
thermo.s = self.s.fetch()
elif self.mode.fetch() == "nm_gle":
rC = self.C.fetch()
if len(rC) == 0:
rC = None
thermo = ThermoNMGLE(A=self.A.fetch(),C=rC)
thermo.s = self.s.fetch()
elif self.mode.fetch() == "nm_gle_g":
rC = self.C.fetch()
if len(rC) == 0:
rC = None
thermo = ThermoNMGLEG(A=self.A.fetch(),C=rC, tau=self.tau.fetch())
thermo.s = self.s.fetch()
elif self.mode.fetch() == "" :
thermo=Thermostat()
else:
raise TypeError("Invalid thermostat mode " + self.mode.fetch())
thermo.ethermo = self.ethermo.fetch()
return thermo
def check(self):
"""Checks that the parameter arrays represents a valid thermostat."""
super(InputThermo,self).check()
if self.mode.fetch() in ["langevin", "svr", "pile_l", "pile_g", "nm_gle_g"]:
if self.tau.fetch() <= 0:
raise ValueError("The thermostat friction coefficient must be set to a positive value")
if self.mode.fetch() in ["gle", "nm_gle", "nm_gle_g"]:
pass # PERHAPS DO CHECKS THAT MATRICES SATISFY REASONABLE CONDITIONS (POSITIVE-DEFINITENESS, ETC)
|
ceph/swift
|
refs/heads/master
|
test/unit/common/middleware/test_healthcheck.py
|
5
|
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from webob import Request
from swift.common.middleware import healthcheck
class FakeApp(object):
def __call__(self, env, start_response):
return "FAKE APP"
def start_response(*args):
pass
class TestHealthCheck(unittest.TestCase):
def setUp(self):
self.app = healthcheck.HealthCheckMiddleware(FakeApp())
def test_healthcheck(self):
req = Request.blank('/healthcheck', environ={'REQUEST_METHOD': 'GET'})
resp = self.app(req.environ, start_response)
self.assertEquals(resp, ['OK'])
def test_healtcheck_pass(self):
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = self.app(req.environ, start_response)
self.assertEquals(resp, 'FAKE APP')
if __name__ == '__main__':
unittest.main()
|
peterhinch/micropython-async
|
refs/heads/master
|
v2/gps/astests.py
|
1
|
#!/usr/bin/env python3.5
# -*- coding: utf-8 -*-
# astests.py
# Tests for AS_GPS module (asynchronous GPS device driver)
# Based on tests for MicropyGPS by Michael Calvin McCoy
# https://github.com/inmcm/micropyGPS
# Copyright (c) 2018 Peter Hinch
# Released under the MIT License (MIT) - see LICENSE file
# Run under CPython 3.5+ or MicroPython
import as_GPS
try:
import uasyncio as asyncio
except ImportError:
import asyncio
async def run():
sentence_count = 0
test_RMC = ['$GPRMC,081836,A,3751.65,S,14507.36,E,000.0,360.0,130998,011.3,E*62\n',
'$GPRMC,123519,A,4807.038,N,01131.000,E,022.4,084.4,230394,003.1,W*6A\n',
'$GPRMC,225446,A,4916.45,N,12311.12,W,000.5,054.7,191194,020.3,E*68\n',
'$GPRMC,180041.896,A,3749.1851,N,08338.7891,W,001.9,154.9,240911,,,A*7A\n',
'$GPRMC,180049.896,A,3749.1808,N,08338.7869,W,001.8,156.3,240911,,,A*70\n',
'$GPRMC,092751.000,A,5321.6802,N,00630.3371,W,0.06,31.66,280511,,,A*45\n']
test_VTG = ['$GPVTG,232.9,T,,M,002.3,N,004.3,K,A*01\n']
test_GGA = ['$GPGGA,180050.896,3749.1802,N,08338.7865,W,1,07,1.1,397.4,M,-32.5,M,,0000*6C\n']
test_GSA = ['$GPGSA,A,3,07,11,28,24,26,08,17,,,,,,2.0,1.1,1.7*37\n',
'$GPGSA,A,3,07,02,26,27,09,04,15,,,,,,1.8,1.0,1.5*33\n']
test_GSV = ['$GPGSV,3,1,12,28,72,355,39,01,52,063,33,17,51,272,44,08,46,184,38*74\n',
'$GPGSV,3,2,12,24,42,058,33,11,34,053,33,07,20,171,40,20,15,116,*71\n',
'$GPGSV,3,3,12,04,12,204,34,27,11,324,35,32,11,089,,26,10,264,40*7B\n',
'$GPGSV,3,1,11,03,03,111,00,04,15,270,00,06,01,010,00,13,06,292,00*74\n',
'$GPGSV,3,2,11,14,25,170,00,16,57,208,39,18,67,296,40,19,40,246,00*74\n',
'$GPGSV,3,3,11,22,42,067,42,24,14,311,43,27,05,244,00,,,,*4D\n',
'$GPGSV,4,1,14,22,81,349,25,14,64,296,22,18,54,114,21,51,40,212,*7D\n',
'$GPGSV,4,2,14,24,30,047,22,04,22,312,26,31,22,204,,12,19,088,23*72\n',
'$GPGSV,4,3,14,25,17,127,18,21,16,175,,11,09,315,16,19,05,273,*72\n',
'$GPGSV,4,4,14,32,05,303,,15,02,073,*7A\n']
test_GLL = ['$GPGLL,3711.0942,N,08671.4472,W,000812.000,A,A*46\n',
'$GPGLL,4916.45,N,12311.12,W,225444,A,*1D\n',
'$GPGLL,4250.5589,S,14718.5084,E,092204.999,A*2D\n',
'$GPGLL,0000.0000,N,00000.0000,E,235947.000,V*2D\n']
my_gps = as_GPS.AS_GPS(None)
sentence = ''
for sentence in test_RMC:
my_gps._valid = 0
sentence_count += 1
sentence = await my_gps._update(sentence)
if sentence is None:
print('RMC sentence is invalid.')
else:
print('Parsed a', sentence, 'Sentence')
print('Longitude:', my_gps.longitude())
print('Latitude', my_gps.latitude())
print('UTC Timestamp:', my_gps.utc)
print('Speed:', my_gps.speed())
print('Date Stamp:', my_gps.date)
print('Course', my_gps.course)
print('Data is Valid:', bool(my_gps._valid & 1))
print('Compass Direction:', my_gps.compass_direction())
print('')
for sentence in test_GLL:
my_gps._valid = 0
sentence_count += 1
sentence = await my_gps._update(sentence)
if sentence is None:
print('GLL sentence is invalid.')
else:
print('Parsed a', sentence, 'Sentence')
print('Longitude:', my_gps.longitude())
print('Latitude', my_gps.latitude())
print('UTC Timestamp:', my_gps.utc)
print('Data is Valid:', bool(my_gps._valid & 2))
print('')
for sentence in test_VTG:
my_gps._valid = 0
sentence_count += 1
sentence = await my_gps._update(sentence)
if sentence is None:
print('VTG sentence is invalid.')
else:
print('Parsed a', sentence, 'Sentence')
print('Speed:', my_gps.speed())
print('Course', my_gps.course)
print('Compass Direction:', my_gps.compass_direction())
print('Data is Valid:', bool(my_gps._valid & 4))
print('')
for sentence in test_GGA:
my_gps._valid = 0
sentence_count += 1
sentence = await my_gps._update(sentence)
if sentence is None:
print('GGA sentence is invalid.')
else:
print('Parsed a', sentence, 'Sentence')
print('Longitude', my_gps.longitude())
print('Latitude', my_gps.latitude())
print('UTC Timestamp:', my_gps.utc)
print('Altitude:', my_gps.altitude)
print('Height Above Geoid:', my_gps.geoid_height)
print('Horizontal Dilution of Precision:', my_gps.hdop)
print('Satellites in Use by Receiver:', my_gps.satellites_in_use)
print('Data is Valid:', bool(my_gps._valid & 8))
print('')
for sentence in test_GSA:
my_gps._valid = 0
sentence_count += 1
sentence = await my_gps._update(sentence)
if sentence is None:
print('GSA sentence is invalid.')
else:
print('Parsed a', sentence, 'Sentence')
print('Satellites Used', my_gps.satellites_used)
print('Horizontal Dilution of Precision:', my_gps.hdop)
print('Vertical Dilution of Precision:', my_gps.vdop)
print('Position Dilution of Precision:', my_gps.pdop)
print('Data is Valid:', bool(my_gps._valid & 16))
print('')
for sentence in test_GSV:
my_gps._valid = 0
sentence_count += 1
sentence = await my_gps._update(sentence)
if sentence is None:
print('GSV sentence is invalid.')
else:
print('Parsed a', sentence, 'Sentence')
print('SV Sentences Parsed', my_gps._last_sv_sentence)
print('SV Sentences in Total', my_gps._total_sv_sentences)
print('# of Satellites in View:', my_gps.satellites_in_view)
print('Data is Valid:', bool(my_gps._valid & 32))
data_valid = my_gps._total_sv_sentences > 0 and my_gps._total_sv_sentences == my_gps._last_sv_sentence
print('Is Satellite Data Valid?:', data_valid)
if data_valid:
print('Satellite Data:', my_gps._satellite_data)
print('Satellites Visible:', list(my_gps._satellite_data.keys()))
print('')
print("Pretty Print Examples:")
print('Latitude (degs):', my_gps.latitude_string(as_GPS.DD))
print('Longitude (degs):', my_gps.longitude_string(as_GPS.DD))
print('Latitude (dms):', my_gps.latitude_string(as_GPS.DMS))
print('Longitude (dms):', my_gps.longitude_string(as_GPS.DMS))
print('Latitude (kml):', my_gps.latitude_string(as_GPS.KML))
print('Longitude (kml):', my_gps.longitude_string(as_GPS.KML))
print('Latitude (degs, mins):', my_gps.latitude_string())
print('Longitude (degs, mins):', my_gps.longitude_string())
print('Speed:', my_gps.speed_string(as_GPS.KPH), 'or',
my_gps.speed_string(as_GPS.MPH), 'or',
my_gps.speed_string(as_GPS.KNOT))
print('Date (Long Format):', my_gps.date_string(as_GPS.LONG))
print('Date (Short D/M/Y Format):', my_gps.date_string(as_GPS.DMY))
print('Date (Short M/D/Y Format):', my_gps.date_string(as_GPS.MDY))
print('Time:', my_gps.time_string())
print()
print('### Final Results ###')
print('Sentences Attempted:', sentence_count)
print('Sentences Found:', my_gps.clean_sentences)
print('Sentences Parsed:', my_gps.parsed_sentences)
print('Unsupported sentences:', my_gps.unsupported_sentences)
print('CRC_Fails:', my_gps.crc_fails)
def run_tests():
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
if __name__ == "__main__":
run_tests()
|
kmoocdev/edx-platform
|
refs/heads/kmooc.rc0
|
common/djangoapps/terrain/stubs/xqueue.py
|
123
|
"""
Stub implementation of XQueue for acceptance tests.
Configuration values:
"default" (dict): Default response to be sent to LMS as a grade for a submission
"<submission>" (dict): Grade response to return for submissions containing the text <submission>
"register_submission_url" (str): URL to send grader payloads when we receive a submission
If no grade response is configured, a default response will be returned.
"""
from .http import StubHttpRequestHandler, StubHttpService, require_params
import json
import copy
from requests import post
from threading import Timer
class StubXQueueHandler(StubHttpRequestHandler):
"""
A handler for XQueue POST requests.
"""
DEFAULT_RESPONSE_DELAY = 2
DEFAULT_GRADE_RESPONSE = {'correct': True, 'score': 1, 'msg': ''}
@require_params('POST', 'xqueue_body', 'xqueue_header')
def do_POST(self):
"""
Handle a POST request from the client
Sends back an immediate success/failure response.
It then POSTS back to the client with grading results.
"""
msg = "XQueue received POST request {0} to path {1}".format(self.post_dict, self.path)
self.log_message(msg)
# Respond only to grading requests
if self._is_grade_request():
# If configured, send the grader payload to other services.
self._register_submission(self.post_dict['xqueue_body'])
try:
xqueue_header = json.loads(self.post_dict['xqueue_header'])
callback_url = xqueue_header['lms_callback_url']
except KeyError:
# If the message doesn't have a header or body,
# then it's malformed. Respond with failure
error_msg = "XQueue received invalid grade request"
self._send_immediate_response(False, message=error_msg)
except ValueError:
# If we could not decode the body or header,
# respond with failure
error_msg = "XQueue could not decode grade request"
self._send_immediate_response(False, message=error_msg)
else:
# Send an immediate response of success
# The grade request is formed correctly
self._send_immediate_response(True)
# Wait a bit before POSTing back to the callback url with the
# grade result configured by the server
# Otherwise, the problem will not realize it's
# queued and it will keep waiting for a response indefinitely
delayed_grade_func = lambda: self._send_grade_response(
callback_url, xqueue_header, self.post_dict['xqueue_body']
)
delay = self.server.config.get('response_delay', self.DEFAULT_RESPONSE_DELAY)
Timer(delay, delayed_grade_func).start()
# If we get a request that's not to the grading submission
# URL, return an error
else:
self._send_immediate_response(False, message="Invalid request URL")
def _send_immediate_response(self, success, message=""):
"""
Send an immediate success/failure message
back to the client
"""
# Send the response indicating success/failure
response_str = json.dumps(
{'return_code': 0 if success else 1, 'content': message}
)
if self._is_grade_request():
self.send_response(
200, content=response_str, headers={'Content-type': 'text/plain'}
)
self.log_message("XQueue: sent response {0}".format(response_str))
else:
self.send_response(500)
def _send_grade_response(self, postback_url, xqueue_header, xqueue_body_json):
"""
POST the grade response back to the client
using the response provided by the server configuration.
Uses the server configuration to determine what response to send:
1) Specific response for submissions containing matching text in `xqueue_body`
2) Default submission configured by client
3) Default submission
`postback_url` is the URL the client told us to post back to
`xqueue_header` (dict) is the full header the client sent us, which we will send back
to the client so it can authenticate us.
`xqueue_body_json` (json-encoded string) is the body of the submission the client sent us.
"""
# First check if we have a configured response that matches the submission body
grade_response = None
# This matches the pattern against the JSON-encoded xqueue_body
# This is very simplistic, but sufficient to associate a student response
# with a grading response.
# There is a danger here that a submission will match multiple response patterns.
# Rather than fail silently (which could cause unpredictable behavior in tests)
# we abort and log a debugging message.
for pattern, response in self.server.queue_responses:
if pattern in xqueue_body_json:
if grade_response is None:
grade_response = response
# Multiple matches, so abort and log an error
else:
self.log_error(
"Multiple response patterns matched '{0}'".format(xqueue_body_json),
)
return
# Fall back to the default grade response configured for this queue,
# then to the default response.
if grade_response is None:
grade_response = self.server.config.get(
'default', copy.deepcopy(self.DEFAULT_GRADE_RESPONSE)
)
# Wrap the message in <div> tags to ensure that it is valid XML
if isinstance(grade_response, dict) and 'msg' in grade_response:
grade_response['msg'] = "<div>{0}</div>".format(grade_response['msg'])
data = {
'xqueue_header': json.dumps(xqueue_header),
'xqueue_body': json.dumps(grade_response)
}
post(postback_url, data=data)
self.log_message("XQueue: sent grading response {0} to {1}".format(data, postback_url))
def _register_submission(self, xqueue_body_json):
"""
If configured, send the submission's grader payload to another service.
"""
url = self.server.config.get('register_submission_url')
# If not configured, do not need to send anything
if url is not None:
try:
xqueue_body = json.loads(xqueue_body_json)
except ValueError:
self.log_error(
"Could not decode XQueue body as JSON: '{0}'".format(xqueue_body_json))
else:
# Retrieve the grader payload, which should be a JSON-encoded dict.
# We pass the payload directly to the service we are notifying, without
# inspecting the contents.
grader_payload = xqueue_body.get('grader_payload')
if grader_payload is not None:
response = post(url, data={'grader_payload': grader_payload})
if not response.ok:
self.log_error(
"Could register submission at URL '{0}'. Status was {1}".format(
url, response.status_code))
else:
self.log_message(
"XQueue body is missing 'grader_payload' key: '{0}'".format(xqueue_body)
)
def _is_grade_request(self):
"""
Return a boolean indicating whether the requested URL indicates a submission.
"""
return 'xqueue/submit' in self.path
class StubXQueueService(StubHttpService):
"""
A stub XQueue grading server that responds to POST requests to localhost.
"""
HANDLER_CLASS = StubXQueueHandler
NON_QUEUE_CONFIG_KEYS = ['default', 'register_submission_url']
@property
def queue_responses(self):
"""
Returns a list of (pattern, response) tuples, where `pattern` is a pattern
to match in the XQueue body, and `response` is a dictionary to return
as the response from the grader.
Every configuration key is a queue name,
except for 'default' and 'register_submission_url' which have special meaning
"""
return {
key: value
for key, value in self.config.iteritems()
if key not in self.NON_QUEUE_CONFIG_KEYS
}.items()
|
wunderlins/learning
|
refs/heads/master
|
python/zodb/lib/linux64/setuptools/command/develop.py
|
114
|
from distutils.util import convert_path
from distutils import log
from distutils.errors import DistutilsError, DistutilsOptionError
import os
import glob
import io
from setuptools.extern import six
from pkg_resources import Distribution, PathMetadata, normalize_path
from setuptools.command.easy_install import easy_install
import setuptools
class develop(easy_install):
"""Set up package for development"""
description = "install package in 'development mode'"
user_options = easy_install.user_options + [
("uninstall", "u", "Uninstall this source package"),
("egg-path=", None, "Set the path to be used in the .egg-link file"),
]
boolean_options = easy_install.boolean_options + ['uninstall']
command_consumes_arguments = False # override base
def run(self):
if self.uninstall:
self.multi_version = True
self.uninstall_link()
else:
self.install_for_development()
self.warn_deprecated_options()
def initialize_options(self):
self.uninstall = None
self.egg_path = None
easy_install.initialize_options(self)
self.setup_path = None
self.always_copy_from = '.' # always copy eggs installed in curdir
def finalize_options(self):
ei = self.get_finalized_command("egg_info")
if ei.broken_egg_info:
template = "Please rename %r to %r before using 'develop'"
args = ei.egg_info, ei.broken_egg_info
raise DistutilsError(template % args)
self.args = [ei.egg_name]
easy_install.finalize_options(self)
self.expand_basedirs()
self.expand_dirs()
# pick up setup-dir .egg files only: no .egg-info
self.package_index.scan(glob.glob('*.egg'))
egg_link_fn = ei.egg_name + '.egg-link'
self.egg_link = os.path.join(self.install_dir, egg_link_fn)
self.egg_base = ei.egg_base
if self.egg_path is None:
self.egg_path = os.path.abspath(ei.egg_base)
target = normalize_path(self.egg_base)
egg_path = normalize_path(os.path.join(self.install_dir,
self.egg_path))
if egg_path != target:
raise DistutilsOptionError(
"--egg-path must be a relative path from the install"
" directory to " + target
)
# Make a distribution for the package's source
self.dist = Distribution(
target,
PathMetadata(target, os.path.abspath(ei.egg_info)),
project_name=ei.egg_name
)
p = self.egg_base.replace(os.sep, '/')
if p != os.curdir:
p = '../' * (p.count('/') + 1)
self.setup_path = p
p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
if p != normalize_path(os.curdir):
raise DistutilsOptionError(
"Can't get a consistent path to setup script from"
" installation directory", p, normalize_path(os.curdir))
def install_for_development(self):
if six.PY3 and getattr(self.distribution, 'use_2to3', False):
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
self.reinitialize_command('build_py', inplace=0)
self.run_command('build_py')
bpy_cmd = self.get_finalized_command("build_py")
build_path = normalize_path(bpy_cmd.build_lib)
# Build extensions
self.reinitialize_command('egg_info', egg_base=build_path)
self.run_command('egg_info')
self.reinitialize_command('build_ext', inplace=0)
self.run_command('build_ext')
# Fixup egg-link and easy-install.pth
ei_cmd = self.get_finalized_command("egg_info")
self.egg_path = build_path
self.dist.location = build_path
# XXX
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
self.install_site_py() # ensure that target dir is site-safe
if setuptools.bootstrap_install_from:
self.easy_install(setuptools.bootstrap_install_from)
setuptools.bootstrap_install_from = None
# create an .egg-link in the installation dir, pointing to our egg
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
if not self.dry_run:
with open(self.egg_link, "w") as f:
f.write(self.egg_path + "\n" + self.setup_path)
# postprocess the installed distro, fixing up .pth, installing scripts,
# and handling requirements
self.process_distribution(None, self.dist, not self.no_deps)
def uninstall_link(self):
if os.path.exists(self.egg_link):
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
egg_link_file = open(self.egg_link)
contents = [line.rstrip() for line in egg_link_file]
egg_link_file.close()
if contents not in ([self.egg_path],
[self.egg_path, self.setup_path]):
log.warn("Link points to %s: uninstall aborted", contents)
return
if not self.dry_run:
os.unlink(self.egg_link)
if not self.dry_run:
self.update_pth(self.dist) # remove any .pth link to us
if self.distribution.scripts:
# XXX should also check for entry point scripts!
log.warn("Note: you must uninstall or replace scripts manually!")
def install_egg_scripts(self, dist):
if dist is not self.dist:
# Installing a dependency, so fall back to normal behavior
return easy_install.install_egg_scripts(self, dist)
# create wrapper scripts in the script dir, pointing to dist.scripts
# new-style...
self.install_wrapper_scripts(dist)
# ...and old-style
for script_name in self.distribution.scripts or []:
script_path = os.path.abspath(convert_path(script_name))
script_name = os.path.basename(script_path)
with io.open(script_path) as strm:
script_text = strm.read()
self.install_script(dist, script_name, script_text, script_path)
def install_wrapper_scripts(self, dist):
dist = VersionlessRequirement(dist)
return easy_install.install_wrapper_scripts(self, dist)
class VersionlessRequirement(object):
"""
Adapt a pkg_resources.Distribution to simply return the project
name as the 'requirement' so that scripts will work across
multiple versions.
>>> dist = Distribution(project_name='foo', version='1.0')
>>> str(dist.as_requirement())
'foo==1.0'
>>> adapted_dist = VersionlessRequirement(dist)
>>> str(adapted_dist.as_requirement())
'foo'
"""
def __init__(self, dist):
self.__dist = dist
def __getattr__(self, name):
return getattr(self.__dist, name)
def as_requirement(self):
return self.project_name
|
aperigault/ansible
|
refs/heads/devel
|
lib/ansible/modules/messaging/rabbitmq/rabbitmq_user.py
|
24
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2013, Chatham Financial <oss@chathamfinancial.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rabbitmq_user
short_description: Manage RabbitMQ users
description:
- Add or remove users to RabbitMQ and assign permissions
version_added: "1.1"
author: Chris Hoffman (@chrishoffman)
options:
user:
description:
- Name of user to add
required: true
aliases: [username, name]
password:
description:
- Password of user to add.
- To change the password of an existing user, you must also specify
C(update_password=always).
tags:
description:
- User tags specified as comma delimited
permissions:
description:
- a list of dicts, each dict contains vhost, configure_priv, write_priv, and read_priv,
and represents a permission rule for that vhost.
- This option should be preferable when you care about all permissions of the user.
- You should use vhost, configure_priv, write_priv, and read_priv options instead
if you care about permissions for just some vhosts.
default: []
vhost:
description:
- vhost to apply access privileges.
- This option will be ignored when permissions option is used.
default: /
node:
description:
- erlang node name of the rabbit we wish to configure
default: rabbit
version_added: "1.2"
configure_priv:
description:
- Regular expression to restrict configure actions on a resource
for the specified vhost.
- By default all actions are restricted.
- This option will be ignored when permissions option is used.
default: ^$
write_priv:
description:
- Regular expression to restrict configure actions on a resource
for the specified vhost.
- By default all actions are restricted.
- This option will be ignored when permissions option is used.
default: ^$
read_priv:
description:
- Regular expression to restrict configure actions on a resource
for the specified vhost.
- By default all actions are restricted.
- This option will be ignored when permissions option is used.
default: ^$
force:
description:
- Deletes and recreates the user.
type: bool
default: 'no'
state:
description:
- Specify if user is to be added or removed
default: present
choices: [present, absent]
update_password:
description:
- C(on_create) will only set the password for newly created users. C(always) will update passwords if they differ.
required: false
default: on_create
choices: [ on_create, always ]
version_added: "2.6"
'''
EXAMPLES = '''
# Add user to server and assign full access control on / vhost.
# The user might have permission rules for other vhost but you don't care.
- rabbitmq_user:
user: joe
password: changeme
vhost: /
configure_priv: .*
read_priv: .*
write_priv: .*
state: present
# Add user to server and assign full access control on / vhost.
# The user doesn't have permission rules for other vhosts
- rabbitmq_user:
user: joe
password: changeme
permissions:
- vhost: /
configure_priv: .*
read_priv: .*
write_priv: .*
state: present
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.collections import count
class RabbitMqUser(object):
def __init__(self, module, username, password, tags, permissions,
node, bulk_permissions=False):
self.module = module
self.username = username
self.password = password
self.node = node
if not tags:
self.tags = list()
else:
self.tags = tags.split(',')
self.permissions = permissions
self.bulk_permissions = bulk_permissions
self._tags = None
self._permissions = []
self._rabbitmqctl = module.get_bin_path('rabbitmqctl', True)
def _exec(self, args, run_in_check_mode=False, check_rc=True):
if not self.module.check_mode or run_in_check_mode:
cmd = [self._rabbitmqctl, '-q']
if self.node:
cmd.extend(['-n', self.node])
rc, out, err = self.module.run_command(cmd + args, check_rc=check_rc)
return out.splitlines()
return list()
def get(self):
users = self._exec(['list_users'], True)
for user_tag in users:
if '\t' not in user_tag:
continue
user, tags = user_tag.split('\t')
if user == self.username:
for c in ['[', ']', ' ']:
tags = tags.replace(c, '')
if tags != '':
self._tags = tags.split(',')
else:
self._tags = list()
self._permissions = self._get_permissions()
return True
return False
def _get_permissions(self):
"""Get permissions of the user from RabbitMQ."""
perms_out = [perm for perm in self._exec(['list_user_permissions', self.username], True) if perm.strip()]
perms_list = list()
for perm in perms_out:
vhost, configure_priv, write_priv, read_priv = perm.split('\t')
if not self.bulk_permissions:
if vhost == self.permissions[0]['vhost']:
perms_list.append(dict(vhost=vhost, configure_priv=configure_priv,
write_priv=write_priv, read_priv=read_priv))
break
else:
perms_list.append(dict(vhost=vhost, configure_priv=configure_priv,
write_priv=write_priv, read_priv=read_priv))
return perms_list
def check_password(self):
return self._exec(['authenticate_user', self.username, self.password],
run_in_check_mode=True, check_rc=False)
def add(self):
if self.password is not None:
self._exec(['add_user', self.username, self.password])
else:
self._exec(['add_user', self.username, ''])
self._exec(['clear_password', self.username])
def delete(self):
self._exec(['delete_user', self.username])
def change_password(self):
if self.password is not None:
self._exec(['change_password', self.username, self.password])
else:
self._exec(['clear_password', self.username])
def set_tags(self):
self._exec(['set_user_tags', self.username] + self.tags)
def set_permissions(self):
permissions_to_clear = [permission for permission in self._permissions if permission not in self.permissions]
permissions_to_add = [permission for permission in self.permissions if permission not in self._permissions]
for permission in permissions_to_clear:
cmd = 'clear_permissions -p {vhost} {username}'.format(username=self.username,
vhost=permission['vhost'])
self._exec(cmd.split(' '))
for permission in permissions_to_add:
cmd = ('set_permissions -p {vhost} {username} {configure_priv} {write_priv} {read_priv}'
.format(username=self.username, **permission))
self._exec(cmd.split(' '))
def has_tags_modifications(self):
return set(self.tags) != set(self._tags)
def has_permissions_modifications(self):
def to_permission_tuple(vhost_permission_dict):
return vhost_permission_dict['vhost'], vhost_permission_dict
def permission_dict(vhost_permission_list):
return dict(map(to_permission_tuple, vhost_permission_list))
return permission_dict(self._permissions) != permission_dict(self.permissions)
def main():
arg_spec = dict(
user=dict(required=True, aliases=['username', 'name']),
password=dict(default=None, no_log=True),
tags=dict(default=None),
permissions=dict(default=list(), type='list'),
vhost=dict(default='/'),
configure_priv=dict(default='^$'),
write_priv=dict(default='^$'),
read_priv=dict(default='^$'),
force=dict(default='no', type='bool'),
state=dict(default='present', choices=['present', 'absent']),
node=dict(default='rabbit'),
update_password=dict(default='on_create', choices=['on_create', 'always'])
)
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
username = module.params['user']
password = module.params['password']
tags = module.params['tags']
permissions = module.params['permissions']
vhost = module.params['vhost']
configure_priv = module.params['configure_priv']
write_priv = module.params['write_priv']
read_priv = module.params['read_priv']
force = module.params['force']
state = module.params['state']
node = module.params['node']
update_password = module.params['update_password']
if permissions:
vhosts = map(lambda permission: permission.get('vhost', '/'), permissions)
if any(map(lambda count: count > 1, count(vhosts).values())):
module.fail_json(msg="Error parsing permissions: You can't have two permission dicts for the same vhost")
bulk_permissions = True
else:
perm = {
'vhost': vhost,
'configure_priv': configure_priv,
'write_priv': write_priv,
'read_priv': read_priv
}
permissions.append(perm)
bulk_permissions = False
rabbitmq_user = RabbitMqUser(module, username, password, tags, permissions,
node, bulk_permissions=bulk_permissions)
result = dict(changed=False, user=username, state=state)
if rabbitmq_user.get():
if state == 'absent':
rabbitmq_user.delete()
result['changed'] = True
else:
if force:
rabbitmq_user.delete()
rabbitmq_user.add()
rabbitmq_user.get()
result['changed'] = True
elif update_password == 'always':
if not rabbitmq_user.check_password():
rabbitmq_user.change_password()
result['changed'] = True
if rabbitmq_user.has_tags_modifications():
rabbitmq_user.set_tags()
result['changed'] = True
if rabbitmq_user.has_permissions_modifications():
rabbitmq_user.set_permissions()
result['changed'] = True
elif state == 'present':
rabbitmq_user.add()
rabbitmq_user.set_tags()
rabbitmq_user.set_permissions()
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
mesnardo/snake
|
refs/heads/master
|
examples/petibm/plotForceCoefficientsCompareOther.py
|
2
|
"""
Post-processes the force coefficients from a PetIBM simulation and compare them
to another simulation.
This script reads the forces, computes the mean forces within a given range,
computes the Strouhal number within a range, plots the force coefficients,
saves the figure, and prints a data-frame that contains the mean values.
"""
from snake.simulation import Simulation
from snake.petibm.simulation import PetIBMSimulation
simulation = PetIBMSimulation(description='PetIBM (present)')
simulation.read_forces()
time_limits = (32.0, 64.0)
simulation.get_mean_forces(limits=time_limits)
simulation.get_strouhal(limits=time_limits, order=200)
other = Simulation(description='',
directory='',
software='')
other.read_forces()
other.get_mean_forces(limits=time_limits)
other.get_strouhal(limits=time_limits, order=200)
simulation.plot_forces(display_coefficients=True,
coefficient=2.0,
display_extrema=True, order=200,
limits=(0.0, 80.0, 0.0, 3.0),
other_simulations=other,
other_coefficients=2.0,
style='mesnardo',
save_name='forceCoefficientsCompareOther')
dataframe = simulation.create_dataframe_forces(display_strouhal=True,
display_coefficients=True,
coefficient=2.0)
dataframe2 = other.create_dataframe_forces(display_strouhal=True,
display_coefficients=True,
coefficient=2.0)
print(dataframe.append(dataframe2))
|
enkaskal/hello
|
refs/heads/master
|
gtest/test/gtest_shuffle_test.py
|
3023
|
#!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
|
stwunsch/gnuradio
|
refs/heads/master
|
gr-digital/python/digital/ofdm_packet_utils.py
|
27
|
#
# Copyright 2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import struct
import numpy
from gnuradio import gru
import crc
def conv_packed_binary_string_to_1_0_string(s):
"""
'\xAF' --> '10101111'
"""
r = []
for ch in s:
x = ord(ch)
for i in range(7,-1,-1):
t = (x >> i) & 0x1
r.append(t)
return ''.join(map(lambda x: chr(x + ord('0')), r))
def conv_1_0_string_to_packed_binary_string(s):
"""
'10101111' -> ('\xAF', False)
Basically the inverse of conv_packed_binary_string_to_1_0_string,
but also returns a flag indicating if we had to pad with leading zeros
to get to a multiple of 8.
"""
if not is_1_0_string(s):
raise ValueError, "Input must be a string containing only 0's and 1's"
# pad to multiple of 8
padded = False
rem = len(s) % 8
if rem != 0:
npad = 8 - rem
s = '0' * npad + s
padded = True
assert len(s) % 8 == 0
r = []
i = 0
while i < len(s):
t = 0
for j in range(8):
t = (t << 1) | (ord(s[i + j]) - ord('0'))
r.append(chr(t))
i += 8
return (''.join(r), padded)
def is_1_0_string(s):
if not isinstance(s, str):
return False
for ch in s:
if not ch in ('0', '1'):
return False
return True
def string_to_hex_list(s):
return map(lambda x: hex(ord(x)), s)
def whiten(s, o):
sa = numpy.fromstring(s, numpy.uint8)
z = sa ^ random_mask_vec8[o:len(sa)+o]
return z.tostring()
def dewhiten(s, o):
return whiten(s, o) # self inverse
def make_header(payload_len, whitener_offset=0):
# Upper nibble is offset, lower 12 bits is len
val = ((whitener_offset & 0xf) << 12) | (payload_len & 0x0fff)
#print "offset =", whitener_offset, " len =", payload_len, " val=", val
return struct.pack('!HH', val, val)
def make_packet(payload, samples_per_symbol, bits_per_symbol,
pad_for_usrp=True, whitener_offset=0, whitening=True):
"""
Build a packet, given access code, payload, and whitener offset
Args:
payload: packet payload, len [0, 4096]
samples_per_symbol: samples per symbol (needed for padding calculation) (int)
bits_per_symbol: (needed for padding calculation) (int)
whitener_offset: offset into whitener string to use [0-16)
whitening: Turn whitener on or off (bool)
Packet will have access code at the beginning, followed by length, payload
and finally CRC-32.
"""
if not whitener_offset >=0 and whitener_offset < 16:
raise ValueError, "whitener_offset must be between 0 and 15, inclusive (%i)" % (whitener_offset,)
payload_with_crc = crc.gen_and_append_crc32(payload)
#print "outbound crc =", string_to_hex_list(payload_with_crc[-4:])
L = len(payload_with_crc)
MAXLEN = len(random_mask_tuple)
if L > MAXLEN:
raise ValueError, "len(payload) must be in [0, %d]" % (MAXLEN,)
pkt_hd = make_header(L, whitener_offset)
pkt_dt = ''.join((payload_with_crc, '\x55'))
packet_length = len(pkt_hd) + len(pkt_dt)
if pad_for_usrp:
usrp_packing = _npadding_bytes(packet_length, samples_per_symbol, bits_per_symbol) * '\x55'
pkt_dt = pkt_dt + usrp_packing
if(whitening):
pkt = pkt_hd + whiten(pkt_dt, whitener_offset)
else:
pkt = pkt_hd + pkt_dt
#print "make_packet: len(pkt) =", len(pkt)
return pkt
def _npadding_bytes(pkt_byte_len, samples_per_symbol, bits_per_symbol):
"""
Generate sufficient padding such that each packet ultimately ends
up being a multiple of 512 bytes when sent across the USB. We
send 4-byte samples across the USB (16-bit I and 16-bit Q), thus
we want to pad so that after modulation the resulting packet
is a multiple of 128 samples.
Args:
ptk_byte_len: len in bytes of packet, not including padding.
samples_per_symbol: samples per bit (1 bit / symbolwidth GMSK) (int)
bits_per_symbol: bits per symbol (log2(modulation order)) (int)
Returns:
number of bytes of padding to append.
"""
modulus = 128
byte_modulus = gru.lcm(modulus/8, samples_per_symbol) * bits_per_symbol / samples_per_symbol
r = pkt_byte_len % byte_modulus
if r == 0:
return 0
return byte_modulus - r
def unmake_packet(whitened_payload_with_crc, whitener_offset=0, dewhitening=1):
"""
Return (ok, payload)
Args:
whitened_payload_with_crc: string
whitener_offset: offset into whitener string to use [0-16)
dewhitening: Turn whitener on or off (bool)
"""
if dewhitening:
payload_with_crc = dewhiten(whitened_payload_with_crc, whitener_offset)
else:
payload_with_crc = whitened_payload_with_crc
ok, payload = crc.check_crc32(payload_with_crc)
if 0:
print "payload_with_crc =", string_to_hex_list(payload_with_crc)
print "ok = %r, len(payload) = %d" % (ok, len(payload))
print "payload =", string_to_hex_list(payload)
return ok, payload
# FYI, this PN code is the output of a 15-bit LFSR
random_mask_tuple = (
255, 63, 0, 16, 0, 12, 0, 5, 192, 3, 16, 1, 204, 0, 85, 192,
63, 16, 16, 12, 12, 5, 197, 195, 19, 17, 205, 204, 85, 149, 255, 47,
0, 28, 0, 9, 192, 6, 208, 2, 220, 1, 153, 192, 106, 208, 47, 28,
28, 9, 201, 198, 214, 210, 222, 221, 152, 89, 170, 186, 255, 51, 0, 21,
192, 15, 16, 4, 12, 3, 69, 193, 243, 16, 69, 204, 51, 21, 213, 207,
31, 20, 8, 15, 70, 132, 50, 227, 85, 137, 255, 38, 192, 26, 208, 11,
28, 7, 73, 194, 182, 209, 182, 220, 118, 217, 230, 218, 202, 219, 23, 27,
78, 139, 116, 103, 103, 106, 170, 175, 63, 60, 16, 17, 204, 12, 85, 197,
255, 19, 0, 13, 192, 5, 144, 3, 44, 1, 221, 192, 89, 144, 58, 236,
19, 13, 205, 197, 149, 147, 47, 45, 220, 29, 153, 201, 170, 214, 255, 30,
192, 8, 80, 6, 188, 2, 241, 193, 132, 80, 99, 124, 41, 225, 222, 200,
88, 86, 186, 190, 243, 48, 69, 212, 51, 31, 85, 200, 63, 22, 144, 14,
236, 4, 77, 195, 117, 145, 231, 44, 74, 157, 247, 41, 134, 158, 226, 232,
73, 142, 182, 228, 118, 203, 102, 215, 106, 222, 175, 24, 124, 10, 161, 199,
56, 82, 146, 189, 173, 177, 189, 180, 113, 183, 100, 118, 171, 102, 255, 106,
192, 47, 16, 28, 12, 9, 197, 198, 211, 18, 221, 205, 153, 149, 170, 239,
63, 12, 16, 5, 204, 3, 21, 193, 207, 16, 84, 12, 63, 69, 208, 51,
28, 21, 201, 207, 22, 212, 14, 223, 68, 88, 51, 122, 149, 227, 47, 9,
220, 6, 217, 194, 218, 209, 155, 28, 107, 73, 239, 118, 204, 38, 213, 218,
223, 27, 24, 11, 74, 135, 119, 34, 166, 153, 186, 234, 243, 15, 5, 196,
3, 19, 65, 205, 240, 85, 132, 63, 35, 80, 25, 252, 10, 193, 199, 16,
82, 140, 61, 165, 209, 187, 28, 115, 73, 229, 246, 203, 6, 215, 66, 222,
177, 152, 116, 106, 167, 111, 58, 172, 19, 61, 205, 209, 149, 156, 111, 41,
236, 30, 205, 200, 85, 150, 191, 46, 240, 28, 68, 9, 243, 70, 197, 242,
211, 5, 157, 195, 41, 145, 222, 236, 88, 77, 250, 181, 131, 55, 33, 214,
152, 94, 234, 184, 79, 50, 180, 21, 183, 79, 54, 180, 22, 247, 78, 198,
180, 82, 247, 125, 134, 161, 162, 248, 121, 130, 162, 225, 185, 136, 114, 230,
165, 138, 251, 39, 3, 90, 129, 251, 32, 67, 88, 49, 250, 148, 67, 47,
113, 220, 36, 89, 219, 122, 219, 99, 27, 105, 203, 110, 215, 108, 94, 173,
248, 125, 130, 161, 161, 184, 120, 114, 162, 165, 185, 187, 50, 243, 85, 133,
255, 35, 0, 25, 192, 10, 208, 7, 28, 2, 137, 193, 166, 208, 122, 220,
35, 25, 217, 202, 218, 215, 27, 30, 139, 72, 103, 118, 170, 166, 255, 58,
192, 19, 16, 13, 204, 5, 149, 195, 47, 17, 220, 12, 89, 197, 250, 211,
3, 29, 193, 201, 144, 86, 236, 62, 205, 208, 85, 156, 63, 41, 208, 30,
220, 8, 89, 198, 186, 210, 243, 29, 133, 201, 163, 22, 249, 206, 194, 212,
81, 159, 124, 104, 33, 238, 152, 76, 106, 181, 239, 55, 12, 22, 133, 206,
227, 20, 73, 207, 118, 212, 38, 223, 90, 216, 59, 26, 147, 75, 45, 247,
93, 134, 185, 162, 242, 249, 133, 130, 227, 33, 137, 216, 102, 218, 170, 219,
63, 27, 80, 11, 124, 7, 97, 194, 168, 81, 190, 188, 112, 113, 228, 36,
75, 91, 119, 123, 102, 163, 106, 249, 239, 2, 204, 1, 149, 192, 111, 16,
44, 12, 29, 197, 201, 147, 22, 237, 206, 205, 148, 85, 175, 127, 60, 32,
17, 216, 12, 90, 133, 251, 35, 3, 89, 193, 250, 208, 67, 28, 49, 201,
212, 86, 223, 126, 216, 32, 90, 152, 59, 42, 147, 95, 45, 248, 29, 130,
137, 161, 166, 248, 122, 194, 163, 17, 185, 204, 114, 213, 229, 159, 11, 40,
7, 94, 130, 184, 97, 178, 168, 117, 190, 167, 48, 122, 148, 35, 47, 89,
220, 58, 217, 211, 26, 221, 203, 25, 151, 74, 238, 183, 12, 118, 133, 230,
227, 10, 201, 199, 22, 210, 142, 221, 164, 89, 187, 122, 243, 99, 5, 233,
195, 14, 209, 196, 92, 83, 121, 253, 226, 193, 137, 144, 102, 236, 42, 205,
223, 21, 152, 15, 42, 132, 31, 35, 72, 25, 246, 138, 198, 231, 18, 202,
141, 151, 37, 174, 155, 60, 107, 81, 239, 124, 76, 33, 245, 216, 71, 26,
178, 139, 53, 167, 87, 58, 190, 147, 48, 109, 212, 45, 159, 93, 168, 57,
190, 146, 240, 109, 132, 45, 163, 93, 185, 249, 178, 194, 245, 145, 135, 44,
98, 157, 233, 169, 142, 254, 228, 64, 75, 112, 55, 100, 22, 171, 78, 255,
116, 64, 39, 112, 26, 164, 11, 59, 71, 83, 114, 189, 229, 177, 139, 52,
103, 87, 106, 190, 175, 48, 124, 20, 33, 207, 88, 84, 58, 191, 83, 48,
61, 212, 17, 159, 76, 104, 53, 238, 151, 12, 110, 133, 236, 99, 13, 233,
197, 142, 211, 36, 93, 219, 121, 155, 98, 235, 105, 143, 110, 228, 44, 75,
93, 247, 121, 134, 162, 226, 249, 137, 130, 230, 225, 138, 200, 103, 22, 170,
142, 255, 36, 64, 27, 112, 11, 100, 7, 107, 66, 175, 113, 188, 36, 113,
219, 100, 91, 107, 123, 111, 99, 108, 41, 237, 222, 205, 152, 85, 170, 191,
63, 48, 16, 20, 12, 15, 69, 196, 51, 19, 85, 205, 255, 21, 128, 15,
32, 4, 24, 3, 74, 129, 247, 32, 70, 152, 50, 234, 149, 143, 47, 36,
28, 27, 73, 203, 118, 215, 102, 222, 170, 216, 127, 26, 160, 11, 56, 7,
82, 130, 189, 161, 177, 184, 116, 114, 167, 101, 186, 171, 51, 63, 85, 208,
63, 28, 16, 9, 204, 6, 213, 194, 223, 17, 152, 12, 106, 133, 239, 35,
12, 25, 197, 202, 211, 23, 29, 206, 137, 148, 102, 239, 106, 204, 47, 21,
220, 15, 25, 196, 10, 211, 71, 29, 242, 137, 133, 166, 227, 58, 201, 211,
22, 221, 206, 217, 148, 90, 239, 123, 12, 35, 69, 217, 243, 26, 197, 203,
19, 23, 77, 206, 181, 148, 119, 47, 102, 156, 42, 233, 223, 14, 216, 4,
90, 131, 123, 33, 227, 88, 73, 250, 182, 195, 54, 209, 214, 220, 94, 217,
248, 90, 194, 187, 17, 179, 76, 117, 245, 231, 7, 10, 130, 135, 33, 162,
152, 121, 170, 162, 255, 57, 128, 18, 224, 13, 136, 5, 166, 131, 58, 225,
211, 8, 93, 198, 185, 146, 242, 237, 133, 141, 163, 37, 185, 219, 50, 219,
85, 155, 127, 43, 96, 31, 104, 8, 46, 134, 156, 98, 233, 233, 142, 206,
228, 84, 75, 127, 119, 96, 38, 168, 26, 254, 139, 0, 103, 64, 42, 176,
31, 52, 8, 23, 70, 142, 178, 228, 117, 139, 103, 39, 106, 154, 175, 43,
60, 31, 81, 200, 60, 86, 145, 254, 236, 64, 77, 240, 53, 132, 23, 35,
78, 153, 244, 106, 199, 111, 18, 172, 13, 189, 197, 177, 147, 52, 109, 215,
109, 158, 173, 168, 125, 190, 161, 176, 120, 116, 34, 167, 89, 186, 186, 243,
51, 5, 213, 195, 31, 17, 200, 12, 86, 133, 254, 227, 0, 73, 192, 54,
208, 22, 220, 14, 217, 196, 90, 211, 123, 29, 227, 73, 137, 246, 230, 198,
202, 210, 215, 29, 158, 137, 168, 102, 254, 170, 192, 127, 16, 32, 12, 24,
5, 202, 131, 23, 33, 206, 152, 84, 106, 191, 111, 48, 44, 20, 29, 207,
73, 148, 54, 239, 86, 204, 62, 213, 208, 95, 28, 56, 9, 210, 134, 221,
162, 217, 185, 154, 242, 235, 5, 143, 67, 36, 49, 219, 84, 91, 127, 123,
96, 35, 104, 25, 238, 138, 204, 103, 21, 234, 143, 15, 36, 4, 27, 67,
75, 113, 247, 100, 70, 171, 114, 255, 101, 128, 43, 32, 31, 88, 8, 58,
134, 147, 34, 237, 217, 141, 154, 229, 171, 11, 63, 71, 80, 50, 188, 21,
177, 207, 52, 84, 23, 127, 78, 160, 52, 120, 23, 98, 142, 169, 164, 126,
251, 96, 67, 104, 49, 238, 148, 76, 111, 117, 236, 39, 13, 218, 133, 155,
35, 43, 89, 223, 122, 216, 35, 26, 153, 203, 42, 215, 95, 30, 184, 8,
114, 134, 165, 162, 251, 57, 131, 82, 225, 253, 136, 65, 166, 176, 122, 244,
35, 7, 89, 194, 186, 209, 179, 28, 117, 201, 231, 22, 202, 142, 215, 36,
94, 155, 120, 107, 98, 175, 105, 188, 46, 241, 220, 68, 89, 243, 122, 197,
227, 19, 9, 205, 198, 213, 146, 223, 45, 152, 29, 170, 137, 191, 38, 240,
26, 196, 11, 19, 71, 77, 242, 181, 133, 183, 35, 54, 153, 214, 234, 222,
207, 24, 84, 10, 191, 71, 48, 50, 148, 21, 175, 79, 60, 52, 17, 215,
76, 94, 181, 248, 119, 2, 166, 129, 186, 224, 115, 8, 37, 198, 155, 18,
235, 77, 143, 117, 164, 39, 59, 90, 147, 123, 45, 227, 93, 137, 249, 166,
194, 250, 209, 131, 28, 97, 201, 232, 86, 206, 190, 212, 112, 95, 100, 56,
43, 82, 159, 125, 168, 33, 190, 152, 112, 106, 164, 47, 59, 92, 19, 121,
205, 226, 213, 137, 159, 38, 232, 26, 206, 139, 20, 103, 79, 106, 180, 47,
55, 92, 22, 185, 206, 242, 212, 69, 159, 115, 40, 37, 222, 155, 24, 107,
74, 175, 119, 60, 38, 145, 218, 236, 91, 13, 251, 69, 131, 115, 33, 229,
216, 75, 26, 183, 75, 54, 183, 86, 246, 190, 198, 240, 82, 196, 61, 147,
81, 173, 252, 125, 129, 225, 160, 72, 120, 54, 162, 150, 249, 174, 194, 252,
81, 129, 252, 96, 65, 232, 48, 78, 148, 52, 111, 87, 108, 62, 173, 208,
125, 156, 33, 169, 216, 126, 218, 160, 91, 56, 59, 82, 147, 125, 173, 225,
189, 136, 113, 166, 164, 122, 251, 99, 3, 105, 193, 238, 208, 76, 92, 53,
249, 215, 2, 222, 129, 152, 96, 106, 168, 47, 62, 156, 16, 105, 204, 46,
213, 220, 95, 25, 248, 10, 194, 135, 17, 162, 140, 121, 165, 226, 251, 9,
131, 70, 225, 242, 200, 69, 150, 179, 46, 245, 220, 71, 25, 242, 138, 197,
167, 19, 58, 141, 211, 37, 157, 219, 41, 155, 94, 235, 120, 79, 98, 180,
41, 183, 94, 246, 184, 70, 242, 178, 197, 181, 147, 55, 45, 214, 157, 158,
233, 168, 78, 254, 180, 64, 119, 112, 38, 164, 26, 251, 75, 3, 119, 65,
230, 176, 74, 244, 55, 7, 86, 130, 190, 225, 176, 72, 116, 54, 167, 86,
250, 190, 195, 48, 81, 212, 60, 95, 81, 248, 60, 66, 145, 241, 172, 68,
125, 243, 97, 133, 232, 99, 14, 169, 196, 126, 211, 96, 93, 232, 57, 142,
146, 228, 109, 139, 109, 167, 109, 186, 173, 179, 61, 181, 209, 183, 28, 118,
137, 230, 230, 202, 202, 215, 23, 30, 142, 136, 100, 102, 171, 106, 255, 111,
0, 44, 0, 29, 192, 9, 144, 6, 236, 2, 205, 193, 149, 144, 111, 44,
44, 29, 221, 201, 153, 150, 234, 238, 207, 12, 84, 5, 255, 67, 0, 49,
192, 20, 80, 15, 124, 4, 33, 195, 88, 81, 250, 188, 67, 49, 241, 212,
68, 95, 115, 120, 37, 226, 155, 9, 171, 70, 255, 114, 192, 37, 144, 27,
44, 11, 93, 199, 121, 146, 162, 237, 185, 141, 178, 229, 181, 139, 55, 39,
86, 154, 190, 235, 48, 79, 84, 52, 63, 87, 80, 62, 188, 16, 113, 204,
36, 85, 219, 127, 27, 96, 11, 104, 7, 110, 130, 172, 97, 189, 232, 113,
142, 164, 100, 123, 107, 99, 111, 105, 236, 46, 205, 220, 85, 153, 255, 42,
192, 31, 16, 8, 12, 6, 133, 194, 227, 17, 137, 204, 102, 213, 234, 223,
15, 24, 4, 10, 131, 71, 33, 242, 152, 69, 170, 179, 63, 53, 208, 23,
28, 14, 137, 196, 102, 211, 106, 221, 239, 25, 140, 10, 229, 199, 11, 18,
135, 77, 162, 181, 185, 183, 50, 246, 149, 134, 239, 34, 204, 25, 149, 202,
239, 23, 12, 14, 133, 196, 99, 19, 105, 205, 238, 213, 140, 95, 37, 248,
27, 2, 139, 65, 167, 112, 122, 164, 35, 59, 89, 211, 122, 221, 227, 25,
137, 202, 230, 215, 10, 222, 135, 24, 98, 138, 169, 167, 62, 250, 144, 67,
44, 49, 221, 212, 89, 159, 122, 232, 35, 14, 153, 196, 106, 211, 111, 29,
236, 9, 141, 198, 229, 146, 203, 45, 151, 93, 174, 185, 188, 114, 241, 229,
132, 75, 35, 119, 89, 230, 186, 202, 243, 23, 5, 206, 131, 20, 97, 207,
104, 84, 46, 191, 92, 112, 57, 228, 18, 203, 77, 151, 117, 174, 167, 60,
122, 145, 227, 44, 73, 221, 246, 217, 134, 218, 226, 219, 9, 155, 70, 235,
114, 207, 101, 148, 43, 47, 95, 92, 56, 57, 210, 146, 221, 173, 153, 189,
170, 241, 191, 4, 112, 3, 100, 1, 235, 64, 79, 112, 52, 36, 23, 91,
78, 187, 116, 115, 103, 101, 234, 171, 15, 63, 68, 16, 51, 76, 21, 245,
207, 7, 20, 2, 143, 65, 164, 48, 123, 84, 35, 127, 89, 224, 58, 200,
19, 22, 141, 206, 229, 148, 75, 47, 119, 92, 38, 185, 218, 242, 219, 5,
155, 67, 43, 113, 223, 100, 88, 43, 122, 159, 99, 40, 41, 222, 158, 216,
104, 90, 174, 187, 60, 115, 81, 229, 252, 75, 1, 247, 64, 70, 176, 50,
244, 21, 135, 79, 34, 180, 25, 183, 74, 246, 183, 6, 246, 130, 198, 225,
146, 200, 109, 150, 173, 174, 253, 188, 65, 177, 240, 116, 68, 39, 115, 90,
165, 251, 59, 3, 83, 65, 253, 240, 65, 132, 48, 99, 84, 41, 255, 94,
192, 56, 80, 18, 188, 13, 177, 197, 180, 83, 55, 125, 214, 161, 158, 248,
104, 66, 174, 177, 188, 116, 113, 231, 100, 74, 171, 119, 63, 102, 144, 42,
236, 31, 13, 200, 5, 150, 131, 46, 225, 220, 72, 89, 246, 186, 198, 243,
18, 197, 205, 147, 21, 173, 207, 61, 148, 17, 175, 76, 124, 53, 225, 215,
8, 94, 134, 184, 98, 242, 169, 133, 190, 227, 48, 73, 212, 54, 223, 86,
216, 62, 218, 144, 91, 44, 59, 93, 211, 121, 157, 226, 233, 137, 142, 230,
228, 74, 203, 119, 23, 102, 142, 170, 228, 127, 11, 96, 7, 104, 2, 174,
129, 188, 96, 113, 232, 36, 78, 155, 116, 107, 103, 111, 106, 172, 47, 61,
220, 17, 153, 204, 106, 213, 239, 31, 12, 8, 5, 198, 131, 18, 225, 205,
136, 85, 166, 191, 58, 240, 19, 4, 13, 195, 69, 145, 243, 44, 69, 221,
243, 25, 133, 202, 227, 23, 9, 206, 134, 212, 98, 223, 105, 152, 46, 234,
156, 79, 41, 244, 30, 199, 72, 82, 182, 189, 182, 241, 182, 196, 118, 211,
102, 221, 234, 217, 143, 26, 228, 11, 11, 71, 71, 114, 178, 165, 181, 187,
55, 51, 86, 149, 254, 239, 0, 76, 0, 53, 192, 23, 16, 14, 140, 4,
101, 195, 107, 17, 239, 76, 76, 53, 245, 215, 7, 30, 130, 136, 97, 166,
168, 122, 254, 163, 0, 121, 192, 34, 208, 25, 156, 10, 233, 199, 14, 210,
132, 93, 163, 121, 185, 226, 242, 201, 133, 150, 227, 46, 201, 220, 86, 217,
254, 218, 192, 91, 16, 59, 76, 19, 117, 205, 231, 21, 138, 143, 39, 36,
26, 155, 75, 43, 119, 95, 102, 184, 42, 242, 159, 5, 168, 3, 62, 129,
208, 96, 92, 40, 57, 222, 146, 216, 109, 154, 173, 171, 61, 191, 81, 176,
60, 116, 17, 231, 76, 74, 181, 247, 55, 6, 150, 130, 238, 225, 140, 72,
101, 246, 171, 6, 255, 66, 192, 49, 144, 20, 108, 15, 109, 196, 45, 147,
93, 173, 249, 189, 130, 241, 161, 132, 120, 99, 98, 169, 233, 190, 206, 240,
84, 68, 63, 115, 80, 37, 252, 27, 1, 203, 64, 87, 112, 62, 164, 16,
123, 76, 35, 117, 217, 231, 26, 202, 139, 23, 39, 78, 154, 180, 107, 55,
111, 86, 172, 62, 253, 208, 65, 156, 48, 105, 212, 46, 223, 92, 88, 57,
250, 146, 195, 45, 145, 221, 172, 89, 189, 250, 241, 131, 4, 97, 195, 104,
81, 238, 188, 76, 113, 245, 228, 71, 11, 114, 135, 101, 162, 171, 57, 191,
82, 240, 61, 132, 17, 163, 76, 121, 245, 226, 199, 9, 146, 134, 237, 162,
205, 185, 149, 178, 239, 53, 140, 23, 37, 206, 155, 20, 107, 79, 111, 116,
44, 39, 93, 218, 185, 155, 50, 235, 85, 143, 127, 36, 32, 27, 88, 11,
122, 135, 99, 34, 169, 217, 190, 218, 240, 91, 4, 59, 67, 83, 113, 253,
228, 65, 139, 112, 103, 100, 42, 171, 95, 63, 120, 16, 34, 140, 25, 165,
202, 251, 23, 3, 78, 129, 244, 96, 71, 104, 50, 174, 149, 188, 111, 49,
236, 20, 77, 207, 117, 148, 39, 47, 90, 156, 59, 41, 211, 94, 221, 248,
89, 130, 186, 225, 179, 8, 117, 198, 167, 18, 250, 141, 131, 37, 161, 219,
56, 91, 82, 187, 125, 179, 97, 181, 232, 119, 14, 166, 132, 122, 227, 99,
9, 233, 198, 206, 210, 212, 93, 159, 121, 168, 34, 254, 153, 128, 106, 224,
47, 8, 28, 6, 137, 194, 230, 209, 138, 220, 103, 25, 234, 138, 207, 39,
20, 26, 143, 75, 36, 55, 91, 86, 187, 126, 243, 96, 69, 232, 51, 14,
149, 196, 111, 19, 108, 13, 237, 197, 141, 147, 37, 173, 219, 61, 155, 81,
171, 124, 127, 97, 224, 40, 72, 30, 182, 136, 118, 230, 166, 202, 250, 215,
3, 30, 129, 200, 96, 86, 168, 62, 254, 144, 64, 108, 48, 45, 212, 29,
159, 73, 168, 54, 254, 150, 192, 110, 208, 44, 92, 29, 249, 201, 130, 214,
225, 158, 200, 104, 86, 174, 190, 252, 112, 65, 228, 48, 75, 84, 55, 127,
86, 160, 62, 248, 16, 66, 140, 49, 165, 212, 123, 31, 99, 72, 41, 246,
158, 198, 232, 82, 206, 189, 148, 113, 175, 100, 124, 43, 97, 223, 104, 88,
46, 186, 156, 115, 41, 229, 222, 203, 24, 87, 74, 190, 183, 48, 118, 148,
38, 239, 90, 204, 59, 21, 211, 79, 29, 244, 9, 135, 70, 226, 178, 201,
181, 150, 247, 46, 198, 156, 82, 233, 253, 142, 193, 164, 80, 123, 124, 35,
97, 217, 232, 90, 206, 187, 20, 115, 79, 101, 244, 43, 7, 95, 66, 184,
49, 178, 148, 117, 175, 103, 60, 42, 145, 223, 44, 88, 29, 250, 137, 131,
38, 225, 218, 200, 91, 22, 187, 78, 243, 116, 69, 231, 115, 10, 165, 199,
59, 18, 147, 77, 173, 245, 189, 135, 49, 162, 148, 121, 175, 98, 252, 41,
129, 222, 224, 88, 72, 58, 182, 147, 54, 237, 214, 205, 158, 213, 168, 95,
62, 184, 16, 114, 140, 37, 165, 219, 59, 27, 83, 75, 125, 247, 97, 134,
168, 98, 254, 169, 128, 126, 224, 32, 72, 24, 54, 138, 150, 231, 46, 202,
156, 87, 41, 254, 158, 192, 104, 80, 46, 188, 28, 113, 201, 228, 86, 203,
126, 215, 96, 94, 168, 56, 126, 146, 160, 109, 184, 45, 178, 157, 181, 169,
183, 62, 246, 144, 70, 236, 50, 205, 213, 149, 159, 47, 40, 28, 30, 137,
200, 102, 214, 170, 222, 255, 24, 64, 10, 176, 7, 52, 2, 151, 65, 174,
176, 124, 116, 33, 231, 88, 74, 186, 183, 51, 54, 149, 214, 239, 30, 204,
8, 85, 198, 191, 18, 240, 13, 132, 5, 163, 67, 57, 241, 210, 196, 93,
147, 121, 173, 226, 253, 137, 129, 166, 224, 122, 200, 35, 22, 153, 206, 234,
212, 79, 31, 116, 8, 39, 70, 154, 178, 235, 53, 143, 87, 36, 62, 155,
80, 107, 124, 47, 97, 220, 40, 89, 222, 186, 216, 115, 26, 165, 203, 59,
23, 83, 78, 189, 244, 113, 135, 100, 98, 171, 105, 191, 110, 240, 44, 68,
29, 243, 73, 133, 246, 227, 6, 201, 194, 214, 209, 158, 220, 104, 89, 238,
186, 204, 115, 21, 229, 207, 11, 20, 7, 79, 66, 180, 49, 183, 84, 118,
191, 102, 240, 42, 196, 31, 19, 72, 13, 246, 133, 134, 227, 34, 201, 217,
150, 218, 238, 219, 12, 91, 69, 251, 115, 3, 101, 193, 235, 16, 79, 76,
52, 53, 215, 87, 30, 190, 136, 112, 102, 164, 42, 251, 95, 3, 120, 1,
226, 128, 73, 160, 54, 248, 22, 194, 142, 209, 164, 92, 123, 121, 227, 98,
201, 233, 150, 206, 238, 212, 76, 95, 117, 248, 39, 2, 154, 129, 171, 32,
127, 88, 32, 58, 152, 19, 42, 141, 223, 37, 152, 27, 42, 139, 95, 39,
120, 26, 162, 139, 57, 167, 82, 250, 189, 131, 49, 161, 212, 120, 95, 98,
184, 41, 178, 158, 245, 168, 71, 62, 178, 144, 117, 172, 39, 61, 218, 145,
155, 44, 107, 93, 239, 121, 140, 34, 229, 217, 139, 26, 231, 75, 10, 183,
71, 54, 178, 150, 245, 174, 199, 60, 82, 145, 253, 172, 65, 189, 240, 113,
132, 36, 99, 91, 105, 251, 110, 195, 108, 81, 237, 252, 77, 129, 245, 160,
71, 56, 50, 146, 149, 173, 175, 61, 188, 17, 177, 204, 116, 85, 231, 127,
10, 160, 7, 56, 2, 146, 129, 173, 160, 125, 184, 33, 178, 152, 117, 170,
167, 63, 58, 144, 19, 44, 13, 221, 197, 153, 147, 42, 237, 223, 13, 152,
5, 170, 131, 63, 33, 208, 24, 92, 10, 185, 199, 50, 210, 149, 157, 175,
41, 188, 30, 241, 200, 68, 86, 179, 126, 245, 224, 71, 8, 50, 134, 149,
162, 239, 57, 140, 18, 229, 205, 139, 21, 167, 79, 58, 180, 19, 55, 77,
214, 181, 158, 247, 40, 70, 158, 178, 232, 117, 142, 167, 36, 122, 155, 99,
43, 105, 223, 110, 216, 44, 90, 157, 251, 41, 131, 94, 225, 248, 72, 66,
182, 177, 182, 244, 118, 199, 102, 210, 170, 221, 191, 25, 176, 10, 244, 7,
7, 66, 130, 177, 161, 180, 120, 119, 98, 166, 169, 186, 254, 243, 0, 69,
192, 51, 16, 21, 204, 15, 21, 196, 15, 19, 68, 13, 243, 69, 133, 243,
35, 5, 217, 195, 26, 209, 203, 28, 87, 73, 254, 182, 192, 118, 208, 38,
220, 26, 217, 203, 26, 215, 75, 30, 183, 72, 118, 182, 166, 246, 250, 198,
195, 18, 209, 205, 156, 85, 169, 255, 62, 192, 16, 80, 12, 60, 5, 209,
195, 28, 81, 201, 252, 86, 193, 254, 208, 64, 92, 48, 57, 212, 18, 223,
77, 152, 53, 170, 151, 63, 46, 144, 28, 108, 9, 237, 198, 205, 146, 213,
173, 159, 61, 168, 17, 190, 140, 112, 101, 228, 43, 11, 95, 71, 120, 50,
162, 149, 185, 175, 50, 252, 21, 129, 207, 32, 84, 24, 63, 74, 144, 55,
44, 22, 157, 206, 233, 148, 78, 239, 116, 76, 39, 117, 218, 167, 27, 58,
139, 83, 39, 125, 218, 161, 155, 56, 107, 82, 175, 125, 188, 33, 177, 216,
116, 90, 167, 123, 58, 163, 83, 57, 253, 210, 193, 157, 144, 105, 172, 46,
253, 220, 65, 153, 240, 106, 196, 47, 19, 92, 13, 249, 197, 130, 211, 33,
157, 216, 105, 154, 174, 235, 60, 79, 81, 244, 60, 71, 81, 242, 188, 69,
177, 243, 52, 69, 215, 115, 30, 165, 200, 123, 22, 163, 78, 249, 244, 66,
199, 113, 146, 164, 109, 187, 109, 179, 109, 181, 237, 183, 13, 182, 133, 182,
227, 54, 201, 214, 214, 222, 222, 216, 88, 90, 186, 187, 51, 51, 255, 63 )
random_mask_vec8 = numpy.array(random_mask_tuple, numpy.uint8)
|
peterayeni/libforensics
|
refs/heads/master
|
code/lf/apps/msoffice/word/metadata.py
|
13
|
# Copyright 2009 Michael Murr
#
# This file is part of LibForensics.
#
# LibForensics is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LibForensics is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with LibForensics. If not, see <http://www.gnu.org/licenses/>.
"""
Metadata from Microsoft Word documents.
.. moduleauthor:: Michael Murr (mmurr@codeforensics.net)
"""
__docformat__ = "restructuredtext en"
__all__ = [
"WordMetadata"
]
from datetime import date
from lf.apps.msoffice.word.objects import Fib, SttbShortUnicode
class WordMetadata():
"""
Represents metadata from a Microsoft Word document.
.. attribute:: magic
The magic number from the FIB.
.. attribute:: version
The file format version from the FIB.
.. attribute:: lang_id
The language identifier from the FIB.
.. attribute:: encryption_key
The encryption key from the FIB.
.. attribute:: is_template
True if the document is a template.
.. attribute:: is_glossary
True if the document is a glossary.
.. attribute:: is_complex
True if the document is in complex fast-saved format.
.. attribute:: has_pictures
True if the document has pictures.
.. attribute:: is_encrypted
True if the document is encrypted.
.. attribute:: is_far_east_encoded
True if the document is encoded for the far east.
.. attribute:: created_environment
The environment the document was created in.
.. attribute:: saved_mac
True if the document was last saved on a Mac.
.. attribute:: magic_created_by
The magic number of the application that created the document.
.. attribute:: magic_revised_by
The magic number of the application that last revised the document.
.. attribute:: created_build_date
The build date of the application that created the document.
.. attribute:: revised_build_date
The build date of the application that last revised the document.
.. attribute:: last_saved_by
A list of the last authors to save the document.
.. attribute:: last_saved_locations
A list of the last locations the document was saved to (correspond
with last_saved_by)
.. attribute:: associated_strings
Associated strings.
.. attribute:: users_roles
A list of (user name, role) pairs for protected content.
"""
def __init__(self, cfb):
"""
Initializes a WordMetadata object.
:parameters:
cfb
A CompoundFile object for the word document.
"""
for entry in cfb.dir_entries.values():
if entry.name == "WordDocument":
stream_id = entry.sid
# end if
# end for
fib = Fib(cfb.get_stream(stream_id))
if fib.header.whichTblStm:
table_name = "1Table"
else:
table_name = "0Table"
# end if
for entry in cfb.dir_entries.values():
if entry.name == table_name:
stream_id = entry.sid
# end if
# end for
table_stream = cfb.get_stream(stream_id, ignore_size=True)
self.magic = fib.header.wIdent
self.version = fib.header.nFib
self.lang_id = fib.header.lid
self.encryption_key = fib.header.lKey
self.is_template = bool(fib.header.dot)
self.is_glossary = bool(fib.header.glsy)
self.is_complex = bool(fib.header.complex)
self.has_pictures = bool(fib.header.hasPic)
self.is_encrypted = bool(fib.header.encrypted)
self.is_far_east_encoded = bool(fib.header.farEast)
self.saved_mac = bool(fib.header.mac)
self.created_environment = fib.header.envr
self.magic_created_by = fib.shorts.wMagicCreated
self.magic_revised_by = fib.shorts.wMagicRevised
created_date = fib.longs.lProductCreated
year = (created_date % 100) + 1900
day = (created_date // 100) % 100
month = (created_date // 10000) % 100
self.created_build_date = date(year, month, day)
revised_date = fib.longs.lProductRevised
year = (revised_date % 100) + 1900
day = (revised_date // 100) % 100
month = (revised_date // 10000) % 100
self.revised_build_date = date(year, month, day)
if fib.fc_lcb.sttbSavedBy.lcb:
saved_by = SttbShortUnicode(
table_stream, fib.fc_lcb.sttbSavedBy.fc
)
last_saved_by = list(saved_by.data[::2])
last_saved_locations = list(saved_by.data[1::2])
else:
last_saved_by = list()
last_saved_locations = list()
# end if
if fib.fc_lcb.sttbfAssoc.lcb:
assoc = SttbShortUnicode(table_stream, fib.fc_lcb.sttbfAssoc.fc)
associated_strings = assoc.data
else:
associated_strings = list()
# end if
if hasattr(fib.fc_lcb, "sttbProtUser"):
if fib.fc_lcb.sttbProtUser.lcb:
prot_users = SttbShortUnicode(
table_stream, fib.fc_lcb.sttbProtUser.fc
)
users_roles = list(zip(prot_users.data, prot_users.extra_data))
else:
users_roles = list()
# end if
else:
users_roles = list()
# end if
self.last_saved_by = last_saved_by
self.last_saved_locations = last_saved_locations
self.associated_strings = associated_strings
self.users_roles = users_roles
# end def __init__
# end class WordMetadata
|
andybab/Impala
|
refs/heads/master
|
tests/util/hdfs_util.py
|
6
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Hdfs access utilities
from xml.etree.ElementTree import parse
from pywebhdfs.webhdfs import PyWebHdfsClient, errors, _raise_pywebhdfs_exception
import getpass
import types
import requests, httplib
class PyWebHdfsClientWithChmod(PyWebHdfsClient):
def chmod(self, path, permission):
"""Set the permission of 'path' to 'permission' (specified as an octal string, e.g.
'775'"""
uri = self._create_uri(path, "SETPERMISSION", permission=permission)
response = requests.put(uri, allow_redirects=True)
if not response.status_code == httplib.OK:
_raise_pywebhdfs_exception(response.status_code, response.text)
return True
class HdfsConfig(object):
"""Reads an XML configuration file (produced by a mini-cluster) into a dictionary
accessible via get()"""
def __init__(self, filename):
self.conf = {}
tree = parse(filename)
for property in tree.getroot().getiterator('property'):
self.conf[property.find('name').text] = property.find('value').text
def get(self, key):
return self.conf.get(key)
def get_hdfs_client_from_conf(conf):
"""Returns a new HTTP client for an HDFS cluster using an HdfsConfig object"""
hostport = conf.get('dfs.namenode.http-address')
if hostport is None:
raise Exception("dfs.namenode.http-address not found in config")
host, port = hostport.split(":")
return get_hdfs_client(host=host, port=port)
def __pyweb_hdfs_client_exists(self, path):
"""The PyWebHdfsClient doesn't provide an API to cleanly detect if a file or directory
exists. This method is bound to each client that is created so tests can simply call
hdfs_client.exists('path') and get back a bool.
"""
try:
self.get_file_dir_status(path)
except errors.FileNotFound:
return False
return True
def get_hdfs_client(host, port, user_name=getpass.getuser()):
"""Returns a new HTTP client for an HDFS cluster using an explict host:port pair"""
hdfs_client = PyWebHdfsClientWithChmod(host=host, port=port, user_name=user_name)
# Bind our "exists" method to hdfs_client.exists
hdfs_client.exists = types.MethodType(__pyweb_hdfs_client_exists, hdfs_client)
return hdfs_client
|
pkoutsias/SickRage
|
refs/heads/master
|
lib/dogpile/__init__.py
|
916
|
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
dlazz/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/webfaction/webfaction_app.py
|
51
|
#!/usr/bin/python
#
# (c) Quentin Stafford-Fraser 2015, with contributions gratefully acknowledged from:
# * Andy Baker
# * Federico Tarantini
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Create a Webfaction application using Ansible and the Webfaction API
#
# Valid application types can be found by looking here:
# https://docs.webfaction.com/xmlrpc-api/apps.html#application-types
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: webfaction_app
short_description: Add or remove applications on a Webfaction host
description:
- Add or remove applications on a Webfaction host. Further documentation at U(https://github.com/quentinsf/ansible-webfaction).
author: Quentin Stafford-Fraser (@quentinsf)
version_added: "2.0"
notes:
- >
You can run playbooks that use this on a local machine, or on a Webfaction host, or elsewhere, since the scripts use the remote webfaction API.
The location is not important. However, running them on multiple hosts I(simultaneously) is best avoided. If you don't specify I(localhost) as
your host, you may want to add C(serial: 1) to the plays.
- See `the webfaction API <https://docs.webfaction.com/xmlrpc-api/>`_ for more info.
options:
name:
description:
- The name of the application
required: true
state:
description:
- Whether the application should exist
choices: ['present', 'absent']
default: "present"
type:
description:
- The type of application to create. See the Webfaction docs at U(https://docs.webfaction.com/xmlrpc-api/apps.html) for a list.
required: true
autostart:
description:
- Whether the app should restart with an C(autostart.cgi) script
type: bool
default: 'no'
extra_info:
description:
- Any extra parameters required by the app
default: ''
port_open:
description:
- IF the port should be opened
type: bool
default: 'no'
login_name:
description:
- The webfaction account to use
required: true
login_password:
description:
- The webfaction password to use
required: true
machine:
description:
- The machine name to use (optional for accounts with only one machine)
'''
EXAMPLES = '''
- name: Create a test app
webfaction_app:
name: "my_wsgi_app1"
state: present
type: mod_wsgi35-python27
login_name: "{{webfaction_user}}"
login_password: "{{webfaction_passwd}}"
machine: "{{webfaction_machine}}"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves import xmlrpc_client
webfaction = xmlrpc_client.ServerProxy('https://api.webfaction.com/')
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
state=dict(required=False, choices=['present', 'absent'], default='present'),
type=dict(required=True),
autostart=dict(required=False, type='bool', default=False),
extra_info=dict(required=False, default=""),
port_open=dict(required=False, type='bool', default=False),
login_name=dict(required=True),
login_password=dict(required=True, no_log=True),
machine=dict(required=False, default=None),
),
supports_check_mode=True
)
app_name = module.params['name']
app_type = module.params['type']
app_state = module.params['state']
if module.params['machine']:
session_id, account = webfaction.login(
module.params['login_name'],
module.params['login_password'],
module.params['machine']
)
else:
session_id, account = webfaction.login(
module.params['login_name'],
module.params['login_password']
)
app_list = webfaction.list_apps(session_id)
app_map = dict([(i['name'], i) for i in app_list])
existing_app = app_map.get(app_name)
result = {}
# Here's where the real stuff happens
if app_state == 'present':
# Does an app with this name already exist?
if existing_app:
if existing_app['type'] != app_type:
module.fail_json(msg="App already exists with different type. Please fix by hand.")
# If it exists with the right type, we don't change it
# Should check other parameters.
module.exit_json(
changed=False,
result=existing_app,
)
if not module.check_mode:
# If this isn't a dry run, create the app
result.update(
webfaction.create_app(
session_id, app_name, app_type,
module.boolean(module.params['autostart']),
module.params['extra_info'],
module.boolean(module.params['port_open'])
)
)
elif app_state == 'absent':
# If the app's already not there, nothing changed.
if not existing_app:
module.exit_json(
changed=False,
)
if not module.check_mode:
# If this isn't a dry run, delete the app
result.update(
webfaction.delete_app(session_id, app_name)
)
else:
module.fail_json(msg="Unknown state specified: {}".format(app_state))
module.exit_json(
changed=True,
result=result
)
if __name__ == '__main__':
main()
|
nlholdem/icodoom
|
refs/heads/master
|
.venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/url.py
|
713
|
from __future__ import absolute_import
from collections import namedtuple
from ..exceptions import LocationParseError
url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
class Url(namedtuple('Url', url_attrs)):
"""
Datastructure for representing an HTTP URL. Used as a return value for
:func:`parse_url`.
"""
slots = ()
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
query=None, fragment=None):
if path and not path.startswith('/'):
path = '/' + path
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
query, fragment)
@property
def hostname(self):
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
@property
def request_uri(self):
"""Absolute path including the query string."""
uri = self.path or '/'
if self.query is not None:
uri += '?' + self.query
return uri
@property
def netloc(self):
"""Network location including host and port"""
if self.port:
return '%s:%d' % (self.host, self.port)
return self.host
@property
def url(self):
"""
Convert self into a url
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port will have : removed).
Example: ::
>>> U = parse_url('http://google.com/mail/')
>>> U.url
'http://google.com/mail/'
>>> Url('http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment').url
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = self
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url += scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
def __str__(self):
return self.url
def split_first(s, delims):
"""
Given a string and an iterable of delimiters, split on the first found
delimiter. Return two split parts and the matched delimiter.
If not found, then the first part is the full input string.
Example::
>>> split_first('foo/bar?baz', '?/=')
('foo', 'bar?baz', '/')
>>> split_first('foo/bar?baz', '123')
('foo/bar?baz', '', None)
Scales linearly with number of delims. Not ideal for large number of delims.
"""
min_idx = None
min_delim = None
for d in delims:
idx = s.find(d)
if idx < 0:
continue
if min_idx is None or idx < min_idx:
min_idx = idx
min_delim = d
if min_idx is None or min_idx < 0:
return s, '', None
return s[:min_idx], s[min_idx + 1:], min_delim
def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
Partly backwards-compatible with :mod:`urlparse`.
Example::
>>> parse_url('http://google.com/mail/')
Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
>>> parse_url('google.com:80')
Url(scheme=None, host='google.com', port=80, path=None, ...)
>>> parse_url('/foo?bar')
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
# While this code has overlap with stdlib's urlparse, it is much
# simplified for our needs and less annoying.
# Additionally, this implementations does silly things to be optimal
# on CPython.
if not url:
# Empty
return Url()
scheme = None
auth = None
host = None
port = None
path = None
fragment = None
query = None
# Scheme
if '://' in url:
scheme, url = url.split('://', 1)
# Find the earliest Authority Terminator
# (http://tools.ietf.org/html/rfc3986#section-3.2)
url, path_, delim = split_first(url, ['/', '?', '#'])
if delim:
# Reassemble the path
path = delim + path_
# Auth
if '@' in url:
# Last '@' denotes end of auth part
auth, url = url.rsplit('@', 1)
# IPv6
if url and url[0] == '[':
host, url = url.split(']', 1)
host += ']'
# Port
if ':' in url:
_host, port = url.split(':', 1)
if not host:
host = _host
if port:
# If given, ports must be integers.
if not port.isdigit():
raise LocationParseError(url)
port = int(port)
else:
# Blank ports are cool, too. (rfc3986#section-3.2.3)
port = None
elif not host and url:
host = url
if not path:
return Url(scheme, auth, host, port, path, query, fragment)
# Fragment
if '#' in path:
path, fragment = path.split('#', 1)
# Query
if '?' in path:
path, query = path.split('?', 1)
return Url(scheme, auth, host, port, path, query, fragment)
def get_host(url):
"""
Deprecated. Use :func:`.parse_url` instead.
"""
p = parse_url(url)
return p.scheme or 'http', p.hostname, p.port
|
agripo/website
|
refs/heads/master
|
core/migrations/0031_auto_20151028_1744.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import ckeditor.fields
class Migration(migrations.Migration):
dependencies = [
('core', '0030_auto_20151028_1731'),
]
operations = [
migrations.AlterField(
model_name='news',
name='content',
field=ckeditor.fields.RichTextField(default=None, verbose_name='Contenu', help_text="Texte de l'actualité"),
),
]
|
ujfjhz/storm
|
refs/heads/master
|
dev-tools/test-ns.py
|
23
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import sys
import os
os.chdir("storm-core")
ns = sys.argv[1]
pipe = Popen(["mvn", "test", "-DfailIfNoTests=false", "-Dtest=%s"%ns])
pipe.wait()
os.chdir("..")
sys.exit(pipe.returncode)
|
MisterPup/Ceilometer-Juno-Extension
|
refs/heads/master
|
ceilometer/tests/storage/test_impl_sqlalchemy.py
|
4
|
#
# Author: John Tran <jhtran@att.com>
# Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for ceilometer/storage/impl_sqlalchemy.py
.. note::
In order to run the tests against real SQL server set the environment
variable CEILOMETER_TEST_SQL_URL to point to a SQL server before running
the tests.
"""
import datetime
import repr
import mock
from oslo.utils import timeutils
from ceilometer.alarm.storage import impl_sqlalchemy as impl_sqla_alarm
from ceilometer.storage import impl_sqlalchemy
from ceilometer.storage import models
from ceilometer.storage.sqlalchemy import models as sql_models
from ceilometer.tests import base as test_base
from ceilometer.tests import db as tests_db
from ceilometer.tests.storage import test_storage_scenarios as scenarios
@tests_db.run_with('sqlite')
class CeilometerBaseTest(tests_db.TestBase):
def test_ceilometer_base(self):
base = sql_models.CeilometerBase()
base['key'] = 'value'
self.assertEqual('value', base['key'])
@tests_db.run_with('sqlite')
class TraitTypeTest(tests_db.TestBase):
# TraitType is a construct specific to sqlalchemy.
# Not applicable to other drivers.
def test_trait_type_exists(self):
tt1 = self.conn._get_or_create_trait_type("foo", 0)
self.assertTrue(tt1.id >= 0)
tt2 = self.conn._get_or_create_trait_type("foo", 0)
self.assertEqual(tt2.id, tt1.id)
self.assertEqual(tt2.desc, tt1.desc)
self.assertEqual(tt2.data_type, tt1.data_type)
def test_new_trait_type(self):
tt1 = self.conn._get_or_create_trait_type("foo", 0)
self.assertTrue(tt1.id >= 0)
tt2 = self.conn._get_or_create_trait_type("blah", 0)
self.assertNotEqual(tt1.id, tt2.id)
self.assertNotEqual(tt1.desc, tt2.desc)
# Test the method __repr__ returns a string
self.assertTrue(repr.repr(tt2))
def test_trait_different_data_type(self):
tt1 = self.conn._get_or_create_trait_type("foo", 0)
self.assertTrue(tt1.id >= 0)
tt2 = self.conn._get_or_create_trait_type("foo", 1)
self.assertNotEqual(tt1.id, tt2.id)
self.assertEqual(tt2.desc, tt1.desc)
self.assertNotEqual(tt1.data_type, tt2.data_type)
# Test the method __repr__ returns a string
self.assertTrue(repr.repr(tt2))
@tests_db.run_with('sqlite')
class EventTypeTest(tests_db.TestBase):
# EventType is a construct specific to sqlalchemy
# Not applicable to other drivers.
def test_event_type_exists(self):
et1 = self.conn._get_or_create_event_type("foo")
self.assertTrue(et1.id >= 0)
et2 = self.conn._get_or_create_event_type("foo")
self.assertEqual(et2.id, et1.id)
self.assertEqual(et2.desc, et1.desc)
def test_event_type_unique(self):
et1 = self.conn._get_or_create_event_type("foo")
self.assertTrue(et1.id >= 0)
et2 = self.conn._get_or_create_event_type("blah")
self.assertNotEqual(et1.id, et2.id)
self.assertNotEqual(et1.desc, et2.desc)
# Test the method __repr__ returns a string
self.assertTrue(repr.repr(et2))
class MyException(Exception):
pass
@tests_db.run_with('sqlite')
class EventTest(tests_db.TestBase):
def test_string_traits(self):
model = models.Trait("Foo", models.Trait.TEXT_TYPE, "my_text")
trait = self.conn._make_trait(model, None)
self.assertEqual(models.Trait.TEXT_TYPE, trait.trait_type.data_type)
self.assertIsNone(trait.t_float)
self.assertIsNone(trait.t_int)
self.assertIsNone(trait.t_datetime)
self.assertEqual("my_text", trait.t_string)
self.assertIsNotNone(trait.trait_type.desc)
def test_int_traits(self):
model = models.Trait("Foo", models.Trait.INT_TYPE, 100)
trait = self.conn._make_trait(model, None)
self.assertEqual(models.Trait.INT_TYPE, trait.trait_type.data_type)
self.assertIsNone(trait.t_float)
self.assertIsNone(trait.t_string)
self.assertIsNone(trait.t_datetime)
self.assertEqual(100, trait.t_int)
self.assertIsNotNone(trait.trait_type.desc)
def test_float_traits(self):
model = models.Trait("Foo", models.Trait.FLOAT_TYPE, 123.456)
trait = self.conn._make_trait(model, None)
self.assertEqual(models.Trait.FLOAT_TYPE, trait.trait_type.data_type)
self.assertIsNone(trait.t_int)
self.assertIsNone(trait.t_string)
self.assertIsNone(trait.t_datetime)
self.assertEqual(123.456, trait.t_float)
self.assertIsNotNone(trait.trait_type.desc)
def test_datetime_traits(self):
now = datetime.datetime.utcnow()
model = models.Trait("Foo", models.Trait.DATETIME_TYPE, now)
trait = self.conn._make_trait(model, None)
self.assertEqual(models.Trait.DATETIME_TYPE,
trait.trait_type.data_type)
self.assertIsNone(trait.t_int)
self.assertIsNone(trait.t_string)
self.assertIsNone(trait.t_float)
self.assertEqual(now, trait.t_datetime)
self.assertIsNotNone(trait.trait_type.desc)
def test_bad_event(self):
now = datetime.datetime.utcnow()
m = [models.Event("1", "Foo", now, []),
models.Event("2", "Zoo", now, [])]
with mock.patch.object(self.conn, "_record_event") as mock_save:
mock_save.side_effect = MyException("Boom")
problem_events = self.conn.record_events(m)
self.assertEqual(2, len(problem_events))
for bad, event in problem_events:
self.assertEqual(bad, models.Event.UNKNOWN_PROBLEM)
def test_get_none_value_traits(self):
model = sql_models.Trait(None, None, 5)
self.assertIsNone(model.get_value())
self.assertTrue(repr.repr(model))
def test_event_repr(self):
ev = sql_models.Event('msg_id', None, False)
ev.id = 100
self.assertTrue(repr.repr(ev))
@tests_db.run_with('sqlite')
class RelationshipTest(scenarios.DBTestBase):
# Note: Do not derive from SQLAlchemyEngineTestBase, since we
# don't want to automatically inherit all the Meter setup.
@mock.patch.object(timeutils, 'utcnow')
def test_clear_metering_data_meta_tables(self, mock_utcnow):
mock_utcnow.return_value = datetime.datetime(2012, 7, 2, 10, 45)
self.conn.clear_expired_metering_data(3 * 60)
session = self.conn._engine_facade.get_session()
meta_tables = [sql_models.MetaText, sql_models.MetaFloat,
sql_models.MetaBigInt, sql_models.MetaBool]
for table in meta_tables:
self.assertEqual(0, (session.query(table)
.filter(~table.id.in_(
session.query(sql_models.Sample.id)
.group_by(sql_models.Sample.id))).count()
))
class CapabilitiesTest(test_base.BaseTestCase):
# Check the returned capabilities list, which is specific to each DB
# driver
def test_capabilities(self):
expected_capabilities = {
'meters': {'pagination': False,
'query': {'simple': True,
'metadata': True,
'complex': False}},
'resources': {'pagination': False,
'query': {'simple': True,
'metadata': True,
'complex': False}},
'samples': {'pagination': True,
'groupby': True,
'query': {'simple': True,
'metadata': True,
'complex': True}},
'statistics': {'pagination': False,
'groupby': True,
'query': {'simple': True,
'metadata': True,
'complex': False},
'aggregation': {'standard': True,
'selectable': {
'max': True,
'min': True,
'sum': True,
'avg': True,
'count': True,
'stddev': True,
'cardinality': True}}
},
'events': {'query': {'simple': True}}
}
actual_capabilities = impl_sqlalchemy.Connection.get_capabilities()
self.assertEqual(expected_capabilities, actual_capabilities)
def test_alarm_capabilities(self):
expected_capabilities = {
'alarms': {'query': {'simple': True,
'complex': True},
'history': {'query': {'simple': True,
'complex': True}}},
}
actual_capabilities = impl_sqla_alarm.Connection.get_capabilities()
self.assertEqual(expected_capabilities, actual_capabilities)
def test_storage_capabilities(self):
expected_capabilities = {
'storage': {'production_ready': True},
}
actual_capabilities = (impl_sqlalchemy.
Connection.get_storage_capabilities())
self.assertEqual(expected_capabilities, actual_capabilities)
|
yw374cornell/e-mission-server
|
refs/heads/master
|
emission/tests/analysisTests/intakeTests/TestFilterAccuracy.py
|
1
|
# Standard imports
import unittest
import datetime as pydt
import logging
import pymongo
import json
import bson.json_util as bju
import pandas as pd
# Our imports
import emission.analysis.intake.cleaning.filter_accuracy as eaicf
import emission.storage.timeseries.abstract_timeseries as esta
import emission.storage.pipeline_queries as epq
import emission.tests.common as etc
class TestFilterAccuracy(unittest.TestCase):
def setUp(self):
# We need to access the database directly sometimes in order to
# forcibly insert entries for the tests to pass. But we put the import
# in here to reduce the temptation to use the database directly elsewhere.
import emission.core.get_database as edb
import uuid
self.testUUID = uuid.uuid4()
self.entries = json.load(open("emission/tests/data/smoothing_data/tablet_2015-11-03"),
object_hook=bju.object_hook)
tsdb = edb.get_timeseries_db()
for entry in self.entries:
entry["user_id"] = self.testUUID
tsdb.save(entry)
self.ts = esta.TimeSeries.get_time_series(self.testUUID)
def tearDown(self):
import emission.core.get_database as edb
edb.get_timeseries_db().remove({"user_id": self.testUUID})
edb.get_pipeline_state_db().remove({"user_id": self.testUUID})
def testEmptyCallToPriorDuplicate(self):
time_query = epq.get_time_range_for_accuracy_filtering(self.testUUID)
unfiltered_points_df = self.ts.get_data_df("background/location", time_query)
self.assertEqual(len(unfiltered_points_df), 205)
# Check call to check duplicate with a zero length dataframe
entry = unfiltered_points_df.iloc[5]
self.assertEqual(eaicf.check_prior_duplicate(pd.DataFrame(), 0, entry), False)
def testEmptyCall(self):
# Check call to the entire filter accuracy with a zero length timeseries
import emission.core.get_database as edb
edb.get_timeseries_db().remove({"user_id": self.testUUID})
# We expect that this should not throw
eaicf.filter_accuracy(self.testUUID)
self.assertEqual(len(self.ts.get_data_df("background/location")), 0)
def testCheckPriorDuplicate(self):
time_query = epq.get_time_range_for_accuracy_filtering(self.testUUID)
unfiltered_points_df = self.ts.get_data_df("background/location", time_query)
self.assertEqual(len(unfiltered_points_df), 205)
entry = unfiltered_points_df.iloc[5]
unfiltered_appended_df = pd.DataFrame([entry] * 5).append(unfiltered_points_df).reset_index()
logging.debug("unfiltered_appended_df = %s" % unfiltered_appended_df[["fmt_time"]].head())
self.assertEqual(eaicf.check_prior_duplicate(unfiltered_appended_df, 0, entry), False)
self.assertEqual(eaicf.check_prior_duplicate(unfiltered_appended_df, 5, entry), True)
self.assertEqual(eaicf.check_prior_duplicate(unfiltered_points_df, 5, entry), False)
def testConvertToFiltered(self):
time_query = epq.get_time_range_for_accuracy_filtering(self.testUUID)
unfiltered_points_df = self.ts.get_data_df("background/location", time_query)
self.assertEqual(len(unfiltered_points_df), 205)
entry_from_df = unfiltered_points_df.iloc[5]
entry_copy = eaicf.convert_to_filtered(self.ts.get_entry_at_ts("background/location",
"metadata.write_ts",
entry_from_df.metadata_write_ts))
self.assertNotIn("_id", entry_copy)
self.assertEquals(entry_copy["metadata"]["key"], "background/filtered_location")
def testExistingFilteredLocation(self):
time_query = epq.get_time_range_for_accuracy_filtering(self.testUUID)
unfiltered_points_df = self.ts.get_data_df("background/location", time_query)
self.assertEqual(len(unfiltered_points_df), 205)
entry_from_df = unfiltered_points_df.iloc[5]
self.assertEqual(eaicf.check_existing_filtered_location(self.ts, entry_from_df), False)
entry_copy = self.ts.get_entry_at_ts("background/location", "metadata.write_ts",
entry_from_df.metadata_write_ts)
self.ts.insert(eaicf.convert_to_filtered(entry_copy))
self.assertEqual(eaicf.check_existing_filtered_location(self.ts, entry_from_df), True)
def testFilterAccuracy(self):
unfiltered_points_df = self.ts.get_data_df("background/location", None)
self.assertEqual(len(unfiltered_points_df), 205)
pre_filtered_points_df = self.ts.get_data_df("background/filtered_location", None)
self.assertEqual(len(pre_filtered_points_df), 0)
eaicf.filter_accuracy(self.testUUID)
filtered_points_df = self.ts.get_data_df("background/filtered_location", None)
self.assertEqual(len(filtered_points_df), 124)
if __name__ == '__main__':
etc.configLogging()
unittest.main()
|
ywcui1990/htmresearch
|
refs/heads/master
|
tests/frameworks/layers/l2l4_network_creation_test.py
|
3
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2016, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import unittest
import random
from htmresearch.support.register_regions import registerAllResearchRegions
from htmresearch.frameworks.layers.laminar_network import createNetwork
networkConfig1 = {
"networkType": "L4L2Column",
"externalInputSize": 1024,
"sensorInputSize": 1024,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 1024,
"cellsPerColumn": 8,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.51,
"connectedPermanence": 0.6,
"permanenceIncrement": 0.1,
"permanenceDecrement": 0.02,
"minThreshold": 10,
"basalPredictedSegmentDecrement": 0.004,
"activationThreshold": 13,
"sampleSize": 20,
},
"L2Params": {
"inputWidth": 1024 * 8,
"cellCount": 4096,
"sdrSize": 40,
"synPermProximalInc": 0.1,
"synPermProximalDec": 0.001,
"initialProximalPermanence": 0.6,
"minThresholdProximal": 10,
"sampleSizeProximal": 20,
"connectedPermanenceProximal": 0.5,
"synPermDistalInc": 0.1,
"synPermDistalDec": 0.001,
"initialDistalPermanence": 0.41,
"activationThresholdDistal": 13,
"sampleSizeDistal": 20,
"connectedPermanenceDistal": 0.5,
"distalSegmentInhibitionFactor": 0.6667,
"learningMode": True,
},
}
networkConfig2 = {
"networkType": "MultipleL4L2Columns",
"numCorticalColumns": 3,
"externalInputSize": 1024,
"sensorInputSize": 1024,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 1024,
"cellsPerColumn": 8,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.51,
"connectedPermanence": 0.6,
"permanenceIncrement": 0.1,
"permanenceDecrement": 0.02,
"minThreshold": 10,
"basalPredictedSegmentDecrement": 0.004,
"activationThreshold": 13,
"sampleSize": 20,
"seed": 42,
},
"L2Params": {
"inputWidth": 1024 * 8,
"cellCount": 4096,
"sdrSize": 40,
"synPermProximalInc": 0.1,
"synPermProximalDec": 0.001,
"initialProximalPermanence": 0.6,
"minThresholdProximal": 10,
"sampleSizeProximal": 20,
"connectedPermanenceProximal": 0.5,
"synPermDistalInc": 0.1,
"synPermDistalDec": 0.001,
"initialDistalPermanence": 0.41,
"activationThresholdDistal": 13,
"sampleSizeDistal": 20,
"connectedPermanenceDistal": 0.5,
"distalSegmentInhibitionFactor": 0.6667,
"learningMode": True,
}
}
networkConfig3 = {
"networkType": "MultipleL4L2Columns",
"numCorticalColumns": 2,
"externalInputSize": 1024,
"sensorInputSize": 1024,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 1024,
"cellsPerColumn": 8,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.51,
"connectedPermanence": 0.6,
"permanenceIncrement": 0.1,
"permanenceDecrement": 0.02,
"minThreshold": 10,
"basalPredictedSegmentDecrement": 0.004,
"activationThreshold": 13,
"sampleSize": 20,
},
"L2Params": {
"inputWidth": 1024 * 8,
"cellCount": 4096,
"sdrSize": 40,
"synPermProximalInc": 0.1,
"synPermProximalDec": 0.001,
"initialProximalPermanence": 0.6,
"minThresholdProximal": 10,
"sampleSizeProximal": 20,
"connectedPermanenceProximal": 0.5,
"synPermDistalInc": 0.1,
"synPermDistalDec": 0.001,
"initialDistalPermanence": 0.41,
"activationThresholdDistal": 13,
"sampleSizeDistal": 20,
"connectedPermanenceDistal": 0.5,
"distalSegmentInhibitionFactor": 0.6667,
"learningMode": True,
}
}
networkConfig4 = {
"networkType": "MultipleL4L2ColumnsWithTopology",
"numCorticalColumns": 5,
"externalInputSize": 1024,
"sensorInputSize": 1024,
"columnPositions": [(0, 0), (1, 0), (2, 0), (2, 1), (2, -1)],
"maxConnectionDistance": 1,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 1024,
"cellsPerColumn": 8,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.51,
"connectedPermanence": 0.6,
"permanenceIncrement": 0.1,
"permanenceDecrement": 0.02,
"minThreshold": 10,
"basalPredictedSegmentDecrement": 0.004,
"activationThreshold": 13,
"sampleSize": 20,
"seed": 42,
},
"L2Params": {
"inputWidth": 1024 * 8,
"cellCount": 4096,
"sdrSize": 40,
"synPermProximalInc": 0.1,
"synPermProximalDec": 0.001,
"initialProximalPermanence": 0.6,
"minThresholdProximal": 10,
"sampleSizeProximal": 20,
"connectedPermanenceProximal": 0.5,
"synPermDistalInc": 0.1,
"synPermDistalDec": 0.001,
"initialDistalPermanence": 0.41,
"activationThresholdDistal": 13,
"sampleSizeDistal": 20,
"connectedPermanenceDistal": 0.5,
"distalSegmentInhibitionFactor": 0.6667,
"learningMode": True,
}
}
class LaminarNetworkTest(unittest.TestCase):
""" Super simple test of laminar network factory"""
@classmethod
def setUpClass(cls):
random.seed(42)
registerAllResearchRegions()
def testL4L2ColumnCreate(self):
"""
In this simplistic test we just create a network, ensure it has the
right number of regions and try to run some inputs through it without
crashing.
"""
# Create a simple network to test the sensor
net = createNetwork(networkConfig1)
self.assertEqual(len(net.regions.keys()),4,
"Incorrect number of regions")
# Add some input vectors to the queue
externalInput = net.regions["externalInput_0"].getSelf()
sensorInput = net.regions["sensorInput_0"].getSelf()
# Add 3 input vectors
externalInput.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput.addDataToQueue([2, 42, 1023], 0, 0)
externalInput.addDataToQueue([1, 42, 1022], 0, 0)
sensorInput.addDataToQueue([1, 42, 1022], 0, 0)
externalInput.addDataToQueue([3, 42, 1021], 0, 0)
sensorInput.addDataToQueue([3, 42, 1021], 0, 0)
# Run the network and check outputs are as expected
net.run(3)
def testL4L2ColumnLinks(self):
"""
In this simplistic test we create a network and ensure that it has the
correct links between regions.
"""
# Create a simple network to check its architecture
net = createNetwork(networkConfig1)
links = net.getLinks()
# These are all the links we're hoping to find
desired_links=set(["sensorInput_0.dataOut-->L4Column_0.activeColumns",
"L2Column_0.feedForwardOutput-->L4Column_0.apicalInput",
"externalInput_0.dataOut-->L4Column_0.basalInput",
"L4Column_0.predictedActiveCells-->L2Column_0.feedforwardGrowthCandidates",
"L4Column_0.activeCells-->L2Column_0.feedforwardInput",
"sensorInput_0.resetOut-->L2Column_0.resetIn",
"sensorInput_0.resetOut-->L4Column_0.resetIn",
"externalInput_0.dataOut-->L4Column_0.basalGrowthCandidates"])
# This gets textual representations of the links.
links = set([link.second.getMoniker() for link in links])
# Build a descriptive error message to pass to the user
error_message = "Error: Links incorrectly formed in simple L2L4 network: \n"
for link in desired_links:
if not link in links:
error_message += "Failed to find link: {}\n".format(link)
for link in links:
if not link in desired_links:
error_message += "Found unexpected link: {}\n".format(link)
self.assertSetEqual(desired_links, links, error_message)
def testMultipleL4L2ColumnsCreate(self):
"""
In this simplistic test we create a network with 3 L4L2Columns, ensure it
has the right number of regions and try to run some inputs through it
without crashing.
"""
net = createNetwork(networkConfig2)
self.assertEqual(len(net.regions.keys()), 4*3,
"Incorrect number of regions")
# Add some input vectors to the queue
externalInput0 = net.regions["externalInput_0"].getSelf()
sensorInput0 = net.regions["sensorInput_0"].getSelf()
externalInput1 = net.regions["externalInput_1"].getSelf()
sensorInput1 = net.regions["sensorInput_1"].getSelf()
externalInput2 = net.regions["externalInput_2"].getSelf()
sensorInput2 = net.regions["sensorInput_2"].getSelf()
externalInput0.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput0.addDataToQueue([2, 42, 1023], 0, 0)
externalInput1.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput1.addDataToQueue([2, 42, 1023], 0, 0)
externalInput2.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput2.addDataToQueue([2, 42, 1023], 0, 0)
# Run the network and check outputs are as expected
net.run(1)
# Spotcheck some of the phases
self.assertEqual(net.getPhases("externalInput_0"), (0,),
"Incorrect phase externalInput_0")
self.assertEqual(net.getPhases("externalInput_1"), (0,),
"Incorrect phase for externalInput_1")
self.assertEqual(net.getPhases("sensorInput_0"), (0,),
"Incorrect phase for sensorInput_0")
self.assertEqual(net.getPhases("sensorInput_1"), (0,),
"Incorrect phase for sensorInput_1")
self.assertEqual(net.getPhases("L4Column_0"), (2,),
"Incorrect phase for L4Column_0")
self.assertEqual(net.getPhases("L4Column_1"), (2,),
"Incorrect phase for L4Column_1")
def testMultipleL4L2ColumnLinks(self):
"""
In this simplistic test we create a network with 3 L4L2 columns, and
ensure that it has the correct links between regions.
"""
# Create a simple network to check its architecture
net = createNetwork(networkConfig2)
links = net.getLinks()
# These are all the links we're hoping to find
desired_links=set(["sensorInput_0.dataOut-->L4Column_0.activeColumns",
"L2Column_0.feedForwardOutput-->L4Column_0.apicalInput",
"externalInput_0.dataOut-->L4Column_0.basalInput",
"L4Column_0.predictedActiveCells-->"+
"L2Column_0.feedforwardGrowthCandidates",
"L4Column_0.activeCells-->L2Column_0.feedforwardInput",
"sensorInput_0.resetOut-->L2Column_0.resetIn",
"sensorInput_0.resetOut-->L4Column_0.resetIn",
"sensorInput_1.dataOut-->L4Column_1.activeColumns",
"L2Column_1.feedForwardOutput-->L4Column_1.apicalInput",
"externalInput_1.dataOut-->L4Column_1.basalInput",
"L4Column_1.predictedActiveCells-->"+
"L2Column_1.feedforwardGrowthCandidates",
"L4Column_1.activeCells-->L2Column_1.feedforwardInput",
"sensorInput_1.resetOut-->L2Column_1.resetIn",
"sensorInput_1.resetOut-->L4Column_1.resetIn",
"sensorInput_2.dataOut-->L4Column_2.activeColumns",
"L2Column_2.feedForwardOutput-->L4Column_2.apicalInput",
"externalInput_2.dataOut-->L4Column_2.basalInput",
"L4Column_2.predictedActiveCells-->"+
"L2Column_2.feedforwardGrowthCandidates",
"L4Column_2.activeCells-->L2Column_2.feedforwardInput",
"sensorInput_2.resetOut-->L2Column_2.resetIn",
"sensorInput_2.resetOut-->L4Column_2.resetIn",
"L2Column_0.feedForwardOutput-->L2Column_1.lateralInput",
"L2Column_0.feedForwardOutput-->L2Column_2.lateralInput",
"L2Column_1.feedForwardOutput-->L2Column_0.lateralInput",
"L2Column_1.feedForwardOutput-->L2Column_2.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_0.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_1.lateralInput",
"externalInput_0.dataOut-->L4Column_0.basalGrowthCandidates",
"externalInput_1.dataOut-->L4Column_1.basalGrowthCandidates",
"externalInput_2.dataOut-->L4Column_2.basalGrowthCandidates"])
# This gets textual representations of the links.
links = set([link.second.getMoniker() for link in links])
# Build a descriptive error message to pass to the user
error_message = "Links incorrectly formed in multicolumn L2L4 network: \n"
for link in desired_links:
if not link in links:
error_message += "Failed to find link: {}\n".format(link)
for link in links:
if not link in desired_links:
error_message += "Found unexpected link: {}\n".format(link)
self.assertSetEqual(desired_links, links, error_message)
def testMultipleL4L2ColumnsWithTopologyCreate(self):
"""
In this simplistic test we create a network with 5 L4L2Columns and
topological lateral connections, ensure it has the right number of regions,
and try to run some inputs through it without crashing.
"""
net = createNetwork(networkConfig4)
self.assertEqual(len(net.regions.keys()), 20, "Incorrect number of regions")
# Add some input vectors to the queue
externalInput0 = net.regions["externalInput_0"].getSelf()
sensorInput0 = net.regions["sensorInput_0"].getSelf()
externalInput1 = net.regions["externalInput_1"].getSelf()
sensorInput1 = net.regions["sensorInput_1"].getSelf()
externalInput2 = net.regions["externalInput_2"].getSelf()
sensorInput2 = net.regions["sensorInput_2"].getSelf()
externalInput3 = net.regions["externalInput_3"].getSelf()
sensorInput3 = net.regions["sensorInput_3"].getSelf()
externalInput4 = net.regions["externalInput_4"].getSelf()
sensorInput4 = net.regions["sensorInput_4"].getSelf()
externalInput0.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput0.addDataToQueue([2, 42, 1023], 0, 0)
externalInput1.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput1.addDataToQueue([2, 42, 1023], 0, 0)
externalInput2.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput2.addDataToQueue([2, 42, 1023], 0, 0)
externalInput3.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput3.addDataToQueue([2, 42, 1023], 0, 0)
externalInput4.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput4.addDataToQueue([2, 42, 1023], 0, 0)
# Run the network and check outputs are as expected
net.run(1)
# Spotcheck some of the phases
self.assertEqual(net.getPhases("externalInput_0"), (0,),
"Incorrect phase externalInput_0")
self.assertEqual(net.getPhases("externalInput_1"), (0,),
"Incorrect phase for externalInput_1")
self.assertEqual(net.getPhases("L4Column_0"), (2,),
"Incorrect phase for L4Column_0")
self.assertEqual(net.getPhases("L4Column_1"), (2,),
"Incorrect phase for L4Column_1")
def testMultipleL4L2ColumnsWithTopologyLinks(self):
"""
In this simplistic test we create a network with 5 L4L2Columns and
topological lateral connections, and ensure that it has the correct links
between regions. The network is laid out as follows:
3
|
0---1---2
|
4
"""
net = createNetwork(networkConfig4)
links = net.getLinks()
# These are all the links we're hoping to find
desired_links=set(["sensorInput_0.dataOut-->L4Column_0.activeColumns",
"L2Column_0.feedForwardOutput-->L4Column_0.apicalInput",
"externalInput_0.dataOut-->L4Column_0.basalInput",
"L4Column_0.predictedActiveCells-->"+
"L2Column_0.feedforwardGrowthCandidates",
"L4Column_0.activeCells-->L2Column_0.feedforwardInput",
"sensorInput_0.resetOut-->L2Column_0.resetIn",
"sensorInput_0.resetOut-->L4Column_0.resetIn",
"sensorInput_1.dataOut-->L4Column_1.activeColumns",
"L2Column_1.feedForwardOutput-->L4Column_1.apicalInput",
"externalInput_1.dataOut-->L4Column_1.basalInput",
"L4Column_1.predictedActiveCells-->"+
"L2Column_1.feedforwardGrowthCandidates",
"L4Column_1.activeCells-->L2Column_1.feedforwardInput",
"sensorInput_1.resetOut-->L2Column_1.resetIn",
"sensorInput_1.resetOut-->L4Column_1.resetIn",
"sensorInput_2.dataOut-->L4Column_2.activeColumns",
"L2Column_2.feedForwardOutput-->L4Column_2.apicalInput",
"externalInput_2.dataOut-->L4Column_2.basalInput",
"L4Column_2.predictedActiveCells-->"+
"L2Column_2.feedforwardGrowthCandidates",
"L4Column_2.activeCells-->L2Column_2.feedforwardInput",
"sensorInput_2.resetOut-->L2Column_2.resetIn",
"sensorInput_2.resetOut-->L4Column_2.resetIn",
"sensorInput_3.dataOut-->L4Column_3.activeColumns",
"L2Column_3.feedForwardOutput-->L4Column_3.apicalInput",
"externalInput_3.dataOut-->L4Column_3.basalInput",
"L4Column_3.predictedActiveCells-->"+
"L2Column_3.feedforwardGrowthCandidates",
"L4Column_3.activeCells-->L2Column_3.feedforwardInput",
"sensorInput_3.resetOut-->L2Column_3.resetIn",
"sensorInput_3.resetOut-->L4Column_3.resetIn",
"sensorInput_4.dataOut-->L4Column_4.activeColumns",
"L2Column_4.feedForwardOutput-->L4Column_4.apicalInput",
"externalInput_4.dataOut-->L4Column_4.basalInput",
"L4Column_4.predictedActiveCells-->"+
"L2Column_4.feedforwardGrowthCandidates",
"L4Column_4.activeCells-->L2Column_4.feedforwardInput",
"sensorInput_4.resetOut-->L2Column_4.resetIn",
"sensorInput_4.resetOut-->L4Column_4.resetIn",
"L2Column_0.feedForwardOutput-->L2Column_1.lateralInput",
"L2Column_1.feedForwardOutput-->L2Column_0.lateralInput",
"L2Column_1.feedForwardOutput-->L2Column_2.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_1.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_3.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_4.lateralInput",
"L2Column_3.feedForwardOutput-->L2Column_2.lateralInput",
"L2Column_4.feedForwardOutput-->L2Column_2.lateralInput",
"externalInput_0.dataOut-->L4Column_0.basalGrowthCandidates",
"externalInput_1.dataOut-->L4Column_1.basalGrowthCandidates",
"externalInput_2.dataOut-->L4Column_2.basalGrowthCandidates",
"externalInput_3.dataOut-->L4Column_3.basalGrowthCandidates",
"externalInput_4.dataOut-->L4Column_4.basalGrowthCandidates"])
# This gets textual representations of the links.
links = set([link.second.getMoniker() for link in links])
# Build a descriptive error message to pass to the user
error_message = "Links incorrectly formed in multicolumn L2L4 network: \n"
for link in desired_links:
if not link in links:
error_message += "Failed to find link: {}\n".format(link)
for link in links:
if not link in desired_links:
error_message += "Found unexpected link: {}\n".format(link)
self.assertSetEqual(desired_links, links, error_message)
@unittest.skip("Need to implement")
def testMultipleL4L2ColumnsSPCreate(self):
"""
In this simplistic test we create a network with 3 L4L2Columns, with spatial
poolers. We ensure it has the right number of regions, that spatial poolers
are named appropriately, and try to run some inputs through it without
crashing.
"""
pass
def testCustomParameters(self):
"""
This test creates a network with custom parameters and tests that the
network gets correctly constructed.
"""
customConfig = {
"networkType": "L4L2Column",
"externalInputSize": 256,
"sensorInputSize": 512,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 512,
"cellsPerColumn": 16,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.23,
"connectedPermanence": 0.75,
"permanenceIncrement": 0.45,
"permanenceDecrement": 0.1,
"minThreshold": 15,
"basalPredictedSegmentDecrement": 0.21,
"activationThreshold": 16,
"sampleSize": 24,
},
"L2Params": {
"inputWidth": 512 * 8,
"cellCount": 2048,
"sdrSize": 30,
"synPermProximalInc": 0.12,
"synPermProximalDec": 0.011,
"initialProximalPermanence": 0.8,
"minThresholdProximal": 8,
"sampleSizeProximal": 17,
"connectedPermanenceProximal": 0.6,
"synPermDistalInc": 0.09,
"synPermDistalDec": 0.002,
"initialDistalPermanence": 0.52,
"activationThresholdDistal": 15,
"sampleSizeDistal": 25,
"connectedPermanenceDistal": 0.6,
"distalSegmentInhibitionFactor": 0.8333,
"learningMode": True,
},
}
net = createNetwork(customConfig)
self.assertEqual(
len(net.regions.keys()), 4,
"Incorrect number of regions"
)
# Get various regions
externalInput = net.regions["externalInput_0"].getSelf()
sensorInput = net.regions["sensorInput_0"].getSelf()
L4Column = net.regions["L4Column_0"].getSelf()
L2Column = net.regions["L2Column_0"].getSelf()
# we need to do a first compute for the various elements to be constructed
sensorInput.addDataToQueue([], 0, 0)
externalInput.addDataToQueue([], 0, 0)
net.run(1)
# check that parameters are correct in L4
for param, value in customConfig["L4Params"].iteritems():
self.assertEqual(L4Column.getParameter(param), value)
# check that parameters are correct in L2
# some parameters are in the tm members
for param, value in customConfig["L2Params"].iteritems():
self.assertEqual(L2Column.getParameter(param), value)
# check that parameters are correct in L2
self.assertEqual(externalInput.outputWidth,
customConfig["externalInputSize"])
self.assertEqual(sensorInput.outputWidth,
customConfig["sensorInputSize"])
def testSingleColumnL4L2DataFlow(self):
"""
This test trains a network with a few (feature, location) pairs and checks
the data flows correctly, and that each intermediate representation is
correct.
"""
# Create a simple network to test the sensor
net = createNetwork(networkConfig1)
self.assertEqual(
len(net.regions.keys()), 4,
"Incorrect number of regions"
)
# Get various regions
externalInput = net.regions["externalInput_0"].getSelf()
sensorInput = net.regions["sensorInput_0"].getSelf()
L4Column = net.regions["L4Column_0"].getSelf()
L2Column = net.regions["L2Column_0"].getSelf()
# create a feature and location pool
features = [self.generatePattern(1024, 20) for _ in xrange(2)]
locations = [self.generatePattern(1024, 20) for _ in xrange(3)]
# train with following pairs:
# (F0, L0) (F1, L1) on object A
# (F0, L2) (F1, L1) on object B
# Object A
# start with an object 1 input to get L2 representation for object 1
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[0], 0, 0)
net.run(1)
# get L2 representation for object A
L2RepresentationA = self.getCurrentL2Representation(L2Column)
self.assertEqual(len(L2RepresentationA), 40)
for _ in xrange(4):
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[0], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationA
)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[1], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationA
)
# get L4 representations when they are stable
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[0], 0, 0)
net.run(1)
L4Representation00 = self.getL4PredictedActiveCells(L4Column)
self.assertEqual(len(L4Representation00), 20)
# send reset signal
sensorInput.addResetToQueue(0)
externalInput.addResetToQueue(0)
net.run(1)
# Object B
# start with empty input
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
# get L2 representation for object B
L2RepresentationB = self.getCurrentL2Representation(L2Column)
self.assertEqual(len(L2RepresentationB), 40)
# check that it is very different from object A
self.assertLessEqual(len(L2RepresentationA & L2RepresentationB), 5)
for _ in xrange(4):
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationB
)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[1], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationB
)
# get L4 representations when they are stable
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
L4Representation02 = self.getL4PredictedActiveCells(L4Column)
self.assertEqual(len(L4Representation02), 20)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[1], 0, 0)
net.run(1)
L4Representation11 = self.getL4PredictedActiveCells(L4Column)
self.assertEqual(len(L4Representation11), 20)
# send reset signal
sensorInput.addResetToQueue(0)
externalInput.addResetToQueue(0)
net.run(1)
# check inference with each (feature, location) pair
L2Column.setParameter("learningMode", 0, False)
L4Column.setParameter("learn", 0, False)
# (F0, L0)
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[0], 0, 0)
net.run(1)
# check L2 representation, L4 representation, no bursting
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationA
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column),
L4Representation00
)
self.assertEqual(len(self.getL4BurstingCells(L4Column)), 0)
# send reset signal
sensorInput.addResetToQueue(0)
externalInput.addResetToQueue(0)
net.run(1)
# (F0, L2)
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
# check L2 representation, L4 representation, no bursting
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationB
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column),
L4Representation02
)
self.assertEqual(len(self.getL4BurstingCells(L4Column)), 0)
# send reset signal
sensorInput.addResetToQueue(0)
externalInput.addResetToQueue(0)
net.run(1)
# (F1, L1)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[1], 0, 0)
net.run(1)
# check L2 representation, L4 representation, no bursting
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationA | L2RepresentationB
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column),
L4Representation11
)
self.assertEqual(len(self.getL4BurstingCells(L4Column)), 0)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
# check bursting (representation in L2 should be like in a random SP)
self.assertEqual(len(self.getL4PredictedActiveCells(L4Column)), 0)
self.assertEqual(len(self.getL4BurstingCells(L4Column)), 20 * 8)
def testTwoColumnsL4L2DataFlow(self):
"""
This test trains a network with a few (feature, location) pairs and checks
the data flows correctly, and that each intermediate representation is
correct.
Indices 0 and 1 in variable names refer to cortical column number.
"""
# Create a simple network to test the sensor
net = createNetwork(networkConfig3)
self.assertEqual(
len(net.regions.keys()), 4 * 2,
"Incorrect number of regions, expected {} but had {}".format(8*2,
len(net.regions.keys()))
)
# Get various regions
externalInput0 = net.regions["externalInput_0"].getSelf()
sensorInput0 = net.regions["sensorInput_0"].getSelf()
L4Column0 = net.regions["L4Column_0"].getSelf()
L2Column0 = net.regions["L2Column_0"].getSelf()
externalInput1 = net.regions["externalInput_1"].getSelf()
sensorInput1 = net.regions["sensorInput_1"].getSelf()
L4Column1 = net.regions["L4Column_1"].getSelf()
L2Column1 = net.regions["L2Column_1"].getSelf()
# create a feature and location pool for column 0
features0 = [self.generatePattern(1024, 20) for _ in xrange(2)]
locations0 = [self.generatePattern(1024, 20) for _ in xrange(3)]
# create a feature and location pool for column 1
features1 = [self.generatePattern(1024, 20) for _ in xrange(2)]
locations1 = [self.generatePattern(1024, 20) for _ in xrange(3)]
# train with following pairs:
# (F0, L0) (F1, L1) on object 1
# (F0, L2) (F1, L1) on object 2
# Object 1
# start with an object A input to get L2 representations for object A
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[0], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[0], 0, 0)
net.run(1)
# get L2 representation for object B
L2RepresentationA0 = self.getCurrentL2Representation(L2Column0)
L2RepresentationA1 = self.getCurrentL2Representation(L2Column1)
self.assertEqual(len(L2RepresentationA0), 40)
self.assertEqual(len(L2RepresentationA0), 40)
for _ in xrange(3):
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[0], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[0], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationA0
)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationA1
)
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[1], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[1], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationA0
)
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationA1
)
# get L4 representations when they are stable
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[0], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[0], 0, 0)
net.run(1)
L4Representation00_0 = self.getL4PredictedActiveCells(L4Column0)
L4Representation00_1 = self.getL4PredictedActiveCells(L4Column1)
self.assertEqual(len(L4Representation00_0), 20)
self.assertEqual(len(L4Representation00_1), 20)
# send reset signal
sensorInput0.addResetToQueue(0)
externalInput0.addResetToQueue(0)
sensorInput1.addResetToQueue(0)
externalInput1.addResetToQueue(0)
net.run(1)
# Object B
# start with input to get L2 representations
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(1)
# get L2 representations for object B
L2RepresentationB0 = self.getCurrentL2Representation(L2Column0)
L2RepresentationB1 = self.getCurrentL2Representation(L2Column1)
self.assertEqual(len(L2RepresentationB0), 40)
self.assertEqual(len(L2RepresentationB1), 40)
# check that it is very different from object A
self.assertLessEqual(len(L2RepresentationA0 & L2RepresentationB0), 5)
self.assertLessEqual(len(L2RepresentationA1 & L2RepresentationB1), 5)
for _ in xrange(3):
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationB0
)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationB1
)
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[1], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[1], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationB0
)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationB1
)
# get L4 representations when they are stable
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(1)
L4Representation02_0 = self.getL4PredictedActiveCells(L4Column0)
L4Representation02_1 = self.getL4PredictedActiveCells(L4Column1)
self.assertEqual(len(L4Representation02_0), 20)
self.assertEqual(len(L4Representation02_1), 20)
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[1], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[1], 0, 0)
net.run(1)
L4Representation11_0 = self.getL4PredictedActiveCells(L4Column0)
L4Representation11_1 = self.getL4PredictedActiveCells(L4Column1)
self.assertEqual(len(L4Representation11_0), 20)
self.assertEqual(len(L4Representation11_1), 20)
sensorInput0.addResetToQueue(0)
externalInput0.addResetToQueue(0)
sensorInput1.addResetToQueue(0)
externalInput1.addResetToQueue(0)
net.run(1)
# check inference with each (feature, location) pair
L2Column0.setParameter("learningMode", 0, False)
L4Column0.setParameter("learn", 0, False)
L2Column1.setParameter("learningMode", 0, False)
L4Column1.setParameter("learn", 0, False)
# (F0, L0)
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[0], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[0], 0, 0)
net.run(1)
# check L2 representations, L4 representations, no bursting
self.assertLessEqual(
len(self.getCurrentL2Representation(L2Column0) - L2RepresentationA0),
5
)
self.assertGreaterEqual(
len(self.getCurrentL2Representation(L2Column0) & L2RepresentationA0),
35
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column0),
L4Representation00_0
)
self.assertEqual(len(self.getL4BurstingCells(L4Column0)), 0)
# be a little tolerant on this test
self.assertLessEqual(
len(self.getCurrentL2Representation(L2Column1) - L2RepresentationA1),
5
)
self.assertGreaterEqual(
len(self.getCurrentL2Representation(L2Column1) & L2RepresentationA1),
35
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column1),
L4Representation00_1
)
self.assertEqual(len(self.getL4BurstingCells(L4Column1)), 0)
# (F0, L2)
# It is fed twice, for the ambiguous prediction test, because of the
# one-off error in distal predictions
# FIXME when this is changed in ColumnPooler
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(2)
# check L2 representation, L4 representation, no bursting
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationB0
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column0),
L4Representation02_0
)
self.assertEqual(len(self.getL4BurstingCells(L4Column0)), 0)
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationB1
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column1),
L4Representation02_1
)
self.assertEqual(len(self.getL4BurstingCells(L4Column1)), 0)
# ambiguous pattern: (F1, L1)
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[1], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[1], 0, 0)
net.run(1)
# check L2 representation, L4 representation, no bursting
# as opposed to the previous test, the representation is not ambiguous
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationB0
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column0),
L4Representation11_0
)
self.assertEqual(len(self.getL4BurstingCells(L4Column0)), 0)
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationB1
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column1),
L4Representation11_1
)
self.assertEqual(len(self.getL4BurstingCells(L4Column1)), 0)
# unknown signal
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(1)
# check bursting (representation in L2 should be like in a random SP)
self.assertLessEqual(len(self.getL4PredictedActiveCells(L4Column0)), 3)
self.assertGreaterEqual(len(self.getL4BurstingCells(L4Column0)), 20 * 7)
self.assertLessEqual(len(self.getL4PredictedActiveCells(L4Column1)), 3)
self.assertGreaterEqual(len(self.getL4BurstingCells(L4Column1)), 20 * 7)
def generatePattern(self, max, size):
"""Generates a random feedback pattern."""
cellsIndices = range(max)
random.shuffle(cellsIndices)
return cellsIndices[:size]
def getL4PredictedCells(self, column):
"""
Returns the cells in L4 that were predicted at the beginning of the last
call to 'compute'.
"""
return set(column._tm.getPredictedCells())
def getL4PredictedActiveCells(self, column):
"""Returns the predicted active cells in L4."""
activeCells = set(column._tm.getActiveCells())
predictedCells = set(column._tm.getPredictedCells())
return activeCells & predictedCells
def getL4BurstingCells(self, column):
"""Returns the bursting cells in L4."""
activeCells = set(column._tm.getActiveCells())
predictedCells = set(column._tm.getPredictedCells())
return activeCells - predictedCells
def getCurrentL2Representation(self, column):
"""Returns the current active representation in a given L2 column."""
return set(column._pooler.activeCells)
if __name__ == "__main__":
unittest.main()
|
RydrDojo/Ridr
|
refs/heads/master
|
pylotVenv/lib/python2.7/site-packages/requests/models.py
|
148
|
# -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
tuples. Order is retained if data is a list of tuples but arbitrary
if parameters are supplied as a dict.
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
or 4-tuples (filename, fileobj, contentype, custom_headers).
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
else:
fdata = fp.read()
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
:param json: json for the body to attach to the request (if files or data is not specified).
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.json = json
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
json=self.json,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = _copy_cookie_jar(self._cookies)
p.body = self.body
p.hooks = self.hooks
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = to_native_string(self.method.upper())
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
#: We're unable to blindly call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
if isinstance(url, bytes):
url = url.decode('utf8')
else:
url = unicode(url) if is_py2 else str(url)
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
try:
scheme, auth, host, port, path, query, fragment = parse_url(url)
except LocationParseError as e:
raise InvalidURL(*e.args)
if not scheme:
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))
raise MissingSchema(error)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
if isinstance(params, (str, bytes)):
params = to_native_string(params)
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files, json=None):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
if not data and json is not None:
content_type = 'application/json'
body = complexjson.dumps(json)
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, dict))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data:
body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
curr_pos = body.tell()
body.seek(0, 2)
end_pos = body.tell()
self.headers['Content-Length'] = builtin_str(max(0, end_pos - curr_pos))
body.seek(curr_pos, 0)
elif body is not None:
l = super_len(body)
if l:
self.headers['Content-Length'] = builtin_str(l)
elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data.
This function eventually generates a ``Cookie`` header from the
given cookies using cookielib. Due to cookielib's design, the header
will not be regenerated if it already exists, meaning this function
can only be called once for the life of the
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
header is removed beforehand."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
# hooks can be passed as None to the prepare method and to this
# method. To prevent iterating over None, simply use an empty list
# if hooks is False-y
hooks = hooks or []
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content', 'status_code', 'headers', 'url', 'history',
'encoding', 'reason', 'cookies', 'elapsed', 'request'
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta).
#: This property specifically measures the time taken between sending
#: the first byte of the request and finishing parsing the headers. It
#: is therefore unaffected by consuming the response content or the
#: value of the ``stream`` keyword argument.
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
#: is a response.
self.request = None
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except HTTPError:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def is_permanent_redirect(self):
"""True if this Response one of the permanent versions of redirect"""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
# Special case for urllib3.
if hasattr(self.raw, 'stream'):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
.. note:: This method is not reentrant safe.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
if delimiter:
lines = chunk.split(delimiter)
else:
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return complexjson.loads(
self.content.decode(encoding), **kwargs
)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return complexjson.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Releases the connection back to the pool. Once this method has been
called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
if not self._content_consumed:
return self.raw.close()
return self.raw.release_conn()
|
zachcp/bioconda-recipes
|
refs/heads/master
|
recipes/womtool/womtool.py
|
19
|
#!/usr/bin/env python
#
# Wrapper script for invoking the jar.
#
# This script is written for use with the Conda package manager and is ported
# from a bash script that does the same thing, adapting the style in
# the peptide-shaker wrapper
# (https://github.com/bioconda/bioconda-recipes/blob/master/recipes/peptide-shaker/peptide-shaker.py)
import subprocess
import sys
import os
from os import access, getenv, path, X_OK
# Expected name of the VarScan JAR file.
JAR_NAME = 'womtool.jar'
PKG_NAME = 'womtool'
# Default options passed to the `java` executable.
DEFAULT_JVM_MEM_OPTS = ['-Xms512m', '-Xmx1g']
def real_dirname(in_path):
"""Returns the path to the JAR file"""
realPath = os.path.dirname(os.path.realpath(in_path))
newPath = os.path.realpath(os.path.join(realPath, "..", "share", PKG_NAME))
return newPath
def java_executable():
"""Returns the name of the Java executable."""
java_home = getenv('JAVA_HOME')
java_bin = path.join('bin', 'java')
env_prefix = os.path.dirname(os.path.dirname(real_dirname(sys.argv[0])))
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
# Use Java installed with Anaconda to ensure correct version
return os.path.join(env_prefix, 'bin', 'java')
def jvm_opts(argv, default_mem_opts=DEFAULT_JVM_MEM_OPTS):
"""Constructs a list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts, prop_opts, pass_args = [], [], []
for arg in argv:
if arg.startswith('-D') or arg.startswith('-XX'):
opts_list = prop_opts
elif arg.startswith('-Xm'):
opts_list = mem_opts
else:
opts_list = pass_args
opts_list.append(arg)
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_mem_opts
return (mem_opts, prop_opts, pass_args)
def main():
java = java_executable()
jar_dir = real_dirname(sys.argv[0])
(mem_opts, prop_opts, pass_args) = jvm_opts(sys.argv[1:])
if pass_args != [] and pass_args[0].startswith('org'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = path.join(jar_dir, JAR_NAME)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == "__main__":
main()
|
rizumu/pinax-starter-app
|
refs/heads/master
|
pinax/app_name/tests/tests.py
|
29
|
from django.test import TestCase
class Tests(TestCase):
def setUp(self):
pass
|
MoyTW/roguebasin_rpas
|
refs/heads/master
|
example.py
|
1
|
__author__ = 'Travis Moy'
import random
import rpas
# Map generation parameters
prob_is_obstructed = .1
map_size = 20
# False values are obstructed; True are unobstructed.
map = [[(lambda: prob_is_obstructed < random.uniform(0.0, 1.0))() for _ in range(map_size)] for _ in range(map_size)]
# Normally this would be some class' function, accessing that class' data.
def is_unobstructed(x, y):
try:
return map[x][y]
except IndexError:
return False
center_x = map_size / 2
center_y = map_size / 2
radius = 10
def print_row(row, cells):
print_str = ''
for col in range(map_size):
if col == center_x and row == center_y:
print_str += '@'
elif (col, row) in cells:
if is_unobstructed(col, row):
print_str += '.'
else:
print_str += 'X'
else:
print_str += ' '
print print_str
def print_map_vision(cells):
for row in range(map_size):
print_row(row, cells)
fov = rpas.FOVCalc()
# FOV settings parameters; most restrictive
fov.NOT_VISIBLE_BLOCKS_VISION = True
fov.RESTRICTIVENESS = 2
fov.VISIBLE_ON_EQUAL = False
cells = fov.calc_visible_cells_from(center_x, center_y, radius, is_unobstructed)
print "Most restrictive settings:"
print_map_vision(cells)
# FOV settings parameters; least restrictive
fov.NOT_VISIBLE_BLOCKS_VISION = False
fov.RESTRICTIVENESS = 0
fov.VISIBLE_ON_EQUAL = True
cells = fov.calc_visible_cells_from(center_x, center_y, radius, is_unobstructed)
print "Least restrictive settings:"
print_map_vision(cells)
|
joelpet/android-quill
|
refs/heads/master
|
jni/libhpdf-2.3.0RC2/if/python/demo/text_demo.py
|
32
|
###
## * << Haru Free PDF Library 2.0.0 >> -- text_demo.c
## *
## * Copyright (c) 1999-2006 Takeshi Kanno <takeshi_kanno@est.hi-ho.ne.jp>
## *
## * Permission to use, copy, modify, distribute and sell this software
## * and its documentation for any purpose is hereby granted without fee,
## * provided that the above copyright notice appear in all copies and
## * that both that copyright notice and this permission notice appear
## * in supporting documentation.
## * It is provided "as is" without express or implied warranty.
## *
##
## port to python by Li Jun
## http://groups.google.com/group/pythoncia
import os, sys
from ctypes import *
up=2
def setlibpath(up):
import sys
path=os.path.normpath(os.path.split(os.path.realpath(__file__))[0]+'\..'*up)
if path not in sys.path:
sys.path.append(path)
setlibpath(up)
from haru import *
from haru.c_func import *
from haru.hpdf_errorcode import *
from grid_sheet import *
from math import *
@HPDF_Error_Handler(None, HPDF_UINT, HPDF_UINT, c_void_p)
def error_handler (error_no, detail_no, user_data):
global pdf
printf ("ERROR: %s, detail_no=%u\n", error_detail[error_no],
detail_no)
HPDF_Free (pdf)
sys.exit(1)
def show_stripe_pattern (page, x, y):
iy = 0
while (iy < 50):
HPDF_Page_SetRGBStroke (page, 0.0, 0.0, 0.5)
HPDF_Page_SetLineWidth (page, 1)
HPDF_Page_MoveTo (page, x, y + iy)
HPDF_Page_LineTo (page, x + HPDF_Page_TextWidth (page, "ABCabc123"),
y + iy)
HPDF_Page_Stroke (page)
iy += 3
HPDF_Page_SetLineWidth (page, 2.5)
def show_description (page, x, y, text):
fsize = HPDF_Page_GetCurrentFontSize (page)
font = HPDF_Page_GetCurrentFont (page)
c = HPDF_Page_GetRGBFill (page)
HPDF_Page_BeginText (page)
HPDF_Page_SetRGBFill (page, 0, 0, 0)
HPDF_Page_SetTextRenderingMode (page, HPDF_FILL)
HPDF_Page_SetFontAndSize (page, font, 10)
HPDF_Page_TextOut (page, x, y - 12, text)
HPDF_Page_EndText (page)
HPDF_Page_SetFontAndSize (page, font, fsize)
HPDF_Page_SetRGBFill (page, c.r, c.g, c.b)
def main ():
global pdf
page_title = "Text Demo"
samp_text = "abcdefgABCDEFG123!#$%&+-@?"
samp_text2 = "The quick brown fox jumps over the lazy dog."
fname=os.path.realpath(sys.argv[0])
fname=fname[:fname.rfind('.')]+'.pdf'
pdf = HPDF_New (error_handler, NULL)
if (not pdf):
printf ("error: cannot create PdfDoc object\n")
return 1
# set compression mode
HPDF_SetCompressionMode (pdf, HPDF_COMP_ALL)
# create default-font
font = HPDF_GetFont (pdf, "Helvetica", NULL)
# add a new page object.
page = HPDF_AddPage (pdf)
# draw grid to the page
print_grid (pdf, page)
# print the lines of the page.
HPDF_Page_SetLineWidth (page, 1)
HPDF_Page_Rectangle (page, 50, 50, HPDF_Page_GetWidth(page) - 100,
HPDF_Page_GetHeight (page) - 110)
HPDF_Page_Stroke (page)
# print the title of the page (with positioning center).
HPDF_Page_SetFontAndSize (page, font, 24)
tw = HPDF_Page_TextWidth (page, page_title)
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, (HPDF_Page_GetWidth(page) - tw) / 2,
HPDF_Page_GetHeight (page) - 50, page_title)
HPDF_Page_EndText (page)
HPDF_Page_BeginText (page)
HPDF_Page_MoveTextPos (page, 60, HPDF_Page_GetHeight(page) - 60)
# font size
fsize = 8
while (fsize < 60):
# set style and size of font.
HPDF_Page_SetFontAndSize(page, font, fsize)
# set the position of the text.
HPDF_Page_MoveTextPos (page, 0, -5 - fsize)
# measure the number of characters which included in the page.
buf= samp_text
length = HPDF_Page_MeasureText (page, samp_text,
HPDF_Page_GetWidth(page) - 120, HPDF_FALSE, NULL)
# truncate the text.
buf='%*s\0' %(int(length), buf)
HPDF_Page_ShowText (page, buf)
# print the description.
HPDF_Page_MoveTextPos (page, 0, -10)
HPDF_Page_SetFontAndSize(page, font, 8)
buf="Fontsize=%.0f" %fsize
HPDF_Page_ShowText (page, buf)
fsize *= 1.5
# font color
HPDF_Page_SetFontAndSize(page, font, 8)
HPDF_Page_MoveTextPos (page, 0, -30)
HPDF_Page_ShowText (page, "Font color")
HPDF_Page_SetFontAndSize (page, font, 18)
HPDF_Page_MoveTextPos (page, 0, -20)
length = len (samp_text)
for i in range(length):
buf=[None ,None]
r = i / float(length)
g = 1 - (i / float(length))
buf[0] = samp_text[i]
buf[1] = '\0'
HPDF_Page_SetRGBFill (page, r, g, 0.0)
HPDF_Page_ShowText (page, buf)
HPDF_Page_MoveTextPos (page, 0, -25)
for i in range(length):
buf=[None ,None]
r = i / float(length)
b = 1 - (i / float(length))
buf[0] = samp_text[i]
buf[1] = '\0'
HPDF_Page_SetRGBFill (page, r, 0.0, b)
HPDF_Page_ShowText (page, buf)
HPDF_Page_MoveTextPos (page, 0, -25)
for i in range(length):
buf=[None ,None]
b = i / float(length)
g = 1 - (i / float(length))
buf[0] = samp_text[i]
buf[1] = '\0'
HPDF_Page_SetRGBFill (page, 0.0, g, b)
HPDF_Page_ShowText (page, buf)
HPDF_Page_EndText (page)
ypos = 450
#
# Font rendering mode
#
HPDF_Page_SetFontAndSize(page, font, 32)
HPDF_Page_SetRGBFill (page, 0.5, 0.5, 0.0)
HPDF_Page_SetLineWidth (page, 1.5)
# PDF_FILL
show_description (page, 60, ypos,
"RenderingMode=PDF_FILL")
HPDF_Page_SetTextRenderingMode (page, HPDF_FILL)
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, 60, ypos, "ABCabc123")
HPDF_Page_EndText (page)
# PDF_STROKE
show_description (page, 60, ypos - 50,
"RenderingMode=PDF_STROKE")
HPDF_Page_SetTextRenderingMode (page, HPDF_STROKE)
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, 60, ypos - 50, "ABCabc123")
HPDF_Page_EndText (page)
# PDF_FILL_THEN_STROKE
show_description (page, 60, ypos - 100,
"RenderingMode=PDF_FILL_THEN_STROKE")
HPDF_Page_SetTextRenderingMode (page, HPDF_FILL_THEN_STROKE)
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, 60, ypos - 100, "ABCabc123")
HPDF_Page_EndText (page)
# PDF_FILL_CLIPPING
show_description (page, 60, ypos - 150,
"RenderingMode=PDF_FILL_CLIPPING")
HPDF_Page_GSave (page)
HPDF_Page_SetTextRenderingMode (page, HPDF_FILL_CLIPPING)
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, 60, ypos - 150, "ABCabc123")
HPDF_Page_EndText (page)
show_stripe_pattern (page, 60, ypos - 150)
HPDF_Page_GRestore (page)
# PDF_STROKE_CLIPPING
show_description (page, 60, ypos - 200,
"RenderingMode=PDF_STROKE_CLIPPING")
HPDF_Page_GSave (page)
HPDF_Page_SetTextRenderingMode (page, HPDF_STROKE_CLIPPING)
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, 60, ypos - 200, "ABCabc123")
HPDF_Page_EndText (page)
show_stripe_pattern (page, 60, ypos - 200)
HPDF_Page_GRestore (page)
# PDF_FILL_STROKE_CLIPPING
show_description (page, 60, ypos - 250,
"RenderingMode=PDF_FILL_STROKE_CLIPPING")
HPDF_Page_GSave (page)
HPDF_Page_SetTextRenderingMode (page, HPDF_FILL_STROKE_CLIPPING)
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, 60, ypos - 250, "ABCabc123")
HPDF_Page_EndText (page)
show_stripe_pattern (page, 60, ypos - 250)
HPDF_Page_GRestore (page)
# Reset text attributes
HPDF_Page_SetTextRenderingMode (page, HPDF_FILL)
HPDF_Page_SetRGBFill (page, 0, 0, 0)
HPDF_Page_SetFontAndSize(page, font, 30)
#
# Rotating text
#
angle1 = 30; # A rotation of 30 degrees.
rad1 = angle1 / 180 * 3.141592; # Calcurate the radian value.
show_description (page, 320, ypos - 60, "Rotating text")
HPDF_Page_BeginText (page)
HPDF_Page_SetTextMatrix (page, cos(rad1), sin(rad1), -sin(rad1), cos(rad1),
330, ypos - 60)
HPDF_Page_ShowText (page, "ABCabc123")
HPDF_Page_EndText (page)
#
# Skewing text.
#
show_description (page, 320, ypos - 120, "Skewing text")
HPDF_Page_BeginText (page)
angle1 = 10
angle2 = 20
rad1 = angle1 / 180 * 3.141592
rad2 = angle2 / 180 * 3.141592
HPDF_Page_SetTextMatrix (page, 1, tan(rad1), tan(rad2), 1, 320, ypos - 120)
HPDF_Page_ShowText (page, "ABCabc123")
HPDF_Page_EndText (page)
#
# scaling text (X direction)
#
show_description (page, 320, ypos - 175, "Scaling text (X direction)")
HPDF_Page_BeginText (page)
HPDF_Page_SetTextMatrix (page, 1.5, 0, 0, 1, 320, ypos - 175)
HPDF_Page_ShowText (page, "ABCabc12")
HPDF_Page_EndText (page)
#
# scaling text (Y direction)
#
show_description (page, 320, ypos - 250, "Scaling text (Y direction)")
HPDF_Page_BeginText (page)
HPDF_Page_SetTextMatrix (page, 1, 0, 0, 2, 320, ypos - 250)
HPDF_Page_ShowText (page, "ABCabc123")
HPDF_Page_EndText (page)
#
# char spacing, word spacing
#
show_description (page, 60, 140, "char-spacing 0")
show_description (page, 60, 100, "char-spacing 1.5")
show_description (page, 60, 60, "char-spacing 1.5, word-spacing 2.5")
HPDF_Page_SetFontAndSize (page, font, 20)
HPDF_Page_SetRGBFill (page, 0.1, 0.3, 0.1)
## char-spacing 0
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, 60, 140, samp_text2)
HPDF_Page_EndText (page)
# char-spacing 1.5
HPDF_Page_SetCharSpace (page, 1.5)
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, 60, 100, samp_text2)
HPDF_Page_EndText (page)
# char-spacing 1.5, word-spacing 3.5
HPDF_Page_SetWordSpace (page, 2.5)
HPDF_Page_BeginText (page)
HPDF_Page_TextOut (page, 60, 60, samp_text2)
HPDF_Page_EndText (page)
# save the document to a file
HPDF_SaveToFile (pdf, fname)
# clean up
HPDF_Free (pdf)
return 0
main()
|
Vimos/scikit-learn
|
refs/heads/master
|
benchmarks/bench_tree.py
|
131
|
"""
To run this, you'll need to have installed.
* scikit-learn
Does two benchmarks
First, we fix a training set, increase the number of
samples to classify and plot number of classified samples as a
function of time.
In the second benchmark, we increase the number of dimensions of the
training set, classify a sample and plot the time taken as a function
of the number of dimensions.
"""
import numpy as np
import matplotlib.pyplot as plt
import gc
from datetime import datetime
# to store the results
scikit_classifier_results = []
scikit_regressor_results = []
mu_second = 0.0 + 10 ** 6 # number of microseconds in a second
def bench_scikit_tree_classifier(X, Y):
"""Benchmark with scikit-learn decision tree classifier"""
from sklearn.tree import DecisionTreeClassifier
gc.collect()
# start time
tstart = datetime.now()
clf = DecisionTreeClassifier()
clf.fit(X, Y).predict(X)
delta = (datetime.now() - tstart)
# stop time
scikit_classifier_results.append(
delta.seconds + delta.microseconds / mu_second)
def bench_scikit_tree_regressor(X, Y):
"""Benchmark with scikit-learn decision tree regressor"""
from sklearn.tree import DecisionTreeRegressor
gc.collect()
# start time
tstart = datetime.now()
clf = DecisionTreeRegressor()
clf.fit(X, Y).predict(X)
delta = (datetime.now() - tstart)
# stop time
scikit_regressor_results.append(
delta.seconds + delta.microseconds / mu_second)
if __name__ == '__main__':
print('============================================')
print('Warning: this is going to take a looong time')
print('============================================')
n = 10
step = 10000
n_samples = 10000
dim = 10
n_classes = 10
for i in range(n):
print('============================================')
print('Entering iteration %s of %s' % (i, n))
print('============================================')
n_samples += step
X = np.random.randn(n_samples, dim)
Y = np.random.randint(0, n_classes, (n_samples,))
bench_scikit_tree_classifier(X, Y)
Y = np.random.randn(n_samples)
bench_scikit_tree_regressor(X, Y)
xx = range(0, n * step, step)
plt.figure('scikit-learn tree benchmark results')
plt.subplot(211)
plt.title('Learning with varying number of samples')
plt.plot(xx, scikit_classifier_results, 'g-', label='classification')
plt.plot(xx, scikit_regressor_results, 'r-', label='regression')
plt.legend(loc='upper left')
plt.xlabel('number of samples')
plt.ylabel('Time (s)')
scikit_classifier_results = []
scikit_regressor_results = []
n = 10
step = 500
start_dim = 500
n_classes = 10
dim = start_dim
for i in range(0, n):
print('============================================')
print('Entering iteration %s of %s' % (i, n))
print('============================================')
dim += step
X = np.random.randn(100, dim)
Y = np.random.randint(0, n_classes, (100,))
bench_scikit_tree_classifier(X, Y)
Y = np.random.randn(100)
bench_scikit_tree_regressor(X, Y)
xx = np.arange(start_dim, start_dim + n * step, step)
plt.subplot(212)
plt.title('Learning in high dimensional spaces')
plt.plot(xx, scikit_classifier_results, 'g-', label='classification')
plt.plot(xx, scikit_regressor_results, 'r-', label='regression')
plt.legend(loc='upper left')
plt.xlabel('number of dimensions')
plt.ylabel('Time (s)')
plt.axis('tight')
plt.show()
|
opengeogroep/inasafe
|
refs/heads/master
|
realtime/test/test_shake_data.py
|
1
|
# coding=utf-8
"""
InaSAFE Disaster risk assessment tool developed by AusAid and World Bank
- **Tests Shake Data functionality related to shakemaps.**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'tim@linfiniti.com'
__version__ = '0.5.0'
__date__ = '30/07/2012'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import os
import shutil
import unittest
from realtime.shake_data import ShakeData
from realtime.utils import (
shakemap_zip_dir,
purge_working_data,
shakemap_extract_dir)
# Clear away working dirs so we can be sure they are
# actually created
purge_working_data()
class TestShakeMap(unittest.TestCase):
"""Testing for the shakemap class"""
#noinspection PyPep8Naming
def setUp(self):
"""Copy our cached dataset from the fixture dir to the cache dir"""
output_file = '20120726022003.out.zip'
input_file = '20120726022003.inp.zip'
output_path = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'../fixtures',
output_file))
input_path = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'../fixtures',
input_file))
shutil.copyfile(
output_path,
os.path.join(shakemap_zip_dir(),
output_file))
shutil.copyfile(
input_path,
os.path.join(shakemap_zip_dir(),
input_file))
#TODO Downloaded data should be removed before each test
def test_get_shake_map_input(self):
"""Check that we can retrieve a shakemap 'inp' input file"""
shake_event = '20110413170148'
shake_data = ShakeData(shake_event)
shakemap_file = shake_data.fetch_input()
expected_file = os.path.join(shakemap_zip_dir(),
shake_event + '.inp.zip')
message = 'Expected path for downloaded shakemap INP not received'
self.assertEqual(shakemap_file, expected_file, message)
def test_get_shake_map_output(self):
"""Check that we can retrieve a shakemap 'out' input file"""
event_id = '20110413170148'
shake_data = ShakeData(event_id)
shakemap_file = shake_data.fetch_output()
expected_file = os.path.join(shakemap_zip_dir(),
event_id + '.out.zip')
message = 'Expected path for downloaded shakemap OUT not received'
self.assertEqual(shakemap_file, expected_file, message)
def test_get_remote_shake_map(self):
"""Check that we can retrieve both input and output from ftp at once"""
shake_event = '20110413170148'
shake_data = ShakeData(shake_event)
expected_input_file = os.path.join(
shakemap_zip_dir(),
shake_event + '.inp.zip')
expected_output_file = os.path.join(
shakemap_zip_dir(),
shake_event + '.out.zip')
if os.path.exists(expected_input_file):
os.remove(expected_input_file)
if os.path.exists(expected_output_file):
os.remove(expected_output_file)
input_file, output_file = shake_data.fetch_event()
message = ('Expected path for downloaded shakemap INP not received'
'\nExpected: %s\nGot: %s' %
(expected_output_file, output_file))
self.assertEqual(input_file, expected_input_file, message)
message = ('Expected path for downloaded shakemap OUT not received'
'\nExpected: %s\nGot: %s' %
(expected_output_file, output_file))
self.assertEqual(output_file, expected_output_file, message)
assert os.path.exists(expected_input_file)
assert os.path.exists(expected_output_file)
def test_get_cached_shake_map(self):
"""Check that we can retrieve both input and output from ftp at once"""
shake_event = '20120726022003'
expected_input_file = os.path.join(shakemap_zip_dir(),
shake_event + '.inp.zip')
expected_output_file = os.path.join(shakemap_zip_dir(),
shake_event + '.out.zip')
shake_data = ShakeData(shake_event)
input_file, output_file = shake_data.fetch_event()
message = ('Expected path for downloaded shakemap INP not received'
'\nExpected: %s\nGot: %s' %
(expected_output_file, output_file))
self.assertEqual(input_file, expected_input_file, message)
message = ('Expected path for downloaded shakemap OUT not received'
'\nExpected: %s\nGot: %s' %
(expected_output_file, output_file))
self.assertEqual(output_file, expected_output_file, message)
def test_get_latest_shake_map(self):
"""Check that we can retrieve the latest shake event"""
# Simply dont set the event id in the ctor to get the latest
shake_data = ShakeData()
input_file, output_file = shake_data.fetch_event()
event_id = shake_data.event_id
expected_input_file = os.path.join(shakemap_zip_dir(),
event_id + '.inp.zip')
expected_output_file = os.path.join(shakemap_zip_dir(),
event_id + '.out.zip')
message = ('Expected path for downloaded shakemap INP not received'
'\nExpected: %s\nGot: %s' %
(expected_output_file, output_file))
self.assertEqual(input_file, expected_input_file, message)
message = ('Expected path for downloaded shakemap OUT not received'
'\nExpected: %s\nGot: %s' %
(expected_output_file, output_file))
self.assertEqual(output_file, expected_output_file, message)
def test_extract_shake_map(self):
"""Test that we can extract the shakemap inp and out files"""
shake_event = '20120726022003'
shake_data = ShakeData(shake_event)
grid_xml = shake_data.extract(force_flag=True)
extract_dir = shakemap_extract_dir()
expected_grid_xml = (os.path.join(extract_dir,
'20120726022003/grid.xml'))
message = 'Expected: %s\nGot: %s\n' % (expected_grid_xml, grid_xml)
assert expected_grid_xml in expected_grid_xml, message
assert os.path.exists(grid_xml)
def test_check_event_is_on_server(self):
"""Test that we can check if an event is on the server."""
shake_event = '20120726022003'
shake_data = ShakeData(shake_event)
self.assertTrue(shake_data.is_on_server(),
('Data for %s is on server' % shake_event))
def test_cache_paths(self):
"""Check we compute local cache paths properly."""
shake_event = '20120726022003'
shake_data = ShakeData(shake_event)
expected_input_path = ('/tmp/inasafe/realtime/shakemaps-zipped/'
'20120726022003.inp.zip')
expected_output_path = ('/tmp/inasafe/realtime/shakemaps-zipped/'
'20120726022003.out.zip')
input_path, output_path = shake_data.cache_paths()
message = 'Expected: %s\nGot: %s' % (expected_input_path, input_path)
assert input_path == expected_input_path, message
message = 'Expected: %s\nGot: %s' % (expected_output_path, output_path)
assert output_path == expected_output_path, message
def test_file_names(self):
"""Check we compute file names properly."""
shake_event = '20120726022003'
shake_data = ShakeData(shake_event)
expected_input_file_name = '20120726022003.inp.zip'
expected_output_file_name = '20120726022003.out.zip'
input_file_name, output_file_name = shake_data.file_names()
message = 'Expected: %s\nGot: %s' % (
expected_input_file_name, input_file_name)
assert input_file_name == expected_input_file_name, message
message = 'Expected: %s\nGot: %s' % (
expected_output_file_name, output_file_name)
assert output_file_name == expected_output_file_name, message
if __name__ == '__main__':
unittest.main()
|
elpaso/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/qgis/TopoColors.py
|
31
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
TopoColors.py
--------------
Date : February 2017
Copyright : (C) 2017 by Nyall Dawson
Email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Nyall Dawson'
__date__ = 'February 2017'
__copyright__ = '(C) 2017, Nyall Dawson'
import os
import operator
import sys
from collections import defaultdict
from qgis.core import (QgsField,
QgsFeatureSink,
QgsGeometry,
QgsSpatialIndex,
QgsPointXY,
NULL,
QgsProcessing,
QgsProcessingException,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterDistance,
QgsProcessingParameterNumber,
QgsProcessingParameterEnum,
QgsProcessingParameterFeatureSink)
from qgis.PyQt.QtCore import (QVariant)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class TopoColor(QgisAlgorithm):
INPUT = 'INPUT'
MIN_COLORS = 'MIN_COLORS'
MIN_DISTANCE = 'MIN_DISTANCE'
BALANCE = 'BALANCE'
OUTPUT = 'OUTPUT'
def tags(self):
return self.tr('topocolor,colors,graph,adjacent,assign').split(',')
def group(self):
return self.tr('Cartography')
def groupId(self):
return 'cartography'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Input layer'),
[QgsProcessing.TypeVectorPolygon]))
self.addParameter(QgsProcessingParameterNumber(self.MIN_COLORS,
self.tr('Minimum number of colors'), minValue=1, maxValue=1000,
defaultValue=4))
self.addParameter(QgsProcessingParameterDistance(self.MIN_DISTANCE,
self.tr('Minimum distance between features'),
parentParameterName=self.INPUT, minValue=0.0,
defaultValue=0.0))
balance_by = [self.tr('By feature count'),
self.tr('By assigned area'),
self.tr('By distance between colors')]
self.addParameter(QgsProcessingParameterEnum(
self.BALANCE,
self.tr('Balance color assignment'),
options=balance_by, defaultValue=0))
self.addParameter(
QgsProcessingParameterFeatureSink(self.OUTPUT, self.tr('Colored'), QgsProcessing.TypeVectorPolygon))
def name(self):
return 'topologicalcoloring'
def displayName(self):
return self.tr('Topological coloring')
def processAlgorithm(self, parameters, context, feedback):
source = self.parameterAsSource(parameters, self.INPUT, context)
if source is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT))
min_colors = self.parameterAsInt(parameters, self.MIN_COLORS, context)
balance_by = self.parameterAsEnum(parameters, self.BALANCE, context)
min_distance = self.parameterAsDouble(parameters, self.MIN_DISTANCE, context)
fields = source.fields()
fields.append(QgsField('color_id', QVariant.Int))
(sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context,
fields, source.wkbType(), source.sourceCrs())
if sink is None:
raise QgsProcessingException(self.invalidSinkError(parameters, self.OUTPUT))
features = {f.id(): f for f in source.getFeatures()}
topology, id_graph = self.compute_graph(features, feedback, min_distance=min_distance)
feature_colors = ColoringAlgorithm.balanced(features,
balance=balance_by,
graph=topology,
feedback=feedback,
min_colors=min_colors)
if len(feature_colors) == 0:
return {self.OUTPUT: dest_id}
max_colors = max(feature_colors.values())
feedback.pushInfo(self.tr('{} colors required').format(max_colors))
total = 20.0 / len(features)
current = 0
for feature_id, input_feature in features.items():
if feedback.isCanceled():
break
output_feature = input_feature
attributes = input_feature.attributes()
if feature_id in feature_colors:
attributes.append(feature_colors[feature_id])
else:
attributes.append(NULL)
output_feature.setAttributes(attributes)
sink.addFeature(output_feature, QgsFeatureSink.FastInsert)
current += 1
feedback.setProgress(80 + int(current * total))
return {self.OUTPUT: dest_id}
@staticmethod
def compute_graph(features, feedback, create_id_graph=False, min_distance=0):
""" compute topology from a layer/field """
s = Graph(sort_graph=False)
id_graph = None
if create_id_graph:
id_graph = Graph(sort_graph=True)
# skip features without geometry
features_with_geometry = {f_id: f for (f_id, f) in features.items() if f.hasGeometry()}
total = 70.0 / len(features_with_geometry) if features_with_geometry else 1
index = QgsSpatialIndex()
i = 0
for feature_id, f in features_with_geometry.items():
if feedback.isCanceled():
break
g = f.geometry()
if min_distance > 0:
g = g.buffer(min_distance, 5)
engine = QgsGeometry.createGeometryEngine(g.constGet())
engine.prepareGeometry()
feature_bounds = g.boundingBox()
# grow bounds a little so we get touching features
feature_bounds.grow(feature_bounds.width() * 0.01)
intersections = index.intersects(feature_bounds)
for l2 in intersections:
f2 = features_with_geometry[l2]
if engine.intersects(f2.geometry().constGet()):
s.add_edge(f.id(), f2.id())
s.add_edge(f2.id(), f.id())
if id_graph:
id_graph.add_edge(f.id(), f2.id())
index.addFeature(f)
i += 1
feedback.setProgress(int(i * total))
for feature_id, f in features_with_geometry.items():
if feedback.isCanceled():
break
if feature_id not in s.node_edge:
s.add_edge(feature_id, None)
return s, id_graph
class ColoringAlgorithm:
@staticmethod
def balanced(features, graph, feedback, balance=0, min_colors=4):
feature_colors = {}
# start with minimum number of colors in pool
color_pool = set(range(1, min_colors + 1))
# calculate count of neighbours
neighbour_count = defaultdict(int)
for feature_id, neighbours in graph.node_edge.items():
neighbour_count[feature_id] += len(neighbours)
# sort features by neighbour count - we want to handle those with more neighbours first
sorted_by_count = [feature_id for feature_id in sorted(neighbour_count.items(),
key=operator.itemgetter(1),
reverse=True)]
# counts for each color already assigned
color_counts = defaultdict(int)
color_areas = defaultdict(float)
for c in color_pool:
color_counts[c] = 0
color_areas[c] = 0
total = 10.0 / len(sorted_by_count) if sorted_by_count else 1
i = 0
for (feature_id, n) in sorted_by_count:
if feedback.isCanceled():
break
# first work out which already assigned colors are adjacent to this feature
adjacent_colors = set()
for neighbour in graph.node_edge[feature_id]:
if neighbour in feature_colors:
adjacent_colors.add(feature_colors[neighbour])
# from the existing colors, work out which are available (ie non-adjacent)
available_colors = color_pool.difference(adjacent_colors)
feature_color = -1
if len(available_colors) == 0:
# no existing colors available for this feature, so add new color to pool and repeat
min_colors += 1
return ColoringAlgorithm.balanced(features, graph, feedback, balance, min_colors)
else:
if balance == 0:
# choose least used available color
counts = [(c, v) for c, v in color_counts.items() if c in available_colors]
feature_color = sorted(counts, key=operator.itemgetter(1))[0][0]
color_counts[feature_color] += 1
elif balance == 1:
areas = [(c, v) for c, v in color_areas.items() if c in available_colors]
feature_color = sorted(areas, key=operator.itemgetter(1))[0][0]
color_areas[feature_color] += features[feature_id].geometry().area()
elif balance == 2:
min_distances = {c: sys.float_info.max for c in available_colors}
this_feature_centroid = features[feature_id].geometry().centroid().constGet()
# find features for all available colors
other_features = {f_id: c for (f_id, c) in feature_colors.items() if c in available_colors}
# loop through these, and calculate the minimum distance from this feature to the nearest
# feature with each assigned color
for other_feature_id, c in other_features.items():
if feedback.isCanceled():
break
other_geometry = features[other_feature_id].geometry()
other_centroid = other_geometry.centroid().constGet()
distance = this_feature_centroid.distanceSquared(other_centroid)
if distance < min_distances[c]:
min_distances[c] = distance
# choose color such that minimum distance is maximised! ie we want MAXIMAL separation between
# features with the same color
feature_color = sorted(min_distances, key=min_distances.__getitem__, reverse=True)[0]
feature_colors[feature_id] = feature_color
i += 1
feedback.setProgress(70 + int(i * total))
return feature_colors
class Graph:
def __init__(self, sort_graph=True):
self.sort_graph = sort_graph
self.node_edge = {}
def add_edge(self, i, j):
ij = [i, j]
if self.sort_graph:
ij.sort()
(i, j) = ij
if i in self.node_edge:
self.node_edge[i].add(j)
else:
self.node_edge[i] = {j}
def make_full(self):
g = Graph(sort_graph=False)
for k in self.node_edge.keys():
for v in self.node_edge[k]:
g.add_edge(v, k)
g.add_edge(k, v)
return g
|
plowman/python-mcparseface
|
refs/heads/master
|
models/syntaxnet/tensorflow/tensorflow/python/ops/gradients.py
|
8
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implements the graph generation for computation of gradients."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import contextlib
import warnings
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_grad # pylint: disable=unused-import
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import control_flow_grad # pylint: disable=unused-import
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import image_grad # pylint: disable=unused-import
from tensorflow.python.ops import logging_ops # pylint: disable=unused-import
from tensorflow.python.ops import linalg_grad # pylint: disable=unused-import
from tensorflow.python.ops import math_grad # pylint: disable=unused-import
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.platform import tf_logging as logging
# Warn the user if we convert a sparse representation to dense with at
# least this number of elements.
_LARGE_SPARSE_NUM_ELEMENTS = 100000000
def _IndexedSlicesToTensor(value, dtype=None, name=None, as_ref=False):
"""Converts an IndexedSlices object `value` to a Tensor.
NOTE(mrry): This function is potentially expensive.
Args:
value: An ops.IndexedSlices object.
dtype: The dtype of the Tensor to be returned.
name: Optional name to use for the returned Tensor.
as_ref: True if a ref is requested.
Returns:
A dense Tensor representing the values in the given IndexedSlices.
Raises:
ValueError: If the IndexedSlices does not have the same dtype.
"""
_ = as_ref
if dtype and not dtype.is_compatible_with(value.dtype):
raise ValueError(
"Tensor conversion requested dtype %s for IndexedSlices with dtype %s" %
(dtype.name, value.dtype.name))
if value.dense_shape is None:
raise ValueError(
"Tensor conversion requested for IndexedSlices without dense_shape: %s"
% str(value))
# TODO(mrry): Consider adding static shape information to
# IndexedSlices, to avoid using numpy here.
dense_shape_value = tensor_util.constant_value(value.dense_shape)
if dense_shape_value is not None:
num_elements = np.prod(dense_shape_value)
if num_elements >= _LARGE_SPARSE_NUM_ELEMENTS:
warnings.warn(
"Converting sparse IndexedSlices to a dense Tensor with %d elements. "
"This may consume a large amount of memory." % num_elements)
else:
warnings.warn(
"Converting sparse IndexedSlices to a dense Tensor of unknown shape. "
"This may consume a large amount of memory.")
return math_ops.unsorted_segment_sum(value.values,
value.indices,
value.dense_shape[0],
name=name)
ops.register_tensor_conversion_function(ops.IndexedSlices,
_IndexedSlicesToTensor)
def _MarkReachedOps(from_ops, reached_ops):
"""Mark all ops reached from "from_ops".
Args:
from_ops: list of Operations.
reached_ops: list of booleans, indexed by operation id.
"""
queue = collections.deque()
queue.extend(from_ops)
while queue:
op = queue.popleft()
if not reached_ops[op._id]:
reached_ops[op._id] = True
for output in op.outputs:
queue.extend(output.consumers())
def _GatherInputs(to_ops, reached_ops):
"""List all inputs of to_ops that are in reached_ops.
Args:
to_ops: list of Operations.
reached_ops: list of booleans, indexed by operation id.
Returns:
The list of all inputs of to_ops that are in reached_ops.
That list includes all elements of to_ops.
"""
inputs = []
queue = collections.deque()
queue.extend(to_ops)
while queue:
op = queue.popleft()
# We are interested in this op.
if reached_ops[op._id]:
inputs.append(op)
# Clear the boolean so we won't add the inputs again.
reached_ops[op._id] = False
for inp in op.inputs:
queue.append(inp.op)
return inputs
def _PendingCount(graph, to_ops, from_ops):
"""Initialize the pending count for ops between two lists of Operations.
'pending_count[op._id]' indicates the number of backprop inputs
to this operation.
Args:
graph: a Graph.
to_ops: list of Operations.
from_ops: list of Operations.
Returns:
A tuple containing: (1) a list of integers indexed by operation id,
indicating the number of backprop inputs to this operation, and (2)
a boolean which is True if any of the ops in between from_ops and to_ops
contain control flow loops.
"""
# Mark reachable ops from from_ops.
reached_ops = [False] * (graph._last_id + 1)
for op in to_ops:
reached_ops[op._id] = True
_MarkReachedOps(from_ops, reached_ops)
# Mark between ops.
between_ops = [False] * (graph._last_id + 1)
between_op_list = []
queue = collections.deque()
queue.extend(to_ops)
while queue:
op = queue.popleft()
# We are interested in this op.
if reached_ops[op._id]:
between_ops[op._id] = True
between_op_list.append(op)
# Clear the boolean so we won't add the inputs again.
reached_ops[op._id] = False
for inp in op.inputs:
queue.append(inp.op)
# 'loop_state' is None if there are no while loops.
loop_state = control_flow_ops.MaybeCreateControlFlowState(between_op_list,
between_ops)
# Initialize pending count for between ops.
pending_count = [0] * (graph._last_id + 1)
for op in between_op_list:
for x in op.inputs:
if between_ops[x.op._id]:
pending_count[x.op._id] += 1
for x in op.control_inputs:
if between_ops[x._id]:
pending_count[x._id] += 1
return pending_count, loop_state
def _AsList(x):
return x if isinstance(x, (list, tuple)) else [x]
def _DefaultGradYs(grad_ys, ys, colocate_gradients_with_ops):
"""Fill in default values for grad_ys.
Args:
grad_ys: List of gradients, can contain None.
ys: List of tensors.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
Returns:
A list of gradients to use, without None.
Raises:
ValueError: If one of the grad_ys is invalid.
"""
if len(grad_ys) != len(ys):
raise ValueError("Passed %d grad_ys for %d ys" % (len(grad_ys), len(ys)))
grad_ys = ops.convert_n_to_tensor_or_indexed_slices(grad_ys, name="grad_y")
for i in xrange(len(grad_ys)):
grad_y = grad_ys[i]
y = ys[i]
if grad_y is None:
with _maybe_colocate_with(y.op, colocate_gradients_with_ops):
grad_ys[i] = array_ops.fill(
array_ops.shape(y),
constant_op.constant(1, dtype=y.dtype))
else:
if grad_y.dtype != y.dtype:
raise ValueError("Y and ys_grad must be of the same type, "
"not y: %s, ys_grad: %s " %
(dtypes.as_dtype(y.dtype).name,
dtypes.as_dtype(grad_y.dtype).name))
return grad_ys
def _IsFloat(tensor):
dtype = dtypes.as_dtype(tensor.dtype)
return dtype.base_dtype in (dtypes.float32, dtypes.float64)
def _VerifyGeneratedGradients(grads, op):
"""Verify that gradients are valid in number and type.
Args:
grads: List of generated gradients.
op: Operation for which the gradients where generated.
Raises:
ValueError: if the gradients are invalid.
"""
if len(grads) != len(op.inputs):
raise ValueError("Num gradients %d generated for op %s do not match num "
"inputs %d" % (len(grads), op.node_def, len(op.inputs)))
for i in xrange(len(grads)):
grad = grads[i]
inp = op.inputs[i]
if grad is not None:
if not grad.dtype.is_compatible_with(inp.dtype):
raise ValueError("Gradient type %s generated for op %s does "
"not match input type %s" %
(dtypes.as_dtype(grad.dtype).name, op.node_def,
dtypes.as_dtype(inp.dtype).name))
def _StopOps(from_ops, pending_count):
"""The set of ops that terminate the gradient computation.
This computes the frontier of the forward graph *before* which backprop
should stop. Operations in the returned set will not be differentiated.
This set is defined as the subset of `from_ops` containing ops that have
no predecessor in `from_ops`. `pending_count` is the result of
`_PendingCount(g, xs, from_ops)`. An 'op' has predecessors in `from_ops`
iff pending_count[op._id] > 0.
Args:
from_ops: list of Operations.
pending_count: List of integers, indexed by operation id.
Returns:
The set of operations.
"""
stop_ops = set()
for op in from_ops:
is_stop_op = True
for inp in op.inputs:
if pending_count[inp.op._id] > 0:
is_stop_op = False
break
if is_stop_op:
stop_ops.add(op._id)
return stop_ops
@contextlib.contextmanager
def _maybe_colocate_with(op, colocate_gradients_with_ops):
"""Context to colocate with `op` if `colocate_gradients_with_ops`."""
if colocate_gradients_with_ops:
with ops.colocate_with(op):
yield
else:
yield
def gradients(ys,
xs,
grad_ys=None,
name="gradients",
colocate_gradients_with_ops=False,
gate_gradients=False,
aggregation_method=None):
"""Constructs symbolic partial derivatives of sum of `ys` w.r.t. x in `xs`.
`ys` and `xs` are each a `Tensor` or a list of tensors. `grad_ys`
is a list of `Tensor`, holding the gradients received by the
`ys`. The list must be the same length as `ys`.
`gradients()` adds ops to the graph to output the partial
derivatives of `ys` with respect to `xs`. It returns a list of
`Tensor` of length `len(xs)` where each tensor is the `sum(dy/dx)`
for y in `ys`.
`grad_ys` is a list of tensors of the same length as `ys` that holds
the initial gradients for each y in `ys`. When `grad_ys` is None,
we fill in a tensor of '1's of the shape of y for each y in `ys`. A
user can provide their own initial `grad_ys` to compute the
derivatives using a different initial gradient for each y (e.g., if
one wanted to weight the gradient differently for each value in
each y).
Args:
ys: A `Tensor` or list of tensors to be differentiated.
xs: A `Tensor` or list of tensors to be used for differentiation.
grad_ys: Optional. A `Tensor` or list of tensors the same size as
`ys` and holding the gradients computed for each y in `ys`.
name: Optional name to use for grouping all the gradient ops together.
defaults to 'gradients'.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
gate_gradients: If True, add a tuple around the gradients returned
for an operations. This avoids some race conditions.
aggregation_method: Specifies the method used to combine gradient terms.
Accepted values are constants defined in the class `AggregationMethod`.
Returns:
A list of `sum(dy/dx)` for each x in `xs`.
Raises:
LookupError: if one of the operations between `x` and `y` does not
have a registered gradient function.
ValueError: if the arguments are invalid.
"""
ys = _AsList(ys)
xs = _AsList(xs)
if grad_ys is None:
grad_ys = [None] * len(ys)
else:
grad_ys = _AsList(grad_ys)
with ops.op_scope(ys + xs + grad_ys, name, "gradients"):
ys = ops.convert_n_to_tensor_or_indexed_slices(ys, name="y")
xs = ops.convert_n_to_tensor_or_indexed_slices(xs, name="x")
grad_ys = _DefaultGradYs(grad_ys, ys, colocate_gradients_with_ops)
# The approach we take here is as follows: Create a list of all ops in the
# subgraph between the ys and xs. Visit these ops in reverse order of ids
# to ensure that when we visit an op the gradients w.r.t its outputs have
# been collected. Then aggregate these gradients if needed, call the op's
# gradient function, and add the generated gradients to the gradients for
# its input.
# Initialize the pending count for ops in the connected subgraph from ys
# to the xs.
to_ops = [t.op for t in ys]
from_ops = [t.op for t in xs]
pending_count, loop_state = _PendingCount(ops.get_default_graph(),
to_ops, from_ops)
# Iterate over the collected ops.
#
# grads: op => list of gradients received on each output endpoint of the
# op. The gradients for each endpoint are initially collected as a list.
# When it is time to call the op's gradient function, for each endpoint we
# aggregate the list of received gradients into a Add() Operation if there
# is more than one.
grads = {}
# Add the initial gradients for the ys.
for y, grad_y in zip(ys, grad_ys):
_SetGrad(grads, y, grad_y)
# Initialize queue with to_ops.
queue = collections.deque()
# Add the ops in 'to_ops' into the queue.
to_ops_set = set()
for op in to_ops:
# 'ready' handles the case where one output gradient relies on
# another output's gradient.
# pylint: disable=protected-access
ready = (pending_count[op._id] == 0)
if ready and op._id not in to_ops_set:
to_ops_set.add(op._id)
queue.append(op)
if loop_state:
# The "unused" exits of the loops are added to ys. As an example,
# people often write:
# v1, _ = While(p, b, [x1, x2])
# result = gradients(v1, x1)
# The exit node of x2 is not included by the betweenness analysis.
# But we need it if x2 is involved in computing v1. So we add it
# back in backprop with a zeros_like gradient.
loop_exits = loop_state.GetAllLoopExits()
for y in loop_exits:
if pending_count[y.op._id] == 0 and y.op._id not in to_ops_set:
if _IsFloat(y):
# Floating-point outputs get a zero gradient.
_SetGrad(grads, y, loop_state.ZerosLikeForExit(y))
queue.append(y.op)
# The set of 'from_ops'.
stop_ops = _StopOps(from_ops, pending_count)
while queue:
# generate gradient subgraph for op.
op = queue.popleft()
with _maybe_colocate_with(op, colocate_gradients_with_ops):
if loop_state:
loop_state.EnterGradWhileContext(op, before=True)
out_grads = _AggregatedGrads(grads, op, loop_state, aggregation_method)
if loop_state:
loop_state.ExitGradWhileContext(op, before=True)
grad_fn = None
# pylint: disable=protected-access
is_func_call = ops.get_default_graph()._is_function(op.type)
if not is_func_call and any(
isinstance(g, ops.Tensor) or g for g in out_grads) and (
op._id not in stop_ops):
# pylint: enable=protected-access
# A grad_fn must be defined, either as a function or as None
# for ops that do not have gradients.
try:
grad_fn = ops.get_gradient_function(op)
except LookupError:
raise LookupError(
"No gradient defined for operation '%s' (op type: %s)" %
(op.name, op.type))
if loop_state:
loop_state.EnterGradWhileContext(op, before=False)
if (grad_fn or is_func_call) and any(
isinstance(g, ops.Tensor) or g for g in out_grads):
# NOTE: If _AggregatedGrads didn't compute a value for the i'th
# output, it means that the cost does not depend on output[i],
# therefore dC/doutput[i] is 0.
for i, out_grad in enumerate(out_grads):
if (not isinstance(out_grad, ops.Tensor)
and not out_grad) and _IsFloat(op.outputs[i]):
# Only floating-point outputs get a zero gradient. Gradient
# functions should ignore the gradient for other outputs.
if loop_state:
out_grads[i] = loop_state.ZerosLike(op, i)
else:
out_grads[i] = control_flow_ops.ZerosLikeOutsideLoop(op, i)
with ops.name_scope(op.name + "_grad"):
# pylint: disable=protected-access
with ops.get_default_graph()._original_op(op):
# pylint: enable=protected-access
if is_func_call:
# For function call ops, we add a 'SymbolicGradient'
# node to the graph to compute gradients.
f_in = [x for x in op.inputs] + out_grads
f_types = [x.dtype for x in op.inputs]
# pylint: disable=protected-access
in_grads = _AsList(functional_ops._symbolic_gradient(
f_in, f_types, op.type))
# pylint: enable=protected-access
else:
in_grads = _AsList(grad_fn(op, *out_grads))
_VerifyGeneratedGradients(in_grads, op)
if gate_gradients and len(
[x for x in in_grads if x is not None]) > 1:
in_grads = control_flow_ops.tuple(in_grads)
logging.vlog(1, "Gradient for '" + op.name + "'")
def _FilterGrad(x):
if x is None:
return False
if isinstance(x, (list, tuple)):
return bool(x)
else:
return True
logging.vlog(1, " in --> %s",
", ".join([x.name for x in out_grads if _FilterGrad(x)]))
logging.vlog(1, " out --> %s",
", ".join([x.name for x in in_grads if _FilterGrad(x)]))
else:
# If no grad_fn is defined or none of out_grads is available,
# just propagates a list of None backwards.
in_grads = [None] * len(op.inputs)
for t_in, in_grad in zip(op.inputs, in_grads):
if in_grad is not None:
if isinstance(in_grad, ops.Tensor):
in_grad.set_shape(t_in.get_shape())
_SetGrad(grads, t_in, in_grad)
if loop_state:
loop_state.ExitGradWhileContext(op, before=False)
# update pending count for the inputs of op.
# pylint: disable=protected-access
for x in op.inputs:
pending_count[x.op._id] -= 1
ready = (pending_count[x.op._id] == 0)
if loop_state and not ready:
ready = (pending_count[x.op._id] > 0 and
control_flow_ops.IsLoopSwitch(x.op))
if ready:
queue.append(x.op)
for x in op.control_inputs:
pending_count[x._id] -= 1
if pending_count[x._id] is 0:
queue.append(x)
# pylint: enable=protected-access
return [_GetGrad(grads, x) for x in xs]
def _SetGrad(grads, t, grad):
"""Sets gradient "grad" in "grads" for tensor "t"."""
op = t.op
op_grads = grads.get(op)
if not op_grads:
op_grads = [[] for _ in xrange(len(op.outputs))]
grads[op] = op_grads
t_grads = op_grads[t.value_index]
if isinstance(t_grads, list):
t_grads.append(grad)
else:
assert control_flow_ops.IsLoopSwitch(op)
op_grads[t.value_index] = grad
def _GetGrad(grads, t):
"""Gets gradient for tensor "t"."""
op = t.op
op_grads = grads.get(op)
if not op_grads:
return None
t_grad = op_grads[t.value_index]
assert not isinstance(t_grad, list), (
"gradients list should have been aggregated by now.")
return t_grad
def _GetGrads(grads, op):
"""Gets all gradients for op."""
if op in grads:
return grads[op]
else:
return [[] for _ in xrange(len(op.outputs))]
def _HandleNestedIndexedSlices(grad):
assert isinstance(grad, ops.IndexedSlices)
if isinstance(grad.values, ops.Tensor):
return grad
else:
assert isinstance(grad.values, ops.IndexedSlices)
g = _HandleNestedIndexedSlices(grad.values)
return ops.IndexedSlices(
g.values, array_ops.gather(grad.indices, g.indices), g.dense_shape)
def _AccumulatorShape(inputs):
shape = tensor_shape.unknown_shape()
for i in inputs:
if isinstance(i, ops.Tensor):
shape = shape.merge_with(i.get_shape())
return shape
class AggregationMethod(object):
"""A class listing aggregation methods used to combine gradients.
Computing partial derivatives can require aggregating gradient
contributions. This class lists the various methods that can
be used to combine gradients in the graph:
* `ADD_N`: All of the gradient terms are summed as part of one
operation using the "AddN" op. It has the property that all
gradients must be ready before any aggregation is performed.
* `DEFAULT`: The system-chosen default aggregation method.
"""
ADD_N = 0
DEFAULT = ADD_N
# The following are experimental and may not be supported in future releases.
EXPERIMENTAL_TREE = 1
EXPERIMENTAL_ACCUMULATE_N = 2
def _AggregatedGrads(grads, op, loop_state, aggregation_method=None):
"""Get the aggregated gradients for op.
Args:
grads: The map of memoized gradients.
op: The op to get gradients for.
loop_state: An object for maintaining the state of the while loops in the
graph. It is of type ControlFlowState. None if the graph
contains no while loops.
aggregation_method: Specifies the method used to combine gradient terms.
Accepted values are constants defined in the class `AggregationMethod`.
Returns:
A list of gradients, one per each output of `op`. If the gradients
for a particular output is a list, this function aggregates it
before returning.
Raises:
TypeError: if the incoming grads are not Tensors or IndexedSlices.
ValueError: if the arguments are invalid.
"""
if aggregation_method is None:
aggregation_method = AggregationMethod.DEFAULT
if aggregation_method not in [AggregationMethod.ADD_N,
AggregationMethod.EXPERIMENTAL_TREE,
AggregationMethod.EXPERIMENTAL_ACCUMULATE_N]:
raise ValueError(
"Invalid aggregation_method specified %s." % aggregation_method)
out_grads = _GetGrads(grads, op)
for i, out_grad in enumerate(out_grads):
if loop_state:
if isinstance(out_grad, (ops.Tensor, ops.IndexedSlices)):
assert control_flow_ops.IsLoopSwitch(op)
continue
# Grads have to be Tensors or IndexedSlices
if not all([isinstance(g, (ops.Tensor, ops.IndexedSlices))
for g in out_grad if g is not None]):
raise TypeError("gradients have to be either all Tensors "
"or all IndexedSlices")
# Aggregate multiple gradients, and convert [] to None.
if out_grad:
if len(out_grad) < 2:
used = "nop"
out_grads[i] = out_grad[0]
elif all([isinstance(g, ops.Tensor) for g in out_grad if g is not None]):
tensor_shape = _AccumulatorShape(out_grad)
if (aggregation_method == AggregationMethod.EXPERIMENTAL_ACCUMULATE_N
and len(out_grad) > 2 and tensor_shape.is_fully_defined()):
# The benefit of using AccumulateN is that its inputs can be combined
# in any order and this can allow the expression to be evaluated with
# a smaller memory footprint. When used with gpu_allocator_retry,
# it is possible to compute a sum of terms which are much larger than
# total GPU memory.
# AccumulateN can currently only be used if we know the shape for
# an accumulator variable. If this is not known, or if we only have
# 2 grads then we fall through to the "tree" case below.
used = "accumulate_n"
out_grads[i] = math_ops.accumulate_n(out_grad)
elif aggregation_method in [AggregationMethod.EXPERIMENTAL_TREE,
AggregationMethod.EXPERIMENTAL_ACCUMULATE_N
]:
# Aggregate all gradients by doing pairwise sums: this may
# reduce performance, but it can improve memory because the
# gradients can be released earlier.
#
# TODO(vrv): Consider replacing this with a version of
# tf.AddN() that eagerly frees its inputs as soon as they are
# ready, so the order of this tree does not become a problem.
used = "tree"
with ops.name_scope(op.name + "_gradient_sum"):
running_sum = out_grad[0]
for grad in out_grad[1:]:
running_sum = math_ops.add_n([running_sum, grad])
out_grads[i] = running_sum
else:
used = "add_n"
out_grads[i] = math_ops.add_n(out_grad)
logging.vlog(2, " _AggregatedGrads %d x %s using %s", len(out_grad),
tensor_shape, used)
else:
out_grad = math_ops._as_indexed_slices_list([g for g in out_grad
if g is not None])
out_grad = [_HandleNestedIndexedSlices(x) for x in out_grad]
# Form IndexedSlices out of the concatenated values and
# indices.
out_grads[i] = ops.IndexedSlices(
array_ops.concat(0, [x.values for x in out_grad]),
array_ops.concat(0, [x.indices
for x in out_grad]), out_grad[0].dense_shape)
else:
out_grads[i] = []
return out_grads
# TODO(vrv): Make this available when we want to make it public.
def _hessian_vector_product(ys, xs, v):
"""Multiply the Hessian of `ys` wrt `xs` by `v`.
This is an efficient construction that uses a backprop-like approach
to compute the product between the Hessian and another vector. The
Hessian is usually too large to be explicitly computed or even
represented, but this method allows us to at least multiply by it
for the same big-O cost as backprop.
Implicit Hessian-vector products are the main practical, scalable way
of using second derivatives with neural networks. They allow us to
do things like construct Krylov subspaces and approximate conjugate
gradient descent.
Example: if `y` = 1/2 `x`^T A `x`, then `hessian_vector_product(y,
x, v)` will return an expression that evaluates to the same values
as (A + A.T) `v`.
Args:
ys: A scalar value, or a tensor or list of tensors to be summed to
yield a scalar.
xs: A list of tensors that we should construct the Hessian over.
v: A list of tensors, with the same shapes as xs, that we want to
multiply by the Hessian.
Returns:
A list of tensors (or if the list would be length 1, a single tensor)
containing the product between the Hessian and `v`.
Raises:
ValueError: `xs` and `v` have different length.
"""
# Validate the input
length = len(xs)
if len(v) != length:
raise ValueError("xs and v must have the same length.")
# First backprop
grads = gradients(ys, xs)
assert len(grads) == length
elemwise_products = [math_ops.mul(grad_elem, array_ops.stop_gradient(v_elem))
for grad_elem, v_elem in zip(grads, v)
if grad_elem is not None]
# Second backprop
return gradients(elemwise_products, xs)
|
2014c2g2/teamwork
|
refs/heads/master
|
exts/w2/static/Brython2.0.0-20140209-164925/Lib/logging/handlers.py
|
736
|
# Copyright 2001-2013 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Additional handlers for the logging package for Python. The core package is
based on PEP 282 and comments thereto in comp.lang.python.
Copyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging.handlers' and log away!
"""
import errno, logging, socket, os, pickle, struct, time, re
from codecs import BOM_UTF8
from stat import ST_DEV, ST_INO, ST_MTIME
import queue
try:
import threading
except ImportError: #pragma: no cover
threading = None
#
# Some constants...
#
DEFAULT_TCP_LOGGING_PORT = 9020
DEFAULT_UDP_LOGGING_PORT = 9021
DEFAULT_HTTP_LOGGING_PORT = 9022
DEFAULT_SOAP_LOGGING_PORT = 9023
SYSLOG_UDP_PORT = 514
SYSLOG_TCP_PORT = 514
_MIDNIGHT = 24 * 60 * 60 # number of seconds in a day
class BaseRotatingHandler(logging.FileHandler):
"""
Base class for handlers that rotate log files at a certain point.
Not meant to be instantiated directly. Instead, use RotatingFileHandler
or TimedRotatingFileHandler.
"""
def __init__(self, filename, mode, encoding=None, delay=False):
"""
Use the specified filename for streamed logging
"""
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
self.mode = mode
self.encoding = encoding
self.namer = None
self.rotator = None
def emit(self, record):
"""
Emit a record.
Output the record to the file, catering for rollover as described
in doRollover().
"""
try:
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
def rotation_filename(self, default_name):
"""
Modify the filename of a log file when rotating.
This is provided so that a custom filename can be provided.
The default implementation calls the 'namer' attribute of the
handler, if it's callable, passing the default name to
it. If the attribute isn't callable (the default is None), the name
is returned unchanged.
:param default_name: The default name for the log file.
"""
if not callable(self.namer):
result = default_name
else:
result = self.namer(default_name)
return result
def rotate(self, source, dest):
"""
When rotating, rotate the current log.
The default implementation calls the 'rotator' attribute of the
handler, if it's callable, passing the source and dest arguments to
it. If the attribute isn't callable (the default is None), the source
is simply renamed to the destination.
:param source: The source filename. This is normally the base
filename, e.g. 'test.log'
:param dest: The destination filename. This is normally
what the source is rotated to, e.g. 'test.log.1'.
"""
if not callable(self.rotator):
# Issue 18940: A file may not have been created if delay is True.
if os.path.exists(source):
os.rename(source, dest)
else:
self.rotator(source, dest)
class RotatingFileHandler(BaseRotatingHandler):
"""
Handler for logging to a set of files, which switches from one file
to the next when the current file reaches a certain size.
"""
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False):
"""
Open the specified file and use it as the stream for logging.
By default, the file grows indefinitely. You can specify particular
values of maxBytes and backupCount to allow the file to rollover at
a predetermined size.
Rollover occurs whenever the current log file is nearly maxBytes in
length. If backupCount is >= 1, the system will successively create
new files with the same pathname as the base file, but with extensions
".1", ".2" etc. appended to it. For example, with a backupCount of 5
and a base file name of "app.log", you would get "app.log",
"app.log.1", "app.log.2", ... through to "app.log.5". The file being
written to is always "app.log" - when it gets filled up, it is closed
and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc.
exist, then they are renamed to "app.log.2", "app.log.3" etc.
respectively.
If maxBytes is zero, rollover never occurs.
"""
# If rotation/rollover is wanted, it doesn't make sense to use another
# mode. If for example 'w' were specified, then if there were multiple
# runs of the calling application, the logs from previous runs would be
# lost if the 'w' is respected, because the log file would be truncated
# on each run.
if maxBytes > 0:
mode = 'a'
BaseRotatingHandler.__init__(self, filename, mode, encoding, delay)
self.maxBytes = maxBytes
self.backupCount = backupCount
def doRollover(self):
"""
Do a rollover, as described in __init__().
"""
if self.stream:
self.stream.close()
self.stream = None
if self.backupCount > 0:
for i in range(self.backupCount - 1, 0, -1):
sfn = self.rotation_filename("%s.%d" % (self.baseFilename, i))
dfn = self.rotation_filename("%s.%d" % (self.baseFilename,
i + 1))
if os.path.exists(sfn):
if os.path.exists(dfn):
os.remove(dfn)
os.rename(sfn, dfn)
dfn = self.rotation_filename(self.baseFilename + ".1")
if os.path.exists(dfn):
os.remove(dfn)
self.rotate(self.baseFilename, dfn)
if not self.delay:
self.stream = self._open()
def shouldRollover(self, record):
"""
Determine if rollover should occur.
Basically, see if the supplied record would cause the file to exceed
the size limit we have.
"""
if self.stream is None: # delay was set...
self.stream = self._open()
if self.maxBytes > 0: # are we rolling over?
msg = "%s\n" % self.format(record)
self.stream.seek(0, 2) #due to non-posix-compliant Windows feature
if self.stream.tell() + len(msg) >= self.maxBytes:
return 1
return 0
class TimedRotatingFileHandler(BaseRotatingHandler):
"""
Handler for logging to a file, rotating the log file at certain timed
intervals.
If backupCount is > 0, when rollover is done, no more than backupCount
files are kept - the oldest ones are deleted.
"""
def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False):
BaseRotatingHandler.__init__(self, filename, 'a', encoding, delay)
self.when = when.upper()
self.backupCount = backupCount
self.utc = utc
# Calculate the real rollover interval, which is just the number of
# seconds between rollovers. Also set the filename suffix used when
# a rollover occurs. Current 'when' events supported:
# S - Seconds
# M - Minutes
# H - Hours
# D - Days
# midnight - roll over at midnight
# W{0-6} - roll over on a certain day; 0 - Monday
#
# Case of the 'when' specifier is not important; lower or upper case
# will work.
if self.when == 'S':
self.interval = 1 # one second
self.suffix = "%Y-%m-%d_%H-%M-%S"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}(\.\w+)?$"
elif self.when == 'M':
self.interval = 60 # one minute
self.suffix = "%Y-%m-%d_%H-%M"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}(\.\w+)?$"
elif self.when == 'H':
self.interval = 60 * 60 # one hour
self.suffix = "%Y-%m-%d_%H"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}(\.\w+)?$"
elif self.when == 'D' or self.when == 'MIDNIGHT':
self.interval = 60 * 60 * 24 # one day
self.suffix = "%Y-%m-%d"
self.extMatch = r"^\d{4}-\d{2}-\d{2}(\.\w+)?$"
elif self.when.startswith('W'):
self.interval = 60 * 60 * 24 * 7 # one week
if len(self.when) != 2:
raise ValueError("You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s" % self.when)
if self.when[1] < '0' or self.when[1] > '6':
raise ValueError("Invalid day specified for weekly rollover: %s" % self.when)
self.dayOfWeek = int(self.when[1])
self.suffix = "%Y-%m-%d"
self.extMatch = r"^\d{4}-\d{2}-\d{2}(\.\w+)?$"
else:
raise ValueError("Invalid rollover interval specified: %s" % self.when)
self.extMatch = re.compile(self.extMatch, re.ASCII)
self.interval = self.interval * interval # multiply by units requested
if os.path.exists(filename):
t = os.stat(filename)[ST_MTIME]
else:
t = int(time.time())
self.rolloverAt = self.computeRollover(t)
def computeRollover(self, currentTime):
"""
Work out the rollover time based on the specified time.
"""
result = currentTime + self.interval
# If we are rolling over at midnight or weekly, then the interval is already known.
# What we need to figure out is WHEN the next interval is. In other words,
# if you are rolling over at midnight, then your base interval is 1 day,
# but you want to start that one day clock at midnight, not now. So, we
# have to fudge the rolloverAt value in order to trigger the first rollover
# at the right time. After that, the regular interval will take care of
# the rest. Note that this code doesn't care about leap seconds. :)
if self.when == 'MIDNIGHT' or self.when.startswith('W'):
# This could be done with less code, but I wanted it to be clear
if self.utc:
t = time.gmtime(currentTime)
else:
t = time.localtime(currentTime)
currentHour = t[3]
currentMinute = t[4]
currentSecond = t[5]
# r is the number of seconds left between now and midnight
r = _MIDNIGHT - ((currentHour * 60 + currentMinute) * 60 +
currentSecond)
result = currentTime + r
# If we are rolling over on a certain day, add in the number of days until
# the next rollover, but offset by 1 since we just calculated the time
# until the next day starts. There are three cases:
# Case 1) The day to rollover is today; in this case, do nothing
# Case 2) The day to rollover is further in the interval (i.e., today is
# day 2 (Wednesday) and rollover is on day 6 (Sunday). Days to
# next rollover is simply 6 - 2 - 1, or 3.
# Case 3) The day to rollover is behind us in the interval (i.e., today
# is day 5 (Saturday) and rollover is on day 3 (Thursday).
# Days to rollover is 6 - 5 + 3, or 4. In this case, it's the
# number of days left in the current week (1) plus the number
# of days in the next week until the rollover day (3).
# The calculations described in 2) and 3) above need to have a day added.
# This is because the above time calculation takes us to midnight on this
# day, i.e. the start of the next day.
if self.when.startswith('W'):
day = t[6] # 0 is Monday
if day != self.dayOfWeek:
if day < self.dayOfWeek:
daysToWait = self.dayOfWeek - day
else:
daysToWait = 6 - day + self.dayOfWeek + 1
newRolloverAt = result + (daysToWait * (60 * 60 * 24))
if not self.utc:
dstNow = t[-1]
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
result = newRolloverAt
return result
def shouldRollover(self, record):
"""
Determine if rollover should occur.
record is not used, as we are just comparing times, but it is needed so
the method signatures are the same
"""
t = int(time.time())
if t >= self.rolloverAt:
return 1
return 0
def getFilesToDelete(self):
"""
Determine the files to delete when rolling over.
More specific than the earlier method, which just used glob.glob().
"""
dirName, baseName = os.path.split(self.baseFilename)
fileNames = os.listdir(dirName)
result = []
prefix = baseName + "."
plen = len(prefix)
for fileName in fileNames:
if fileName[:plen] == prefix:
suffix = fileName[plen:]
if self.extMatch.match(suffix):
result.append(os.path.join(dirName, fileName))
result.sort()
if len(result) < self.backupCount:
result = []
else:
result = result[:len(result) - self.backupCount]
return result
def doRollover(self):
"""
do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the
start of the interval, not the current time. If there is a backup count,
then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix.
"""
if self.stream:
self.stream.close()
self.stream = None
# get the time that this sequence started at and make it a TimeTuple
currentTime = int(time.time())
dstNow = time.localtime(currentTime)[-1]
t = self.rolloverAt - self.interval
if self.utc:
timeTuple = time.gmtime(t)
else:
timeTuple = time.localtime(t)
dstThen = timeTuple[-1]
if dstNow != dstThen:
if dstNow:
addend = 3600
else:
addend = -3600
timeTuple = time.localtime(t + addend)
dfn = self.rotation_filename(self.baseFilename + "." +
time.strftime(self.suffix, timeTuple))
if os.path.exists(dfn):
os.remove(dfn)
self.rotate(self.baseFilename, dfn)
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)
if not self.delay:
self.stream = self._open()
newRolloverAt = self.computeRollover(currentTime)
while newRolloverAt <= currentTime:
newRolloverAt = newRolloverAt + self.interval
#If DST changes and midnight or weekly rollover, adjust for this.
if (self.when == 'MIDNIGHT' or self.when.startswith('W')) and not self.utc:
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
self.rolloverAt = newRolloverAt
class WatchedFileHandler(logging.FileHandler):
"""
A handler for logging to a file, which watches the file
to see if it has changed while in use. This can happen because of
usage of programs such as newsyslog and logrotate which perform
log file rotation. This handler, intended for use under Unix,
watches the file to see if it has changed since the last emit.
(A file has changed if its device or inode have changed.)
If it has changed, the old file stream is closed, and the file
opened to get a new stream.
This handler is not appropriate for use under Windows, because
under Windows open files cannot be moved or renamed - logging
opens the files with exclusive locks - and so there is no need
for such a handler. Furthermore, ST_INO is not supported under
Windows; stat always returns zero for this value.
This handler is based on a suggestion and patch by Chad J.
Schroeder.
"""
def __init__(self, filename, mode='a', encoding=None, delay=False):
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
self.dev, self.ino = -1, -1
self._statstream()
def _statstream(self):
if self.stream:
sres = os.fstat(self.stream.fileno())
self.dev, self.ino = sres[ST_DEV], sres[ST_INO]
def emit(self, record):
"""
Emit a record.
First check if the underlying file has changed, and if it
has, close the old stream and reopen the file to get the
current stream.
"""
# Reduce the chance of race conditions by stat'ing by path only
# once and then fstat'ing our new fd if we opened a new log stream.
# See issue #14632: Thanks to John Mulligan for the problem report
# and patch.
try:
# stat the file by path, checking for existence
sres = os.stat(self.baseFilename)
except OSError as err:
if err.errno == errno.ENOENT:
sres = None
else:
raise
# compare file system stat with that of our stream file handle
if not sres or sres[ST_DEV] != self.dev or sres[ST_INO] != self.ino:
if self.stream is not None:
# we have an open file handle, clean it up
self.stream.flush()
self.stream.close()
# open a new file handle and get new stat info from that fd
self.stream = self._open()
self._statstream()
logging.FileHandler.emit(self, record)
class SocketHandler(logging.Handler):
"""
A handler class which writes logging records, in pickle format, to
a streaming socket. The socket is kept open across logging calls.
If the peer resets it, an attempt is made to reconnect on the next call.
The pickle which is sent is that of the LogRecord's attribute dictionary
(__dict__), so that the receiver does not need to have the logging module
installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
When the attribute *closeOnError* is set to True - if a socket error
occurs, the socket is silently closed and then reopened on the next
logging call.
"""
logging.Handler.__init__(self)
self.host = host
self.port = port
self.sock = None
self.closeOnError = False
self.retryTime = None
#
# Exponential backoff parameters.
#
self.retryStart = 1.0
self.retryMax = 30.0
self.retryFactor = 2.0
def makeSocket(self, timeout=1):
"""
A factory method which allows subclasses to define the precise
type of socket they want.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if hasattr(s, 'settimeout'):
s.settimeout(timeout)
try:
s.connect((self.host, self.port))
return s
except socket.error:
s.close()
raise
def createSocket(self):
"""
Try to create a socket, using an exponential backoff with
a max retry time. Thanks to Robert Olson for the original patch
(SF #815911) which has been slightly refactored.
"""
now = time.time()
# Either retryTime is None, in which case this
# is the first time back after a disconnect, or
# we've waited long enough.
if self.retryTime is None:
attempt = True
else:
attempt = (now >= self.retryTime)
if attempt:
try:
self.sock = self.makeSocket()
self.retryTime = None # next time, no delay before trying
except socket.error:
#Creation failed, so set the retry time and return.
if self.retryTime is None:
self.retryPeriod = self.retryStart
else:
self.retryPeriod = self.retryPeriod * self.retryFactor
if self.retryPeriod > self.retryMax:
self.retryPeriod = self.retryMax
self.retryTime = now + self.retryPeriod
def send(self, s):
"""
Send a pickled string to the socket.
This function allows for partial sends which can happen when the
network is busy.
"""
if self.sock is None:
self.createSocket()
#self.sock can be None either because we haven't reached the retry
#time yet, or because we have reached the retry time and retried,
#but are still unable to connect.
if self.sock:
try:
if hasattr(self.sock, "sendall"):
self.sock.sendall(s)
else: #pragma: no cover
sentsofar = 0
left = len(s)
while left > 0:
sent = self.sock.send(s[sentsofar:])
sentsofar = sentsofar + sent
left = left - sent
except socket.error: #pragma: no cover
self.sock.close()
self.sock = None # so we can call createSocket next time
def makePickle(self, record):
"""
Pickles the record in binary format with a length prefix, and
returns it ready for transmission across the socket.
"""
ei = record.exc_info
if ei:
# just to get traceback text into record.exc_text ...
dummy = self.format(record)
# See issue #14436: If msg or args are objects, they may not be
# available on the receiving end. So we convert the msg % args
# to a string, save it as msg and zap the args.
d = dict(record.__dict__)
d['msg'] = record.getMessage()
d['args'] = None
d['exc_info'] = None
s = pickle.dumps(d, 1)
slen = struct.pack(">L", len(s))
return slen + s
def handleError(self, record):
"""
Handle an error during logging.
An error has occurred during logging. Most likely cause -
connection lost. Close the socket so that we can retry on the
next event.
"""
if self.closeOnError and self.sock:
self.sock.close()
self.sock = None #try to reconnect next time
else:
logging.Handler.handleError(self, record)
def emit(self, record):
"""
Emit a record.
Pickles the record and writes it to the socket in binary format.
If there is an error with the socket, silently drop the packet.
If there was a problem with the socket, re-establishes the
socket.
"""
try:
s = self.makePickle(record)
self.send(s)
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
def close(self):
"""
Closes the socket.
"""
self.acquire()
try:
if self.sock:
self.sock.close()
self.sock = None
logging.Handler.close(self)
finally:
self.release()
class DatagramHandler(SocketHandler):
"""
A handler class which writes logging records, in pickle format, to
a datagram socket. The pickle which is sent is that of the LogRecord's
attribute dictionary (__dict__), so that the receiver does not need to
have the logging module installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
"""
SocketHandler.__init__(self, host, port)
self.closeOnError = False
def makeSocket(self):
"""
The factory method of SocketHandler is here overridden to create
a UDP socket (SOCK_DGRAM).
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return s
def send(self, s):
"""
Send a pickled string to a socket.
This function no longer allows for partial sends which can happen
when the network is busy - UDP does not guarantee delivery and
can deliver packets out of sequence.
"""
if self.sock is None:
self.createSocket()
self.sock.sendto(s, (self.host, self.port))
class SysLogHandler(logging.Handler):
"""
A handler class which sends formatted logging records to a syslog
server. Based on Sam Rushing's syslog module:
http://www.nightmare.com/squirl/python-ext/misc/syslog.py
Contributed by Nicolas Untz (after which minor refactoring changes
have been made).
"""
# from <linux/sys/syslog.h>:
# ======================================================================
# priorities/facilities are encoded into a single 32-bit quantity, where
# the bottom 3 bits are the priority (0-7) and the top 28 bits are the
# facility (0-big number). Both the priorities and the facilities map
# roughly one-to-one to strings in the syslogd(8) source code. This
# mapping is included in this file.
#
# priorities (these are ordered)
LOG_EMERG = 0 # system is unusable
LOG_ALERT = 1 # action must be taken immediately
LOG_CRIT = 2 # critical conditions
LOG_ERR = 3 # error conditions
LOG_WARNING = 4 # warning conditions
LOG_NOTICE = 5 # normal but significant condition
LOG_INFO = 6 # informational
LOG_DEBUG = 7 # debug-level messages
# facility codes
LOG_KERN = 0 # kernel messages
LOG_USER = 1 # random user-level messages
LOG_MAIL = 2 # mail system
LOG_DAEMON = 3 # system daemons
LOG_AUTH = 4 # security/authorization messages
LOG_SYSLOG = 5 # messages generated internally by syslogd
LOG_LPR = 6 # line printer subsystem
LOG_NEWS = 7 # network news subsystem
LOG_UUCP = 8 # UUCP subsystem
LOG_CRON = 9 # clock daemon
LOG_AUTHPRIV = 10 # security/authorization messages (private)
LOG_FTP = 11 # FTP daemon
# other codes through 15 reserved for system use
LOG_LOCAL0 = 16 # reserved for local use
LOG_LOCAL1 = 17 # reserved for local use
LOG_LOCAL2 = 18 # reserved for local use
LOG_LOCAL3 = 19 # reserved for local use
LOG_LOCAL4 = 20 # reserved for local use
LOG_LOCAL5 = 21 # reserved for local use
LOG_LOCAL6 = 22 # reserved for local use
LOG_LOCAL7 = 23 # reserved for local use
priority_names = {
"alert": LOG_ALERT,
"crit": LOG_CRIT,
"critical": LOG_CRIT,
"debug": LOG_DEBUG,
"emerg": LOG_EMERG,
"err": LOG_ERR,
"error": LOG_ERR, # DEPRECATED
"info": LOG_INFO,
"notice": LOG_NOTICE,
"panic": LOG_EMERG, # DEPRECATED
"warn": LOG_WARNING, # DEPRECATED
"warning": LOG_WARNING,
}
facility_names = {
"auth": LOG_AUTH,
"authpriv": LOG_AUTHPRIV,
"cron": LOG_CRON,
"daemon": LOG_DAEMON,
"ftp": LOG_FTP,
"kern": LOG_KERN,
"lpr": LOG_LPR,
"mail": LOG_MAIL,
"news": LOG_NEWS,
"security": LOG_AUTH, # DEPRECATED
"syslog": LOG_SYSLOG,
"user": LOG_USER,
"uucp": LOG_UUCP,
"local0": LOG_LOCAL0,
"local1": LOG_LOCAL1,
"local2": LOG_LOCAL2,
"local3": LOG_LOCAL3,
"local4": LOG_LOCAL4,
"local5": LOG_LOCAL5,
"local6": LOG_LOCAL6,
"local7": LOG_LOCAL7,
}
#The map below appears to be trivially lowercasing the key. However,
#there's more to it than meets the eye - in some locales, lowercasing
#gives unexpected results. See SF #1524081: in the Turkish locale,
#"INFO".lower() != "info"
priority_map = {
"DEBUG" : "debug",
"INFO" : "info",
"WARNING" : "warning",
"ERROR" : "error",
"CRITICAL" : "critical"
}
def __init__(self, address=('localhost', SYSLOG_UDP_PORT),
facility=LOG_USER, socktype=None):
"""
Initialize a handler.
If address is specified as a string, a UNIX socket is used. To log to a
local syslogd, "SysLogHandler(address="/dev/log")" can be used.
If facility is not specified, LOG_USER is used.
"""
logging.Handler.__init__(self)
self.address = address
self.facility = facility
self.socktype = socktype
if isinstance(address, str):
self.unixsocket = True
self._connect_unixsocket(address)
else:
self.unixsocket = False
if socktype is None:
socktype = socket.SOCK_DGRAM
self.socket = socket.socket(socket.AF_INET, socktype)
if socktype == socket.SOCK_STREAM:
self.socket.connect(address)
self.socktype = socktype
self.formatter = None
def _connect_unixsocket(self, address):
use_socktype = self.socktype
if use_socktype is None:
use_socktype = socket.SOCK_DGRAM
self.socket = socket.socket(socket.AF_UNIX, use_socktype)
try:
self.socket.connect(address)
# it worked, so set self.socktype to the used type
self.socktype = use_socktype
except socket.error:
self.socket.close()
if self.socktype is not None:
# user didn't specify falling back, so fail
raise
use_socktype = socket.SOCK_STREAM
self.socket = socket.socket(socket.AF_UNIX, use_socktype)
try:
self.socket.connect(address)
# it worked, so set self.socktype to the used type
self.socktype = use_socktype
except socket.error:
self.socket.close()
raise
def encodePriority(self, facility, priority):
"""
Encode the facility and priority. You can pass in strings or
integers - if strings are passed, the facility_names and
priority_names mapping dictionaries are used to convert them to
integers.
"""
if isinstance(facility, str):
facility = self.facility_names[facility]
if isinstance(priority, str):
priority = self.priority_names[priority]
return (facility << 3) | priority
def close (self):
"""
Closes the socket.
"""
self.acquire()
try:
self.socket.close()
logging.Handler.close(self)
finally:
self.release()
def mapPriority(self, levelName):
"""
Map a logging level name to a key in the priority_names map.
This is useful in two scenarios: when custom levels are being
used, and in the case where you can't do a straightforward
mapping by lowercasing the logging level name because of locale-
specific issues (see SF #1524081).
"""
return self.priority_map.get(levelName, "warning")
ident = '' # prepended to all messages
append_nul = True # some old syslog daemons expect a NUL terminator
def emit(self, record):
"""
Emit a record.
The record is formatted, and then sent to the syslog server. If
exception information is present, it is NOT sent to the server.
"""
msg = self.format(record)
if self.ident:
msg = self.ident + msg
if self.append_nul:
msg += '\000'
"""
We need to convert record level to lowercase, maybe this will
change in the future.
"""
prio = '<%d>' % self.encodePriority(self.facility,
self.mapPriority(record.levelname))
prio = prio.encode('utf-8')
# Message is a string. Convert to bytes as required by RFC 5424
msg = msg.encode('utf-8')
msg = prio + msg
try:
if self.unixsocket:
try:
self.socket.send(msg)
except socket.error:
self.socket.close()
self._connect_unixsocket(self.address)
self.socket.send(msg)
elif self.socktype == socket.SOCK_DGRAM:
self.socket.sendto(msg, self.address)
else:
self.socket.sendall(msg)
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
class SMTPHandler(logging.Handler):
"""
A handler class which sends an SMTP email for each logging event.
"""
def __init__(self, mailhost, fromaddr, toaddrs, subject,
credentials=None, secure=None, timeout=5.0):
"""
Initialize the handler.
Initialize the instance with the from and to addresses and subject
line of the email. To specify a non-standard SMTP port, use the
(host, port) tuple format for the mailhost argument. To specify
authentication credentials, supply a (username, password) tuple
for the credentials argument. To specify the use of a secure
protocol (TLS), pass in a tuple for the secure argument. This will
only be used when authentication credentials are supplied. The tuple
will be either an empty tuple, or a single-value tuple with the name
of a keyfile, or a 2-value tuple with the names of the keyfile and
certificate file. (This tuple is passed to the `starttls` method).
A timeout in seconds can be specified for the SMTP connection (the
default is one second).
"""
logging.Handler.__init__(self)
if isinstance(mailhost, tuple):
self.mailhost, self.mailport = mailhost
else:
self.mailhost, self.mailport = mailhost, None
if isinstance(credentials, tuple):
self.username, self.password = credentials
else:
self.username = None
self.fromaddr = fromaddr
if isinstance(toaddrs, str):
toaddrs = [toaddrs]
self.toaddrs = toaddrs
self.subject = subject
self.secure = secure
self.timeout = timeout
def getSubject(self, record):
"""
Determine the subject for the email.
If you want to specify a subject line which is record-dependent,
override this method.
"""
return self.subject
def emit(self, record):
"""
Emit a record.
Format the record and send it to the specified addressees.
"""
try:
import smtplib
from email.utils import formatdate
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port, timeout=self.timeout)
msg = self.format(record)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
self.fromaddr,
",".join(self.toaddrs),
self.getSubject(record),
formatdate(), msg)
if self.username:
if self.secure is not None:
smtp.ehlo()
smtp.starttls(*self.secure)
smtp.ehlo()
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
class NTEventLogHandler(logging.Handler):
"""
A handler class which sends events to the NT Event Log. Adds a
registry entry for the specified application name. If no dllname is
provided, win32service.pyd (which contains some basic message
placeholders) is used. Note that use of these placeholders will make
your event logs big, as the entire message source is held in the log.
If you want slimmer logs, you have to pass in the name of your own DLL
which contains the message definitions you want to use in the event log.
"""
def __init__(self, appname, dllname=None, logtype="Application"):
logging.Handler.__init__(self)
try:
import win32evtlogutil, win32evtlog
self.appname = appname
self._welu = win32evtlogutil
if not dllname:
dllname = os.path.split(self._welu.__file__)
dllname = os.path.split(dllname[0])
dllname = os.path.join(dllname[0], r'win32service.pyd')
self.dllname = dllname
self.logtype = logtype
self._welu.AddSourceToRegistry(appname, dllname, logtype)
self.deftype = win32evtlog.EVENTLOG_ERROR_TYPE
self.typemap = {
logging.DEBUG : win32evtlog.EVENTLOG_INFORMATION_TYPE,
logging.INFO : win32evtlog.EVENTLOG_INFORMATION_TYPE,
logging.WARNING : win32evtlog.EVENTLOG_WARNING_TYPE,
logging.ERROR : win32evtlog.EVENTLOG_ERROR_TYPE,
logging.CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE,
}
except ImportError:
print("The Python Win32 extensions for NT (service, event "\
"logging) appear not to be available.")
self._welu = None
def getMessageID(self, record):
"""
Return the message ID for the event record. If you are using your
own messages, you could do this by having the msg passed to the
logger being an ID rather than a formatting string. Then, in here,
you could use a dictionary lookup to get the message ID. This
version returns 1, which is the base message ID in win32service.pyd.
"""
return 1
def getEventCategory(self, record):
"""
Return the event category for the record.
Override this if you want to specify your own categories. This version
returns 0.
"""
return 0
def getEventType(self, record):
"""
Return the event type for the record.
Override this if you want to specify your own types. This version does
a mapping using the handler's typemap attribute, which is set up in
__init__() to a dictionary which contains mappings for DEBUG, INFO,
WARNING, ERROR and CRITICAL. If you are using your own levels you will
either need to override this method or place a suitable dictionary in
the handler's typemap attribute.
"""
return self.typemap.get(record.levelno, self.deftype)
def emit(self, record):
"""
Emit a record.
Determine the message ID, event category and event type. Then
log the message in the NT event log.
"""
if self._welu:
try:
id = self.getMessageID(record)
cat = self.getEventCategory(record)
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
def close(self):
"""
Clean up this handler.
You can remove the application name from the registry as a
source of event log entries. However, if you do this, you will
not be able to see the events as you intended in the Event Log
Viewer - it needs to be able to access the registry to get the
DLL name.
"""
#self._welu.RemoveSourceFromRegistry(self.appname, self.logtype)
logging.Handler.close(self)
class HTTPHandler(logging.Handler):
"""
A class which sends records to a Web server, using either GET or
POST semantics.
"""
def __init__(self, host, url, method="GET", secure=False, credentials=None):
"""
Initialize the instance with the host, the request URL, and the method
("GET" or "POST")
"""
logging.Handler.__init__(self)
method = method.upper()
if method not in ["GET", "POST"]:
raise ValueError("method must be GET or POST")
self.host = host
self.url = url
self.method = method
self.secure = secure
self.credentials = credentials
def mapLogRecord(self, record):
"""
Default implementation of mapping the log record into a dict
that is sent as the CGI data. Overwrite in your class.
Contributed by Franz Glasner.
"""
return record.__dict__
def emit(self, record):
"""
Emit a record.
Send the record to the Web server as a percent-encoded dictionary
"""
try:
import http.client, urllib.parse
host = self.host
if self.secure:
h = http.client.HTTPSConnection(host)
else:
h = http.client.HTTPConnection(host)
url = self.url
data = urllib.parse.urlencode(self.mapLogRecord(record))
if self.method == "GET":
if (url.find('?') >= 0):
sep = '&'
else:
sep = '?'
url = url + "%c%s" % (sep, data)
h.putrequest(self.method, url)
# support multiple hosts on one IP address...
# need to strip optional :port from host, if present
i = host.find(":")
if i >= 0:
host = host[:i]
h.putheader("Host", host)
if self.method == "POST":
h.putheader("Content-type",
"application/x-www-form-urlencoded")
h.putheader("Content-length", str(len(data)))
if self.credentials:
import base64
s = ('u%s:%s' % self.credentials).encode('utf-8')
s = 'Basic ' + base64.b64encode(s).strip()
h.putheader('Authorization', s)
h.endheaders()
if self.method == "POST":
h.send(data.encode('utf-8'))
h.getresponse() #can't do anything with the result
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
class BufferingHandler(logging.Handler):
"""
A handler class which buffers logging records in memory. Whenever each
record is added to the buffer, a check is made to see if the buffer should
be flushed. If it should, then flush() is expected to do what's needed.
"""
def __init__(self, capacity):
"""
Initialize the handler with the buffer size.
"""
logging.Handler.__init__(self)
self.capacity = capacity
self.buffer = []
def shouldFlush(self, record):
"""
Should the handler flush its buffer?
Returns true if the buffer is up to capacity. This method can be
overridden to implement custom flushing strategies.
"""
return (len(self.buffer) >= self.capacity)
def emit(self, record):
"""
Emit a record.
Append the record. If shouldFlush() tells us to, call flush() to process
the buffer.
"""
self.buffer.append(record)
if self.shouldFlush(record):
self.flush()
def flush(self):
"""
Override to implement custom flushing behaviour.
This version just zaps the buffer to empty.
"""
self.acquire()
try:
self.buffer = []
finally:
self.release()
def close(self):
"""
Close the handler.
This version just flushes and chains to the parent class' close().
"""
self.flush()
logging.Handler.close(self)
class MemoryHandler(BufferingHandler):
"""
A handler class which buffers logging records in memory, periodically
flushing them to a target handler. Flushing occurs whenever the buffer
is full, or when an event of a certain severity or greater is seen.
"""
def __init__(self, capacity, flushLevel=logging.ERROR, target=None):
"""
Initialize the handler with the buffer size, the level at which
flushing should occur and an optional target.
Note that without a target being set either here or via setTarget(),
a MemoryHandler is no use to anyone!
"""
BufferingHandler.__init__(self, capacity)
self.flushLevel = flushLevel
self.target = target
def shouldFlush(self, record):
"""
Check for buffer full or a record at the flushLevel or higher.
"""
return (len(self.buffer) >= self.capacity) or \
(record.levelno >= self.flushLevel)
def setTarget(self, target):
"""
Set the target handler for this handler.
"""
self.target = target
def flush(self):
"""
For a MemoryHandler, flushing means just sending the buffered
records to the target, if there is one. Override if you want
different behaviour.
The record buffer is also cleared by this operation.
"""
self.acquire()
try:
if self.target:
for record in self.buffer:
self.target.handle(record)
self.buffer = []
finally:
self.release()
def close(self):
"""
Flush, set the target to None and lose the buffer.
"""
self.flush()
self.acquire()
try:
self.target = None
BufferingHandler.close(self)
finally:
self.release()
class QueueHandler(logging.Handler):
"""
This handler sends events to a queue. Typically, it would be used together
with a multiprocessing Queue to centralise logging to file in one process
(in a multi-process application), so as to avoid file write contention
between processes.
This code is new in Python 3.2, but this class can be copy pasted into
user code for use with earlier Python versions.
"""
def __init__(self, queue):
"""
Initialise an instance, using the passed queue.
"""
logging.Handler.__init__(self)
self.queue = queue
def enqueue(self, record):
"""
Enqueue a record.
The base implementation uses put_nowait. You may want to override
this method if you want to use blocking, timeouts or custom queue
implementations.
"""
self.queue.put_nowait(record)
def prepare(self, record):
"""
Prepares a record for queuing. The object returned by this method is
enqueued.
The base implementation formats the record to merge the message
and arguments, and removes unpickleable items from the record
in-place.
You might want to override this method if you want to convert
the record to a dict or JSON string, or send a modified copy
of the record while leaving the original intact.
"""
# The format operation gets traceback text into record.exc_text
# (if there's exception data), and also puts the message into
# record.message. We can then use this to replace the original
# msg + args, as these might be unpickleable. We also zap the
# exc_info attribute, as it's no longer needed and, if not None,
# will typically not be pickleable.
self.format(record)
record.msg = record.message
record.args = None
record.exc_info = None
return record
def emit(self, record):
"""
Emit a record.
Writes the LogRecord to the queue, preparing it for pickling first.
"""
try:
self.enqueue(self.prepare(record))
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
if threading:
class QueueListener(object):
"""
This class implements an internal threaded listener which watches for
LogRecords being added to a queue, removes them and passes them to a
list of handlers for processing.
"""
_sentinel = None
def __init__(self, queue, *handlers):
"""
Initialise an instance with the specified queue and
handlers.
"""
self.queue = queue
self.handlers = handlers
self._stop = threading.Event()
self._thread = None
def dequeue(self, block):
"""
Dequeue a record and return it, optionally blocking.
The base implementation uses get. You may want to override this method
if you want to use timeouts or work with custom queue implementations.
"""
return self.queue.get(block)
def start(self):
"""
Start the listener.
This starts up a background thread to monitor the queue for
LogRecords to process.
"""
self._thread = t = threading.Thread(target=self._monitor)
t.setDaemon(True)
t.start()
def prepare(self , record):
"""
Prepare a record for handling.
This method just returns the passed-in record. You may want to
override this method if you need to do any custom marshalling or
manipulation of the record before passing it to the handlers.
"""
return record
def handle(self, record):
"""
Handle a record.
This just loops through the handlers offering them the record
to handle.
"""
record = self.prepare(record)
for handler in self.handlers:
handler.handle(record)
def _monitor(self):
"""
Monitor the queue for records, and ask the handler
to deal with them.
This method runs on a separate, internal thread.
The thread will terminate if it sees a sentinel object in the queue.
"""
q = self.queue
has_task_done = hasattr(q, 'task_done')
while not self._stop.isSet():
try:
record = self.dequeue(True)
if record is self._sentinel:
break
self.handle(record)
if has_task_done:
q.task_done()
except queue.Empty:
pass
# There might still be records in the queue.
while True:
try:
record = self.dequeue(False)
if record is self._sentinel:
break
self.handle(record)
if has_task_done:
q.task_done()
except queue.Empty:
break
def enqueue_sentinel(self):
"""
This is used to enqueue the sentinel record.
The base implementation uses put_nowait. You may want to override this
method if you want to use timeouts or work with custom queue
implementations.
"""
self.queue.put_nowait(self._sentinel)
def stop(self):
"""
Stop the listener.
This asks the thread to terminate, and then waits for it to do so.
Note that if you don't call this before your application exits, there
may be some records still left on the queue, which won't be processed.
"""
self._stop.set()
self.enqueue_sentinel()
self._thread.join()
self._thread = None
|
longmen21/edx-platform
|
refs/heads/master
|
common/lib/xmodule/xmodule/lti_module.py
|
16
|
"""
Learning Tools Interoperability (LTI) module.
Resources
---------
Theoretical background and detailed specifications of LTI can be found on:
http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html
This module is based on the version 1.1.1 of the LTI specifications by the
IMS Global authority. For authentication, it uses OAuth1.
When responding back to the LTI tool provider, we must issue a correct
response. Types of responses and their message payload is available at:
Table A1.2 Interpretation of the 'CodeMajor/severity' matrix.
http://www.imsglobal.org/gws/gwsv1p0/imsgws_wsdlBindv1p0.html
A resource to test the LTI protocol (PHP realization):
http://www.imsglobal.org/developers/LTI/test/v1p1/lms.php
We have also begun to add support for LTI 1.2/2.0. We will keep this
docstring in synch with what support is available. The first LTI 2.0
feature to be supported is the REST API results service, see specification
at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
What is supported:
------------------
1.) Display of simple LTI in iframe or a new window.
2.) Multiple LTI components on a single page.
3.) The use of multiple LTI providers per course.
4.) Use of advanced LTI component that provides back a grade.
A) LTI 1.1.1 XML endpoint
a.) The LTI provider sends back a grade to a specified URL.
b.) Currently only action "update" is supported. "Read", and "delete"
actions initially weren't required.
B) LTI 2.0 Result Service JSON REST endpoint
(http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html)
a.) Discovery of all such LTI http endpoints for a course. External tools GET from this discovery
endpoint and receive URLs for interacting with individual grading units.
(see lms/djangoapps/courseware/views/views.py:get_course_lti_endpoints)
b.) GET, PUT and DELETE in LTI Result JSON binding
(http://www.imsglobal.org/lti/ltiv2p0/mediatype/application/vnd/ims/lis/v2/result+json/index.html)
for a provider to synchronize grades into edx-platform. Reading, Setting, and Deleteing
Numeric grades between 0 and 1 and text + basic HTML feedback comments are supported, via
GET / PUT / DELETE HTTP methods respectively
"""
import datetime
from django.utils.timezone import UTC
import logging
import oauthlib.oauth1
from oauthlib.oauth1.rfc5849 import signature
import hashlib
import base64
import urllib
import textwrap
import bleach
from lxml import etree
from webob import Response
import mock
from xml.sax.saxutils import escape
from xmodule.editing_module import MetadataOnlyEditingDescriptor
from xmodule.raw_module import EmptyDataRawDescriptor
from xmodule.x_module import XModule, module_attr
from xmodule.lti_2_util import LTI20ModuleMixin, LTIError
from pkg_resources import resource_string
from xblock.core import String, Scope, List, XBlock
from xblock.fields import Boolean, Float
log = logging.getLogger(__name__)
DOCS_ANCHOR_TAG_OPEN = (
"<a target='_blank' "
"href='http://edx.readthedocs.io/projects/edx-partner-course-staff/en/latest/exercises_tools/lti_component.html'>"
)
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
class LTIFields(object):
"""
Fields to define and obtain LTI tool from provider are set here,
except credentials, which should be set in course settings::
`lti_id` is id to connect tool with credentials in course settings. It should not contain :: (double semicolon)
`launch_url` is launch URL of tool.
`custom_parameters` are additional parameters to navigate to proper book and book page.
For example, for Vitalsource provider, `launch_url` should be
*https://bc-staging.vitalsource.com/books/book*,
and to get to proper book and book page, you should set custom parameters as::
vbid=put_book_id_here
book_location=page/put_page_number_here
Default non-empty URL for `launch_url` is needed due to oauthlib demand (URL scheme should be presented)::
https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136
"""
display_name = String(
display_name=_("Display Name"),
help=_(
"Enter the name that students see for this component. "
"Analytics reports may also use the display name to identify this component."
),
scope=Scope.settings,
default="LTI",
)
lti_id = String(
display_name=_("LTI ID"),
help=_(
"Enter the LTI ID for the external LTI provider. "
"This value must be the same LTI ID that you entered in the "
"LTI Passports setting on the Advanced Settings page."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='',
scope=Scope.settings
)
launch_url = String(
display_name=_("LTI URL"),
help=_(
"Enter the URL of the external tool that this component launches. "
"This setting is only used when Hide External Tool is set to False."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='http://www.example.com',
scope=Scope.settings)
custom_parameters = List(
display_name=_("Custom Parameters"),
help=_(
"Add the key/value pair for any custom parameters, such as the page your e-book should open to or "
"the background color for this component."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
scope=Scope.settings)
open_in_a_new_page = Boolean(
display_name=_("Open in New Page"),
help=_(
"Select True if you want students to click a link that opens the LTI tool in a new window. "
"Select False if you want the LTI content to open in an IFrame in the current page. "
"This setting is only used when Hide External Tool is set to False. "
),
default=True,
scope=Scope.settings
)
has_score = Boolean(
display_name=_("Scored"),
help=_(
"Select True if this component will receive a numerical score from the external LTI system."
),
default=False,
scope=Scope.settings
)
weight = Float(
display_name=_("Weight"),
help=_(
"Enter the number of points possible for this component. "
"The default value is 1.0. "
"This setting is only used when Scored is set to True."
),
default=1.0,
scope=Scope.settings,
values={"min": 0},
)
module_score = Float(
help=_("The score kept in the xblock KVS -- duplicate of the published score in django DB"),
default=None,
scope=Scope.user_state
)
score_comment = String(
help=_("Comment as returned from grader, LTI2.0 spec"),
default="",
scope=Scope.user_state
)
hide_launch = Boolean(
display_name=_("Hide External Tool"),
help=_(
"Select True if you want to use this component as a placeholder for syncing with an external grading "
"system rather than launch an external tool. "
"This setting hides the Launch button and any IFrames for this component."
),
default=False,
scope=Scope.settings
)
# Users will be presented with a message indicating that their e-mail/username would be sent to a third
# party application. When "Open in New Page" is not selected, the tool automatically appears without any user action.
ask_to_send_username = Boolean(
display_name=_("Request user's username"),
# Translators: This is used to request the user's username for a third party service.
help=_("Select True to request the user's username."),
default=False,
scope=Scope.settings
)
ask_to_send_email = Boolean(
display_name=_("Request user's email"),
# Translators: This is used to request the user's email for a third party service.
help=_("Select True to request the user's email address."),
default=False,
scope=Scope.settings
)
description = String(
display_name=_("LTI Application Information"),
help=_(
"Enter a description of the third party application. If requesting username and/or email, use this text box to inform users "
"why their username and/or email will be forwarded to a third party application."
),
default="",
scope=Scope.settings
)
button_text = String(
display_name=_("Button Text"),
help=_(
"Enter the text on the button used to launch the third party application."
),
default="",
scope=Scope.settings
)
accept_grades_past_due = Boolean(
display_name=_("Accept grades past deadline"),
help=_("Select True to allow third party systems to post grades past the deadline."),
default=True,
scope=Scope.settings
)
class LTIModule(LTIFields, LTI20ModuleMixin, XModule):
"""
Module provides LTI integration to course.
Except usual Xmodule structure it proceeds with OAuth signing.
How it works::
1. Get credentials from course settings.
2. There is minimal set of parameters need to be signed (presented for Vitalsource)::
user_id
oauth_callback
lis_outcome_service_url
lis_result_sourcedid
launch_presentation_return_url
lti_message_type
lti_version
roles
*+ all custom parameters*
These parameters should be encoded and signed by *OAuth1* together with
`launch_url` and *POST* request type.
3. Signing proceeds with client key/secret pair obtained from course settings.
That pair should be obtained from LTI provider and set into course settings by course author.
After that signature and other OAuth data are generated.
OAuth data which is generated after signing is usual::
oauth_callback
oauth_nonce
oauth_consumer_key
oauth_signature_method
oauth_timestamp
oauth_version
4. All that data is passed to form and sent to LTI provider server by browser via
autosubmit via JavaScript.
Form example::
<form
action="${launch_url}"
name="ltiLaunchForm-${element_id}"
class="ltiLaunchForm"
method="post"
target="ltiLaunchFrame-${element_id}"
encType="application/x-www-form-urlencoded"
>
<input name="launch_presentation_return_url" value="" />
<input name="lis_outcome_service_url" value="" />
<input name="lis_result_sourcedid" value="" />
<input name="lti_message_type" value="basic-lti-launch-request" />
<input name="lti_version" value="LTI-1p0" />
<input name="oauth_callback" value="about:blank" />
<input name="oauth_consumer_key" value="${oauth_consumer_key}" />
<input name="oauth_nonce" value="${oauth_nonce}" />
<input name="oauth_signature_method" value="HMAC-SHA1" />
<input name="oauth_timestamp" value="${oauth_timestamp}" />
<input name="oauth_version" value="1.0" />
<input name="user_id" value="${user_id}" />
<input name="role" value="student" />
<input name="oauth_signature" value="${oauth_signature}" />
<input name="custom_1" value="${custom_param_1_value}" />
<input name="custom_2" value="${custom_param_2_value}" />
<input name="custom_..." value="${custom_param_..._value}" />
<input type="submit" value="Press to Launch" />
</form>
5. LTI provider has same secret key and it signs data string via *OAuth1* and compares signatures.
If signatures are correct, LTI provider redirects iframe source to LTI tool web page,
and LTI tool is rendered to iframe inside course.
Otherwise error message from LTI provider is generated.
"""
js = {
'js': [
resource_string(__name__, 'js/src/lti/lti.js')
]
}
css = {'scss': [resource_string(__name__, 'css/lti/lti.scss')]}
js_module_name = "LTI"
def get_input_fields(self):
# LTI provides a list of default parameters that might be passed as
# part of the POST data. These parameters should not be prefixed.
# Likewise, The creator of an LTI link can add custom key/value parameters
# to a launch which are to be included with the launch of the LTI link.
# In this case, we will automatically add `custom_` prefix before this parameters.
# See http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html#_Toc316828520
PARAMETERS = [
"lti_message_type",
"lti_version",
"resource_link_title",
"resource_link_description",
"user_image",
"lis_person_name_given",
"lis_person_name_family",
"lis_person_name_full",
"lis_person_contact_email_primary",
"lis_person_sourcedid",
"role_scope_mentor",
"context_type",
"context_title",
"context_label",
"launch_presentation_locale",
"launch_presentation_document_target",
"launch_presentation_css_url",
"launch_presentation_width",
"launch_presentation_height",
"launch_presentation_return_url",
"tool_consumer_info_product_family_code",
"tool_consumer_info_version",
"tool_consumer_instance_guid",
"tool_consumer_instance_name",
"tool_consumer_instance_description",
"tool_consumer_instance_url",
"tool_consumer_instance_contact_email",
]
client_key, client_secret = self.get_client_key_secret()
# parsing custom parameters to dict
custom_parameters = {}
for custom_parameter in self.custom_parameters:
try:
param_name, param_value = [p.strip() for p in custom_parameter.split('=', 1)]
except ValueError:
_ = self.runtime.service(self, "i18n").ugettext
msg = _('Could not parse custom parameter: {custom_parameter}. Should be "x=y" string.').format(
custom_parameter="{0!r}".format(custom_parameter)
)
raise LTIError(msg)
# LTI specs: 'custom_' should be prepended before each custom parameter, as pointed in link above.
if param_name not in PARAMETERS:
param_name = 'custom_' + param_name
custom_parameters[unicode(param_name)] = unicode(param_value)
return self.oauth_params(
custom_parameters,
client_key,
client_secret,
)
def get_context(self):
"""
Returns a context.
"""
# use bleach defaults. see https://github.com/jsocol/bleach/blob/master/bleach/__init__.py
# ALLOWED_TAGS are
# ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'strong', 'ul']
#
# ALLOWED_ATTRIBUTES are
# 'a': ['href', 'title'],
# 'abbr': ['title'],
# 'acronym': ['title'],
#
# This lets all plaintext through.
sanitized_comment = bleach.clean(self.score_comment)
return {
'input_fields': self.get_input_fields(),
# These parameters do not participate in OAuth signing.
'launch_url': self.launch_url.strip(),
'element_id': self.location.html_id(),
'element_class': self.category,
'open_in_a_new_page': self.open_in_a_new_page,
'display_name': self.display_name,
'form_url': self.runtime.handler_url(self, 'preview_handler').rstrip('/?'),
'hide_launch': self.hide_launch,
'has_score': self.has_score,
'weight': self.weight,
'module_score': self.module_score,
'comment': sanitized_comment,
'description': self.description,
'ask_to_send_username': self.ask_to_send_username,
'ask_to_send_email': self.ask_to_send_email,
'button_text': self.button_text,
'accept_grades_past_due': self.accept_grades_past_due,
}
def get_html(self):
"""
Renders parameters to template.
"""
return self.system.render_template('lti.html', self.get_context())
@XBlock.handler
def preview_handler(self, _, __):
"""
This is called to get context with new oauth params to iframe.
"""
template = self.system.render_template('lti_form.html', self.get_context())
return Response(template, content_type='text/html')
def get_user_id(self):
user_id = self.runtime.anonymous_student_id
assert user_id is not None
return unicode(urllib.quote(user_id))
def get_outcome_service_url(self, service_name="grade_handler"):
"""
Return URL for storing grades.
To test LTI on sandbox we must use http scheme.
While testing locally and on Jenkins, mock_lti_server use http.referer
to obtain scheme, so it is ok to have http(s) anyway.
The scheme logic is handled in lms/lib/xblock/runtime.py
"""
return self.runtime.handler_url(self, service_name, thirdparty=True).rstrip('/?')
def get_resource_link_id(self):
"""
This is an opaque unique identifier that the TC guarantees will be unique
within the TC for every placement of the link.
If the tool / activity is placed multiple times in the same context,
each of those placements will be distinct.
This value will also change if the item is exported from one system or
context and imported into another system or context.
This parameter is required.
Example: u'edx.org-i4x-2-3-lti-31de800015cf4afb973356dbe81496df'
Hostname, edx.org,
makes resource_link_id change on import to another system.
Last part of location, location.name - 31de800015cf4afb973356dbe81496df,
is random hash, updated by course_id,
this makes resource_link_id unique inside single course.
First part of location is tag-org-course-category, i4x-2-3-lti.
Location.name itself does not change on import to another course,
but org and course_id change.
So together with org and course_id in a form of
i4x-2-3-lti-31de800015cf4afb973356dbe81496df this part of resource_link_id:
makes resource_link_id to be unique among courses inside same system.
"""
return unicode(urllib.quote("{}-{}".format(self.system.hostname, self.location.html_id())))
def get_lis_result_sourcedid(self):
"""
This field contains an identifier that indicates the LIS Result Identifier (if any)
associated with this launch. This field identifies a unique row and column within the
TC gradebook. This field is unique for every combination of context_id / resource_link_id / user_id.
This value may change for a particular resource_link_id / user_id from one launch to the next.
The TP should only retain the most recent value for this field for a particular resource_link_id / user_id.
This field is generally optional, but is required for grading.
"""
return "{context}:{resource_link}:{user_id}".format(
context=urllib.quote(self.context_id),
resource_link=self.get_resource_link_id(),
user_id=self.get_user_id()
)
def get_course(self):
"""
Return course by course id.
"""
return self.descriptor.runtime.modulestore.get_course(self.course_id)
@property
def context_id(self):
"""
Return context_id.
context_id is an opaque identifier that uniquely identifies the context (e.g., a course)
that contains the link being launched.
"""
return self.course_id.to_deprecated_string()
@property
def role(self):
"""
Get system user role and convert it to LTI role.
"""
roles = {
'student': u'Student',
'staff': u'Administrator',
'instructor': u'Instructor',
}
return roles.get(self.system.get_user_role(), u'Student')
def oauth_params(self, custom_parameters, client_key, client_secret):
"""
Signs request and returns signature and OAuth parameters.
`custom_paramters` is dict of parsed `custom_parameter` field
`client_key` and `client_secret` are LTI tool credentials.
Also *anonymous student id* is passed to template and therefore to LTI provider.
"""
client = oauthlib.oauth1.Client(
client_key=unicode(client_key),
client_secret=unicode(client_secret)
)
# Must have parameters for correct signing from LTI:
body = {
u'user_id': self.get_user_id(),
u'oauth_callback': u'about:blank',
u'launch_presentation_return_url': '',
u'lti_message_type': u'basic-lti-launch-request',
u'lti_version': 'LTI-1p0',
u'roles': self.role,
# Parameters required for grading:
u'resource_link_id': self.get_resource_link_id(),
u'lis_result_sourcedid': self.get_lis_result_sourcedid(),
u'context_id': self.context_id,
}
if self.has_score:
body.update({
u'lis_outcome_service_url': self.get_outcome_service_url()
})
self.user_email = ""
self.user_username = ""
# Username and email can't be sent in studio mode, because the user object is not defined.
# To test functionality test in LMS
if callable(self.runtime.get_real_user):
real_user_object = self.runtime.get_real_user(self.runtime.anonymous_student_id)
try:
self.user_email = real_user_object.email
except AttributeError:
self.user_email = ""
try:
self.user_username = real_user_object.username
except AttributeError:
self.user_username = ""
if self.ask_to_send_username and self.user_username:
body["lis_person_sourcedid"] = self.user_username
if self.ask_to_send_email and self.user_email:
body["lis_person_contact_email_primary"] = self.user_email
# Appending custom parameter for signing.
body.update(custom_parameters)
headers = {
# This is needed for body encoding:
'Content-Type': 'application/x-www-form-urlencoded',
}
try:
__, headers, __ = client.sign(
unicode(self.launch_url.strip()),
http_method=u'POST',
body=body,
headers=headers)
except ValueError: # Scheme not in url.
# https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136
# Stubbing headers for now:
log.info(
u"LTI module %s in course %s does not have oauth parameters correctly configured.",
self.location,
self.location.course_key,
)
headers = {
u'Content-Type': u'application/x-www-form-urlencoded',
u'Authorization': u'OAuth oauth_nonce="80966668944732164491378916897", \
oauth_timestamp="1378916897", oauth_version="1.0", oauth_signature_method="HMAC-SHA1", \
oauth_consumer_key="", oauth_signature="frVp4JuvT1mVXlxktiAUjQ7%2F1cw%3D"'}
params = headers['Authorization']
# Parse headers to pass to template as part of context:
params = dict([param.strip().replace('"', '').split('=') for param in params.split(',')])
params[u'oauth_nonce'] = params[u'OAuth oauth_nonce']
del params[u'OAuth oauth_nonce']
# oauthlib encodes signature with
# 'Content-Type': 'application/x-www-form-urlencoded'
# so '='' becomes '%3D'.
# We send form via browser, so browser will encode it again,
# So we need to decode signature back:
params[u'oauth_signature'] = urllib.unquote(params[u'oauth_signature']).decode('utf8')
# Add LTI parameters to OAuth parameters for sending in form.
params.update(body)
return params
def max_score(self):
return self.weight if self.has_score else None
@XBlock.handler
def grade_handler(self, request, suffix): # pylint: disable=unused-argument
"""
This is called by courseware.module_render, to handle an AJAX call.
Used only for grading. Returns XML response.
Example of request body from LTI provider::
<?xml version = "1.0" encoding = "UTF-8"?>
<imsx_POXEnvelopeRequest xmlns = "some_link (may be not required)">
<imsx_POXHeader>
<imsx_POXRequestHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>528243ba5241b</imsx_messageIdentifier>
</imsx_POXRequestHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>
<replaceResultRequest>
<resultRecord>
<sourcedGUID>
<sourcedId>feb-123-456-2929::28883</sourcedId>
</sourcedGUID>
<result>
<resultScore>
<language>en-us</language>
<textString>0.4</textString>
</resultScore>
</result>
</resultRecord>
</replaceResultRequest>
</imsx_POXBody>
</imsx_POXEnvelopeRequest>
Example of correct/incorrect answer XML body:: see response_xml_template.
"""
response_xml_template = textwrap.dedent("""\
<?xml version="1.0" encoding="UTF-8"?>
<imsx_POXEnvelopeResponse xmlns = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0">
<imsx_POXHeader>
<imsx_POXResponseHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>{imsx_messageIdentifier}</imsx_messageIdentifier>
<imsx_statusInfo>
<imsx_codeMajor>{imsx_codeMajor}</imsx_codeMajor>
<imsx_severity>status</imsx_severity>
<imsx_description>{imsx_description}</imsx_description>
<imsx_messageRefIdentifier>
</imsx_messageRefIdentifier>
</imsx_statusInfo>
</imsx_POXResponseHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>{response}</imsx_POXBody>
</imsx_POXEnvelopeResponse>
""")
# Returns when `action` is unsupported.
# Supported actions:
# - replaceResultRequest.
unsupported_values = {
'imsx_codeMajor': 'unsupported',
'imsx_description': 'Target does not support the requested operation.',
'imsx_messageIdentifier': 'unknown',
'response': ''
}
# Returns if:
# - past due grades are not accepted and grade is past due
# - score is out of range
# - can't parse response from TP;
# - can't verify OAuth signing or OAuth signing is incorrect.
failure_values = {
'imsx_codeMajor': 'failure',
'imsx_description': 'The request has failed.',
'imsx_messageIdentifier': 'unknown',
'response': ''
}
if not self.accept_grades_past_due and self.is_past_due():
failure_values['imsx_description'] = "Grade is past due"
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
try:
imsx_messageIdentifier, sourcedId, score, action = self.parse_grade_xml_body(request.body)
except Exception as e:
error_message = "Request body XML parsing error: " + escape(e.message)
log.debug("[LTI]: " + error_message)
failure_values['imsx_description'] = error_message
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
# Verify OAuth signing.
try:
self.verify_oauth_body_sign(request)
except (ValueError, LTIError) as e:
failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
error_message = "OAuth verification error: " + escape(e.message)
failure_values['imsx_description'] = error_message
log.debug("[LTI]: " + error_message)
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
real_user = self.system.get_real_user(urllib.unquote(sourcedId.split(':')[-1]))
if not real_user: # that means we can't save to database, as we do not have real user id.
failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
failure_values['imsx_description'] = "User not found."
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
if action == 'replaceResultRequest':
self.set_user_module_score(real_user, score, self.max_score())
values = {
'imsx_codeMajor': 'success',
'imsx_description': 'Score for {sourced_id} is now {score}'.format(sourced_id=sourcedId, score=score),
'imsx_messageIdentifier': escape(imsx_messageIdentifier),
'response': '<replaceResultResponse/>'
}
log.debug("[LTI]: Grade is saved.")
return Response(response_xml_template.format(**values), content_type="application/xml")
unsupported_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
log.debug("[LTI]: Incorrect action.")
return Response(response_xml_template.format(**unsupported_values), content_type='application/xml')
@classmethod
def parse_grade_xml_body(cls, body):
"""
Parses XML from request.body and returns parsed data
XML body should contain nsmap with namespace, that is specified in LTI specs.
Returns tuple: imsx_messageIdentifier, sourcedId, score, action
Raises Exception if can't parse.
"""
lti_spec_namespace = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0"
namespaces = {'def': lti_spec_namespace}
data = body.strip().encode('utf-8')
parser = etree.XMLParser(ns_clean=True, recover=True, encoding='utf-8')
root = etree.fromstring(data, parser=parser)
imsx_messageIdentifier = root.xpath("//def:imsx_messageIdentifier", namespaces=namespaces)[0].text or ''
sourcedId = root.xpath("//def:sourcedId", namespaces=namespaces)[0].text
score = root.xpath("//def:textString", namespaces=namespaces)[0].text
action = root.xpath("//def:imsx_POXBody", namespaces=namespaces)[0].getchildren()[0].tag.replace('{' + lti_spec_namespace + '}', '')
# Raise exception if score is not float or not in range 0.0-1.0 regarding spec.
score = float(score)
if not 0 <= score <= 1:
raise LTIError('score value outside the permitted range of 0-1.')
return imsx_messageIdentifier, sourcedId, score, action
def verify_oauth_body_sign(self, request, content_type='application/x-www-form-urlencoded'):
"""
Verify grade request from LTI provider using OAuth body signing.
Uses http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html::
This specification extends the OAuth signature to include integrity checks on HTTP request bodies
with content types other than application/x-www-form-urlencoded.
Arguments:
request: DjangoWebobRequest.
Raises:
LTIError if request is incorrect.
"""
client_key, client_secret = self.get_client_key_secret()
headers = {
'Authorization': unicode(request.headers.get('Authorization')),
'Content-Type': content_type,
}
sha1 = hashlib.sha1()
sha1.update(request.body)
oauth_body_hash = base64.b64encode(sha1.digest())
oauth_params = signature.collect_parameters(headers=headers, exclude_oauth_signature=False)
oauth_headers = dict(oauth_params)
oauth_signature = oauth_headers.pop('oauth_signature')
mock_request_lti_1 = mock.Mock(
uri=unicode(urllib.unquote(self.get_outcome_service_url())),
http_method=unicode(request.method),
params=oauth_headers.items(),
signature=oauth_signature
)
mock_request_lti_2 = mock.Mock(
uri=unicode(urllib.unquote(request.url)),
http_method=unicode(request.method),
params=oauth_headers.items(),
signature=oauth_signature
)
if oauth_body_hash != oauth_headers.get('oauth_body_hash'):
log.error(
"OAuth body hash verification failed, provided: {}, "
"calculated: {}, for url: {}, body is: {}".format(
oauth_headers.get('oauth_body_hash'),
oauth_body_hash,
self.get_outcome_service_url(),
request.body
)
)
raise LTIError("OAuth body hash verification is failed.")
if (not signature.verify_hmac_sha1(mock_request_lti_1, client_secret) and not
signature.verify_hmac_sha1(mock_request_lti_2, client_secret)):
log.error("OAuth signature verification failed, for "
"headers:{} url:{} method:{}".format(
oauth_headers,
self.get_outcome_service_url(),
unicode(request.method)
))
raise LTIError("OAuth signature verification has failed.")
def get_client_key_secret(self):
"""
Obtains client_key and client_secret credentials from current course.
"""
course = self.get_course()
for lti_passport in course.lti_passports:
try:
lti_id, key, secret = [i.strip() for i in lti_passport.split(':')]
except ValueError:
_ = self.runtime.service(self, "i18n").ugettext
msg = _('Could not parse LTI passport: {lti_passport}. Should be "id:key:secret" string.').format(
lti_passport='{0!r}'.format(lti_passport)
)
raise LTIError(msg)
if lti_id == self.lti_id.strip():
return key, secret
return '', ''
def is_past_due(self):
"""
Is it now past this problem's due date, including grace period?
"""
due_date = self.due # pylint: disable=no-member
if self.graceperiod is not None and due_date: # pylint: disable=no-member
close_date = due_date + self.graceperiod # pylint: disable=no-member
else:
close_date = due_date
return close_date is not None and datetime.datetime.now(UTC()) > close_date
class LTIDescriptor(LTIFields, MetadataOnlyEditingDescriptor, EmptyDataRawDescriptor):
"""
Descriptor for LTI Xmodule.
"""
module_class = LTIModule
resources_dir = None
grade_handler = module_attr('grade_handler')
preview_handler = module_attr('preview_handler')
lti_2_0_result_rest_handler = module_attr('lti_2_0_result_rest_handler')
clear_user_module_score = module_attr('clear_user_module_score')
get_outcome_service_url = module_attr('get_outcome_service_url')
|
sphereflow/space_combat
|
refs/heads/master
|
src/billboard_old.py
|
1
|
from texture import *
import state_machine as StateMachine
from math_3d import *
from movement import *
from bound_collidable import *
class Billboard(BoundCollidable) :
def __init__(self) :
self.m = Movement()
self.vbos = None
self.list_index = -1
self.vao = None
def copy(self) :
ret = Billboard()
ret.set_texture(self.tex)
ret.m.r.width = self.m.r.width
ret.m.r.height = self.m.r.height
return ret
def render(self) :
StateMachine.set_model_view(self.m.get_mpos())
self.tex.set_tex()
if self.vao != None :
glBindVertexArray(self.vao)
glDrawArrays(GL_TRIANGLES, 0, 4)
return
if self.vbos != None :
glBindBuffer(GL_ARRAY_BUFFER, self.vbos[0])
glVertexPointer(2, GL_FLOAT, 0, None)
glBindBuffer(GL_ARRAY_BUFFER,self.vbos[1])
glTexCoordPointer(2, GL_FLOAT, 0, None)
glDrawArrays(GL_TRIANGLES, 0, 6)
return
if self.list_index >= 0 :
glCallList(self.list_index)
return
glBegin(GL_TRIANGLES)
glTexCoord2i(0, 1)
glVertex2f(-self.m.r.width * 0.5, -self.m.r.height * 0.5)
glTexCoord2i(1, 1)
glVertex2f(self.m.r.width * 0.5, -self.m.r.height * 0.5)
glTexCoord2i(1, 0)
glVertex2f(self.m.r.width * 0.5, self.m.r.height * 0.5)
glTexCoord2i(0, 1)
glVertex2f(-self.m.r.width * 0.5, -self.m.r.height * 0.5)
glTexCoord2i(1, 0)
glVertex2f(self.m.r.width * 0.5, self.m.r.height * 0.5)
glTexCoord2i(0, 0)
glVertex2f(-self.m.r.width * 0.5, self.m.r.height * 0.5)
glEnd()
def get_rect(self, r) :
r = self.m.r
def set_texture(self, t) :
if not t :
return
self.tex = t
def get_texture(self) :
return self.tex
def gen_dl(self) :
list_index = glGenLists(1)
glNewList(list_index, GL_COMPILE)
glBegin(GL_TRIANGLES)
glTexCoord2i(0, 1)
glVertex2f(-self.m.r.width * 0.5, -self.m.r.height * 0.5)
glTexCoord2i(1, 1)
glVertex2f(self.m.r.width * 0.5, -self.m.r.height * 0.5)
glTexCoord2i(1, 0)
glVertex2f(self.m.r.width * 0.5, self.m.r.height * 0.5)
glTexCoord2i(0, 1)
glVertex2f(-self.m.r.width * 0.5, -self.m.r.height * 0.5)
glTexCoord2i(1, 0)
glVertex2f(self.m.r.width * 0.5, self.m.r.height * 0.5)
glTexCoord2i(0, 0)
glVertex2f(-self.m.r.width * 0.5, self.m.r.height * 0.5)
glEnd()
glEndList()
def gen_va(self) :
self.vao = glGenVertexArrays(1)
glBindVertexArray(self.vao)
self.gen_vbo()
glEnableVertexAttribArray(0)
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0)
glEnableVertexAttribArray(1)
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, 0)
# TODO : this sets the modelview, too. prevent this from happening
def gen_vbo(self) :
self.vbos = glGenBuffers(2)
d = np.array([-self.m.r.width * 0.5, -self.m.r.height * 0.5,
self.m.r.width * 0.5, -self.m.r.height * 0.5,
self.m.r.width * 0.5, self.m.r.height * 0.5,
-self.m.r.width * 0.5, -self.m.r.height * 0.5,
self.m.r.width * 0.5, self.m.r.height * 0.5,
-self.m.r.width * 0.5, self.m.r.height * 0.5], dtype = 'float32')
glBindBuffer(GL_ARRAY_BUFFER, self.vbos[0])
glBufferData(GL_ARRAY_BUFFER, d, GL_STATIC_DRAW)
d = np.array([0.0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0], dtype = 'float32')
glBindBuffer(GL_ARRAY_BUFFER, self.vbos[1])
glBufferData(GL_ARRAY_BUFFER, d, GL_STATIC_DRAW)
|
supergis/micropython
|
refs/heads/master
|
tests/basics/parser.py
|
62
|
# parser tests
try:
compile
except NameError:
print("SKIP")
import sys
sys.exit()
# completely empty string
# uPy and CPy differ for this case
#try:
# compile("", "stdin", "single")
#except SyntaxError:
# print("SyntaxError")
try:
compile("", "stdin", "eval")
except SyntaxError:
print("SyntaxError")
compile("", "stdin", "exec")
# empty continued line
try:
compile("\\\n", "stdin", "single")
except SyntaxError:
print("SyntaxError")
try:
compile("\\\n", "stdin", "eval")
except SyntaxError:
print("SyntaxError")
compile("\\\n", "stdin", "exec")
|
EUDAT-B2SHARE/invenio-old
|
refs/heads/next
|
modules/bibformat/lib/elements/bfe_authority_institution.py
|
3
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints institution data from an Authority Record.
"""
__revision__ = "$Id$"
from invenio.config import CFG_SITE_URL
from invenio.bibauthority_config import \
CFG_BIBAUTHORITY_RECORD_CONTROL_NUMBER_FIELD, \
CFG_BIBAUTHORITY_AUTHORITY_COLLECTION_NAME
from invenio.bibauthority_engine import \
get_control_nos_from_recID, \
guess_main_name_from_authority_recID
from invenio.search_engine import \
perform_request_search, \
get_record
def format_element(bfo, detail='no'):
""" Prints the data of an institution authority record in HTML. By default prints
brief version.
@param detail: whether the 'detailed' rather than the 'brief' format
@type detail: 'yes' or 'no'
"""
from invenio.messages import gettext_set_language
_ = gettext_set_language(bfo.lang) # load the right message language
# return value
out = ""
# brief
main_dicts = bfo.fields('110%%')
if len(main_dicts):
main = main_dicts[0].get('a') or ""
out += "<p>" + "<strong>" + _("Main %s name") % _("institution") + "</strong>" + ": " + main + "</p>"
# detail
if detail.lower() == "yes":
sees = [see_dict['a'] for see_dict in bfo.fields('410%%') if 'a' in see_dict]
sees = filter(None, sees) # fastest way to remove empty ""s
if len(sees):
out += "<p>" + "<strong>" + _("Variant(s)") + "</strong>" + ": " + ", ".join(sees) + "</p>"
see_also_dicts = bfo.fields('510%%')
cc_val = CFG_BIBAUTHORITY_AUTHORITY_COLLECTION_NAME
c_val = "Authority Institution"
record_url_pattern = "/record/" + "%s"
search_url_pattern = "/search?" + \
"cc=" + "%s" + \
"&c=" + "%s" + \
"&p=" + "%s" + \
"&sc=" + "%s"
link_pattern = "<a href='" + CFG_SITE_URL + '%s' + "'>" + '%s' + "</a>"
# populate the first 3 lists
parent_htmls, predecessor_htmls, successor_htmls = \
get_main_htmls(see_also_dicts, cc_val, c_val, record_url_pattern,
search_url_pattern, link_pattern)
# populate the list of children
child_htmls = \
get_child_htmls(bfo.recID, cc_val, c_val, record_url_pattern,
link_pattern)
# put it all together
if len(parent_htmls):
out += "<p>" + "<strong>" + _("Parent") + "</strong>" + ": " + ", ".join(parent_htmls) + "</p>"
if len(child_htmls):
out += "<p>" + "<strong>" + _("Children") + "</strong>" + ": " + ", ".join(child_htmls) + "</p>"
if len(predecessor_htmls):
out += "<p>" + "<strong>" + _("Predecessor") + "</strong>" + ": " + ", ".join(predecessor_htmls) + "</p>"
if len(successor_htmls):
out += "<p>" + "<strong>" + _("Successor") + "</strong>" + ": " + ", ".join(successor_htmls) + "</p>"
# return
return out
def get_main_htmls(see_also_dicts, cc_val, c_val, record_url_pattern,
search_url_pattern, link_pattern):
"""parent_htmls, predecessor_htmls, successor_htmls can all be deduced
directly from the metadata of the record"""
# reusable vars
f_val = CFG_BIBAUTHORITY_RECORD_CONTROL_NUMBER_FIELD
sc_val = "1"
parent_htmls = []
predecessor_htmls = []
successor_htmls = []
# start processing
for see_also_dict in see_also_dicts:
if 'w' in see_also_dict:
# $w contains 'a' for predecessor, 'b' for successor, etc.
w_subfield = see_also_dict.get('w')
# $4 contains control_no of linked authority record
_4_subfield = see_also_dict.get('4')
# $a contains the name of the linked institution
out_string = see_also_dict.get('a') or _4_subfield
# if we have something to display
if out_string:
url = ''
# if we have a control number
if _4_subfield:
p_val = _4_subfield
# if CFG_BIBAUTHORITY_PREFIX_SEP in _4_subfield:
# unused, p_val = _4_subfield.split(CFG_BIBAUTHORITY_PREFIX_SEP);
recIDs = perform_request_search(cc=cc_val,
c=c_val,
p=p_val,
f=f_val)
if len(recIDs) == 1:
url = record_url_pattern % (recIDs[0])
elif len(recIDs) > 1:
p_val = "recid:" + \
" or recid:".join([str(r) for r in recIDs])
url = search_url_pattern % (cc_val,
c_val,
p_val,
sc_val)
# if we found one or multiple records for the control_no,
# make the out_string a clickable url towards those records
if url:
out_string = link_pattern % (url, out_string)
# add the out_string to the appropriate list
if w_subfield == 't':
parent_htmls.append(out_string)
elif w_subfield == 'a':
predecessor_htmls.append(out_string)
elif w_subfield == 'b':
successor_htmls.append(out_string)
# return
return parent_htmls, predecessor_htmls, successor_htmls
def get_child_htmls(this_recID, cc_val, c_val, record_url_pattern,
link_pattern):
"""children aren'r referenced by parents, so we need special treatment to find
them"""
control_nos = get_control_nos_from_recID(this_recID)
for control_no in control_nos:
url = ''
p_val = '510%4:"' + control_no + '" and 510%w:t'
# find a first, fuzzy result set
# narrowing down on a few possible recIDs
recIDs = perform_request_search(cc=cc_val,
c=c_val,
p=p_val)
# now filter to find the ones where the subfield conditions of p_val
# are both true within the exact same field
sf_req = [('w', 't'), ('4', control_no)]
recIDs = filter(lambda x:
match_all_subfields_for_tag(x, '510', sf_req),
recIDs)
# proceed with assembling the html link
child_htmls = []
for recID in recIDs:
url = record_url_pattern % str(recID)
display = guess_main_name_from_authority_recID(recID) or str(recID)
out_html = link_pattern % (url, display)
child_htmls.append(out_html)
return child_htmls
def match_all_subfields_for_tag(recID, field_tag, subfields_required=[]):
"""
Tests whether the record with recID has at least one field with 'field_tag'
where all of the required subfields in subfields_required match a subfield
in the given field both in code and value
@param recID: record ID
@type recID: int
@param field_tag: a 3 digit code for the field tag code
@type field_tag: string
@param subfields_required: a list of subfield code/value tuples
@type subfields_required: list of tuples of strings.
same format as in get_record():
e.g. [('w', 't'),
('4', 'XYZ123')]
@return: boolean
"""
rec = get_record(recID)
for field in rec[field_tag]:
subfields_present = field[0]
intersection = set(subfields_present) & set(subfields_required)
if set(subfields_required) == intersection:
return True
return False
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
|
tpazderka/pysaml2
|
refs/heads/master
|
tests/idp_conf.py
|
1
|
from saml2 import BINDING_SOAP
from saml2 import BINDING_HTTP_REDIRECT
from saml2 import BINDING_HTTP_POST
from saml2.saml import NAMEID_FORMAT_PERSISTENT
from saml2.saml import NAME_FORMAT_URI
from pathutils import full_path
from pathutils import xmlsec_path
BASE = "http://localhost:8088"
CONFIG = {
"entityid": "urn:mace:example.com:saml:roland:idp",
"name": "Rolands IdP",
"service": {
"idp": {
"endpoints": {
"single_sign_on_service": [
("%s/sso" % BASE, BINDING_HTTP_REDIRECT)],
"single_logout_service": [
("%s/slo" % BASE, BINDING_SOAP),
("%s/slop" % BASE, BINDING_HTTP_POST)]
},
"policy": {
"default": {
"lifetime": {"minutes": 15},
"attribute_restrictions": None, # means all I have
"name_form": NAME_FORMAT_URI,
},
"urn:mace:example.com:saml:roland:sp": {
"lifetime": {"minutes": 5},
"nameid_format": NAMEID_FORMAT_PERSISTENT,
# "attribute_restrictions":{
# "givenName": None,
# "surName": None,
# }
}
},
"subject_data": full_path("subject_data.db"),
#"domain": "umu.se",
#"name_qualifier": ""
},
},
"debug": 1,
"key_file": full_path("test.key"),
"cert_file": full_path("test.pem"),
"xmlsec_binary": xmlsec_path,
"metadata": [{
"class": "saml2.mdstore.MetaDataFile",
"metadata": [(full_path("metadata_sp_1.xml"), ),
(full_path("vo_metadata.xml"), )],
}],
"attribute_map_dir": full_path("attributemaps"),
"organization": {
"name": "Exempel AB",
"display_name": [("Exempel AB", "se"), ("Example Co.", "en")],
"url": "http://www.example.com/roland",
},
"contact_person": [
{
"given_name": "John",
"sur_name": "Smith",
"email_address": ["john.smith@example.com"],
"contact_type": "technical",
},
],
}
|
tark-hidden/jinja2
|
refs/heads/master
|
examples/basic/inheritance.py
|
83
|
from jinja2 import Environment
from jinja2.loaders import DictLoader
env = Environment(loader=DictLoader({
'a': '''[A[{% block body %}{% endblock %}]]''',
'b': '''{% extends 'a' %}{% block body %}[B]{% endblock %}''',
'c': '''{% extends 'b' %}{% block body %}###{{ super() }}###{% endblock %}'''
}))
print env.get_template('c').render()
|
ewollesen/AutobahnPython
|
refs/heads/master
|
examples/websocket/streaming/frame_based_client.py
|
15
|
###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from ranstring import randomByteString
from twisted.internet import reactor
from autobahn.websocket import WebSocketProtocol, \
WebSocketClientFactory, \
WebSocketClientProtocol, \
connectWS
FRAME_SIZE = 1 * 2**20
class FrameBasedHashClientProtocol(WebSocketClientProtocol):
"""
Message-based WebSockets client that generates stream of random octets
sent to WebSockets server as a sequence of frames all in one message.
The server will respond to us with the SHA-256 computed over frames.
When we receive response, we repeat by sending a new frame.
"""
def sendOneFrame(self):
data = randomByteString(FRAME_SIZE)
self.sendMessageFrame(data)
def onOpen(self):
self.count = 0
self.beginMessage(opcode = WebSocketProtocol.MESSAGE_TYPE_BINARY)
self.sendOneFrame()
def onMessage(self, message, binary):
print "Digest for frame %d computed by server: %s" \
% (self.count, message)
self.count += 1
self.sendOneFrame()
if __name__ == '__main__':
factory = WebSocketClientFactory("ws://localhost:9000")
factory.protocol = FrameBasedHashClientProtocol
connectWS(factory)
reactor.run()
|
vgupta6/Project-2
|
refs/heads/master
|
modules/s3/pyvttbl/stats/jsci.py
|
16
|
#!/usr/bin/python
"""Basic statistics utility functions.
The implementation of Student's t distribution inverse CDF was ported to Python
from JSci. The parameters are set to only be accurate to approximately 5
decimal places.
The JSci port comes frist. "New" code is near the bottom.
JSci information:
http://jsci.sourceforge.net/
Original Author: Mark Hale
Original Licence: LGPL
"""
import math
# Relative machine precision.
EPS = 2.22e-16
# The smallest positive floating-point number such that 1/xminin is machine representable.
XMININ = 2.23e-308
# Square root of 2 * pi
SQRT2PI = 2.5066282746310005024157652848110452530069867406099
LOGSQRT2PI = math.log(SQRT2PI);
# Rough estimate of the fourth root of logGamma_xBig
lg_frtbig = 2.25e76
pnt68 = 0.6796875
# lower value = higher precision
PRECISION = 4.0*EPS
def betaFraction(x, p, q):
"""Evaluates of continued fraction part of incomplete beta function.
Based on an idea from Numerical Recipes (W.H. Press et al, 1992)."""
sum_pq = p + q
p_plus = p + 1.0
p_minus = p - 1.0
h = 1.0-sum_pq*x/p_plus;
if abs(h) < XMININ:
h = XMININ
h = 1.0/h
frac = h
m = 1
delta = 0.0
c = 1.0
while m <= MAX_ITERATIONS and abs(delta-1.0) > PRECISION:
m2 = 2*m
# even index for d
d=m*(q-m)*x/((p_minus+m2)*(p+m2))
h=1.0+d*h
if abs(h) < XMININ: h=XMININ
h=1.0/h;
c=1.0+d/c;
if abs(c) < XMININ: c=XMININ
frac *= h*c;
# odd index for d
d = -(p+m)*(sum_pq+m)*x/((p+m2)*(p_plus+m2))
h=1.0+d*h
if abs(h) < XMININ: h=XMININ;
h=1.0/h
c=1.0+d/c
if abs(c) < XMININ: c = XMININ
delta=h*c
frac *= delta
m += 1
return frac
# The largest argument for which <code>logGamma(x)</code> is representable in the machine.
LOG_GAMMA_X_MAX_VALUE = 2.55e305
# Log Gamma related constants
lg_d1 = -0.5772156649015328605195174;
lg_d2 = 0.4227843350984671393993777;
lg_d4 = 1.791759469228055000094023;
lg_p1 = [ 4.945235359296727046734888,
201.8112620856775083915565, 2290.838373831346393026739,
11319.67205903380828685045, 28557.24635671635335736389,
38484.96228443793359990269, 26377.48787624195437963534,
7225.813979700288197698961 ]
lg_q1 = [ 67.48212550303777196073036,
1113.332393857199323513008, 7738.757056935398733233834,
27639.87074403340708898585, 54993.10206226157329794414,
61611.22180066002127833352, 36351.27591501940507276287,
8785.536302431013170870835 ]
lg_p2 = [ 4.974607845568932035012064,
542.4138599891070494101986, 15506.93864978364947665077,
184793.2904445632425417223, 1088204.76946882876749847,
3338152.967987029735917223, 5106661.678927352456275255,
3074109.054850539556250927 ]
lg_q2 = [ 183.0328399370592604055942,
7765.049321445005871323047, 133190.3827966074194402448,
1136705.821321969608938755, 5267964.117437946917577538,
13467014.54311101692290052, 17827365.30353274213975932,
9533095.591844353613395747 ]
lg_p4 = [ 14745.02166059939948905062,
2426813.369486704502836312, 121475557.4045093227939592,
2663432449.630976949898078, 29403789566.34553899906876,
170266573776.5398868392998, 492612579337.743088758812,
560625185622.3951465078242 ]
lg_q4 = [ 2690.530175870899333379843,
639388.5654300092398984238, 41355999.30241388052042842,
1120872109.61614794137657, 14886137286.78813811542398,
101680358627.2438228077304, 341747634550.7377132798597,
446315818741.9713286462081 ]
lg_c = [ -0.001910444077728,8.4171387781295e-4,
-5.952379913043012e-4, 7.93650793500350248e-4,
-0.002777777777777681622553, 0.08333333333333333331554247,
0.0057083835261 ]
def logGamma(x):
"""The natural logarithm of the gamma function.
Based on public domain NETLIB (Fortran) code by W. J. Cody and L. Stoltz<BR>
Applied Mathematics Division<BR>
Argonne National Laboratory<BR>
Argonne, IL 60439<BR>
<P>
References:
<OL>
<LI>W. J. Cody and K. E. Hillstrom, 'Chebyshev Approximations for the Natural Logarithm of the Gamma Function,' Math. Comp. 21, 1967, pp. 198-203.
<LI>K. E. Hillstrom, ANL/AMD Program ANLC366S, DGAMMA/DLGAMA, May, 1969.
<LI>Hart, Et. Al., Computer Approximations, Wiley and sons, New York, 1968.
</OL></P><P>
From the original documentation:
</P><P>
This routine calculates the LOG(GAMMA) function for a positive real argument X.
Computation is based on an algorithm outlined in references 1 and 2.
The program uses rational functions that theoretically approximate LOG(GAMMA)
to at least 18 significant decimal digits. The approximation for X > 12 is from reference 3,
while approximations for X < 12.0 are similar to those in reference 1, but are unpublished.
The accuracy achieved depends on the arithmetic system, the compiler, the intrinsic functions,
and proper selection of the machine-dependent constants.
</P><P>
Error returns:<BR>
The program returns the value XINF for X .LE. 0.0 or when overflow would occur.
The computation is believed to be free of underflow and overflow."""
y = x
if y < 0.0 or y > LOG_GAMMA_X_MAX_VALUE:
# Bad arguments
return float("inf")
if y <= EPS:
return -math.log(y)
if y <= 1.5:
if (y < pnt68):
corr = -math.log(y)
xm1 = y
else:
corr = 0.0;
xm1 = y - 1.0;
if y <= 0.5 or y >= pnt68:
xden = 1.0;
xnum = 0.0;
for i in xrange(8):
xnum = xnum * xm1 + lg_p1[i];
xden = xden * xm1 + lg_q1[i];
return corr + xm1 * (lg_d1 + xm1 * (xnum / xden));
else:
xm2 = y - 1.0;
xden = 1.0;
xnum = 0.0;
for i in xrange(8):
xnum = xnum * xm2 + lg_p2[i];
xden = xden * xm2 + lg_q2[i];
return corr + xm2 * (lg_d2 + xm2 * (xnum / xden));
if (y <= 4.0):
xm2 = y - 2.0;
xden = 1.0;
xnum = 0.0;
for i in xrange(8):
xnum = xnum * xm2 + lg_p2[i];
xden = xden * xm2 + lg_q2[i];
return xm2 * (lg_d2 + xm2 * (xnum / xden));
if y <= 12.0:
xm4 = y - 4.0;
xden = -1.0;
xnum = 0.0;
for i in xrange(8):
xnum = xnum * xm4 + lg_p4[i];
xden = xden * xm4 + lg_q4[i];
return lg_d4 + xm4 * (xnum / xden);
assert y <= lg_frtbig
res = lg_c[6];
ysq = y * y;
for i in xrange(6):
res = res / ysq + lg_c[i];
res /= y;
corr = math.log(y);
res = res + LOGSQRT2PI - 0.5 * corr;
res += y * (corr - 1.0);
return res
def logBeta(p, q):
"""The natural logarithm of the beta function."""
assert p > 0
assert q > 0
if p <= 0 or q <= 0 or p + q > LOG_GAMMA_X_MAX_VALUE:
return 0
return logGamma(p)+logGamma(q)-logGamma(p+q)
def incompleteBeta(x, p, q):
"""Incomplete beta function.
The computation is based on formulas from Numerical Recipes, Chapter 6.4 (W.H. Press et al, 1992).
Ported from Java: http://jsci.sourceforge.net/"""
assert 0 <= x <= 1
assert p > 0
assert q > 0
# Range checks to avoid numerical stability issues?
if x <= 0.0:
return 0.0
if x >= 1.0:
return 1.0
if p <= 0.0 or q <= 0.0 or (p+q) > LOG_GAMMA_X_MAX_VALUE:
return 0.0
beta_gam = math.exp(-logBeta(p,q) + p*math.log(x) + q*math.log(1.0-x))
if x < (p+1.0)/(p+q+2.0):
return beta_gam*betaFraction(x, p, q)/p
else:
return 1.0-(beta_gam*betaFraction(1.0-x,q,p)/q)
ACCURACY = 10**-7
MAX_ITERATIONS = 10000
def findRoot(value, x_low, x_high, function):
"""Use the bisection method to find root such that function(root) == value."""
guess = (x_high + x_low) / 2.0
v = function(guess)
difference = v - value
i = 0
while abs(difference) > ACCURACY and i < MAX_ITERATIONS:
i += 1
if difference > 0:
x_high = guess
else:
x_low = guess
guess = (x_high + x_low) / 2.0
v = function(guess)
difference = v - value
return guess
def StudentTCDF(degree_of_freedom, X):
"""Student's T distribution CDF. Returns probability that a value x < X.
Ported from Java: http://jsci.sourceforge.net/"""
A = 0.5 * incompleteBeta(degree_of_freedom/(degree_of_freedom+X*X), 0.5*degree_of_freedom, 0.5)
if X > 0:
return 1 - A
return A
def InverseStudentT(degree_of_freedom, probability):
"""Inverse of Student's T distribution CDF. Returns the value x such that CDF(x) = probability.
Ported from Java: http://jsci.sourceforge.net/
This is not the best algorithm in the world. SciPy has a Fortran version
(see special.stdtrit):
http://svn.scipy.org/svn/scipy/trunk/scipy/stats/distributions.py
http://svn.scipy.org/svn/scipy/trunk/scipy/special/cdflib/cdft.f
Very detailed information:
http://www.maths.ox.ac.uk/~shaww/finpapers/tdist.pdf
"""
assert 0 <= probability <= 1
if probability == 1:
return float("inf")
if probability == 0:
return float("-inf")
if probability == 0.5:
return 0.0
def f(x):
return StudentTCDF(degree_of_freedom, x)
return findRoot(probability, -10**4, 10**4, f)
def tinv(p, degree_of_freedom):
"""Similar to the TINV function in Excel
p: 1-confidence (eg. 0.05 = 95% confidence)"""
assert 0 <= p <= 1
confidence = 1 - p
return InverseStudentT(degree_of_freedom, (1+confidence)/2.0)
def memoize(function):
cache = {}
def closure(*args):
if args not in cache:
cache[args] = function(*args)
return cache[args]
return closure
# Cache tinv results, since we typically call it with the same args over and over
cached_tinv = memoize(tinv)
def stats(r, confidence_interval=0.05):
"""Returns statistics about a sequence of numbers.
By default it computes the 95% confidence interval.
Returns (average, median, standard deviation, min, max, confidence interval)"""
total = sum(r)
average = total/float(len(r))
sum_deviation_squared = sum([(i-average)**2 for i in r])
standard_deviation = math.sqrt(sum_deviation_squared/(len(r)-1 or 1))
s = list(r)
s.sort()
median = s[len(s)/2]
minimum = s[0]
maximum = s[-1]
# See: http://davidmlane.com/hyperstat/
# confidence_95 = 1.959963984540051 * standard_deviation / math.sqrt(len(r))
# We must estimate both using the t distribution:
# http://davidmlane.com/hyperstat/B7483.html
# s_m = s / sqrt(N)
s_m = standard_deviation / math.sqrt(len(r))
# Degrees of freedom = n-1
# t = tinv(0.05, degrees_of_freedom)
# confidence = +/- t * s_m
confidence = cached_tinv(confidence_interval, len(r)-1) * s_m
return average, median, standard_deviation, minimum, maximum, confidence
|
quamilek/django
|
refs/heads/master
|
django/contrib/gis/db/backends/postgis/adapter.py
|
373
|
"""
This object provides quoting for GEOS geometries into PostgreSQL/PostGIS.
"""
from __future__ import unicode_literals
from psycopg2 import Binary
from psycopg2.extensions import ISQLQuote
class PostGISAdapter(object):
def __init__(self, geom, geography=False):
"Initializes on the geometry."
# Getting the WKB (in string form, to allow easy pickling of
# the adaptor) and the SRID from the geometry.
self.ewkb = bytes(geom.ewkb)
self.srid = geom.srid
self.geography = geography
self._adapter = Binary(self.ewkb)
def __conform__(self, proto):
# Does the given protocol conform to what Psycopg2 expects?
if proto == ISQLQuote:
return self
else:
raise Exception('Error implementing psycopg2 protocol. Is psycopg2 installed?')
def __eq__(self, other):
if not isinstance(other, PostGISAdapter):
return False
return (self.ewkb == other.ewkb) and (self.srid == other.srid)
def __hash__(self):
return hash((self.ewkb, self.srid))
def __str__(self):
return self.getquoted()
def prepare(self, conn):
"""
This method allows escaping the binary in the style required by the
server's `standard_conforming_string` setting.
"""
self._adapter.prepare(conn)
def getquoted(self):
"Returns a properly quoted string for use in PostgreSQL/PostGIS."
# psycopg will figure out whether to use E'\\000' or '\000'
return str('%s(%s)' % (
'ST_GeogFromWKB' if self.geography else 'ST_GeomFromEWKB',
self._adapter.getquoted().decode())
)
|
dkodnik/arp
|
refs/heads/master
|
openerp/addons/test_exceptions/models.py
|
46
|
# -*- coding: utf-8 -*-
import openerp
class m(openerp.osv.osv.Model):
""" This model exposes a few methods that will raise the different
exceptions that must be handled by the server (and its RPC layer)
and the clients.
"""
_name = 'test.exceptions.model'
def generate_except_osv(self, cr, uid, ids, context=None):
# title is ignored in the new (6.1) exceptions
raise openerp.osv.osv.except_osv('title', 'description')
def generate_except_orm(self, cr, uid, ids, context=None):
# title is ignored in the new (6.1) exceptions
raise openerp.osv.orm.except_orm('title', 'description')
def generate_warning(self, cr, uid, ids, context=None):
raise openerp.exceptions.Warning('description')
def generate_redirect_warning(self, cr, uid, ids, context=None):
dummy, action_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'test_exceptions', 'action_test_exceptions')
raise openerp.exceptions.RedirectWarning('description', action_id, 'go to the redirection')
def generate_access_denied(self, cr, uid, ids, context=None):
raise openerp.exceptions.AccessDenied()
def generate_access_error(self, cr, uid, ids, context=None):
raise openerp.exceptions.AccessError('description')
def generate_exc_access_denied(self, cr, uid, ids, context=None):
raise Exception('AccessDenied')
def generate_undefined(self, cr, uid, ids, context=None):
self.surely_undefined_symbol
def generate_except_osv_safe_eval(self, cr, uid, ids, context=None):
self.generate_safe_eval(cr, uid, ids, self.generate_except_osv, context)
def generate_except_orm_safe_eval(self, cr, uid, ids, context=None):
self.generate_safe_eval(cr, uid, ids, self.generate_except_orm, context)
def generate_warning_safe_eval(self, cr, uid, ids, context=None):
self.generate_safe_eval(cr, uid, ids, self.generate_warning, context)
def generate_redirect_warning_safe_eval(self, cr, uid, ids, context=None):
self.generate_safe_eval(cr, uid, ids, self.generate_redirect_warning, context)
def generate_access_denied_safe_eval(self, cr, uid, ids, context=None):
self.generate_safe_eval(cr, uid, ids, self.generate_access_denied, context)
def generate_access_error_safe_eval(self, cr, uid, ids, context=None):
self.generate_safe_eval(cr, uid, ids, self.generate_access_error, context)
def generate_exc_access_denied_safe_eval(self, cr, uid, ids, context=None):
self.generate_safe_eval(cr, uid, ids, self.generate_exc_access_denied, context)
def generate_undefined_safe_eval(self, cr, uid, ids, context=None):
self.generate_safe_eval(cr, uid, ids, self.generate_undefined, context)
def generate_safe_eval(self, cr, uid, ids, f, context):
globals_dict = { 'generate': lambda *args: f(cr, uid, ids, context) }
openerp.tools.safe_eval.safe_eval("generate()", mode='exec', globals_dict=globals_dict)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
drnextgis/QGIS
|
refs/heads/master
|
tests/src/python/test_qgsserver_accesscontrol.py
|
4
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Stephane Brunner'
__date__ = '28/08/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
print('CTEST_FULL_OUTPUT')
import qgis # NOQA
import os
from shutil import copyfile
from math import sqrt
from qgis.testing import unittest
from utilities import unitTestDataPath
from osgeo import gdal
from osgeo.gdalconst import GA_ReadOnly
from qgis.server import QgsServer, QgsAccessControlFilter
from qgis.core import QgsRenderChecker, QgsApplication
from qgis.PyQt.QtCore import QSize
import tempfile
import urllib.request
import urllib.parse
import urllib.error
import base64
XML_NS = \
'service="WFS" version="1.0.0" ' \
'xmlns:wfs="http://www.opengis.net/wfs" ' \
'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' \
'xmlns:ogc="http://www.opengis.net/ogc" ' \
'xmlns="http://www.opengis.net/wfs" updateSequence="0" ' \
'xmlns:xlink="http://www.w3.org/1999/xlink" ' \
'xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.0.0/WFS-capabilities.xsd" ' \
'xmlns:gml="http://www.opengis.net/gml" ' \
'xmlns:ows="http://www.opengis.net/ows" '
WFS_TRANSACTION_INSERT = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:Transaction {xml_ns}>
<wfs:Insert idgen="GenerateNew">
<qgs:db_point>
<qgs:geometry>
<gml:Point srsDimension="2" srsName="http://www.opengis.net/def/crs/EPSG/0/4326">
<gml:coordinates decimal="." cs="," ts=" ">{x},{y}</gml:coordinates>
</gml:Point>
</qgs:geometry>
<qgs:name>{name}</qgs:name>
<qgs:color>{color}</qgs:color>
</qgs:db_point>
</wfs:Insert>
</wfs:Transaction>""".format(x=1000, y=2000, name="test", color="{color}", xml_ns=XML_NS)
WFS_TRANSACTION_UPDATE = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:Transaction {xml_ns}>
<wfs:Update typeName="db_point">
<wfs:Property>
<wfs:Name>color</wfs:Name>
<wfs:Value>{color}</wfs:Value>
</wfs:Property>
<ogc:Filter>
<ogc:FeatureId fid="{id}"/>
</ogc:Filter>
</wfs:Update>
</wfs:Transaction>"""
WFS_TRANSACTION_DELETE = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:Transaction {xml_ns}>
<wfs:Delete typeName="db_point">
<ogc:Filter>
<ogc:FeatureId fid="{id}"/>
</ogc:Filter>
</wfs:Delete>
</wfs:Transaction>"""
class RestrictedAccessControl(QgsAccessControlFilter):
""" Used to have restriction access """
# Be able to deactivate the access control to have a reference point
_active = False
def __init__(self, server_iface):
super(QgsAccessControlFilter, self).__init__(server_iface)
def layerFilterExpression(self, layer):
""" Return an additional expression filter """
if not self._active:
return super(RestrictedAccessControl, self).layerFilterExpression(layer)
return "$id = 1" if layer.name() == "Hello" else None
def layerFilterSubsetString(self, layer):
""" Return an additional subset string (typically SQL) filter """
if not self._active:
return super(RestrictedAccessControl, self).layerFilterSubsetString(layer)
if layer.name() == "Hello_SubsetString":
return "pk = 1"
elif layer.name() == "Hello_Project_SubsetString":
return "pkuid = 6 or pkuid = 7"
elif layer.name() == "Hello_Filter_SubsetString":
return "pkuid = 6 or pkuid = 7"
else:
return None
def layerPermissions(self, layer):
""" Return the layer rights """
if not self._active:
return super(RestrictedAccessControl, self).layerPermissions(layer)
rh = self.serverInterface().requestHandler()
rights = QgsAccessControlFilter.LayerPermissions()
# Used to test WFS transactions
if rh.parameter("LAYER_PERM") == "no" and rh.parameterMap()["LAYER_PERM"] == "no":
return rights
# Used to test the WCS
if rh.parameter("TEST") == "dem" and rh.parameterMap()["TEST"] == "dem":
rights.canRead = layer.name() != "dem"
else:
rights.canRead = layer.name() != "Country"
if layer.name() == "db_point":
rights.canRead = rights.canInsert = rights.canUpdate = rights.canDelete = True
return rights
def authorizedLayerAttributes(self, layer, attributes):
""" Return the authorised layer attributes """
if not self._active:
return super(RestrictedAccessControl, self).authorizedLayerAttributes(layer, attributes)
if "colour" in attributes:
attributes.remove("colour")
return attributes
def allowToEdit(self, layer, feature):
""" Are we authorise to modify the following geometry """
if not self._active:
return super(RestrictedAccessControl, self).allowToEdit(layer, feature)
return feature.attribute("color") in ["red", "yellow"]
def cacheKey(self):
return "r" if self._active else "f"
class TestQgsServerAccessControl(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
cls._app = QgsApplication([], False)
cls._server = QgsServer()
cls._server.handleRequest()
cls._server_iface = cls._server.serverInterface()
cls._accesscontrol = RestrictedAccessControl(cls._server_iface)
cls._server_iface.registerAccessControl(cls._accesscontrol, 100)
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
del cls._server
cls._app.exitQgis()
def setUp(self):
self.testdata_path = unitTestDataPath("qgis_server_accesscontrol")
dataFile = os.path.join(self.testdata_path, "helloworld.db")
self.assertTrue(os.path.isfile(dataFile), 'Could not find data file "{}"'.format(dataFile))
copyfile(dataFile, os.path.join(self.testdata_path, "_helloworld.db"))
for k in ["QUERY_STRING", "QGIS_PROJECT_FILE"]:
if k in os.environ:
del os.environ[k]
self.projectPath = os.path.join(self.testdata_path, "project.qgs")
self.assertTrue(os.path.isfile(self.projectPath), 'Could not find project file "{}"'.format(self.projectPath))
def tearDown(self):
copyfile(os.path.join(self.testdata_path, "_helloworld.db"), os.path.join(self.testdata_path, "helloworld.db"))
# # WMS # # WMS # # WMS # #
def test_wms_getcapabilities(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetCapabilities"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<Name>Hello</Name>") != -1,
"No Hello layer in GetCapabilities\n%s" % response)
self.assertTrue(
str(response).find("<Name>Country</Name>") != -1,
"No Country layer in GetCapabilities\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<Name>Hello</Name>") != -1,
"No Hello layer in GetCapabilities\n%s" % response)
self.assertFalse(
str(response).find("<Name>Country</Name>") != -1,
"Country layer in GetCapabilities\n%s" % response)
def test_wms_describelayer_hello(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "DescribeLayer",
"LAYERS": "Hello",
"SLD_VERSION": "1.1.0"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<se:FeatureTypeName>Hello</se:FeatureTypeName>") != -1,
"No Hello layer in DescribeLayer\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<se:FeatureTypeName>Hello</se:FeatureTypeName>") != -1,
"No Hello layer in DescribeLayer\n%s" % response)
def test_wms_describelayer_country(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "DescribeLayer",
"LAYERS": "Country",
"SLD_VERSION": "1.1.0"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<se:FeatureTypeName>Country</se:FeatureTypeName>") != -1,
"No Country layer in DescribeLayer\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<se:FeatureTypeName>Country</se:FeatureTypeName>") != -1,
"Country layer in DescribeLayer\n%s" % response)
def test_wms_getlegendgraphic_hello(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYERS": "Hello",
"FORMAT": "image/png"
}.items())])
response, headers = self._get_fullaccess(query_string)
self._img_diff_error(response, headers, "WMS_GetLegendGraphic_Hello", 250, QSize(10, 10))
response, headers = self._get_restricted(query_string)
self._img_diff_error(response, headers, "WMS_GetLegendGraphic_Hello", 250, QSize(10, 10))
def test_wms_getlegendgraphic_country(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYERS": "Country",
"FORMAT": "image/png"
}.items())])
response, headers = self._get_fullaccess(query_string)
self._img_diff_error(response, headers, "WMS_GetLegendGraphic_Country", 250, QSize(10, 10))
response, headers = self._get_restricted(query_string)
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find('<ServiceException code="Security">') != -1,
"Not allowed GetLegendGraphic"
)
def test_wms_getmap(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "Country,Hello",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857"
}.items())])
response, headers = self._get_fullaccess(query_string)
self._img_diff_error(response, headers, "WMS_GetMap")
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "Hello",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857"
}.items())])
response, headers = self._get_restricted(query_string)
self._img_diff_error(response, headers, "Restricted_WMS_GetMap")
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "Country",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857"
}.items())])
response, headers = self._get_restricted(query_string)
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find('<ServiceException code="Security">') != -1,
"Not allowed do a GetMap on Country"
)
def test_wms_getfeatureinfo_hello(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Country,Hello",
"QUERY_LAYERS": "Hello",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "56",
"Y": "144"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeatureInfo\n%s" % response)
self.assertTrue(
str(response).find("<qgs:colour>red</qgs:colour>") != -1,
"No color in result of GetFeatureInfo\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeatureInfo\n%s" % response)
self.assertFalse(
str(response).find("<qgs:colour>red</qgs:colour>") != -1,
"Unexpected color in result of GetFeatureInfo\n%s" % response)
self.assertFalse(
str(response).find("<qgs:colour>NULL</qgs:colour>") != -1,
"Unexpected color NULL in result of GetFeatureInfo\n%s" % response)
def test_wms_getfeatureinfo_hello2(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Country,Hello",
"QUERY_LAYERS": "Hello",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "146",
"Y": "160"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"No result in GetFeatureInfo\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"Unexpected result in GetFeatureInfo\n%s" % response)
def test_wms_getfeatureinfo_country(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Country,Hello",
"QUERY_LAYERS": "Country",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "56",
"Y": "144"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeatureInfo\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpected result in GetFeatureInfo\n%s" % response)
# # WFS # # WFS # # WFS # #
def test_wfs_getcapabilities(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.1.0",
"REQUEST": "GetCapabilities"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<Name>Hello</Name>") != -1,
"No Hello layer in WFS/GetCapabilities\n%s" % response)
self.assertTrue(
str(response).find("<Name>Country</Name>") != -1,
"No Country layer in WFS/GetCapabilities\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<Name>Hello</Name>") != -1,
"No Hello layer in WFS/GetCapabilities\n%s" % response)
self.assertFalse(
str(response).find("<Name>Country</Name>") != -1,
"Unexpected Country layer in WFS/GetCapabilities\n%s" % response)
def test_wfs_describefeaturetype_hello(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.1.0",
"REQUEST": "DescribeFeatureType",
"TYPENAME": "Hello"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find('name="Hello"') != -1,
"No Hello layer in DescribeFeatureType\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find('name="Hello"') != -1,
"No Hello layer in DescribeFeatureType\n%s" % response)
def test_wfs_describefeaturetype_country(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WFS",
"VERSION": "1.1.0",
"REQUEST": "DescribeFeatureType",
"TYPENAME": "Country"
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find('name="Country"') != -1,
"No Country layer in DescribeFeatureType\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find('name="Country"') != -1,
"Unexpected Country layer in DescribeFeatureType\n%s" % response)
def test_wfs_getfeature_hello(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:colour>red</qgs:colour>") != -1,
"No color in result of GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:colour>red</qgs:colour>") != -1,
"Unexpected color in result of GetFeature\n%s" % response)
self.assertFalse(
str(response).find("<qgs:colour>NULL</qgs:colour>") != -1,
"Unexpected color NULL in result of GetFeature\n%s" % response)
def test_wfs_getfeature_hello2(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>2</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_country(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Country" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pk</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No result in GetFeatureInfo\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"Unexpeced result in GetFeatureInfo\n%s" % response)
# # WCS # # WCS # # WCS # #
def test_wcs_getcapabilities(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities",
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<name>dem</name>") != -1,
"No dem layer in WCS/GetCapabilities\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<name>dem</name>") != -1,
"No dem layer in WCS/GetCapabilities\n%s" % response)
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "GetCapabilities",
"TEST": "dem",
}.items())])
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<name>dem</name>") != -1,
"Unexpected dem layer in WCS/GetCapabilities\n%s" % response)
def test_wcs_describecoverage(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "DescribeCoverage",
"COVERAGE": "dem",
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<name>dem</name>") != -1,
"No dem layer in DescribeCoverage\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<name>dem</name>") != -1,
"No dem layer in DescribeCoverage\n%s" % response)
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "DescribeCoverage",
"COVERAGE": "dem",
"TEST": "dem",
}.items())])
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<name>dem</name>") != -1,
"Unexpected dem layer in DescribeCoverage\n%s" % response)
def test_wcs_getcoverage(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "GetCoverage",
"COVERAGE": "dem",
"CRS": "EPSG:3857",
"BBOX": "-1387454,4252256,431091,5458375",
"HEIGHT": "100",
"WIDTH": "100",
"FORMAT": "GTiff",
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertEqual(
headers.get("Content-Type"), "image/tiff",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
self._geo_img_diff(response, "WCS_GetCoverage.geotiff") == 0,
"Image for GetCoverage is wrong")
response, headers = self._get_restricted(query_string)
self.assertEqual(
headers.get("Content-Type"), "image/tiff",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
self._geo_img_diff(response, "WCS_GetCoverage.geotiff") == 0,
"Image for GetCoverage is wrong")
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WCS",
"VERSION": "1.0.0",
"REQUEST": "GetCoverage",
"COVERAGE": "dem",
"CRS": "EPSG:3857",
"BBOX": "-1387454,4252256,431091,5458375",
"HEIGHT": "100",
"WIDTH": "100",
"FORMAT": "GTiff",
"TEST": "dem",
}.items())])
response, headers = self._get_restricted(query_string)
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find('<ServiceException code="Security">') != -1,
"Not allowed GetCoverage")
# # WFS/Transactions # #
def test_wfstransaction_insert(self):
data = WFS_TRANSACTION_INSERT.format(x=1000, y=2000, name="test", color="{color}", xml_ns=XML_NS)
self._test_colors({1: "blue"})
response, headers = self._post_fullaccess(data.format(color="red"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Insert is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Insert don't succeed\n%s" % response)
self._test_colors({2: "red"})
response, headers = self._post_restricted(data.format(color="blue"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Insert is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">Feature modify permission denied</ServiceException>') != -1,
"WFS/Transactions Insert succeed\n%s" % response)
response, headers = self._post_restricted(data.format(color="red"), "LAYER_PERM=no")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Insert is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">Feature insert permission denied</ServiceException>') != -1,
"WFS/Transactions Insert succeed\n%s" % response)
response, headers = self._post_restricted(data.format(color="yellow"), "LAYER_PERM=yes")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Insert is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Insert don't succeed\n%s" % response)
self._test_colors({3: "yellow"})
def test_wfstransaction_update(self):
data = WFS_TRANSACTION_UPDATE.format(id="1", color="{color}", xml_ns=XML_NS)
self._test_colors({1: "blue"})
response, headers = self._post_restricted(data.format(color="yellow"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">Feature modify permission denied</ServiceException>') != -1,
"WFS/Transactions Update succeed\n%s" % response)
self._test_colors({1: "blue"})
response, headers = self._post_fullaccess(data.format(color="red"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Update is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Update don't succeed\n%s" % response)
self._test_colors({1: "red"})
response, headers = self._post_restricted(data.format(color="blue"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Update is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">Feature modify permission denied</ServiceException>') != -1,
"WFS/Transactions Update succeed\n%s" % response)
self._test_colors({1: "red"})
response, headers = self._post_restricted(data.format(color="yellow"), "LAYER_PERM=no")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Update is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">Feature update permission denied</ServiceException>') != -1,
"WFS/Transactions Update succeed\n%s" % response)
self._test_colors({1: "red"})
response, headers = self._post_restricted(data.format(color="yellow"), "LAYER_PERM=yes")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Update is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Update don't succeed\n%s" % response)
self._test_colors({1: "yellow"})
def test_wfstransaction_delete_fullaccess(self):
data = WFS_TRANSACTION_DELETE.format(id="1", xml_ns=XML_NS)
self._test_colors({1: "blue"})
response, headers = self._post_fullaccess(data)
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Delete don't succeed\n%s" % response)
def test_wfstransaction_delete_restricted(self):
data = WFS_TRANSACTION_DELETE.format(id="1", xml_ns=XML_NS)
self._test_colors({1: "blue"})
response, headers = self._post_restricted(data)
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">Feature modify permission denied</ServiceException>') != -1,
"WFS/Transactions Delete succeed\n%s" % response)
data_update = WFS_TRANSACTION_UPDATE.format(id="1", color="red", xml_ns=XML_NS)
response, headers = self._post_fullaccess(data_update)
self._test_colors({1: "red"})
response, headers = self._post_restricted(data, "LAYER_PERM=no")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">Feature delete permission denied</ServiceException>') != -1,
"WFS/Transactions Delete succeed\n%s" % response)
response, headers = self._post_restricted(data, "LAYER_PERM=yes")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Delete don't succeed\n%s" % response)
# # Subset String # #
# # WMS # # WMS # # WMS # #
def test_wms_getmap_subsetstring(self):
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "Country,Hello_SubsetString",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857"
}.items())])
response, headers = self._get_fullaccess(query_string)
self._img_diff_error(response, headers, "WMS_GetMap")
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "Hello_SubsetString",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857"
}.items())])
response, headers = self._get_restricted(query_string)
self._img_diff_error(response, headers, "Restricted_WMS_GetMap")
def test_wms_getmap_subsetstring_with_filter(self):
""" test that request filter and access control subsetStrings are correctly combined. Note that for this
test we reuse the projectsubsetstring reference images as we are using filter requests to set the same
filter " pkuid in (7,8) " as the project subsetstring uses for its test.
"""
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "Hello_Filter_SubsetString",
"FILTER": "Hello_Filter_SubsetString:\"pkuid\" IN ( 7 , 8 )",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857"
}.items())])
response, headers = self._get_fullaccess(query_string)
self._img_diff_error(response, headers, "WMS_GetMap_projectsubstring")
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "Hello_Filter_SubsetString",
"FILTER": "Hello_Filter_SubsetString:\"pkuid\" IN ( 7 , 8 )",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857"
}.items())])
response, headers = self._get_restricted(query_string)
self._img_diff_error(response, headers, "Restricted_WMS_GetMap_projectsubstring")
def test_wms_getmap_projectsubsetstring(self):
""" test that project set layer subsetStrings are honored"""
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "Hello_Project_SubsetString",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857"
}.items())])
response, headers = self._get_fullaccess(query_string)
self._img_diff_error(response, headers, "WMS_GetMap_projectsubstring")
query_string = "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "Hello_Project_SubsetString",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857"
}.items())])
response, headers = self._get_restricted(query_string)
self._img_diff_error(response, headers, "Restricted_WMS_GetMap_projectsubstring")
def test_wms_getfeatureinfo_subsetstring(self):
query_string = "&".join(["%s=%s" % i for i in list({
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Country,Hello_SubsetString",
"QUERY_LAYERS": "Hello_SubsetString",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "56",
"Y": "144",
"MAP": urllib.parse.quote(self.projectPath)
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeatureInfo Hello/1\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No good result in GetFeatureInfo Hello/1\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeatureInfo Hello/1\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No good result in GetFeatureInfo Hello/1\n%s" % response)
def test_wms_getfeatureinfo_subsetstring2(self):
query_string = "&".join(["%s=%s" % i for i in list({
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Country,Hello_SubsetString",
"QUERY_LAYERS": "Hello_SubsetString",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "146",
"Y": "160",
"MAP": urllib.parse.quote(self.projectPath)
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result result in GetFeatureInfo Hello/2\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"No good result result in GetFeatureInfo Hello/2\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Unexpected result result in GetFeatureInfo Hello/2\n%s" % response)
def test_wms_getfeatureinfo_projectsubsetstring(self):
"""test that layer subsetStrings set in projects are honored. This test checks for a feature which should be filtered
out by the project set layer subsetString
"""
query_string = "&".join(["%s=%s" % i for i in list({
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Hello_Project_SubsetString",
"QUERY_LAYERS": "Hello_Project_SubsetString",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "56",
"Y": "144",
"MAP": urllib.parse.quote(self.projectPath)
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Project set layer subsetString not honored in WMS GetFeatureInfo/1\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Project set layer subsetString not honored in WMS GetFeatureInfo when access control applied/1\n%s" % response)
def test_wms_getfeatureinfo_projectsubsetstring5(self):
"""test that layer subsetStrings set in projects are honored. This test checks for a feature which should pass
both project set layer subsetString and access control filters
"""
query_string = "&".join(["%s=%s" % i for i in list({
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Hello_Project_SubsetString",
"QUERY_LAYERS": "Hello_Project_SubsetString",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-1623412,3146330,-1603412,3166330",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "146",
"Y": "160",
"MAP": urllib.parse.quote(self.projectPath)
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result result in GetFeatureInfo Hello/2\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"No good result result in GetFeatureInfo Hello/2\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result result in GetFeatureInfo Hello/2\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"No good result result in GetFeatureInfo Hello/2\n%s" % response)
def test_wms_getfeatureinfo_projectsubsetstring3(self):
"""test that layer subsetStrings set in projects are honored. This test checks for a feature which should pass
the project set layer subsetString but fail the access control checks
"""
query_string = "&".join(["%s=%s" % i for i in list({
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Hello_Project_SubsetString",
"QUERY_LAYERS": "Hello_Project_SubsetString",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "3415650,2018968,3415750,2019968",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "146",
"Y": "160",
"MAP": urllib.parse.quote(self.projectPath)
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result result in GetFeatureInfo Hello/2\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>8</qgs:pk>") != -1,
"No good result result in GetFeatureInfo Hello/2\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Unexpected result from GetFeatureInfo Hello/2\n%s" % response)
def test_wms_getfeatureinfo_subsetstring_with_filter(self):
"""test that request filters are honored. This test checks for a feature which should be filtered
out by the request filter
"""
query_string = "&".join(["%s=%s" % i for i in list({
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Hello_Filter_SubsetString",
"QUERY_LAYERS": "Hello_Filter_SubsetString",
"FILTER": "Hello_Filter_SubsetString:\"pkuid\" IN ( 7 , 8 )",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "56",
"Y": "144",
"MAP": urllib.parse.quote(self.projectPath)
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Request filter not honored in WMS GetFeatureInfo/1\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Request filter not honored in WMS GetFeatureInfo when access control applied/1\n%s" % response)
def test_wms_getfeatureinfo_projectsubsetstring4(self):
"""test that request filters are honored. This test checks for a feature which should pass
both request filter and access control filters
"""
query_string = "&".join(["%s=%s" % i for i in list({
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Hello_Filter_SubsetString",
"QUERY_LAYERS": "Hello_Filter_SubsetString",
"FILTER": "Hello_Filter_SubsetString:\"pkuid\" IN ( 7 , 8 )",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-1623412,3146330,-1603412,3166330",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "146",
"Y": "160",
"MAP": urllib.parse.quote(self.projectPath)
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result result in GetFeatureInfo Hello/2\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"No good result result in GetFeatureInfo Hello/2\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result result in GetFeatureInfo Hello/2\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"No good result result in GetFeatureInfo Hello/2\n%s" % response)
def test_wms_getfeatureinfo_projectsubsetstring2(self):
"""test that request filters are honored. This test checks for a feature which should pass
the request filter but fail the access control checks
"""
query_string = "&".join(["%s=%s" % i for i in list({
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetFeatureInfo",
"LAYERS": "Hello_Filter_SubsetString",
"QUERY_LAYERS": "Hello_Filter_SubsetString",
"FILTER": "Hello_Filter_SubsetString:\"pkuid\" IN ( 7 , 8 )",
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "3415650,2018968,3415750,2019968",
"HEIGHT": "500",
"WIDTH": "500",
"SRS": "EPSG:3857",
"FEATURE_COUNT": "10",
"INFO_FORMAT": "application/vnd.ogc.gml",
"X": "146",
"Y": "160",
"MAP": urllib.parse.quote(self.projectPath)
}.items())])
response, headers = self._get_fullaccess(query_string)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result result in GetFeatureInfo Hello/2\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>8</qgs:pk>") != -1,
"No good result result in GetFeatureInfo Hello/2\n%s" % response)
response, headers = self._get_restricted(query_string)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Unexpected result from GetFeatureInfo Hello/2\n%s" % response)
# # WFS # # WFS # # WFS # #
def test_wfs_getfeature_subsetstring(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>1</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No good result in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>1</qgs:pk>") != -1,
"No good result in GetFeature\n%s" % response)
def test_wfs_getfeature_subsetstring2(self):
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>2</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>2</qgs:pk>") != -1,
"No good result in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Unexpected result in GetFeature\n%s" % response)
def test_wfs_getfeature_project_subsetstring(self):
"""Tests access control with a subset string already applied to a layer in a project
'Hello_Project_SubsetString' layer has a subsetString of "pkuid in (7,8)"
This test checks for retrieving a feature which should be available in with/without access control
"""
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_Project_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>7</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
# should be one result
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Feature with pkuid=7 not found in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>7</qgs:pk>") != -1,
"Feature with pkuid=7 not found in GetFeature, has been incorrectly filtered out by access controls\n%s" % response)
def test_wfs_getfeature_project_subsetstring2(self):
"""Tests access control with a subset string already applied to a layer in a project
'Hello_Project_SubsetString' layer has a subsetString of "pkuid in (7,8)"
This test checks for a feature which should be filtered out by access controls
"""
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_Project_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>8</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
# should be one result
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") != -1,
"No result in GetFeature\n%s" % response)
self.assertTrue(
str(response).find("<qgs:pk>8</qgs:pk>") != -1,
"Feature with pkuid=8 not found in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Feature with pkuid=8 was found in GetFeature, but should have been filtered out by access controls\n%s" % response)
def test_wfs_getfeature_project_subsetstring3(self):
"""Tests access control with a subset string already applied to a layer in a project
'Hello_Project_SubsetString' layer has a subsetString of "pkuid in (7,8)"
This test checks for a features which should be filtered out by project subsetStrings.
Eg pkuid 6 passes the access control checks, but should not be shown because of project layer subsetString
"""
data = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="Hello_Project_SubsetString" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>pkuid</ogc:PropertyName>
<ogc:Literal>6</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(xml_ns=XML_NS)
# should be no results, since pkuid 1 should be filtered out by project subsetString
response, headers = self._post_fullaccess(data)
self.assertTrue(
str(response).find("<qgs:pk>") == -1,
"Project based layer subsetString not respected in GetFeature\n%s" % response)
response, headers = self._post_restricted(data)
self.assertFalse(
str(response).find("<qgs:pk>") != -1,
"Project based layer subsetString not respected in GetFeature with restricted access\n%s" % response)
def _handle_request(self, restricted, *args):
self._accesscontrol._active = restricted
result = self._result(self._server.handleRequest(*args))
return result
def _result(self, data):
headers = {}
for line in data[0].decode('UTF-8').split("\n"):
if line != "":
header = line.split(":")
self.assertEqual(len(header), 2, line)
headers[str(header[0])] = str(header[1]).strip()
return data[1], headers
def _get_fullaccess(self, query_string):
self._server.putenv("REQUEST_METHOD", "GET")
result = self._handle_request(False, query_string)
self._server.putenv("REQUEST_METHOD", '')
return result
def _get_restricted(self, query_string):
self._server.putenv("REQUEST_METHOD", "GET")
result = self._handle_request(True, query_string)
self._server.putenv("REQUEST_METHOD", '')
return result
def _post_fullaccess(self, data, query_string=None):
self._server.putenv("REQUEST_METHOD", "POST")
self._server.putenv("REQUEST_BODY", data)
self._server.putenv("QGIS_PROJECT_FILE", self.projectPath)
result = self._handle_request(False, query_string)
self._server.putenv("REQUEST_METHOD", '')
self._server.putenv("REQUEST_BODY", '')
self._server.putenv("QGIS_PROJECT_FILE", '')
return result
def _post_restricted(self, data, query_string=None):
self._server.putenv("REQUEST_METHOD", "POST")
self._server.putenv("REQUEST_BODY", data)
self._server.putenv("QGIS_PROJECT_FILE", self.projectPath)
result = self._handle_request(True, query_string)
self._server.putenv("REQUEST_METHOD", '')
self._server.putenv("REQUEST_BODY", '')
self._server.putenv("QGIS_PROJECT_FILE", '')
return result
def _img_diff(self, image, control_image, max_diff, max_size_diff=QSize()):
temp_image = os.path.join(tempfile.gettempdir(), "%s_result.png" % control_image)
with open(temp_image, "wb") as f:
f.write(image)
control = QgsRenderChecker()
control.setControlPathPrefix("qgis_server_accesscontrol")
control.setControlName(control_image)
control.setRenderedImage(temp_image)
if max_size_diff.isValid():
control.setSizeTolerance(max_size_diff.width(), max_size_diff.height())
return control.compareImages(control_image), control.report()
def _img_diff_error(self, response, headers, image, max_diff=10, max_size_diff=QSize()):
self.assertEqual(
headers.get("Content-Type"), "image/png",
"Content type is wrong: %s" % headers.get("Content-Type"))
test, report = self._img_diff(response, image, max_diff, max_size_diff)
with open(os.path.join(tempfile.gettempdir(), image + "_result.png"), "rb") as rendered_file:
encoded_rendered_file = base64.b64encode(rendered_file.read())
message = "Image is wrong\n%s\nImage:\necho '%s' | base64 -d >%s/%s_result.png" % (
report, encoded_rendered_file.strip(), tempfile.gettempdir(), image
)
with open(os.path.join(tempfile.gettempdir(), image + "_result_diff.png"), "rb") as diff_file:
encoded_diff_file = base64.b64encode(diff_file.read())
message += "\nDiff:\necho '%s' | base64 -d > %s/%s_result_diff.png" % (
encoded_diff_file.strip(), tempfile.gettempdir(), image
)
self.assertTrue(test, message)
def _geo_img_diff(self, image_1, image_2):
if os.name == 'nt':
# Not supported on Windows due to #13061
return 0
with open(os.path.join(tempfile.gettempdir(), image_2), "wb") as f:
f.write(image_1)
image_1 = gdal.Open(os.path.join(tempfile.gettempdir(), image_2), GA_ReadOnly)
assert image_1, "No output image written: " + image_2
image_2 = gdal.Open(os.path.join(self.testdata_path, "results", image_2), GA_ReadOnly)
assert image_1, "No expected image found:" + image_2
if image_1.RasterXSize != image_2.RasterXSize or image_1.RasterYSize != image_2.RasterYSize:
image_1 = None
image_2 = None
return 1000 # wrong size
square_sum = 0
for x in range(image_1.RasterXSize):
for y in range(image_1.RasterYSize):
square_sum += (image_1.ReadAsArray()[x][y] - image_2.ReadAsArray()[x][y]) ** 2
# Explicitly close GDAL datasets
image_1 = None
image_2 = None
return sqrt(square_sum)
def _test_colors(self, colors):
for id, color in list(colors.items()):
response, headers = self._post_fullaccess(
"""<?xml version="1.0" encoding="UTF-8"?>
<wfs:GetFeature {xml_ns}>
<wfs:Query typeName="db_point" srsName="EPSG:3857" xmlns:feature="http://www.qgis.org/gml">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc"><ogc:PropertyIsEqualTo>
<ogc:PropertyName>gid</ogc:PropertyName>
<ogc:Literal>{id}</ogc:Literal>
</ogc:PropertyIsEqualTo></ogc:Filter></wfs:Query></wfs:GetFeature>""".format(id=id, xml_ns=XML_NS)
)
self.assertTrue(
str(response).find("<qgs:color>{color}</qgs:color>".format(color=color)) != -1,
"Wrong color in result\n%s" % response)
if __name__ == "__main__":
unittest.main()
|
sql-machine-learning/sqlflow
|
refs/heads/develop
|
python/runtime/pai/submitter_evaluate.py
|
1
|
# Copyright 2020 The SQLFlow Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import runtime.temp_file as temp_file
from runtime import db
from runtime.diagnostics import SQLFlowDiagnostic
from runtime.model import EstimatorType
from runtime.pai import cluster_conf, pai_model, table_ops
from runtime.pai.get_pai_tf_cmd import (ENTRY_FILE, JOB_ARCHIVE_FILE,
PARAMS_FILE, get_pai_tf_cmd)
from runtime.pai.prepare_archive import prepare_archive
from runtime.pai.submit_pai_task import submit_pai_task
from runtime.pai_local.try_run import try_pai_local_run
from runtime.step.create_result_table import create_evaluate_table
def submit_pai_evaluate(datasource,
original_sql,
select,
label_name,
model,
model_params,
result_table,
user=""):
"""Submit a PAI evaluation task
Args:
datasource: string
Like: maxcompute://ak:sk@domain.com/api?
curr_project=test_ci&scheme=http
original_sql: string
Original "TO PREDICT" statement.
select: string
SQL statement to get prediction data set.
model: string
Model to load and do prediction.
label_name: string
The label name to evaluate.
model_params: dict
Params for training, crossponding to WITH clause.
result_table: string
The table name to save prediction result.
user: string
A string to identify the user, used to load model from the user's
directory.
"""
params = dict(locals())
project = table_ops.get_project(datasource)
if result_table.count(".") == 0:
result_table = "%s.%s" % (project, result_table)
params["result_table"] = result_table
oss_model_path = pai_model.get_oss_model_save_path(datasource,
model,
user=user)
model_type, estimator = pai_model.get_saved_model_type_and_estimator(
datasource, model)
if model_type == EstimatorType.PAIML:
raise SQLFlowDiagnostic("PAI model evaluation is not supported yet.")
if model_type == EstimatorType.XGBOOST:
params["entry_type"] = "evaluate_xgb"
validation_metrics = model_params.get("validation.metrics",
"accuracy_score")
else:
params["entry_type"] = "evaluate_tf"
validation_metrics = model_params.get("validation.metrics", "Accuracy")
validation_metrics = [m.strip() for m in validation_metrics.split(",")]
with db.connect_with_data_source(datasource) as conn:
result_column_names = create_evaluate_table(conn, result_table,
validation_metrics)
with table_ops.create_tmp_tables_guard(select, datasource) as data_table:
params["pai_table"] = data_table
params["result_column_names"] = result_column_names
if try_pai_local_run(params, oss_model_path):
return
conf = cluster_conf.get_cluster_config(model_params)
with temp_file.TemporaryDirectory(prefix="sqlflow", dir="/tmp") as cwd:
prepare_archive(cwd, estimator, oss_model_path, params)
cmd = get_pai_tf_cmd(
conf, "file://" + os.path.join(cwd, JOB_ARCHIVE_FILE),
"file://" + os.path.join(cwd, PARAMS_FILE), ENTRY_FILE, model,
oss_model_path, data_table, "", result_table, project)
submit_pai_task(cmd, datasource)
|
tkerola/chainer
|
refs/heads/master
|
tests/chainer_tests/functions_tests/array_tests/test_get_item.py
|
8
|
import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
_backend_params = (
# CPU tests
testing.product({
'use_cuda': [False],
'use_ideep': ['never', 'always'],
})
# GPU tests
+ [{'use_cuda': True}]
# ChainerX tests
+ [
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:1'},
])
@testing.inject_backend_tests(None, _backend_params)
@testing.parameterize(*testing.product_dict(
[{'dtype': numpy.float16},
{'dtype': numpy.float32},
{'dtype': numpy.float64},
],
[{'axes': [1, 2], 'offsets': 0},
{'axes': [1, 2], 'offsets': [0, 1, 1]},
{'axes': 1, 'offsets': 1},
{'axes': 1, 'offsets': [0, 1, 1]},
{'axes': [], 'offsets': 0, 'new_axes': 0},
{'axes': [], 'offsets': 0, 'new_axes': 2},
{'axes': [], 'offsets': 0, 'new_axes': 3},
{'slices': (1, -1, 0)},
{'slices': (1, -1)},
{'slices': (1, Ellipsis, -1)},
{'slices': (1, None, Ellipsis, None, -1)},
]
))
class TestGetItem(testing.FunctionTestCase):
def setUp(self):
shape = (4, 2, 1)
if not hasattr(self, 'slices'):
axes = self.axes
offsets = self.offsets
# Convert axes, offsets and shape to slices
if isinstance(offsets, int):
offsets = tuple([offsets] * len(shape))
if isinstance(axes, int):
axes = tuple([axes])
slices = [slice(None)] * len(shape)
for axis in axes:
slices[axis] = slice(
offsets[axis], offsets[axis] + shape[axis])
if hasattr(self, 'new_axes'):
slices.insert(self.new_axes, None)
self.axes = axes
self.offsets = offsets
self.slices = tuple(slices)
self.check_backward_options.update({'atol': 5e-4, 'rtol': 5e-4})
self.check_double_backward_options.update({'atol': 1e-3, 'rtol': 1e-3})
def generate_inputs(self):
x = numpy.random.uniform(-1, 1, (4, 3, 2)).astype(self.dtype)
return x,
def forward(self, inputs, device):
x, = inputs
y = functions.get_item(x, self.slices)
return y,
def forward_expected(self, inputs):
x, = inputs
y = x[self.slices]
return numpy.asarray(y),
@testing.inject_backend_tests(None, _backend_params)
@testing.parameterize(*testing.product_dict(
[{'dtype': numpy.float16},
{'dtype': numpy.float32},
{'dtype': numpy.float64},
],
[{'slices': []},
{'slices': ([],)},
{'slices': ([[]],)},
{'slices': numpy.array([], dtype=numpy.bool)},
{'slices': (1, [1])},
{'slices': ([1], slice(1, 2))},
{'slices': [1, 0]},
{'slices': ([1, 0],)},
{'slices': numpy.array([[1, 0], [2, 3]])},
{'slices': ([1, 0], [1, 1])},
{'slices': ([1, 0], slice(None), [[1, 1], [1, 1]])},
{'slices': ([1, 0], slice(1, 2), [0, 0])},
{'slices': ([[1, 1], [1, 0]], slice(1, 2), 1)},
{'slices': numpy.array([True] * 18 + [False] * 6).reshape(4, 3, 2)},
{'slices': numpy.array([True, False, False, True])},
{'slices': (slice(None), numpy.array([True, False, True]))},
{'slices': numpy.array([False, False, False, False])},
{'slices': (3, 2, Ellipsis, 1)},
{'slices': (numpy.array(False)), 'input_shape': ()},
{'slices': (numpy.array(True)), 'input_shape': ()},
]
))
class TestGetItemAdvanced(testing.FunctionTestCase):
input_shape = (4, 3, 2)
def setUp(self):
self.check_backward_options.update({'atol': 5e-4, 'rtol': 5e-4})
self.check_double_backward_options.update({'atol': 1e-3, 'rtol': 1e-3})
def generate_inputs(self):
x = numpy.random.uniform(-1, 1, self.input_shape).astype(self.dtype)
return x,
def _convert_slices(self, slices, device):
# Converts advanced indexing slices (of numpy.ndarray) to respective
# backend arrays.
if isinstance(slices, list):
return [self._convert_slices(a, device) for a in slices]
if isinstance(slices, tuple):
return tuple([self._convert_slices(a, device) for a in slices])
if isinstance(slices, numpy.ndarray):
return device.send(slices)
return slices
def forward(self, inputs, device):
x, = inputs
slices = self._convert_slices(self.slices, device)
y = functions.get_item(x, slices)
return y,
def forward_expected(self, inputs):
x, = inputs
y = x[self.slices]
return numpy.asarray(y),
@testing.parameterize(
{'slices': ([1, 0], [1, 1]), 'sliced_shape': (2, 2)},
{'slices': ([1, 0], slice(None), [[1, 1], [1, 1]]),
'sliced_shape': (2, 2, 3)},
{'slices': ([1, 0], [1, 1], [0, 0]), 'sliced_shape': (2,)},
{'slices': (slice(None), numpy.array([True, False, True])),
'sliced_shape': (4, 2, 2)},
)
class TestCupyIndicesGetItem(unittest.TestCase):
def setUp(self):
self.x_data = numpy.random.uniform(
-1, 1, (4, 3, 2)).astype(numpy.float32)
self.gy_data = numpy.random.uniform(
-1, 1, self.sliced_shape).astype(numpy.float32)
def check_forward(self, x_data):
slices = []
for i, s in enumerate(self.slices):
if isinstance(s, numpy.ndarray):
s = chainer.backends.cuda.cupy.array(s)
if isinstance(s, list):
s = chainer.backends.cuda.cupy.array(s, dtype=numpy.int32)
slices.append(s)
slices = tuple(slices)
x = chainer.Variable(x_data)
y = functions.get_item(x, slices)
self.assertEqual(y.data.dtype, numpy.float32)
numpy.testing.assert_equal(cuda.to_cpu(x_data)[self.slices],
cuda.to_cpu(y.data))
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x_data))
def check_backward(self, x_data, y_grad):
slices = []
for i, s in enumerate(self.slices):
if isinstance(s, numpy.ndarray):
s = chainer.backends.cuda.cupy.array(s)
if isinstance(s, list):
s = chainer.backends.cuda.cupy.array(s, dtype=numpy.int32)
slices.append(s)
slices = tuple(slices)
def f(x):
return functions.get_item(x, slices)
gradient_check.check_backward(
f, (x_data,), y_grad, dtype='d')
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x_data),
cuda.to_gpu(self.gy_data))
class TestInvalidGetItem(unittest.TestCase):
def setUp(self):
self.default_debug = chainer.is_debug()
chainer.set_debug(True)
self.x_data = numpy.random.uniform(-1, 1, (4, 3, 2))
def tearDown(self):
chainer.set_debug(self.default_debug)
def test_multiple_ellipsis(self):
with self.assertRaises(ValueError):
functions.get_item(self.x_data, (Ellipsis, Ellipsis))
testing.run_module(__name__, __file__)
|
indirectlylit/kolibri
|
refs/heads/develop
|
kolibri/core/device/test/prefixed_locale_middleware_urls.py
|
4
|
from __future__ import unicode_literals
from django.conf.urls import include
from django.conf.urls import url
from .locale_middleware_urls import patterns
path_prefix = "test/"
urlpatterns = [url(path_prefix, include(patterns))]
|
agimofcarmen/xen-api
|
refs/heads/master
|
scripts/mtcerrno-to-ocaml.py
|
34
|
#!/usr/bin/env python
# Convert the MTC exit codes into a disjoint union type. Each line in the file looks like:
# errdef, MTC_EXIT_SUCCESS, 0, 0, "",
# Usage:
# cat ../xha.hg/include/mtcerrno.def | ./scripts/mtcerrno-to-ocaml.py > ocaml/xapi/xha_errno.ml
import sys
def parse(file):
all = []
while True:
line = file.readline()
if line == "":
return all
if line.startswith("errdef, MTC_EXIT"):
bits = line.split(",")
name = bits[1].strip()
code = bits[2].strip()
desc = bits[4].strip()
this = { "name": name, "code": code, "desc": desc }
all.append(this)
def ctor_name(x):
ctor = x['name']
return ctor[0].upper() + ctor[1:].lower()
def make_datatype(all):
print "type code = "
for x in all:
print "| %s" % ctor_name(x)
def to_string(all):
print "let to_string : code -> string = function"
for x in all:
print "| %s -> \"%s\"" % (ctor_name(x), x['name'])
def to_description_string(all):
print "let to_description_string : code -> string = function"
for x in all:
print "| %s -> %s" % (ctor_name(x), x['desc'])
def of_int(all):
print "let of_int : int -> code = function"
for x in all:
print "| %s -> %s" % (x['code'], ctor_name(x))
print "| x -> failwith (Printf.sprintf \"Unrecognised MTC exit code: %d\" x)"
if __name__ == "__main__":
all = parse(sys.stdin)
print "(* Autogenerated by %s -- do not edit *)" % (sys.argv[0])
make_datatype(all)
to_string(all)
to_description_string(all)
of_int(all)
|
shixiaobo8/yjy_django_omsa
|
refs/heads/master
|
Myapp/aliyun/oss.py
|
1
|
#! /usr/bin/env python
# -*- coding:utf8 -*-
"""
阿里云sdk调用oss:
安装步骤请参见官方文档:https://develop.aliyun.com/sdk/python?spm=5176.doc25699.2.2.seCDuq
"""
from __future__ import print_function
import oss2
import sys
from oss2 import auth
class oss():
def __init__(self):
self.endpoint = 'oss-cn-beijing.aliyuncs.com'
self.auth = auth('LTAIYj7b9Fm1rrH2','6rWkgQX8yiIDrOY70vcy19EUuHvjW2');
self.service = oss2.Service(auth,self.endpoint)
def getBuckets(self):
return oss2.BucketIterator(self.service)
#return self.service.list_buckets()
def newBucket(self,bucketname='',endpoint='oss-cn-beijing.aliyuncs.com'):
# 在指定的endpoint上创建bucket
# param: endpoint : 默认为华北2
# param:bucketname: 新建的 bucket名称 """
bucket = oss2.Bucket(self.auth,endpoint,bucketname)
bucket.create_bucket(oss2.models.BUCKET_ACL_PUBLIC_READ)
def putFiles(self,onputfilename='',local_filename='',bucketname='',endpoint='oss-cn-beijing.aliyuncs.com'):
bucket = oss2.Bucket(self.auth,endpoint,bucketname)
bucket.put_object_from_file(onputfilename, local_filename,progress_callback='percentage')
def percentage(self,consumed_bytes, total_bytes):
if total_bytes:
rate = int(100 * (float(consumed_bytes) / float(total_bytes)))
print('\r{0}% '.format(rate), end='')
sys.stdout.flush()
def download(self,endpoint,bucketname,remote_filename,local_filename):
bucket = oss2.Bucket(self.auth,endpoint,bucketname)
bucket.get_object_to_file(remote_filename, local_filename,progress_callback='percentage')
|
bloomreach/briefly
|
refs/heads/master
|
src/briefly/dag.py
|
1
|
#
# Copyright 2013-2015 BloomReach, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from properties import Properties
class GraphNode(object):
''' Internal graph node data strcuture used by this module. '''
def __init__(self, attributes=None, **kwargs):
''' self.parents and self.children cannot be reassigned by mistake. '''
self.__dict__['parents'] = set()
self.__dict__['children'] = set()
self.attributes = attributes
self.set_attributes(attributes, **kwargs)
def __setattr__(self, name, value):
''' Override setattr to control the access. '''
if name in ('parents', 'children'):
raise AttributeError("%s is an immutable attribute." % (name,))
super(GraphNode, self).__setattr__(name, value)
def set_attributes(self, attributes={}, **kwargs):
''' Setting the attributes. '''
if isinstance(self.attributes, dict):
self.attributes.update(attributes)
self.attributes.update(kwargs)
else:
self.attributes = attributes
def unpack(self, node_key):
''' Unpack the node into (node_key, attributes) if attributes were provided.
Or simply return node_key if attributes were not provided previously.
'''
if isinstance(self.attributes, dict):
if self.attributes:
return node_key, self.attributes
else:
return node_key
if self.attributes is not None:
return node_key, self.attributes
return node_key
def __repr__(self):
''' Implement repr() protocol.'''
parents_str = ','.join(map(repr, self.parents))
children_str = ','.join(map(repr, self.children))
return '(p: %s, c: %s)' % (parents_str,
children_str)
class DependencyGraph(object):
''' Directed acylic graph.
Note that the data structure does not impose synchronization.
The client has to synchronize the access of the graph.
'''
def __init__(self):
''' self.node_map represents { node_key : GraphNode }. '''
self.clear()
def clear(self):
''' Clear the entire graph.'''
self.node_map = {}
def __contains__(self, node):
''' Implment 'in' protocol.'''
return self.has_node(node)
def __iter__(self):
''' Implment iterator protocol.'''
return self.node_map.iterkeys()
def __len__(self):
''' Implment len() protocol.'''
return len(self.node_map)
def __repr__(self):
''' Implement repr() protocol.'''
return repr(self.node_map)
@property
def nodes(self):
''' Return all nodes in the graph.'''
nodes = []
for k, graph_node in self.node_map.iteritems():
nodes.append(graph_node.unpack(k))
return nodes
def add_edges(self, edges):
''' Add edges into graph. '''
for edge in edges:
self.add_edge(*edge)
def add_edge(self, parent, child):
''' Add an edge parent -> child.'''
if parent is child:
# degenerate case, do nothing.
assert parent in self.node_map, 'node %s not existed.' % (repr(parent),)
return
assert parent in self.node_map, 'node %s not existed.' % (repr(parent),)
assert child in self.node_map, 'node %s not existed.' % (repr(child),)
self.get_children(parent).add(child)
self.get_parents(child).add(parent)
def add_node(self, node_key, attrs_or_property=None, **argd):
''' Add a node into the graph. '''
self.node_map.setdefault(node_key,
GraphNode(attrs_or_property, **argd))
def get_node_set(self, node_key, relation='parents'):
''' Common methods called by get_parents and get_children. '''
graph_node = self.node_map.get(node_key)
return getattr(graph_node, relation)
def get_parents(self, node_key):
''' Get a set of parents node keys from node_key.'''
return self.get_node_set(node_key, relation='parents')
def get_children(self, node_key):
''' Get a set of children node keys from node_key.'''
return self.get_node_set(node_key, relation='children')
def get_start_nodes(self):
''' Get start nodes (those nodes do not have incoming edges) from the graph. '''
nodes = []
for node_key, graph_node in self.node_map.iteritems():
if not self.has_parent(node_key):
nodes.append(graph_node.unpack(node_key))
return nodes
def get_bridge_nodes(self, node_key):
''' Get bridge nodes give a node_key (those nodes have node_key as their the only parent).
If node_key is removed the graph, bridge nodes will be the next 'start nodes'.
'''
nodes = []
for c in self.get_children(node_key):
parents = self.get_parents(c)
if len(parents) == 1 and node_key in parents:
graph_node = self.node_map[c]
nodes.append(graph_node.unpack(c))
return nodes
def remove(self, node_key):
''' Remove a node in the graph. Throws an exception if node does not exists.'''
for p in self.get_parents(node_key):
self.get_children(p).remove(node_key)
for c in self.get_children(node_key):
self.get_parents(c).remove(node_key)
if node_key in self.node_map:
self.node_map.pop(node_key)
def has_parent(self, node_key):
''' Determine if a node has parent(s).'''
return len(self.get_parents(node_key)) > 0
def has_child(self, node_key):
''' Determine if a node has child(ren).'''
return len(self.get_children(node_key)) > 0
def has_node(self, node_key):
''' Check if the graph has the node_key.'''
return node_key in self.node_map
def has_edge(self, parent, child):
''' Check if the graph has an edge parent -> child.'''
assert self.has_node(parent)
assert self.has_node(child)
has_cinp = (child in self.get_children(parent))
has_pinc = (parent in self.get_parents(child))
assert has_cinp == has_pinc
return has_cinp
def is_empty(self):
''' Check if the graph containing no nodes.'''
return len(self.node_map) == 0
|
perGENIE/pergenie-web
|
refs/heads/master
|
pergenie/misc/apps/population/models.py
|
3
|
from lib.r.r import projection
from django.conf import settings
def project_new_person(scale, info):
"""Project new person onto PCA coordinate.
args: str(scale)
data_info: {'user_id': '', 'name': '', ...}
retval: {'position': [x, y], 'label': '', 'map_label': ''}
"""
# TODO: currently only for `global` scale
if scale in ('global'):
record = {'position': projection(scale, info),
'label': info['name'],
'map_label': ''}
else:
record = None
return record
def get_people(scale):
"""Get points of people in PCA coordinate.
args: str(scale)
retval: list(list(), ...)
"""
popcode2global = {'CHB': 'EastAsia', 'JPT': 'EastAsia', 'CHS': 'EastAsia',
'CEU': 'Europe', 'TSI': 'Europe', 'GBR': 'Europe', 'FIN': 'Europe', 'IBS': 'Europe',
'YRI': 'Africa', 'LWK': 'Africa', 'ASW': 'Africa',
'MXL': 'Americas', 'CLM': 'Americas', 'PUR': 'Americas'}
# with pymongo.MongoClient(host=settings.MONGO_URI) as c:
# db = c['pergenie']
# col = db['population_pca'][scale]
# if scale == 'global':
# records = [{'position': rec['position'],
# 'label': popcode2global[rec['popcode']],
# 'map_label': rec['popcode']} for rec in col.find()]
# else:
# records = [{'position': rec['position'],
# 'label': rec['popcode'],
# 'map_label': rec['popcode']} for rec in col.find()]
# return records
|
abdellatifkarroum/odoo
|
refs/heads/8.0
|
addons/account/project/__init__.py
|
427
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import project
import report
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
shakamunyi/tensorflow
|
refs/heads/master
|
tensorflow/python/layers/convolutional.py
|
4
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# pylint: disable=unused-import,g-bad-import-order
"""Contains the convolutional layer classes and their functional aliases.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import base
from tensorflow.python.layers import utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_ops
class _Conv(base.Layer):
"""Abstract nD convolution layer (private, used as implementation base).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
rank: An integer, the rank of the convolution, e.g. "2" for 2D convolution.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of n integers, specifying the
length of the convolution window.
strides: An integer or tuple/list of n integers,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, ..., channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, ...)`.
dilation_rate: An integer or tuple/list of n integers, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, rank,
filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(_Conv, self).__init__(trainable=trainable, name=name,
activity_regularizer=activity_regularizer,
**kwargs)
self.rank = rank
self.filters = filters
self.kernel_size = utils.normalize_tuple(kernel_size, rank, 'kernel_size')
self.strides = utils.normalize_tuple(strides, rank, 'strides')
self.padding = utils.normalize_padding(padding)
self.data_format = utils.normalize_data_format(data_format)
self.dilation_rate = utils.normalize_tuple(
dilation_rate, rank, 'dilation_rate')
self.activation = activation
self.use_bias = use_bias
self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
self.kernel_regularizer = kernel_regularizer
self.bias_regularizer = bias_regularizer
self.kernel_constraint = kernel_constraint
self.bias_constraint = bias_constraint
self.input_spec = base.InputSpec(ndim=self.rank + 2)
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis].value is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[channel_axis].value
kernel_shape = self.kernel_size + (input_dim, self.filters)
self.kernel = self.add_variable(name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.input_spec = base.InputSpec(ndim=self.rank + 2,
axes={channel_axis: input_dim})
with ops.name_scope(None, 'convolution', [self.kernel]) as name:
self._convolution_op = nn_ops.Convolution(
input_shape,
filter_shape=self.kernel.get_shape(),
dilation_rate=self.dilation_rate,
strides=self.strides,
padding=self.padding.upper(),
data_format=utils.convert_data_format(self.data_format,
self.rank + 2),
name=name)
self.built = True
def call(self, inputs):
# TODO(agarwal): do we need this name_scope ?
with ops.name_scope(None, 'convolution', [inputs, self.kernel]):
outputs = self._convolution_op(inputs, self.kernel)
if self.use_bias:
if self.data_format == 'channels_first':
if self.rank == 1:
# nn.bias_add does not accept a 1D input tensor.
bias = array_ops.reshape(self.bias, (1, self.filters, 1))
outputs += bias
if self.rank == 2:
outputs = nn.bias_add(outputs, self.bias, data_format='NCHW')
if self.rank == 3:
# As of Mar 2017, direct addition is significantly slower than
# bias_add when computing gradients. To use bias_add, we collapse Z
# and Y into a single dimension to obtain a 4D input tensor.
outputs_shape = outputs.shape.as_list()
outputs_4d = array_ops.reshape(outputs,
[outputs_shape[0], outputs_shape[1],
outputs_shape[2] * outputs_shape[3],
outputs_shape[4]])
outputs_4d = nn.bias_add(outputs_4d, self.bias, data_format='NCHW')
outputs = array_ops.reshape(outputs_4d, outputs_shape)
else:
outputs = nn.bias_add(outputs, self.bias, data_format='NHWC')
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_last':
space = input_shape[1:-1]
new_space = []
for i in range(len(space)):
new_dim = utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i])
new_space.append(new_dim)
return tensor_shape.TensorShape([input_shape[0]] + new_space +
[self.filters])
else:
space = input_shape[2:]
new_space = []
for i in range(len(space)):
new_dim = utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i])
new_space.append(new_dim)
return tensor_shape.TensorShape([input_shape[0], self.filters] +
new_space)
class Conv1D(_Conv):
"""1D convolution layer (e.g. temporal convolution).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of a single integer, specifying the
length of the 1D convolution window.
strides: An integer or tuple/list of a single integer,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
dilation_rate: An integer or tuple/list of a single integer, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Convolution1D, self).__init__(
rank=1,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name, **kwargs)
def conv1d(inputs,
filters,
kernel_size,
strides=1,
padding='valid',
data_format='channels_last',
dilation_rate=1,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for 1D convolution layer (e.g. temporal convolution).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of a single integer, specifying the
length of the 1D convolution window.
strides: An integer or tuple/list of a single integer,
specifying the stride length of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
dilation_rate: An integer or tuple/list of a single integer, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv1D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv2D(_Conv):
"""2D convolution layer (e.g. spatial convolution over images).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
height and width of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Conv2D, self).__init__(
rank=2,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name, **kwargs)
def conv2d(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the 2D convolution layer.
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 2 integers, specifying the
height and width of the 2D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the convolution along the height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv2D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv3D(_Conv):
"""3D convolution layer (e.g. spatial convolution over volumes).
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the convolution along the depth,
height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
dilation_rate: An integer or tuple/list of 3 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Conv3D, self).__init__(
rank=3,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name, **kwargs)
def conv3d(inputs,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1, 1),
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the 3D convolution layer.
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True (and a `bias_initializer` is provided),
a bias vector is created and added to the outputs. Finally, if
`activation` is not `None`, it is applied to the outputs as well.
Arguments:
inputs: Tensor input.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the convolution along the depth,
height and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
dilation_rate: An integer or tuple/list of 3 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv3D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class SeparableConv2D(Conv2D):
"""Depthwise separable 2D convolution.
This layer performs a depthwise convolution that acts separately on
channels, followed by a pointwise convolution that mixes channels.
If `use_bias` is True and a bias initializer is provided,
it adds a bias vector to the output.
It then optionally applies an activation function to produce the final output.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any `stride` value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
depth_multiplier: The number of depthwise convolution output channels for
each input channel. The total number of depthwise convolution output
channels will be equal to `num_filters_in * depth_multiplier`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
depthwise_initializer: An initializer for the depthwise convolution kernel.
pointwise_initializer: An initializer for the pointwise convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
depthwise_regularizer: Optional regularizer for the depthwise
convolution kernel.
pointwise_regularizer: Optional regularizer for the pointwise
convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
depthwise_constraint: Optional projection function to be applied to the
depthwise kernel after being updated by an `Optimizer` (e.g. used for
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
pointwise_constraint: Optional projection function to be applied to the
pointwise kernel after being updated by an `Optimizer`.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
depth_multiplier=1,
activation=None,
use_bias=True,
depthwise_initializer=None,
pointwise_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
depthwise_regularizer=None,
pointwise_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
depthwise_constraint=None,
pointwise_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(SeparableConv2D, self).__init__(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
**kwargs)
self.depth_multiplier = depth_multiplier
self.depthwise_initializer = depthwise_initializer
self.pointwise_initializer = pointwise_initializer
self.depthwise_regularizer = depthwise_regularizer
self.pointwise_regularizer = pointwise_regularizer
self.depthwise_constraint = depthwise_constraint
self.pointwise_constraint = pointwise_constraint
def build(self, input_shape):
if len(input_shape) < 4:
raise ValueError('Inputs to `SeparableConv2D` should have rank 4. '
'Received input shape:', str(input_shape))
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = 3
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs to '
'`SeparableConv2D` '
'should be defined. Found `None`.')
input_dim = int(input_shape[channel_axis])
self.input_spec = base.InputSpec(ndim=4, axes={channel_axis: input_dim})
depthwise_kernel_shape = (self.kernel_size[0],
self.kernel_size[1],
input_dim,
self.depth_multiplier)
pointwise_kernel_shape = (1, 1,
self.depth_multiplier * input_dim,
self.filters)
self.depthwise_kernel = self.add_variable(
name='depthwise_kernel',
shape=depthwise_kernel_shape,
initializer=self.depthwise_initializer,
regularizer=self.depthwise_regularizer,
constraint=self.depthwise_constraint,
trainable=True,
dtype=self.dtype)
self.pointwise_kernel = self.add_variable(
name='pointwise_kernel',
shape=pointwise_kernel_shape,
initializer=self.pointwise_initializer,
regularizer=self.pointwise_regularizer,
constraint=self.pointwise_constraint,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.built = True
def call(self, inputs):
# Apply the actual ops.
if self.data_format == 'channels_last':
strides = (1,) + self.strides + (1,)
else:
strides = (1, 1) + self.strides
outputs = nn.separable_conv2d(
inputs,
self.depthwise_kernel,
self.pointwise_kernel,
strides=strides,
padding=self.padding.upper(),
rate=self.dilation_rate,
data_format=utils.convert_data_format(self.data_format, ndim=4))
if self.use_bias:
outputs = nn.bias_add(
outputs,
self.bias,
data_format=utils.convert_data_format(self.data_format, ndim=4))
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
rows = input_shape[2]
cols = input_shape[3]
else:
rows = input_shape[1]
cols = input_shape[2]
rows = utils.conv_output_length(rows, self.kernel_size[0],
self.padding, self.strides[0])
cols = utils.conv_output_length(cols, self.kernel_size[1],
self.padding, self.strides[1])
if self.data_format == 'channels_first':
return tensor_shape.TensorShape(
[input_shape[0], self.filters, rows, cols])
else:
return tensor_shape.TensorShape(
[input_shape[0], rows, cols, self.filters])
def separable_conv2d(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
dilation_rate=(1, 1),
depth_multiplier=1,
activation=None,
use_bias=True,
depthwise_initializer=None,
pointwise_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
depthwise_regularizer=None,
pointwise_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
depthwise_constraint=None,
pointwise_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the depthwise separable 2D convolution layer.
This layer performs a depthwise convolution that acts separately on
channels, followed by a pointwise convolution that mixes channels.
If `use_bias` is True and a bias initializer is provided,
it adds a bias vector to the output.
It then optionally applies an activation function to produce the final output.
Arguments:
inputs: Input tensor.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any `stride` value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
dilation_rate: An integer or tuple/list of 2 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
depth_multiplier: The number of depthwise convolution output channels for
each input channel. The total number of depthwise convolution output
channels will be equal to `num_filters_in * depth_multiplier`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
depthwise_initializer: An initializer for the depthwise convolution kernel.
pointwise_initializer: An initializer for the pointwise convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
depthwise_regularizer: Optional regularizer for the depthwise
convolution kernel.
pointwise_regularizer: Optional regularizer for the pointwise
convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
depthwise_constraint: Optional projection function to be applied to the
depthwise kernel after being updated by an `Optimizer` (e.g. used for
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
pointwise_constraint: Optional projection function to be applied to the
pointwise kernel after being updated by an `Optimizer`.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = SeparableConv2D(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
depth_multiplier=depth_multiplier,
activation=activation,
use_bias=use_bias,
depthwise_initializer=depthwise_initializer,
pointwise_initializer=pointwise_initializer,
bias_initializer=bias_initializer,
depthwise_regularizer=depthwise_regularizer,
pointwise_regularizer=pointwise_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
depthwise_constraint=depthwise_constraint,
pointwise_constraint=pointwise_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv2DTranspose(Conv2D):
"""Transposed 2D convolution layer (sometimes called 2D Deconvolution).
The need for transposed convolutions generally arises
from the desire to use a transformation going in the opposite direction
of a normal convolution, i.e., from something that has the shape of the
output of some convolution to something that has the shape of its input
while maintaining a connectivity pattern that is compatible with
said convolution.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 positive integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
padding: one of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self, filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Conv2DTranspose, self).__init__(
filters,
kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
**kwargs)
self.input_spec = base.InputSpec(ndim=4)
def build(self, input_shape):
if len(input_shape) != 4:
raise ValueError('Inputs should have rank ' +
str(4) +
'Received input shape:', str(input_shape))
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[channel_axis]
self.input_spec = base.InputSpec(ndim=4, axes={channel_axis: input_dim})
kernel_shape = self.kernel_size + (self.filters, input_dim)
self.kernel = self.add_variable(name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.built = True
def call(self, inputs):
inputs_shape = array_ops.shape(inputs)
batch_size = inputs_shape[0]
if self.data_format == 'channels_first':
c_axis, h_axis, w_axis = 1, 2, 3
else:
c_axis, h_axis, w_axis = 3, 1, 2
height, width = inputs_shape[h_axis], inputs_shape[w_axis]
kernel_h, kernel_w = self.kernel_size
stride_h, stride_w = self.strides
# Infer the dynamic output shape:
out_height = utils.deconv_output_length(height,
kernel_h,
self.padding,
stride_h)
out_width = utils.deconv_output_length(width,
kernel_w,
self.padding,
stride_w)
if self.data_format == 'channels_first':
output_shape = (batch_size, self.filters, out_height, out_width)
strides = (1, 1, stride_h, stride_w)
else:
output_shape = (batch_size, out_height, out_width, self.filters)
strides = (1, stride_h, stride_w, 1)
output_shape_tensor = array_ops.stack(output_shape)
outputs = nn.conv2d_transpose(
inputs,
self.kernel,
output_shape_tensor,
strides,
padding=self.padding.upper(),
data_format=utils.convert_data_format(self.data_format, ndim=4))
if context.in_graph_mode():
# Infer the static output shape:
out_shape = inputs.get_shape().as_list()
out_shape[c_axis] = self.filters
out_shape[h_axis] = utils.deconv_output_length(out_shape[h_axis],
kernel_h,
self.padding,
stride_h)
out_shape[w_axis] = utils.deconv_output_length(out_shape[w_axis],
kernel_w,
self.padding,
stride_w)
outputs.set_shape(out_shape)
if self.use_bias:
outputs = nn.bias_add(
outputs,
self.bias,
data_format=utils.convert_data_format(self.data_format, ndim=4))
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = list(input_shape)
if self.data_format == 'channels_first':
c_axis, h_axis, w_axis = 1, 2, 3
else:
c_axis, h_axis, w_axis = 3, 1, 2
kernel_h, kernel_w = self.kernel_size
stride_h, stride_w = self.strides
output_shape[c_axis] = self.filters
output_shape[h_axis] = utils.deconv_output_length(
output_shape[h_axis], kernel_h, self.padding, stride_h)
output_shape[w_axis] = utils.deconv_output_length(
output_shape[w_axis], kernel_w, self.padding, stride_w)
return tensor_shape.TensorShape(output_shape)
def conv2d_transpose(inputs,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for transposed 2D convolution layer.
The need for transposed convolutions generally arises
from the desire to use a transformation going in the opposite direction
of a normal convolution, i.e., from something that has the shape of the
output of some convolution to something that has the shape of its input
while maintaining a connectivity pattern that is compatible with
said convolution.
Arguments:
inputs: Input tensor.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 2 positive integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 2 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
padding: one of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
activation: Activation function. Set it to `None` to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If `None`, then no
bias will be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv2DTranspose(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
class Conv3DTranspose(Conv3D):
"""Transposed 3D convolution layer (sometimes called 3D Deconvolution).
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for all spatial
dimensions.
strides: An integer or tuple/list of 3 integers, specifying the strides
of the convolution along the depth, height and width.
Can be a single integer to specify the same value for all spatial
dimensions.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
activation: Activation function. Set it to `None` to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If `None`, then no
bias will be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
"""
def __init__(self,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
**kwargs):
super(Conv3DTranspose, self).__init__(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
**kwargs)
self.input_spec = base.InputSpec(ndim=5)
def build(self, input_shape):
if len(input_shape) != 5:
raise ValueError('Inputs should have rank 5, received input shape:',
str(input_shape))
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined, found None: ' + str(input_shape))
input_dim = input_shape[channel_axis]
kernel_shape = self.kernel_size + (self.filters, input_dim)
self.kernel = self.add_variable(
'kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_variable(
'bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
def call(self, inputs):
inputs_shape = array_ops.shape(inputs)
batch_size = inputs_shape[0]
if self.data_format == 'channels_first':
c_axis, d_axis, h_axis, w_axis = 1, 2, 3, 4
else:
c_axis, d_axis, h_axis, w_axis = 4, 1, 2, 3
self.input_spec = base.InputSpec(ndim=5,
axes={c_axis: inputs_shape[c_axis]})
depth = inputs_shape[d_axis]
height = inputs_shape[h_axis]
width = inputs_shape[w_axis]
kernel_d, kernel_h, kernel_w = self.kernel_size
stride_d, stride_h, stride_w = self.strides
# Infer the dynamic output shape:
out_depth = utils.deconv_output_length(depth,
kernel_d,
self.padding,
stride_d)
out_height = utils.deconv_output_length(height,
kernel_h,
self.padding,
stride_h)
out_width = utils.deconv_output_length(width,
kernel_w,
self.padding,
stride_w)
if self.data_format == 'channels_first':
output_shape = (batch_size, self.filters, out_depth, out_height,
out_width)
strides = (1, 1, stride_d, stride_h, stride_w)
else:
output_shape = (batch_size, out_depth, out_height, out_width,
self.filters)
strides = (1, stride_d, stride_h, stride_w, 1)
output_shape_tensor = array_ops.stack(output_shape)
outputs = nn.conv3d_transpose(
inputs,
self.kernel,
output_shape_tensor,
strides,
data_format=utils.convert_data_format(self.data_format, ndim=5),
padding=self.padding.upper())
if context.in_graph_mode():
# Infer the static output shape:
out_shape = inputs.get_shape().as_list()
out_shape[c_axis] = self.filters
out_shape[d_axis] = utils.deconv_output_length(out_shape[d_axis],
kernel_d,
self.padding,
stride_d)
out_shape[h_axis] = utils.deconv_output_length(out_shape[h_axis],
kernel_h,
self.padding,
stride_h)
out_shape[w_axis] = utils.deconv_output_length(out_shape[w_axis],
kernel_w,
self.padding,
stride_w)
outputs.set_shape(out_shape)
if self.use_bias:
outputs_shape = outputs.shape.as_list()
if self.data_format == 'channels_first':
outputs_4d = array_ops.reshape(outputs, [
outputs_shape[0], outputs_shape[1],
outputs_shape[2] * outputs_shape[3], outputs_shape[4]
])
else:
outputs_4d = array_ops.reshape(outputs, [
outputs_shape[0], outputs_shape[1] * outputs_shape[2],
outputs_shape[3], outputs_shape[4]
])
outputs_4d = nn.bias_add(
outputs_4d,
self.bias,
data_format=utils.convert_data_format(self.data_format, ndim=4))
outputs = array_ops.reshape(outputs_4d, outputs_shape)
if self.activation is not None:
return self.activation(outputs)
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = list(input_shape)
if self.data_format == 'channels_first':
c_axis, d_axis, h_axis, w_axis = 1, 2, 3, 4
else:
c_axis, d_axis, h_axis, w_axis = 4, 1, 2, 3
kernel_d, kernel_h, kernel_w = self.kernel_size
stride_d, stride_h, stride_w = self.strides
output_shape[c_axis] = self.filters
output_shape[d_axis] = utils.deconv_output_length(
output_shape[d_axis], stride_d, kernel_d, self.padding)
output_shape[h_axis] = utils.deconv_output_length(
output_shape[h_axis], stride_h, kernel_h, self.padding)
output_shape[w_axis] = utils.deconv_output_length(
output_shape[w_axis], stride_w, kernel_w, self.padding)
return tensor_shape.TensorShape(output_shape)
def conv3d_transpose(inputs,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
data_format='channels_last',
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for transposed 3D convolution layer.
Arguments:
inputs: Input tensor.
filters: Integer, the dimensionality of the output space (i.e. the number
of filters in the convolution).
kernel_size: A tuple or list of 3 positive integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
strides: A tuple or list of 3 positive integers specifying the strides
of the convolution. Can be a single integer to specify the same value for
all spatial dimensions.
padding: one of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
activation: Activation function. Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, no bias will
be applied.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Conv3DTranspose(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs)
# Aliases
Convolution1D = Conv1D
Convolution2D = Conv2D
Convolution3D = Conv3D
SeparableConvolution2D = SeparableConv2D
Convolution2DTranspose = Deconvolution2D = Deconv2D = Conv2DTranspose
Convolution3DTranspose = Deconvolution3D = Deconv3D = Conv3DTranspose
convolution1d = conv1d
convolution2d = conv2d
convolution3d = conv3d
separable_convolution2d = separable_conv2d
convolution2d_transpose = deconvolution2d = deconv2d = conv2d_transpose
convolution3d_transpose = deconvolution3d = deconv3d = conv3d_transpose
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.